lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
apache-2.0
|
20955c9514a332c84f5cdfaa24bca3b2cf71d330
| 0
|
XHidamariSketchX/xmemcached,mbrukman/xmemcached,mbrukman/xmemcached,XHidamariSketchX/xmemcached,bmahe/xmemcached,killme2008/xmemcached,killme2008/xmemcached,springning/xmemcached,fengshao0907/xmemcached,springning/xmemcached,fengshao0907/xmemcached,bmahe/xmemcached
|
/**
*Copyright [2009-2010] [dennis zhuang(killme2008@gmail.com)]
*Licensed under the Apache License, Version 2.0 (the "License");
*you may not use this file except in compliance with the License.
*You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*Unless required by applicable law or agreed to in writing,
*software distributed under the License is distributed on an "AS IS" BASIS,
*WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
*either express or implied. See the License for the specific language governing permissions and limitations under the License
*/
package net.rubyeye.xmemcached.utils;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import net.rubyeye.xmemcached.codec.MemcachedDecoder;
import net.rubyeye.xmemcached.monitor.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.code.yanf4j.buffer.IoBuffer;
/**
* Utilities for byte process
*
* @author dennis
*
*/
public final class ByteUtils {
public static final Logger log = LoggerFactory.getLogger(ByteUtils.class);
public static final String DEFAULT_CHARSET_NAME = "utf-8";
public static final Charset DEFAULT_CHARSET = Charset.forName(DEFAULT_CHARSET_NAME);
public static final ByteBuffer SPLIT = ByteBuffer.wrap(Constants.CRLF);
/**
* if it is testing,check key argument even if use binary protocol. The user
* must never change this value at all.
*/
public static boolean testing;
private ByteUtils() {
}
public static final byte[] getBytes(String k) {
if (k == null || k.length() == 0) {
throw new IllegalArgumentException("Key must not be blank");
}
try {
return k.getBytes(DEFAULT_CHARSET_NAME);
}
catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
public static final void setArguments(IoBuffer bb, Object... args) {
boolean wasFirst = true;
for (Object o : args) {
if (wasFirst) {
wasFirst = false;
}
else {
bb.put(Constants.SPACE);
}
if (o instanceof byte[]) {
bb.put((byte[]) o);
}
else {
bb.put(getBytes(String.valueOf(o)));
}
}
bb.put(Constants.CRLF);
}
public static final int setArguments(byte[] bb, int index, Object... args) {
boolean wasFirst = true;
int s = index;
for (Object o : args) {
if (wasFirst) {
wasFirst = false;
}
else {
bb[s++] = Constants.SPACE;
}
if (o instanceof byte[]) {
byte[] tmp = (byte[]) o;
System.arraycopy(tmp, 0, bb, s, tmp.length);
s += tmp.length;
}
else if (o instanceof Integer) {
int v = ((Integer) o).intValue();
s += stringSize(v);
getBytes(v, s, bb);
}
else if (o instanceof String) {
byte[] tmp = getBytes((String) o);
System.arraycopy(tmp, 0, bb, s, tmp.length);
s += tmp.length;
}
else if (o instanceof Long) {
long v = ((Long) o).longValue();
s += stringSize(v);
getBytes(v, s, bb);
}
}
System.arraycopy(Constants.CRLF, 0, bb, s, 2);
s += 2;
return s;
}
public static final void checkKey(final byte[] keyBytes) {
if (keyBytes.length > ByteUtils.maxKeyLength) {
throw new IllegalArgumentException("Key is too long (maxlen = " + ByteUtils.maxKeyLength + ")");
}
// Validate the key
if (memcachedProtocol == Protocol.Text || testing) {
for (byte b : keyBytes) {
if (b == ' ' || b == '\n' || b == '\r' || b == 0) {
try {
throw new IllegalArgumentException("Key contains invalid characters: "
+ new String(keyBytes, "utf-8"));
}
catch (UnsupportedEncodingException e) {
}
}
}
}
}
public static final void checkKey(final String key) {
if (key == null || key.length() == 0) {
throw new IllegalArgumentException("Key must not be blank");
}
byte[] keyBytes = getBytes(key);
if (keyBytes.length > ByteUtils.maxKeyLength) {
throw new IllegalArgumentException("Key is too long (maxlen = " + ByteUtils.maxKeyLength + ")");
}
if (memcachedProtocol == Protocol.Text || testing) {
// Validate the key
for (byte b : keyBytes) {
if (b == ' ' || b == '\n' || b == '\r' || b == 0) {
try {
throw new IllegalArgumentException("Key contains invalid characters:"
+ new String(keyBytes, "utf-8"));
}
catch (UnsupportedEncodingException e) {
}
}
}
}
}
private static Protocol memcachedProtocol = Protocol.Text;
private static int maxKeyLength = 250;
public static void setProtocol(Protocol protocol) {
if (protocol == null) {
throw new NullPointerException("Null Protocol");
}
memcachedProtocol = protocol;
// if (protocol == Protocol.Text) {
// maxKeyLength = 250;
// }
// else {
// maxKeyLength = 65535;
// }
}
public static final int normalizeCapacity(int requestedCapacity) {
switch (requestedCapacity) {
case 0:
case 1 << 0:
case 1 << 1:
case 1 << 2:
case 1 << 3:
case 1 << 4:
case 1 << 5:
case 1 << 6:
case 1 << 7:
case 1 << 8:
case 1 << 9:
case 1 << 10:
case 1 << 11:
case 1 << 12:
case 1 << 13:
case 1 << 14:
case 1 << 15:
case 1 << 16:
case 1 << 17:
case 1 << 18:
case 1 << 19:
case 1 << 21:
case 1 << 22:
case 1 << 23:
case 1 << 24:
case 1 << 25:
case 1 << 26:
case 1 << 27:
case 1 << 28:
case 1 << 29:
case 1 << 30:
case Integer.MAX_VALUE:
return requestedCapacity;
}
int newCapacity = 1;
while (newCapacity < requestedCapacity) {
newCapacity <<= 1;
if (newCapacity < 0) {
return Integer.MAX_VALUE;
}
}
return newCapacity;
}
public static final boolean stepBuffer(ByteBuffer buffer, int remaining) {
if (buffer.remaining() >= remaining) {
buffer.position(buffer.position() + remaining);
return true;
}
else {
return false;
}
}
/**
* �峰�涓��琛�
*
* @param buffer
*/
public static final String nextLine(ByteBuffer buffer) {
if (buffer == null) {
return null;
}
/**
* 娴��琛ㄦ���� Shift-And绠���归� >BM绠���归���� > �寸��归� > KMP�归�锛�
* 濡��浣���村ソ��缓璁��璇�mail缁��(killme2008@gmail.com)
*/
int index = MemcachedDecoder.SPLIT_MATCHER.matchFirst(com.google.code.yanf4j.buffer.IoBuffer.wrap(buffer));
if (index >= 0) {
int limit = buffer.limit();
buffer.limit(index);
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
buffer.limit(limit);
buffer.position(index + ByteUtils.SPLIT.remaining());
return getString(bytes);
}
return null;
}
public static String getString(byte[] bytes) {
try {
return new String(bytes, DEFAULT_CHARSET_NAME);
}
catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
public static void byte2hex(byte b, StringBuffer buf) {
char[] hexChars = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };
int high = ((b & 0xf0) >> 4);
int low = (b & 0x0f);
buf.append(hexChars[high]);
buf.append(hexChars[low]);
}
public static void int2hex(int a, StringBuffer str) {
str.append(Integer.toHexString(a));
}
public static void short2hex(int a, StringBuffer str) {
str.append(Integer.toHexString(a));
}
public static void getBytes(long i, int index, byte[] buf) {
long q;
int r;
int pos = index;
byte sign = 0;
if (i < 0) {
sign = '-';
i = -i;
}
// Get 2 digits/iteration using longs until quotient fits into an int
while (i > Integer.MAX_VALUE) {
q = i / 100;
// really: r = i - (q * 100);
r = (int) (i - ((q << 6) + (q << 5) + (q << 2)));
i = q;
buf[--pos] = DigitOnes[r];
buf[--pos] = DigitTens[r];
}
// Get 2 digits/iteration using ints
int q2;
int i2 = (int) i;
while (i2 >= 65536) {
q2 = i2 / 100;
// really: r = i2 - (q * 100);
r = i2 - ((q2 << 6) + (q2 << 5) + (q2 << 2));
i2 = q2;
buf[--pos] = DigitOnes[r];
buf[--pos] = DigitTens[r];
}
// Fall thru to fast mode for smaller numbers
// assert(i2 <= 65536, i2);
for (;;) {
q2 = (i2 * 52429) >>> (16 + 3);
r = i2 - ((q2 << 3) + (q2 << 1)); // r = i2-(q2*10) ...
buf[--pos] = digits[r];
i2 = q2;
if (i2 == 0)
break;
}
if (sign != 0) {
buf[--pos] = sign;
}
}
/**
* Places characters representing the integer i into the character array
* buf. The characters are placed into the buffer backwards starting with
* the least significant digit at the specified index (exclusive), and
* working backwards from there.
*
* Will fail if i == Integer.MIN_VALUE
*/
static void getBytes(int i, int index, byte[] buf) {
int q, r;
int pos = index;
byte sign = 0;
if (i < 0) {
sign = '-';
i = -i;
}
// Generate two digits per iteration
while (i >= 65536) {
q = i / 100;
// really: r = i - (q * 100);
r = i - ((q << 6) + (q << 5) + (q << 2));
i = q;
buf[--pos] = DigitOnes[r];
buf[--pos] = DigitTens[r];
}
// Fall thru to fast mode for smaller numbers
// assert(i <= 65536, i);
for (;;) {
q = (i * 52429) >>> (16 + 3);
r = i - ((q << 3) + (q << 1)); // r = i-(q*10) ...
buf[--pos] = digits[r];
i = q;
if (i == 0)
break;
}
if (sign != 0) {
buf[--pos] = sign;
}
}
/**
* All possible chars for representing a number as a String
*/
final static byte[] digits = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'g',
'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x',
'y', 'z' };
final static byte[] DigitTens = { '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1',
'1', '1', '1', '1', '2', '2', '2', '2', '2', '2', '2', '2', '2', '2', '3', '3',
'3', '3', '3', '3', '3', '3', '3', '3', '4', '4', '4', '4', '4', '4', '4', '4',
'4', '4', '5', '5', '5', '5', '5', '5', '5', '5', '5', '5', '6', '6', '6', '6',
'6', '6', '6', '6', '6', '6', '7', '7', '7', '7', '7', '7', '7', '7', '7', '7',
'8', '8', '8', '8', '8', '8', '8', '8', '8', '8', '9', '9', '9', '9', '9', '9',
'9', '9', '9', '9', };
final static byte[] DigitOnes = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '1', '2', '3', '4', '5',
'6', '7', '8', '9', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '1',
'2', '3', '4', '5', '6', '7', '8', '9', '0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '1', '2', '3',
'4', '5', '6', '7', '8', '9', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '1', '2', '3', '4', '5',
'6', '7', '8', '9', };
final static int[] sizeTable = { 9, 99, 999, 9999, 99999, 999999, 9999999, 99999999, 999999999, Integer.MAX_VALUE };
// Requires positive x
public static final int stringSize(int x) {
for (int i = 0;; i++)
if (x <= sizeTable[i])
return i + 1;
}
// Requires positive x
public static final int stringSize(long x) {
long p = 10;
for (int i = 1; i < 19; i++) {
if (x < p)
return i;
p = 10 * p;
}
return 19;
}
final static int[] byte_len_array = new int[256];
static {
for (int i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; ++i) {
int size = (i < 0) ? stringSize(-i) + 1 : stringSize(i);
byte_len_array[i & 0xFF] = size;
}
}
}
|
src/main/java/net/rubyeye/xmemcached/utils/ByteUtils.java
|
/**
*Copyright [2009-2010] [dennis zhuang(killme2008@gmail.com)]
*Licensed under the Apache License, Version 2.0 (the "License");
*you may not use this file except in compliance with the License.
*You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*Unless required by applicable law or agreed to in writing,
*software distributed under the License is distributed on an "AS IS" BASIS,
*WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
*either express or implied. See the License for the specific language governing permissions and limitations under the License
*/
package net.rubyeye.xmemcached.utils;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import net.rubyeye.xmemcached.codec.MemcachedDecoder;
import net.rubyeye.xmemcached.monitor.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.code.yanf4j.buffer.IoBuffer;
/**
* Utilities for byte process
*
* @author dennis
*
*/
public final class ByteUtils {
public static final Logger log = LoggerFactory.getLogger(ByteUtils.class);
public static final String DEFAULT_CHARSET_NAME = "utf-8";
public static final Charset DEFAULT_CHARSET = Charset.forName(DEFAULT_CHARSET_NAME);
public static final ByteBuffer SPLIT = ByteBuffer.wrap(Constants.CRLF);
/**
* if it is testing,check key argument even if use binary protocol. The user
* must never change this value at all.
*/
public static boolean testing;
private ByteUtils() {
}
public static final byte[] getBytes(String k) {
if (k == null || k.length() == 0) {
throw new IllegalArgumentException("Key must not be blank");
}
try {
return k.getBytes(DEFAULT_CHARSET_NAME);
}
catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
public static final void setArguments(IoBuffer bb, Object... args) {
boolean wasFirst = true;
for (Object o : args) {
if (wasFirst) {
wasFirst = false;
}
else {
bb.put(Constants.SPACE);
}
if (o instanceof byte[]) {
bb.put((byte[]) o);
}
else {
bb.put(getBytes(String.valueOf(o)));
}
}
bb.put(Constants.CRLF);
}
public static final int setArguments(byte[] bb, int index, Object... args) {
boolean wasFirst = true;
int s = index;
for (Object o : args) {
if (wasFirst) {
wasFirst = false;
}
else {
bb[s++] = Constants.SPACE;
}
if (o instanceof byte[]) {
byte[] tmp = (byte[]) o;
System.arraycopy(tmp, 0, bb, s, tmp.length);
s += tmp.length;
}
else if (o instanceof Integer) {
int v = ((Integer) o).intValue();
s += stringSize(v);
getBytes(v, s, bb);
}
else if (o instanceof String) {
byte[] tmp = getBytes((String) o);
System.arraycopy(tmp, 0, bb, s, tmp.length);
s += tmp.length;
}
else if (o instanceof Long) {
long v = ((Long) o).longValue();
s += stringSize(v);
getBytes(v, s, bb);
}
}
System.arraycopy(Constants.CRLF, 0, bb, s, 2);
s += 2;
return s;
}
public static final void checkKey(final byte[] keyBytes) {
if (keyBytes.length > ByteUtils.maxKeyLength) {
throw new IllegalArgumentException("Key is too long (maxlen = " + ByteUtils.maxKeyLength + ")");
}
// Validate the key
if (memcachedProtocol == Protocol.Text || testing) {
for (byte b : keyBytes) {
if (b == ' ' || b == '\n' || b == '\r' || b == 0) {
try {
throw new IllegalArgumentException("Key contains invalid characters: "
+ new String(keyBytes, "utf-8"));
}
catch (UnsupportedEncodingException e) {
}
}
}
}
}
public static final void checkKey(final String key) {
if (key == null || key.length() == 0) {
throw new IllegalArgumentException("Key must not be blank");
}
byte[] keyBytes = getBytes(key);
if (keyBytes.length > ByteUtils.maxKeyLength) {
throw new IllegalArgumentException("Key is too long (maxlen = " + ByteUtils.maxKeyLength + ")");
}
if (memcachedProtocol == Protocol.Text || testing) {
// Validate the key
for (byte b : keyBytes) {
if (b == ' ' || b == '\n' || b == '\r' || b == 0) {
try {
throw new IllegalArgumentException("Key contains invalid characters:"
+ new String(keyBytes, "utf-8"));
}
catch (UnsupportedEncodingException e) {
}
}
}
}
}
private static Protocol memcachedProtocol = Protocol.Text;
private static int maxKeyLength = 250;
public static void setProtocol(Protocol protocol) {
if (protocol == null) {
throw new NullPointerException("Null Protocol");
}
memcachedProtocol = protocol;
if (protocol == Protocol.Text) {
maxKeyLength = 250;
}
else {
maxKeyLength = 65535;
}
}
public static final int normalizeCapacity(int requestedCapacity) {
switch (requestedCapacity) {
case 0:
case 1 << 0:
case 1 << 1:
case 1 << 2:
case 1 << 3:
case 1 << 4:
case 1 << 5:
case 1 << 6:
case 1 << 7:
case 1 << 8:
case 1 << 9:
case 1 << 10:
case 1 << 11:
case 1 << 12:
case 1 << 13:
case 1 << 14:
case 1 << 15:
case 1 << 16:
case 1 << 17:
case 1 << 18:
case 1 << 19:
case 1 << 21:
case 1 << 22:
case 1 << 23:
case 1 << 24:
case 1 << 25:
case 1 << 26:
case 1 << 27:
case 1 << 28:
case 1 << 29:
case 1 << 30:
case Integer.MAX_VALUE:
return requestedCapacity;
}
int newCapacity = 1;
while (newCapacity < requestedCapacity) {
newCapacity <<= 1;
if (newCapacity < 0) {
return Integer.MAX_VALUE;
}
}
return newCapacity;
}
public static final boolean stepBuffer(ByteBuffer buffer, int remaining) {
if (buffer.remaining() >= remaining) {
buffer.position(buffer.position() + remaining);
return true;
}
else {
return false;
}
}
/**
* �峰�涓��琛�
*
* @param buffer
*/
public static final String nextLine(ByteBuffer buffer) {
if (buffer == null) {
return null;
}
/**
* 娴��琛ㄦ���� Shift-And绠���归� >BM绠���归���� > �寸��归� > KMP�归�锛�
* 濡��浣���村ソ��缓璁��璇�mail缁��(killme2008@gmail.com)
*/
int index = MemcachedDecoder.SPLIT_MATCHER.matchFirst(com.google.code.yanf4j.buffer.IoBuffer.wrap(buffer));
if (index >= 0) {
int limit = buffer.limit();
buffer.limit(index);
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
buffer.limit(limit);
buffer.position(index + ByteUtils.SPLIT.remaining());
return getString(bytes);
}
return null;
}
public static String getString(byte[] bytes) {
try {
return new String(bytes, DEFAULT_CHARSET_NAME);
}
catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
public static void byte2hex(byte b, StringBuffer buf) {
char[] hexChars = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };
int high = ((b & 0xf0) >> 4);
int low = (b & 0x0f);
buf.append(hexChars[high]);
buf.append(hexChars[low]);
}
public static void int2hex(int a, StringBuffer str) {
str.append(Integer.toHexString(a));
}
public static void short2hex(int a, StringBuffer str) {
str.append(Integer.toHexString(a));
}
public static void getBytes(long i, int index, byte[] buf) {
long q;
int r;
int pos = index;
byte sign = 0;
if (i < 0) {
sign = '-';
i = -i;
}
// Get 2 digits/iteration using longs until quotient fits into an int
while (i > Integer.MAX_VALUE) {
q = i / 100;
// really: r = i - (q * 100);
r = (int) (i - ((q << 6) + (q << 5) + (q << 2)));
i = q;
buf[--pos] = DigitOnes[r];
buf[--pos] = DigitTens[r];
}
// Get 2 digits/iteration using ints
int q2;
int i2 = (int) i;
while (i2 >= 65536) {
q2 = i2 / 100;
// really: r = i2 - (q * 100);
r = i2 - ((q2 << 6) + (q2 << 5) + (q2 << 2));
i2 = q2;
buf[--pos] = DigitOnes[r];
buf[--pos] = DigitTens[r];
}
// Fall thru to fast mode for smaller numbers
// assert(i2 <= 65536, i2);
for (;;) {
q2 = (i2 * 52429) >>> (16 + 3);
r = i2 - ((q2 << 3) + (q2 << 1)); // r = i2-(q2*10) ...
buf[--pos] = digits[r];
i2 = q2;
if (i2 == 0)
break;
}
if (sign != 0) {
buf[--pos] = sign;
}
}
/**
* Places characters representing the integer i into the character array
* buf. The characters are placed into the buffer backwards starting with
* the least significant digit at the specified index (exclusive), and
* working backwards from there.
*
* Will fail if i == Integer.MIN_VALUE
*/
static void getBytes(int i, int index, byte[] buf) {
int q, r;
int pos = index;
byte sign = 0;
if (i < 0) {
sign = '-';
i = -i;
}
// Generate two digits per iteration
while (i >= 65536) {
q = i / 100;
// really: r = i - (q * 100);
r = i - ((q << 6) + (q << 5) + (q << 2));
i = q;
buf[--pos] = DigitOnes[r];
buf[--pos] = DigitTens[r];
}
// Fall thru to fast mode for smaller numbers
// assert(i <= 65536, i);
for (;;) {
q = (i * 52429) >>> (16 + 3);
r = i - ((q << 3) + (q << 1)); // r = i-(q*10) ...
buf[--pos] = digits[r];
i = q;
if (i == 0)
break;
}
if (sign != 0) {
buf[--pos] = sign;
}
}
/**
* All possible chars for representing a number as a String
*/
final static byte[] digits = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'g',
'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x',
'y', 'z' };
final static byte[] DigitTens = { '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '1', '1', '1', '1', '1',
'1', '1', '1', '1', '2', '2', '2', '2', '2', '2', '2', '2', '2', '2', '3', '3',
'3', '3', '3', '3', '3', '3', '3', '3', '4', '4', '4', '4', '4', '4', '4', '4',
'4', '4', '5', '5', '5', '5', '5', '5', '5', '5', '5', '5', '6', '6', '6', '6',
'6', '6', '6', '6', '6', '6', '7', '7', '7', '7', '7', '7', '7', '7', '7', '7',
'8', '8', '8', '8', '8', '8', '8', '8', '8', '8', '9', '9', '9', '9', '9', '9',
'9', '9', '9', '9', };
final static byte[] DigitOnes = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '1', '2', '3', '4', '5',
'6', '7', '8', '9', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '1',
'2', '3', '4', '5', '6', '7', '8', '9', '0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '1', '2', '3',
'4', '5', '6', '7', '8', '9', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '1', '2', '3', '4', '5',
'6', '7', '8', '9', };
final static int[] sizeTable = { 9, 99, 999, 9999, 99999, 999999, 9999999, 99999999, 999999999, Integer.MAX_VALUE };
// Requires positive x
public static final int stringSize(int x) {
for (int i = 0;; i++)
if (x <= sizeTable[i])
return i + 1;
}
// Requires positive x
public static final int stringSize(long x) {
long p = 10;
for (int i = 1; i < 19; i++) {
if (x < p)
return i;
p = 10 * p;
}
return 19;
}
final static int[] byte_len_array = new int[256];
static {
for (int i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; ++i) {
int size = (i < 0) ? stringSize(-i) + 1 : stringSize(i);
byte_len_array[i & 0xFF] = size;
}
}
}
|
Limit key length in 250 both in text and binary protocol
|
src/main/java/net/rubyeye/xmemcached/utils/ByteUtils.java
|
Limit key length in 250 both in text and binary protocol
|
|
Java
|
apache-2.0
|
e7c1ce8662466deb6f722f3df27a597c46a19590
| 0
|
yukuku/androidbible,yukuku/androidbible,infojulio/androidbible,infojulio/androidbible,yukuku/androidbible,yukuku/androidbible,yukuku/androidbible,yukuku/androidbible,infojulio/androidbible,infojulio/androidbible,infojulio/androidbible,infojulio/androidbible,infojulio/androidbible,infojulio/androidbible,yukuku/androidbible,yukuku/androidbible
|
package yuku.alkitab.base;
import android.content.Context;
import android.content.res.Configuration;
import android.os.Build;
import android.preference.PreferenceManager;
import android.support.multidex.MultiDex;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import android.view.ViewConfiguration;
import com.google.android.gms.analytics.GoogleAnalytics;
import com.google.android.gms.analytics.Tracker;
import com.google.gson.Gson;
import com.squareup.leakcanary.LeakCanary;
import com.squareup.okhttp.Call;
import com.squareup.okhttp.Interceptor;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.internal.Version;
import yuku.afw.storage.Preferences;
import yuku.alkitab.base.model.SyncShadow;
import yuku.alkitab.base.model.VersionImpl;
import yuku.alkitab.base.storage.Prefkey;
import yuku.alkitab.base.sync.Gcm;
import yuku.alkitab.base.sync.Sync;
import yuku.alkitab.debug.R;
import yuku.alkitab.reminder.util.DevotionReminder;
import yuku.alkitabfeedback.FeedbackSender;
import yuku.alkitabintegration.display.Launcher;
import yuku.kirimfidbek.CrashReporter;
import yuku.stethoshim.StethoShim;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
public class App extends yuku.afw.App {
public static final String TAG = App.class.getSimpleName();
private static boolean initted = false;
private static Tracker APP_TRACKER;
enum OkHttpClientWrapper {
INSTANCE;
OkHttpClient defaultClient = new OkHttpClient();
OkHttpClient longTimeoutClient = new OkHttpClient();
{
final Interceptor userAgent = chain -> {
final Request originalRequest = chain.request();
final Request requestWithUserAgent = originalRequest.newBuilder()
.removeHeader("User-Agent")
.addHeader("User-Agent", Version.userAgent() + " " + App.context.getPackageName() + "/" + App.getVersionName())
.build();
return chain.proceed(requestWithUserAgent);
};
defaultClient.networkInterceptors().add(userAgent);
longTimeoutClient.networkInterceptors().add(userAgent);
}
{ // init longTimeoutClient
longTimeoutClient.setConnectTimeout(300, TimeUnit.SECONDS);
longTimeoutClient.setReadTimeout(300, TimeUnit.SECONDS);
longTimeoutClient.setWriteTimeout(600, TimeUnit.SECONDS);
}
{ // init stetho interceptor
StethoShim.addNetworkInterceptor(defaultClient);
StethoShim.addNetworkInterceptor(longTimeoutClient);
}
}
enum GsonWrapper {
INSTANCE;
Gson gson = new Gson();
}
public static String downloadString(String url) throws IOException {
return OkHttpClientWrapper.INSTANCE.defaultClient.newCall(new Request.Builder().url(url).build()).execute().body().string();
}
public static byte[] downloadBytes(String url) throws IOException {
return OkHttpClientWrapper.INSTANCE.defaultClient.newCall(new Request.Builder().url(url).build()).execute().body().bytes();
}
public static Call downloadCall(String url) {
return OkHttpClientWrapper.INSTANCE.defaultClient.newCall(new Request.Builder().url(url).build());
}
public static OkHttpClient getOkHttpClient() {
return OkHttpClientWrapper.INSTANCE.defaultClient;
}
public static OkHttpClient getLongTimeoutOkHttpClient() {
return OkHttpClientWrapper.INSTANCE.longTimeoutClient;
}
@Override public void onCreate() {
super.onCreate();
staticInit();
{ // Google Analytics V4
// This can't be in staticInit because we need the Application instance.
final GoogleAnalytics analytics = GoogleAnalytics.getInstance(context);
final Tracker t = analytics.newTracker(context.getString(R.string.ga_trackingId));
t.enableAutoActivityTracking(true);
t.enableAdvertisingIdCollection(true);
APP_TRACKER = t;
analytics.enableAutoActivityReports(this);
}
{ // LeakCanary, also we need the Application instance.
LeakCanary.install(this);
}
{ // Stetho call through proxy
StethoShim.initializeWithDefaults(this);
}
}
public synchronized static void staticInit() {
if (initted) return;
initted = true;
final CrashReporter cr = new CrashReporter();
cr.activateDefaultUncaughtExceptionHandler();
cr.trySend();
final FeedbackSender fs = FeedbackSender.getInstance(context);
fs.trySend();
PreferenceManager.setDefaultValues(context, R.xml.settings_display, false);
PreferenceManager.setDefaultValues(context, R.xml.settings_usage, false);
PreferenceManager.setDefaultValues(context, R.xml.secret_settings, false);
PreferenceManager.setDefaultValues(context, R.xml.sync_settings, false);
updateConfigurationWithPreferencesLocale();
// all activities need at least the activeVersion from S, so initialize it here.
synchronized (S.class) {
if (S.activeVersion == null) {
S.activeVersion = VersionImpl.getInternalVersion();
}
}
// also pre-calculate calculated preferences value here
S.calculateAppliedValuesBasedOnPreferences();
{ // GCM
Gcm.renewGcmRegistrationIdIfNeeded(Sync::notifyNewGcmRegistrationId);
}
DevotionReminder.scheduleAlarm(context);
forceOverflowMenu();
// make sure launcher do not open other variants of the app
Launcher.setAppPackageName(context.getPackageName());
// sync on app start, if we are logged in
if (Preferences.contains(Prefkey.sync_simpleToken)) {
Sync.notifySyncNeeded(SyncShadow.ALL_SYNC_SET_NAMES);
}
}
private static void forceOverflowMenu() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
return; // no need to do anything, it is already forced on KitKat
}
final ViewConfiguration config = ViewConfiguration.get(context);
try {
final Field sHasPermanentMenuKey = ViewConfiguration.class.getDeclaredField("sHasPermanentMenuKey");
sHasPermanentMenuKey.setAccessible(true);
sHasPermanentMenuKey.setBoolean(config, false);
} catch (Exception e) {
Log.w(TAG, "ViewConfiguration has no sHasPermanentMenuKey field", e);
}
try {
final Field sHasPermanentMenuKeySet = ViewConfiguration.class.getDeclaredField("sHasPermanentMenuKeySet");
sHasPermanentMenuKeySet.setAccessible(true);
sHasPermanentMenuKeySet.setBoolean(config, true);
} catch (Exception e) {
Log.w(TAG, "ViewConfiguration has no sHasPermanentMenuKeySet field", e);
}
}
private static Locale getLocaleFromPreferences() {
final String lang = Preferences.getString(context.getString(R.string.pref_language_key), context.getString(R.string.pref_language_default));
if (lang == null || "DEFAULT".equals(lang)) {
return Locale.getDefault();
}
switch (lang) {
case "zh-CN":
return Locale.SIMPLIFIED_CHINESE;
case "zh-TW":
return Locale.TRADITIONAL_CHINESE;
default:
return new Locale(lang);
}
}
@Override public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
Log.d(TAG, "@@onConfigurationChanged: config changed to: " + newConfig); //$NON-NLS-1$
updateConfigurationWithPreferencesLocale();
}
public static void updateConfigurationWithPreferencesLocale() {
final Configuration config = context.getResources().getConfiguration();
final Locale locale = getLocaleFromPreferences();
if (!U.equals(config.locale.getLanguage(), locale.getLanguage()) || !U.equals(config.locale.getCountry(), locale.getCountry())) {
Log.d(TAG, "@@updateConfigurationWithPreferencesLocale: locale will be updated to: " + locale); //$NON-NLS-1$
config.locale = locale;
context.getResources().updateConfiguration(config, null);
}
}
public static LocalBroadcastManager getLbm() {
return LocalBroadcastManager.getInstance(context);
}
public static Gson getDefaultGson() {
return GsonWrapper.INSTANCE.gson;
}
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
public synchronized static Tracker getTracker() {
return APP_TRACKER;
}
}
|
Alkitab/src/main/java/yuku/alkitab/base/App.java
|
package yuku.alkitab.base;
import android.content.Context;
import android.content.res.Configuration;
import android.os.Build;
import android.preference.PreferenceManager;
import android.support.multidex.MultiDex;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import android.view.ViewConfiguration;
import com.google.android.gms.analytics.GoogleAnalytics;
import com.google.android.gms.analytics.Tracker;
import com.google.gson.Gson;
import com.squareup.leakcanary.LeakCanary;
import com.squareup.okhttp.Call;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Request;
import yuku.afw.storage.Preferences;
import yuku.alkitab.base.model.SyncShadow;
import yuku.alkitab.base.model.VersionImpl;
import yuku.alkitab.base.storage.Prefkey;
import yuku.alkitab.base.sync.Gcm;
import yuku.alkitab.base.sync.Sync;
import yuku.alkitab.debug.R;
import yuku.alkitab.reminder.util.DevotionReminder;
import yuku.alkitabfeedback.FeedbackSender;
import yuku.alkitabintegration.display.Launcher;
import yuku.kirimfidbek.CrashReporter;
import yuku.stethoshim.StethoShim;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
public class App extends yuku.afw.App {
public static final String TAG = App.class.getSimpleName();
private static boolean initted = false;
private static Tracker APP_TRACKER;
enum OkHttpClientWrapper {
INSTANCE;
OkHttpClient defaultClient = new OkHttpClient();
OkHttpClient longTimeoutClient = new OkHttpClient();
{ // init longTimeoutClient
longTimeoutClient.setConnectTimeout(300, TimeUnit.SECONDS);
longTimeoutClient.setReadTimeout(300, TimeUnit.SECONDS);
longTimeoutClient.setWriteTimeout(600, TimeUnit.SECONDS);
}
{ // init stetho interceptor
StethoShim.addNetworkInterceptor(defaultClient);
StethoShim.addNetworkInterceptor(longTimeoutClient);
}
}
enum GsonWrapper {
INSTANCE;
Gson gson = new Gson();
}
public static String downloadString(String url) throws IOException {
return OkHttpClientWrapper.INSTANCE.defaultClient.newCall(new Request.Builder().url(url).build()).execute().body().string();
}
public static byte[] downloadBytes(String url) throws IOException {
return OkHttpClientWrapper.INSTANCE.defaultClient.newCall(new Request.Builder().url(url).build()).execute().body().bytes();
}
public static Call downloadCall(String url) {
return OkHttpClientWrapper.INSTANCE.defaultClient.newCall(new Request.Builder().url(url).build());
}
public static OkHttpClient getOkHttpClient() {
return OkHttpClientWrapper.INSTANCE.defaultClient;
}
public static OkHttpClient getLongTimeoutOkHttpClient() {
return OkHttpClientWrapper.INSTANCE.longTimeoutClient;
}
@Override public void onCreate() {
super.onCreate();
staticInit();
{ // Google Analytics V4
// This can't be in staticInit because we need the Application instance.
final GoogleAnalytics analytics = GoogleAnalytics.getInstance(context);
final Tracker t = analytics.newTracker(context.getString(R.string.ga_trackingId));
t.enableAutoActivityTracking(true);
t.enableAdvertisingIdCollection(true);
APP_TRACKER = t;
analytics.enableAutoActivityReports(this);
}
{ // LeakCanary, also we need the Application instance.
LeakCanary.install(this);
}
{ // Stetho call through proxy
StethoShim.initializeWithDefaults(this);
}
}
public synchronized static void staticInit() {
if (initted) return;
initted = true;
final CrashReporter cr = new CrashReporter();
cr.activateDefaultUncaughtExceptionHandler();
cr.trySend();
final FeedbackSender fs = FeedbackSender.getInstance(context);
fs.trySend();
PreferenceManager.setDefaultValues(context, R.xml.settings_display, false);
PreferenceManager.setDefaultValues(context, R.xml.settings_usage, false);
PreferenceManager.setDefaultValues(context, R.xml.secret_settings, false);
PreferenceManager.setDefaultValues(context, R.xml.sync_settings, false);
updateConfigurationWithPreferencesLocale();
// all activities need at least the activeVersion from S, so initialize it here.
synchronized (S.class) {
if (S.activeVersion == null) {
S.activeVersion = VersionImpl.getInternalVersion();
}
}
// also pre-calculate calculated preferences value here
S.calculateAppliedValuesBasedOnPreferences();
{ // GCM
Gcm.renewGcmRegistrationIdIfNeeded(Sync::notifyNewGcmRegistrationId);
}
DevotionReminder.scheduleAlarm(context);
forceOverflowMenu();
// make sure launcher do not open other variants of the app
Launcher.setAppPackageName(context.getPackageName());
// sync on app start, if we are logged in
if (Preferences.contains(Prefkey.sync_simpleToken)) {
Sync.notifySyncNeeded(SyncShadow.ALL_SYNC_SET_NAMES);
}
}
private static void forceOverflowMenu() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
return; // no need to do anything, it is already forced on KitKat
}
final ViewConfiguration config = ViewConfiguration.get(context);
try {
final Field sHasPermanentMenuKey = ViewConfiguration.class.getDeclaredField("sHasPermanentMenuKey");
sHasPermanentMenuKey.setAccessible(true);
sHasPermanentMenuKey.setBoolean(config, false);
} catch (Exception e) {
Log.w(TAG, "ViewConfiguration has no sHasPermanentMenuKey field", e);
}
try {
final Field sHasPermanentMenuKeySet = ViewConfiguration.class.getDeclaredField("sHasPermanentMenuKeySet");
sHasPermanentMenuKeySet.setAccessible(true);
sHasPermanentMenuKeySet.setBoolean(config, true);
} catch (Exception e) {
Log.w(TAG, "ViewConfiguration has no sHasPermanentMenuKeySet field", e);
}
}
private static Locale getLocaleFromPreferences() {
final String lang = Preferences.getString(context.getString(R.string.pref_language_key), context.getString(R.string.pref_language_default));
if (lang == null || "DEFAULT".equals(lang)) {
return Locale.getDefault();
}
switch (lang) {
case "zh-CN":
return Locale.SIMPLIFIED_CHINESE;
case "zh-TW":
return Locale.TRADITIONAL_CHINESE;
default:
return new Locale(lang);
}
}
@Override public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
Log.d(TAG, "@@onConfigurationChanged: config changed to: " + newConfig); //$NON-NLS-1$
updateConfigurationWithPreferencesLocale();
}
public static void updateConfigurationWithPreferencesLocale() {
final Configuration config = context.getResources().getConfiguration();
final Locale locale = getLocaleFromPreferences();
if (!U.equals(config.locale.getLanguage(), locale.getLanguage()) || !U.equals(config.locale.getCountry(), locale.getCountry())) {
Log.d(TAG, "@@updateConfigurationWithPreferencesLocale: locale will be updated to: " + locale); //$NON-NLS-1$
config.locale = locale;
context.getResources().updateConfiguration(config, null);
}
}
public static LocalBroadcastManager getLbm() {
return LocalBroadcastManager.getInstance(context);
}
public static Gson getDefaultGson() {
return GsonWrapper.INSTANCE.gson;
}
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
public synchronized static Tracker getTracker() {
return APP_TRACKER;
}
}
|
Add user-agent header to all http requests for debugging
|
Alkitab/src/main/java/yuku/alkitab/base/App.java
|
Add user-agent header to all http requests for debugging
|
|
Java
|
apache-2.0
|
7fdede154172bc5da00090f65bc2bd67736e466f
| 0
|
ffafara/gocd,ffafara/gocd,ffafara/gocd,ffafara/gocd,ffafara/gocd
|
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.server.domain.xml;
import com.thoughtworks.go.domain.JobInstance;
import com.thoughtworks.go.domain.RunDuration;
import com.thoughtworks.go.domain.Stage;
import com.thoughtworks.go.domain.StageIdentifier;
import com.thoughtworks.go.domain.XmlRepresentable;
import com.thoughtworks.go.domain.XmlWriterContext;
import com.thoughtworks.go.util.DateUtils;
import org.dom4j.Document;
import org.dom4j.Element;
import org.dom4j.dom.DOMDocument;
import org.dom4j.dom.DOMElement;
/**
* @understands rendering xml representation of Stage
*/
public class StageXmlViewModel implements XmlRepresentable {
private final Stage stage;
public StageXmlViewModel(Stage stage) {
this.stage = stage;
}
public static String httpUrlFor(String baseUrl, final long id) {
return baseUrl + "/api/stages/" + id + ".xml";
}
public Document toXml(XmlWriterContext writerContext) {
DOMElement root = new DOMElement("stage");
root.addAttribute("name", stage.getName()).addAttribute("counter", String.valueOf(stage.getCounter()));
Document document = new DOMDocument(root);
root.addElement("link").addAttribute("rel", "self").addAttribute("href", httpUrl(writerContext.getBaseUrl()));
StageIdentifier stageId = stage.getIdentifier();
root.addElement("id").addCDATA(stageId.asURN());
String pipelineName = stageId.getPipelineName();
root.addElement("pipeline").addAttribute("name", pipelineName)
.addAttribute("counter", String.valueOf(stageId.getPipelineCounter()))
.addAttribute("label", stageId.getPipelineLabel())
.addAttribute("href", writerContext.getBaseUrl() + "/api/pipelines/" + pipelineName + "/" + stage.getPipelineId() + ".xml");
root.addElement("updated").addText(DateUtils.formatISO8601(stage.latestTransitionDate()));
root.addElement("result").addText(stage.getResult().toString());
root.addElement("state").addText(stage.status());
root.addElement("approvedBy").addCDATA(stage.getApprovedBy());
root.addElement("duration").addText(stage.getDuration().duration(RunDuration.PERIOD_FORMATTER));
Element jobs = root.addElement("jobs");
for (JobInstance jobInstance : stage.getJobInstances()) {
jobs.addElement("job").addAttribute("href", writerContext.getBaseUrl() + "/api/jobs/" + jobInstance.getId() + ".xml");
}
return document;
}
public String httpUrl(String baseUrl) {
return httpUrlFor(baseUrl, stage.getId());
}
}
|
server/src/com/thoughtworks/go/server/domain/xml/StageXmlViewModel.java
|
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.server.domain.xml;
import com.thoughtworks.go.domain.JobInstance;
import com.thoughtworks.go.domain.Stage;
import com.thoughtworks.go.domain.StageIdentifier;
import com.thoughtworks.go.domain.XmlRepresentable;
import com.thoughtworks.go.domain.XmlWriterContext;
import com.thoughtworks.go.util.DateUtils;
import org.dom4j.Document;
import org.dom4j.Element;
import org.dom4j.dom.DOMDocument;
import org.dom4j.dom.DOMElement;
/**
* @understands rendering xml representation of Stage
*/
public class StageXmlViewModel implements XmlRepresentable {
private final Stage stage;
public StageXmlViewModel(Stage stage) {
this.stage = stage;
}
public static String httpUrlFor(String baseUrl, final long id) {
return baseUrl + "/api/stages/" + id + ".xml";
}
public Document toXml(XmlWriterContext writerContext) {
DOMElement root = new DOMElement("stage");
root.addAttribute("name", stage.getName()).addAttribute("counter", String.valueOf(stage.getCounter()));
Document document = new DOMDocument(root);
root.addElement("link").addAttribute("rel", "self").addAttribute("href", httpUrl(writerContext.getBaseUrl()));
StageIdentifier stageId = stage.getIdentifier();
root.addElement("id").addCDATA(stageId.asURN());
String pipelineName = stageId.getPipelineName();
root.addElement("pipeline").addAttribute("name", pipelineName)
.addAttribute("counter", String.valueOf(stageId.getPipelineCounter()))
.addAttribute("label", stageId.getPipelineLabel())
.addAttribute("href", writerContext.getBaseUrl() + "/api/pipelines/" + pipelineName + "/" + stage.getPipelineId() + ".xml");
root.addElement("updated").addText(DateUtils.formatISO8601(stage.latestTransitionDate()));
root.addElement("result").addText(stage.getResult().toString());
root.addElement("state").addText(stage.status());
root.addElement("approvedBy").addCDATA(stage.getApprovedBy());
Element jobs = root.addElement("jobs");
for (JobInstance jobInstance : stage.getJobInstances()) {
jobs.addElement("job").addAttribute("href", writerContext.getBaseUrl() + "/api/jobs/" + jobInstance.getId() + ".xml");
}
return document;
}
public String httpUrl(String baseUrl) {
return httpUrlFor(baseUrl, stage.getId());
}
}
|
Modified StageXmlViewModel to expose duration property via API xml format
|
server/src/com/thoughtworks/go/server/domain/xml/StageXmlViewModel.java
|
Modified StageXmlViewModel to expose duration property via API xml format
|
|
Java
|
apache-2.0
|
b752d82035288e36cc750d39826ee05bc32dddf4
| 0
|
apixandru/rummikub-java,apixandru/rummikub-java,apixandru/rummikub-java
|
/**
*
*/
package com.github.apixandru.games.rummikub;
import java.util.Arrays;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
/**
* @author Alexandru-Constantin Bledea
* @since Sep 17, 2015
*/
@SuppressWarnings ("static-method")
public final class CardsTest {
private static final Card joker = new Card(null, null);
/**
*
*/
@Test
public void testIsAscendingRanks() {
final List<Card> cards = Arrays.asList(new Card(Color.RED, Rank.ONE), new Card(Color.RED, Rank.TWO), new Card(Color.RED, Rank.THREE));
Assert.assertTrue("Cards should be in ascending order", Cards.isAscendingRanks(cards));
}
/**
*
*/
@Test
public void testIsAscendingRanksWithJoker() {
final List<Card> cards = Arrays.asList(new Card(Color.RED, Rank.ONE), joker, new Card(Color.RED, Rank.THREE));
Assert.assertTrue("Cards should be in ascending order", Cards.isAscendingRanks(cards));
}
/**
*
*/
@Test
public void testIsAscendingRanksWithJokerBad() {
final List<Card> cards = Arrays.asList(new Card(Color.RED, Rank.ONE), joker, new Card(Color.RED, Rank.FOUR));
Assert.assertFalse("Cards should not be in ascending order", Cards.isAscendingRanks(cards));
}
/**
*
*/
@Test
public void testIsAscendingRanksBad() {
final List<Card> cards = Arrays.asList(new Card(Color.RED, Rank.TWELVE), new Card(Color.RED, Rank.THIRTEEN), new Card(Color.RED, Rank.ONE));
Assert.assertFalse("Cards should not be in ascending order", Cards.isAscendingRanks(cards));
}
/**
*
*/
@Test
public void testIsAscendingRanksDoubleJoker() {
final List<Card> cards = Arrays.asList(joker, joker, new Card(Color.RED, Rank.THREE));
Assert.assertTrue("Jokers should map to 1 and 2", Cards.isAscendingRanks(cards));
}
/**
*
*/
@Test
public void testIsAscendingRanksDoubleJokerBad() {
final List<Card> cards = Arrays.asList(joker, joker, new Card(Color.RED, Rank.ONE));
Assert.assertFalse("Jokers should map to 1 and 2, third card cannot be 1", Cards.isAscendingRanks(cards));
}
/**
*
*/
@Test
public void testIsSameColorJokers() {
final List<Card> cards = Arrays.asList(joker, joker);
Assert.assertTrue("Jokers are actually 'the same' color", Cards.isAllSameColor(cards));
}
/**
*
*/
@Test
public void testIsSameRankJokers() {
final List<Card> cards = Arrays.asList(joker, joker);
Assert.assertTrue("Jokers are actually 'the same' color", Cards.isSameRanks(cards));
}
}
|
src/test/java/com/github/apixandru/games/rummikub/CardsTest.java
|
/**
*
*/
package com.github.apixandru.games.rummikub;
import java.util.Arrays;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
/**
* @author Alexandru-Constantin Bledea
* @since Sep 17, 2015
*/
@SuppressWarnings ("static-method")
public final class CardsTest {
private static final Card joker = new Card(null, null);
/**
*
*/
@Test
public void testIsAscendingRanks() {
final List<Card> cards = Arrays.asList(new Card(Color.RED, Rank.ONE), new Card(Color.RED, Rank.TWO), new Card(Color.RED, Rank.THREE));
Assert.assertTrue("Cards should be in ascending order", Cards.isAscendingRanks(cards));
}
/**
*
*/
@Test
public void testIsAscendingRanksWithJoker() {
final List<Card> cards = Arrays.asList(new Card(Color.RED, Rank.ONE), joker, new Card(Color.RED, Rank.THREE));
Assert.assertTrue("Cards should be in ascending order", Cards.isAscendingRanks(cards));
}
/**
*
*/
@Test
public void testIsAscendingRanksWithJokerBad() {
final List<Card> cards = Arrays.asList(new Card(Color.RED, Rank.ONE), joker, new Card(Color.RED, Rank.FOUR));
Assert.assertFalse("Cards should not be in ascending order", Cards.isAscendingRanks(cards));
}
/**
*
*/
@Test
public void testIsAscendingRanksBad() {
final List<Card> cards = Arrays.asList(new Card(Color.RED, Rank.TWELVE), new Card(Color.RED, Rank.THIRTEEN), new Card(Color.RED, Rank.ONE));
Assert.assertFalse("Cards should not be in ascending order", Cards.isAscendingRanks(cards));
}
/**
*
*/
@Test
public void testIsAscendingRanksDoubleJoker() {
final List<Card> cards = Arrays.asList(joker, joker, new Card(Color.RED, Rank.THREE));
Assert.assertTrue("Jokers should map to 1 and 2", Cards.isAscendingRanks(cards));
}
/**
*
*/
@Test
public void testIsAscendingRanksDoubleJokerBad() {
final List<Card> cards = Arrays.asList(joker, joker, new Card(Color.RED, Rank.ONE));
Assert.assertFalse("Jokers should map to 1 and 2, third card cannot be 1", Cards.isAscendingRanks(cards));
}
/**
*
*/
@Test
public void testIsSameJokers() {
final List<Card> cards = Arrays.asList(joker, joker);
Assert.assertTrue("Jokers are actually 'the same' color", Cards.isAllSameColor(cards));
}
}
|
add another test, rename old one
|
src/test/java/com/github/apixandru/games/rummikub/CardsTest.java
|
add another test, rename old one
|
|
Java
|
apache-2.0
|
872c6ff840c4cb73e1149da1534e04f7947c3b87
| 0
|
pCloud/pcloud-networking-java
|
/*
* Copyright (c) 2017 pCloud AG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pcloud.networking.api;
import com.pcloud.networking.client.PCloudAPIClient;
import com.pcloud.networking.serialization.Transformer;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Composes implementations for interfaces in which you have provided methods that describe your network calls
*
* @see PCloudAPIClient
* @see ResponseInterceptor
*/
@SuppressWarnings({"WeakerAccess", "unused"})
public class ApiComposer {
/**
* Creates and returns a {@linkplain Builder} to build the {@linkplain ApiComposer} with
*
* @return A new instance of a {@linkplain Builder} to build the {@linkplain ApiComposer} with
*/
public static Builder create() {
return new Builder();
}
/**
* Internal list of {@linkplain ApiMethod.Factory} factories
* <p>
* Holds the list of available {@linkplain ApiMethod.Factory} implementations.
* <p>
* <b>Note that the order of elements is important, please consider it before modification.</b>
*/
private static final List<ApiMethod.Factory> BUILT_IN_API_METHOD_FACTORIES = Arrays.asList(
MultiCallWrappedApiMethod.FACTORY,
CallWrappedApiMethod.FACTORY,
DirectApiMethod.FACTORY
);
/**
* The built-in set of {@linkplain CallAdapter.Factory} implementations
*/
private static final List<CallAdapter.Factory> BUILT_IN_CALL_ADAPTER_FACTORIES = Arrays.asList(
CallWrappedCallAdapter.FACTORY,
DirectCallAdapter.FACTORY
);
private PCloudAPIClient apiClient;
private Transformer transformer;
private List<ResponseInterceptor> interceptors;
private boolean loadEagerly;
private final Map<Method, ApiMethod<?>> apiMethodsCache = new ConcurrentHashMap<>();
private final List<ApiMethod.Factory> factories = new ArrayList<>(BUILT_IN_API_METHOD_FACTORIES);
private final List<CallAdapter.Factory> callAdapterFactories = new ArrayList<>(BUILT_IN_CALL_ADAPTER_FACTORIES);
private ApiComposer(Builder builder) {
if (builder.apiClient == null) {
throw new IllegalArgumentException("PCloudAPIClient instance cannot be null.");
}
this.apiClient = builder.apiClient;
this.transformer = builder.transformer != null ? builder.transformer : Transformer.create().build();
this.interceptors = new ArrayList<>(builder.interceptors);
this.callAdapterFactories.addAll(builder.callAdapterFactories);
this.loadEagerly = builder.loadEagerly;
}
/**
* Returns the {@linkplain Transformer} you provided in the {@linkplain Builder}
*
* @return The {@linkplain Transformer} you provided in the {@linkplain Builder}
*/
public Transformer transformer() {
return transformer;
}
/**
* Returns the {@linkplain PCloudAPIClient} you provided in the {@linkplain Builder}
*
* @return The {@linkplain PCloudAPIClient} you provided in the {@linkplain Builder}
*/
public PCloudAPIClient apiClient() {
return apiClient;
}
/**
* Returns a {@linkplain List} of all the {@linkplain ResponseInterceptor}
* objects you provided in the {@linkplain Builder}
*
* @return A {@linkplain List} of all the {@linkplain ResponseInterceptor}
* objects you provided in the {@linkplain Builder}
*/
public List<ResponseInterceptor> interceptors() {
return interceptors;
}
/**
* Composes an instance of the java interface which houses the network call methods
* <p>
* Note that you should provide an interface which does not extend anything or else this wont work!
*
* @param apiType The class of the java interface in which you have
* written the methods to represent the network calls
* @param <T> The generic type of the returned instance
* @return An instance of a class of the same generic type as the class you provided as an argument
* implementing the interface in which you have written methods to represent your network calls
* @throws RuntimeException if {@linkplain #loadEagerly} is set to true and if the instantiation fails
*/
@SuppressWarnings("unchecked")
public <T> T compose(Class<T> apiType) {
validateApiInterface(apiType);
if (loadEagerly) {
Method[] methods = apiType.getDeclaredMethods();
for (Method method : methods) {
loadApiMethod(method);
}
}
return (T) Proxy.newProxyInstance(apiType.getClassLoader(), new Class<?>[]{apiType},
new InvocationHandler() {
@Override
public Object invoke(Object proxy, java.lang.reflect.Method method, Object[] args)
throws Throwable {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
if (args == null) {
args = ApiMethod.EMPTY_ARGS;
}
ApiMethod<Object> apiMethod = (ApiMethod<Object>) loadApiMethod(method);
return apiMethod.invoke(ApiComposer.this, args);
}
});
}
CallAdapter<?, ?> nextCallAdapter(Method method) {
CallAdapter<?, ?> callAdapter = null;
for (CallAdapter.Factory adapterFactory : callAdapterFactories) {
if ((callAdapter = adapterFactory.get(this, method)) != null) {
break;
}
}
return callAdapter;
}
private ApiMethod<?> loadApiMethod(Method javaMethod) {
ApiMethod<?> apiMethod = apiMethodsCache.get(javaMethod);
if (apiMethod == null) {
apiMethod = createApiMethod(javaMethod);
if (apiMethod == null) {
throw ApiMethod.Factory.apiMethodError(javaMethod,
"Cannot adapt method, return type '%s' is not supported.",
javaMethod.getReturnType().getName());
}
apiMethodsCache.put(javaMethod, apiMethod);
}
return apiMethod;
}
private ApiMethod<?> createApiMethod(Method javaMethod) {
ApiMethod<?> apiMethod = null;
Type[] argumentTypes = javaMethod.getGenericParameterTypes();
Annotation[][] argumentAnnotations = javaMethod.getParameterAnnotations();
for (ApiMethod.Factory factory : factories) {
if ((apiMethod = factory.create(this, javaMethod, argumentTypes, argumentAnnotations)) != null) {
break;
}
}
return apiMethod;
}
private static <T> void validateApiInterface(Class<T> apiInterface) {
if (!apiInterface.isInterface()) {
throw new IllegalArgumentException("API declaration '" + apiInterface + "' is not an interface.");
}
// Avoid a bug in Android's Dalvik VM for versions 4.x, http://b.android.com/58753
if (apiInterface.getInterfaces().length > 0) {
throw new IllegalArgumentException("API declarations must not extend other interfaces.");
}
}
/**
* Returns a new instance of a {@linkplain Builder} to build a new {@linkplain ApiComposer}
*
* @return A new instance of a {@linkplain Builder} to build a new {@linkplain ApiComposer}
*/
public Builder newBuilder() {
return new Builder(this);
}
/**
* A builder to build instances of {@linkplain ApiComposer}
*/
public static class Builder {
private PCloudAPIClient apiClient;
private Transformer transformer;
private List<ResponseInterceptor> interceptors = new LinkedList<>();
private List<CallAdapter.Factory> callAdapterFactories = new ArrayList<>();
private boolean loadEagerly;
private Builder() {
}
private Builder(ApiComposer composer) {
this.apiClient = composer.apiClient;
this.transformer = composer.transformer;
this.interceptors = new ArrayList<>(composer.interceptors);
this.callAdapterFactories = new ArrayList<>(composer.callAdapterFactories);
this.callAdapterFactories.removeAll(BUILT_IN_CALL_ADAPTER_FACTORIES);
}
/**
* Sets the {@linkplain PCloudAPIClient} for the {@linkplain ApiComposer}
*
* @param apiClient The {@linkplain PCloudAPIClient} to be set to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
* @throws IllegalArgumentException on a null {@linkplain PCloudAPIClient} argument
*/
public Builder apiClient(PCloudAPIClient apiClient) {
if (apiClient == null) {
throw new IllegalArgumentException("PCloudAPIClient argument cannot be null.");
}
this.apiClient = apiClient;
return this;
}
/**
* Sets the {@linkplain Transformer} for this {@linkplain ApiComposer}
*
* @param transformer The {@linkplain Transformer} to be set to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
* @throws IllegalArgumentException on a null {@linkplain Transformer} argument
*/
public Builder transformer(Transformer transformer) {
if (transformer == null) {
throw new IllegalArgumentException("Transformer argument cannot be null.");
}
this.transformer = transformer;
return this;
}
/**
* Sets a single {@linkplain ResponseInterceptor} for this {@linkplain ApiComposer}
*
* @param interceptor The {@linkplain ResponseInterceptor} to be set to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
* @throws IllegalArgumentException on a null {@linkplain ResponseInterceptor} argument
*/
public Builder addInterceptor(ResponseInterceptor interceptor) {
if (interceptor == null) {
throw new IllegalArgumentException("ResponseInterceptor argument cannot be null.");
}
this.interceptors.add(interceptor);
return this;
}
/**
* Removes a single {@linkplain ResponseInterceptor} from the {@linkplain Builder}
*
* @param interceptor The {@linkplain ResponseInterceptor} to be removed from the {@linkplain Builder}
* @return A reference to the {@linkplain Builder} object
* @throws IllegalArgumentException on a null {@linkplain ResponseInterceptor} argument
*/
public Builder removeInterceptor(ResponseInterceptor interceptor) {
if (interceptor == null) {
throw new IllegalArgumentException("ResponseInterceptor argument cannot be null.");
}
this.interceptors.remove(interceptor);
return this;
}
/**
* Removes a {@linkplain Collection} of {@linkplain ResponseInterceptor} from the {@linkplain Builder}
* <p>
* Does the same thing as {@linkplain #removeInterceptor(ResponseInterceptor)}
* for each {@linkplain ResponseInterceptor} in the {@linkplain Collection}
*
* @param interceptors A {@linkplain Collection} of {@linkplain ResponseInterceptor}
* objects to be removed from the {@linkplain Builder}
* @return A reference to the {@linkplain Builder} object
* @throws IllegalArgumentException on a null {@linkplain Collection} argument
*/
public Builder removeInterceptors(Collection<ResponseInterceptor> interceptors) {
if (interceptors == null) {
throw new IllegalArgumentException("ResponseInterceptor collection argument cannot be null.");
}
for (ResponseInterceptor r : interceptors) {
removeInterceptor(r);
}
return this;
}
/**
* Adds a {@linkplain Collection} of {@linkplain ResponseInterceptor} objects for the {@linkplain Builder}
* <p>
* Does the same thing as {@linkplain #addInterceptor(ResponseInterceptor)} for each
* {@linkplain ResponseInterceptor} in the {@linkplain Collection}
*
* @param interceptors A {@linkplain Collection} of {@linkplain ResponseInterceptor}
* objects to be added to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
*/
public Builder addInterceptors(Collection<ResponseInterceptor> interceptors) {
if (interceptors == null) {
throw new IllegalArgumentException("ResponseInterceptor collection argument cannot be null.");
}
for (ResponseInterceptor r : interceptors) {
addInterceptor(r);
}
return this;
}
/**
* Sets a single {@linkplain CallAdapter.Factory} for this {@linkplain ApiComposer}
*
* @param adapterFactory The {@linkplain CallAdapter.Factory} to be set to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
* @throws IllegalArgumentException on a null {@linkplain CallAdapter.Factory} argument
*/
public Builder addAdapterFactory(CallAdapter.Factory adapterFactory) {
if (adapterFactory == null) {
throw new IllegalArgumentException("CallAdapter.Factory argument cannot be null.");
}
this.callAdapterFactories.add(adapterFactory);
return this;
}
/**
* Adds an array of {@linkplain CallAdapter.Factory} objects for the {@linkplain Builder}
* <p>
* Does the same thing as {@linkplain #addAdapterFactory(CallAdapter.Factory)} for each
* {@linkplain CallAdapter.Factory} in the {@linkplain Collection}
*
* @param adapterFactories An array of {@linkplain CallAdapter.Factory}
* objects to be added to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
*/
public Builder addAdapterFactories(CallAdapter.Factory... adapterFactories) {
if (adapterFactories == null) {
throw new IllegalArgumentException("CallAdapter.Factory collection argument cannot be null.");
}
return addAdapterFactories(Arrays.asList(adapterFactories));
}
/**
* Adds a {@linkplain Collection} of {@linkplain CallAdapter.Factory} objects for the {@linkplain Builder}
* <p>
* Does the same thing as {@linkplain #addAdapterFactory(CallAdapter.Factory)} for each
* {@linkplain CallAdapter.Factory} in the {@linkplain Collection}
*
* @param adapterFactories A {@linkplain Collection} of {@linkplain CallAdapter.Factory}
* objects to be added to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
*/
public Builder addAdapterFactories(Iterable<CallAdapter.Factory> adapterFactories) {
if (adapterFactories == null) {
throw new IllegalArgumentException("CallAdapter.Factory collection argument cannot be null.");
}
for (CallAdapter.Factory factory : adapterFactories) {
addAdapterFactory(factory);
}
return this;
}
/**
* Determine if the interface implementation will be lazy or eager.
* <p>
* If set to {@code true }the {@linkplain ApiComposer} created from this builder will initialize
* all interface method implementations immediately after the {@linkplain ApiComposer#compose(Class)} is called.
* <p>
* If set to {@code false} the {@linkplain ApiComposer#compose(Class)} will only
* basic conditions for the supplied interface class and will actually resolve
* the interface method implementations on-demand (when the methods are called for the first time).
*
* @param loadEagerly the condition for whether the interface implementation should be lazy or eager
* @return A reference to the {@linkplain Builder} object
*/
public Builder loadEagerly(boolean loadEagerly) {
this.loadEagerly = loadEagerly;
return this;
}
/**
* Creates and returns a new instance of the {@linkplain ApiComposer}
* with the parameters set via the {@linkplain Builder}
*
* @return A new instance of the {@linkplain ApiComposer} with the parameters set via the {@linkplain Builder}
*/
public ApiComposer create() {
return new ApiComposer(this);
}
}
}
|
composer/src/main/java/com/pcloud/networking/api/ApiComposer.java
|
/*
* Copyright (c) 2017 pCloud AG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pcloud.networking.api;
import com.pcloud.networking.client.PCloudAPIClient;
import com.pcloud.networking.serialization.Transformer;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Composes implementations for interfaces in which you have provided methods that describe your network calls
*
* @see PCloudAPIClient
* @see ResponseInterceptor
*/
@SuppressWarnings({"WeakerAccess", "unused"})
public class ApiComposer {
/**
* Creates and returns a {@linkplain Builder} to build the {@linkplain ApiComposer} with
*
* @return A new instance of a {@linkplain Builder} to build the {@linkplain ApiComposer} with
*/
public static Builder create() {
return new Builder();
}
/**
* Internal list of {@linkplain ApiMethod.Factory} factories
* <p>
* Holds the list of available {@linkplain ApiMethod.Factory} implementations.
* <p>
* <b>Note that the order of elements is important, please consider it before modification.</b>
*/
private static final List<ApiMethod.Factory> BUILT_IN_API_METHOD_FACTORIES = Arrays.asList(
MultiCallWrappedApiMethod.FACTORY,
CallWrappedApiMethod.FACTORY,
DirectApiMethod.FACTORY
);
/**
* The built-in set of {@linkplain CallAdapter.Factory} implementations
*/
private static final List<CallAdapter.Factory> BUILD_IN_CALL_ADAPTER_FACTORIES = Arrays.asList(
CallWrappedCallAdapter.FACTORY,
DirectCallAdapter.FACTORY
);
private PCloudAPIClient apiClient;
private Transformer transformer;
private List<ResponseInterceptor> interceptors;
private boolean loadEagerly;
private Map<Method, ApiMethod<?>> apiMethodsCache = new ConcurrentHashMap<>();
private List<ApiMethod.Factory> factories = new ArrayList<>(BUILT_IN_API_METHOD_FACTORIES);
private List<CallAdapter.Factory> callAdapterFactories = new ArrayList<>(BUILD_IN_CALL_ADAPTER_FACTORIES);
private ApiComposer(Builder builder) {
if (builder.apiClient == null) {
throw new IllegalArgumentException("PCloudAPIClient instance cannot be null.");
}
this.apiClient = builder.apiClient;
this.transformer = builder.transformer != null ? builder.transformer : Transformer.create().build();
this.interceptors = new ArrayList<>(builder.interceptors);
this.callAdapterFactories.addAll(builder.callAdapterFactories);
this.loadEagerly = builder.loadEagerly;
}
/**
* Returns the {@linkplain Transformer} you provided in the {@linkplain Builder}
*
* @return The {@linkplain Transformer} you provided in the {@linkplain Builder}
*/
public Transformer transformer() {
return transformer;
}
/**
* Returns the {@linkplain PCloudAPIClient} you provided in the {@linkplain Builder}
*
* @return The {@linkplain PCloudAPIClient} you provided in the {@linkplain Builder}
*/
public PCloudAPIClient apiClient() {
return apiClient;
}
/**
* Returns a {@linkplain List} of all the {@linkplain ResponseInterceptor}
* objects you provided in the {@linkplain Builder}
*
* @return A {@linkplain List} of all the {@linkplain ResponseInterceptor}
* objects you provided in the {@linkplain Builder}
*/
public List<ResponseInterceptor> interceptors() {
return interceptors;
}
/**
* Composes an instance of the java interface which houses the network call methods
* <p>
* Note that you should provide an interface which does not extend anything or else this wont work!
*
* @param apiType The class of the java interface in which you have
* written the methods to represent the network calls
* @param <T> The generic type of the returned instance
* @return An instance of a class of the same generic type as the class you provided as an argument
* implementing the interface in which you have written methods to represent your network calls
* @throws RuntimeException if {@linkplain #loadEagerly} is set to true and if the instantiation fails
*/
@SuppressWarnings("unchecked")
public <T> T compose(Class<T> apiType) {
validateApiInterface(apiType);
if (loadEagerly) {
Method[] methods = apiType.getDeclaredMethods();
for (Method method : methods) {
loadApiMethod(method);
}
}
return (T) Proxy.newProxyInstance(apiType.getClassLoader(), new Class<?>[]{apiType},
new InvocationHandler() {
@Override
public Object invoke(Object proxy, java.lang.reflect.Method method, Object[] args)
throws Throwable {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
if (args == null) {
args = ApiMethod.EMPTY_ARGS;
}
ApiMethod<Object> apiMethod = (ApiMethod<Object>) loadApiMethod(method);
return apiMethod.invoke(ApiComposer.this, args);
}
});
}
CallAdapter<?, ?> nextCallAdapter(Method method) {
CallAdapter<?, ?> callAdapter = null;
for (CallAdapter.Factory adapterFactory : callAdapterFactories) {
if ((callAdapter = adapterFactory.get(this, method)) != null) {
break;
}
}
return callAdapter;
}
private ApiMethod<?> loadApiMethod(Method javaMethod) {
ApiMethod<?> apiMethod = apiMethodsCache.get(javaMethod);
if (apiMethod == null) {
apiMethod = createApiMethod(javaMethod);
if (apiMethod == null) {
throw ApiMethod.Factory.apiMethodError(javaMethod,
"Cannot adapt method, return type '%s' is not supported.",
javaMethod.getReturnType().getName());
}
apiMethodsCache.put(javaMethod, apiMethod);
}
return apiMethod;
}
private ApiMethod<?> createApiMethod(Method javaMethod) {
ApiMethod<?> apiMethod = null;
Type[] argumentTypes = javaMethod.getGenericParameterTypes();
Annotation[][] argumentAnnotations = javaMethod.getParameterAnnotations();
for (ApiMethod.Factory factory : factories) {
if ((apiMethod = factory.create(this, javaMethod, argumentTypes, argumentAnnotations)) != null) {
break;
}
}
return apiMethod;
}
private static <T> void validateApiInterface(Class<T> apiInterface) {
if (!apiInterface.isInterface()) {
throw new IllegalArgumentException("API declaration '" + apiInterface + "' is not an interface.");
}
// Avoid a bug in Android's Dalvik VM for versions 4.x, http://b.android.com/58753
if (apiInterface.getInterfaces().length > 0) {
throw new IllegalArgumentException("API declarations must not extend other interfaces.");
}
}
/**
* Returns a new instance of a {@linkplain Builder} to build a new {@linkplain ApiComposer}
*
* @return A new instance of a {@linkplain Builder} to build a new {@linkplain ApiComposer}
*/
public Builder newBuilder() {
this.callAdapterFactories.removeAll(BUILD_IN_CALL_ADAPTER_FACTORIES);
return new Builder(this);
}
/**
* A builder to build instances of {@linkplain ApiComposer}
*/
public static class Builder {
private PCloudAPIClient apiClient;
private Transformer transformer;
private List<ResponseInterceptor> interceptors = new LinkedList<>();
private List<CallAdapter.Factory> callAdapterFactories = new ArrayList<>();
private boolean loadEagerly;
private Builder() {
}
private Builder(ApiComposer composer) {
this.apiClient = composer.apiClient;
this.transformer = composer.transformer;
this.interceptors = new ArrayList<>(composer.interceptors);
this.callAdapterFactories = new ArrayList<>(composer.callAdapterFactories);
}
/**
* Sets the {@linkplain PCloudAPIClient} for the {@linkplain ApiComposer}
*
* @param apiClient The {@linkplain PCloudAPIClient} to be set to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
* @throws IllegalArgumentException on a null {@linkplain PCloudAPIClient} argument
*/
public Builder apiClient(PCloudAPIClient apiClient) {
if (apiClient == null) {
throw new IllegalArgumentException("PCloudAPIClient argument cannot be null.");
}
this.apiClient = apiClient;
return this;
}
/**
* Sets the {@linkplain Transformer} for this {@linkplain ApiComposer}
*
* @param transformer The {@linkplain Transformer} to be set to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
* @throws IllegalArgumentException on a null {@linkplain Transformer} argument
*/
public Builder transformer(Transformer transformer) {
if (transformer == null) {
throw new IllegalArgumentException("Transformer argument cannot be null.");
}
this.transformer = transformer;
return this;
}
/**
* Sets a single {@linkplain ResponseInterceptor} for this {@linkplain ApiComposer}
*
* @param interceptor The {@linkplain ResponseInterceptor} to be set to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
* @throws IllegalArgumentException on a null {@linkplain ResponseInterceptor} argument
*/
public Builder addInterceptor(ResponseInterceptor interceptor) {
if (interceptor == null) {
throw new IllegalArgumentException("ResponseInterceptor argument cannot be null.");
}
this.interceptors.add(interceptor);
return this;
}
/**
* Removes a single {@linkplain ResponseInterceptor} from the {@linkplain Builder}
*
* @param interceptor The {@linkplain ResponseInterceptor} to be removed from the {@linkplain Builder}
* @return A reference to the {@linkplain Builder} object
* @throws IllegalArgumentException on a null {@linkplain ResponseInterceptor} argument
*/
public Builder removeInterceptor(ResponseInterceptor interceptor) {
if (interceptor == null) {
throw new IllegalArgumentException("ResponseInterceptor argument cannot be null.");
}
this.interceptors.remove(interceptor);
return this;
}
/**
* Removes a {@linkplain Collection} of {@linkplain ResponseInterceptor} from the {@linkplain Builder}
* <p>
* Does the same thing as {@linkplain #removeInterceptor(ResponseInterceptor)}
* for each {@linkplain ResponseInterceptor} in the {@linkplain Collection}
*
* @param interceptors A {@linkplain Collection} of {@linkplain ResponseInterceptor}
* objects to be removed from the {@linkplain Builder}
* @return A reference to the {@linkplain Builder} object
* @throws IllegalArgumentException on a null {@linkplain Collection} argument
*/
public Builder removeInterceptors(Collection<ResponseInterceptor> interceptors) {
if (interceptors == null) {
throw new IllegalArgumentException("ResponseInterceptor collection argument cannot be null.");
}
for (ResponseInterceptor r : interceptors) {
removeInterceptor(r);
}
return this;
}
/**
* Adds a {@linkplain Collection} of {@linkplain ResponseInterceptor} objects for the {@linkplain Builder}
* <p>
* Does the same thing as {@linkplain #addInterceptor(ResponseInterceptor)} for each
* {@linkplain ResponseInterceptor} in the {@linkplain Collection}
*
* @param interceptors A {@linkplain Collection} of {@linkplain ResponseInterceptor}
* objects to be added to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
*/
public Builder addInterceptors(Collection<ResponseInterceptor> interceptors) {
if (interceptors == null) {
throw new IllegalArgumentException("ResponseInterceptor collection argument cannot be null.");
}
for (ResponseInterceptor r : interceptors) {
addInterceptor(r);
}
return this;
}
/**
* Sets a single {@linkplain CallAdapter.Factory} for this {@linkplain ApiComposer}
*
* @param adapterFactory The {@linkplain CallAdapter.Factory} to be set to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
* @throws IllegalArgumentException on a null {@linkplain CallAdapter.Factory} argument
*/
public Builder addAdapterFactory(CallAdapter.Factory adapterFactory) {
if (adapterFactory == null) {
throw new IllegalArgumentException("CallAdapter.Factory argument cannot be null.");
}
this.callAdapterFactories.add(adapterFactory);
return this;
}
/**
* Adds an array of {@linkplain CallAdapter.Factory} objects for the {@linkplain Builder}
* <p>
* Does the same thing as {@linkplain #addAdapterFactory(CallAdapter.Factory)} for each
* {@linkplain CallAdapter.Factory} in the {@linkplain Collection}
*
* @param adapterFactories An array of {@linkplain CallAdapter.Factory}
* objects to be added to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
*/
public Builder addAdapterFactories(CallAdapter.Factory... adapterFactories) {
if (adapterFactories == null) {
throw new IllegalArgumentException("CallAdapter.Factory collection argument cannot be null.");
}
return addAdapterFactories(Arrays.asList(adapterFactories));
}
/**
* Adds a {@linkplain Collection} of {@linkplain CallAdapter.Factory} objects for the {@linkplain Builder}
* <p>
* Does the same thing as {@linkplain #addAdapterFactory(CallAdapter.Factory)} for each
* {@linkplain CallAdapter.Factory} in the {@linkplain Collection}
*
* @param adapterFactories A {@linkplain Collection} of {@linkplain CallAdapter.Factory}
* objects to be added to the {@linkplain ApiComposer}
* @return A reference to the {@linkplain Builder} object
*/
public Builder addAdapterFactories(Iterable<CallAdapter.Factory> adapterFactories) {
if (adapterFactories == null) {
throw new IllegalArgumentException("CallAdapter.Factory collection argument cannot be null.");
}
for (CallAdapter.Factory factory : adapterFactories) {
addAdapterFactory(factory);
}
return this;
}
/**
* Determine if the interface implementation will be lazy or eager.
* <p>
* If set to {@code true }the {@linkplain ApiComposer} created from this builder will initialize
* all interface method implementations immediately after the {@linkplain ApiComposer#compose(Class)} is called.
* <p>
* If set to {@code false} the {@linkplain ApiComposer#compose(Class)} will only
* basic conditions for the supplied interface class and will actually resolve
* the interface method implementations on-demand (when the methods are called for the first time).
*
* @param loadEagerly the condition for whether the interface implementation should be lazy or eager
* @return A reference to the {@linkplain Builder} object
*/
public Builder loadEagerly(boolean loadEagerly) {
this.loadEagerly = loadEagerly;
return this;
}
/**
* Creates and returns a new instance of the {@linkplain ApiComposer}
* with the parameters set via the {@linkplain Builder}
*
* @return A new instance of the {@linkplain ApiComposer} with the parameters set via the {@linkplain Builder}
*/
public ApiComposer create() {
return new ApiComposer(this);
}
}
}
|
(Composer): Fix a `ConcurrentModificationException` whencreating `ApiComposer.Builder` instances
|
composer/src/main/java/com/pcloud/networking/api/ApiComposer.java
|
(Composer): Fix a `ConcurrentModificationException` whencreating `ApiComposer.Builder` instances
|
|
Java
|
apache-2.0
|
419af2c7fd44899bb04c13981f2d9c73db4f6da5
| 0
|
softindex/datakernel,softindex/datakernel,softindex/datakernel
|
/*
* Copyright (C) 2015-2018 SoftIndex LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datakernel.async;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.function.BiConsumer;
import java.util.stream.Collector;
public interface IndexedCollector<T, A, R> {
A accumulator(int stages);
void accumulate(A accumulator, int stageIndex, T stageResult);
R finish(A accumulator);
default R resultOf() {
return resultOf(Collections.emptyList());
}
default R resultOf(T value1) {
//noinspection ArraysAsListWithZeroOrOneArgument - using asList instead of singletonList() to allow mutability
return resultOf(Arrays.asList(value1));
}
default R resultOf(T value1, T value2) {
return resultOf(Arrays.asList(value1, value2));
}
default R resultOf(List<? extends T> values) {
A accumulator = accumulator(values.size());
for (int i = 0; i < values.size(); i++) {
accumulate(accumulator, i, values.get(i));
}
return finish(accumulator);
}
static <T, A, R> IndexedCollector<T, A, R> ofCollector(Collector<T, A, R> collector) {
return new IndexedCollector<T, A, R>() {
final BiConsumer<A, T> accumulator = collector.accumulator();
@Override
public A accumulator(int stages) {
return collector.supplier().get();
}
@Override
public void accumulate(A accumulator, int stageIndex, T stageResult) {
this.accumulator.accept(accumulator, stageResult);
}
@Override
public R finish(A accumulator) {
return collector.finisher().apply(accumulator);
}
};
}
IndexedCollector<Object, Object[], List<Object>> TO_LIST = new IndexedCollector<Object, Object[], List<Object>>() {
@Override
public Object[] accumulator(int stages) {
return new Object[stages];
}
@Override
public void accumulate(Object[] accumulator, int stageIndex, Object stageResult) {
accumulator[stageIndex] = stageResult;
}
@SuppressWarnings("unchecked")
@Override
public List<Object> finish(Object[] accumulator) {
return Arrays.asList(accumulator);
}
@Override
public List<Object> resultOf() {
return Collections.emptyList();
}
@Override
public List<Object> resultOf(Object value1) {
return Arrays.asList(value1);
}
@Override
public List<Object> resultOf(Object value1, Object value2) {
return Arrays.asList(value1, value2);
}
@SuppressWarnings("unchecked")
@Override
public List<Object> resultOf(List<?> values) {
return (List<Object>) values;
}
};
@SuppressWarnings("unchecked")
static <T> IndexedCollector<T, Object[], List<T>> toList() {
return (IndexedCollector) TO_LIST;
}
@SuppressWarnings("unchecked")
static <T> IndexedCollector<T, T[], T[]> toArray(Class<T> type) {
return new IndexedCollector<T, T[], T[]>() {
@Override
public T[] accumulator(int stages) {
return (T[]) Array.newInstance(type, stages);
}
@Override
public void accumulate(T[] accumulator, int stageIndex, T stageResult) {
accumulator[stageIndex] = stageResult;
}
@SuppressWarnings("unchecked")
@Override
public T[] finish(T[] accumulator) {
return (T[]) accumulator;
}
@Override
public T[] resultOf() {
return (T[]) Array.newInstance(type, 0);
}
@Override
public T[] resultOf(T value1) {
T[] array = (T[]) Array.newInstance(type, 1);
array[0] = (T) value1;
return array;
}
@Override
public T[] resultOf(T value1, T value2) {
T[] array = (T[]) Array.newInstance(type, 2);
array[0] = (T) value1;
array[1] = (T) value2;
return array;
}
@Override
public T[] resultOf(List<? extends T> values) {
return values.toArray(accumulator(0));
}
};
}
}
|
eventloop/src/main/java/io/datakernel/async/IndexedCollector.java
|
package io.datakernel.async;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.function.BiConsumer;
import java.util.stream.Collector;
public interface IndexedCollector<T, A, R> {
A accumulator(int stages);
void accumulate(A accumulator, int stageIndex, T stageResult);
R finish(A accumulator);
default R resultOf() {
return resultOf(Collections.emptyList());
}
default R resultOf(T value1) {
//noinspection ArraysAsListWithZeroOrOneArgument - using asList instead of singletonList() to allow mutability
return resultOf(Arrays.asList(value1));
}
default R resultOf(T value1, T value2) {
return resultOf(Arrays.asList(value1, value2));
}
default R resultOf(List<? extends T> values) {
A accumulator = accumulator(values.size());
for (int i = 0; i < values.size(); i++) {
accumulate(accumulator, i, values.get(i));
}
return finish(accumulator);
}
static <T, A, R> IndexedCollector<T, A, R> ofCollector(Collector<T, A, R> collector) {
return new IndexedCollector<T, A, R>() {
final BiConsumer<A, T> accumulator = collector.accumulator();
@Override
public A accumulator(int stages) {
return collector.supplier().get();
}
@Override
public void accumulate(A accumulator, int stageIndex, T stageResult) {
this.accumulator.accept(accumulator, stageResult);
}
@Override
public R finish(A accumulator) {
return collector.finisher().apply(accumulator);
}
};
}
IndexedCollector<Object, Object[], List<Object>> TO_LIST = new IndexedCollector<Object, Object[], List<Object>>() {
@Override
public Object[] accumulator(int stages) {
return new Object[stages];
}
@Override
public void accumulate(Object[] accumulator, int stageIndex, Object stageResult) {
accumulator[stageIndex] = stageResult;
}
@SuppressWarnings("unchecked")
@Override
public List<Object> finish(Object[] accumulator) {
return Arrays.asList(accumulator);
}
@Override
public List<Object> resultOf() {
return Collections.emptyList();
}
@Override
public List<Object> resultOf(Object value1) {
return Arrays.asList(value1);
}
@Override
public List<Object> resultOf(Object value1, Object value2) {
return Arrays.asList(value1, value2);
}
@SuppressWarnings("unchecked")
@Override
public List<Object> resultOf(List<?> values) {
return (List<Object>) values;
}
};
@SuppressWarnings("unchecked")
static <T> IndexedCollector<T, Object[], List<T>> toList() {
return (IndexedCollector) TO_LIST;
}
@SuppressWarnings("unchecked")
static <T> IndexedCollector<T, T[], T[]> toArray(Class<T> type) {
return (IndexedCollector) new IndexedCollector<Object, Object[], Object[]>() {
@Override
public T[] accumulator(int stages) {
return (T[]) Array.newInstance(type, stages);
}
@Override
public void accumulate(Object[] accumulator, int stageIndex, Object stageResult) {
accumulator[stageIndex] = stageResult;
}
@SuppressWarnings("unchecked")
@Override
public T[] finish(Object[] accumulator) {
return (T[]) accumulator;
}
@Override
public T[] resultOf() {
return (T[]) Array.newInstance(type, 0);
}
@Override
public T[] resultOf(Object value1) {
T[] array = (T[]) Array.newInstance(type, 1);
array[0] = (T) value1;
return array;
}
@Override
public T[] resultOf(Object value1, Object value2) {
T[] array = (T[]) Array.newInstance(type, 2);
array[0] = (T) value1;
array[1] = (T) value2;
return array;
}
@Override
public T[] resultOf(List<?> values) {
return (T[]) values.toArray();
}
};
}
}
|
Fix IndexedCollector.toArray() bug
|
eventloop/src/main/java/io/datakernel/async/IndexedCollector.java
|
Fix IndexedCollector.toArray() bug
|
|
Java
|
apache-2.0
|
0d7c1adedb4b85353cef366f41ce2d030a9fb9fb
| 0
|
leungmanhin/relex,AmeBel/relex,ainishdave/relex,anitzkin/relex,virneo/relex,williampma/relex,linas/relex,anitzkin/relex,rodsol/relex,linas/relex,virneo/relex,ainishdave/relex,leungmanhin/relex,anitzkin/relex,ainishdave/relex,AmeBel/relex,rodsol/relex,linas/relex,williampma/relex,williampma/relex,anitzkin/relex,ainishdave/relex,rodsol/relex,opencog/relex,opencog/relex,leungmanhin/relex,rodsol/relex,anitzkin/relex,opencog/relex,virneo/relex,AmeBel/relex,williampma/relex,virneo/relex
|
/*
* Copyright 2009 Linas Vepstas
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package relex.test;
import java.util.ArrayList;
import java.util.Collections;
import relex.ParsedSentence;
import relex.RelationExtractor;
import relex.Sentence;
import relex.output.SimpleView;
public class TestRelEx
{
private RelationExtractor re;
private int pass;
private int fail;
private int subpass;
private int subfail;
private static ArrayList<String> sentfail= new ArrayList<String>();
public TestRelEx()
{
re = new RelationExtractor();
pass = 0;
fail = 0;
subpass = 0;
subfail = 0;
}
public ArrayList<String> split(String a)
{
String[] sa = a.split("\n");
ArrayList<String> saa = new ArrayList<String>();
for (String s : sa) {
saa.add(s);
}
Collections.sort (saa);
return saa;
}
/**
* First argument is the sentence.
* Second argument is a list of the relations that RelEx
* should be generating.
* Return true if RelEx generates the same dependencies
* as the second argument.
*/
public boolean test_sentence (String sent, String sf)
{
re.do_penn_tagging = false;
re.setMaxParses(1);
Sentence sntc = re.processSentence(sent);
ParsedSentence parse = sntc.getParses().get(0);
String rs = SimpleView.printBinaryRelations(parse);
String urs = SimpleView.printUnaryRelations(parse);
ArrayList<String> exp = split(sf);
ArrayList<String> brgot = split(rs);
ArrayList<String> urgot = split(urs);
//add number of binary relations from parser-output, to total number of relationships got
int sizeOfGotRelations= brgot.size();
//check expected binary and unary relations
//the below for-loop checks whether all expected binary relations are
//contained in the parser-binary-relation-output arrayList "brgot".
//if any unary relations are expected in the output it checks the
//parser-unary-relation-output arrayList "urgot" for unary relationships
for (int i=0; i< exp.size(); i++)
{
if(!brgot.contains((String)exp.get(i)))
{
if(!urgot.contains(exp.get(i)))
{
System.err.println("Error: content miscompare:\n" +
"\tExpected = " + exp + "\n" +
"\tGot Binary Relations = " + brgot + "\n" +
"\tGot Unary Relations = " + urgot + "\n" +
"\tSentence = " + sent);
subfail ++;
fail ++;
sentfail.add(sent);
return false;
}
//add the unary relation, count to totoal number of binary relations
sizeOfGotRelations++;
}
}
//The size checking of the expected relationships vs output relationships
//is done here purposefully, to accommodate if there is any unary relationships present
//in the expected output(see above for-loop also).
//However it only checks whether parser-output resulted more relationships(binary+unary) than expected relations
//If the parser-output resulted less relationships(binary+unary) than expected it would
//catch that in the above for-loop
if (exp.size() < sizeOfGotRelations)
{
System.err.println("Error: size miscompare:\n" +
"\tExpected = " + exp + "\n" +
"\tGot Binary Relations = " + brgot + "\n" +
"\tGot Unary Relations = " + urgot + "\n" +
"\tSentence = " + sent);
subfail ++;
fail ++;
sentfail.add(sent);
return false;
}
subpass ++;
pass ++;
return true;
}
public void report(boolean rc, String subsys)
{
if (rc) {
System.err.println(subsys + ": Tested " + pass + " sentences, test passed OK");
} else {
System.err.println(subsys + ": Test failed\n\t" +
fail + " sentences failed\n\t" +
pass + " sentences passed");
}
subpass = 0;
subfail = 0;
}
public boolean test_comparatives()
{
boolean rc = true;
rc &= test_sentence ("Some people like pigs less than dogs.",
"_advmod(like, less)\n" +
"_obj(like, pig)\n" +
"_quantity(people, some)\n" +
"_subj(like, people)\n" +
"than(pig, dog)\n");
rc &= test_sentence ("Some people like pigs more than dogs.",
"_advmod(like, more)\n" +
"_obj(like, pig)\n" +
"_quantity(people, some)\n" +
"_subj(like, people)\n" +
"than(pig, dog)\n");
//Non-equal Gradable : Two entities one feature "more/less"
rc &= test_sentence ("He is more intelligent than John.",
"than(he, John)\n" +
"more(intelligent,he)\n" +
"degree(intelligent,comparative)\n"+
"_predadj(he, intelligent)\n");
rc &= test_sentence ("He is less intelligent than John.",
"than(he, John)\n" +
"_more(intelligent,he)\n" +
"degree(intelligent,comparative)\n"+
"_advmod(intelligent, less)\n"+
"_predadj(he, intelligent)\n");
rc &= test_sentence ("He runs more quickly than John.",
"_advmod(run, quickly)\n" +
"_subj(run, he)\n" +
"than(he, John)\n" +
"more(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs less quickly than John.",
"_advmod(run, quickly)\n" +
"_subj(run, he)\n" +
"_advmod(quickly, less)\n"+
"than(he, John)\n" +
"_more(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs more quickly than John does.",
"_advmod(run, quickly)\n" +
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"than(he, John)\n" +
"more(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs less quickly than John does.",
"_advmod(run, quickly)\n" +
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"_advmod(quickly, less)\n"+
"than(he, John)\n" +
"_more(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs more than John.",
"_obj(run, more)\n" +
"_subj(run, he)\n" +
"than(he,John)\n"+
"more(more,run)\n"+
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs less than John.",
"_obj(run, less)\n" +
"_subj(run, he)\n" +
"than(he,John)\n"+
"_more(more,run)\n"+
"degree(quickly, comparative)\n");
report(rc, "Comparatives");
return rc;
}
public boolean test_Conjunction()
{
boolean rc = true;
//conjoined verbs
rc &= test_sentence ("Scientists make observations and ask questions.",
"_obj(make, observation)\n" +
"_obj(ask, question)\n" +
"_subj(make, scientist)\n" +
"_subj(ask, scientist)\n" +
"conj_and(make, ask)\n");
//conjoined nouns
rc &= test_sentence ("She is a student and an employee.",
"_obj(be, student)\n" +
"_obj(be, employee)\n" +
"_subj(be, she)\n" +
"conj_and(student, employee)\n");
//conjoined adjectives
rc &= test_sentence ("I hailed a black and white taxi.",
"_obj(hail, taxi)\n" +
"_subj(hail, I)\n" +
"_amod(taxi, black)\n" +
"_amod(taxi, white)\n" +
"conj_and(black, white)\n");
//conjoined adverbs
rc &= test_sentence ("She ran quickly and quietly.",
"_advmod(run, quickly)\n" +
"_advmod(run, quietly)\n" +
"_subj(run, she)\n" +
"conj_and(quickly, quietly)\n");
//adjectival modifiers on conjoined subject
rc &= test_sentence ("The big truck and the little car collided.",
"_amod(car, little)\n" +
"_amod(truck, big)\n" +
"_subj(collide, truck)\n" +
"_subj(collide, car)\n" +
"conj_and(truck, car)\n");
//verbs with modifiers
rc &= test_sentence ( "We ate dinner at home and went to the movies.",
"_obj(eat, dinner)\n" +
"conj_and(eat, go)\n" +
"at(eat, home)\n" +
"_subj(eat, we)\n" +
"to(go, movie)\n" +
"_subj(go, we)\n");
//verb with more modifiers
rc &= test_sentence ("We ate a late dinner at home and went out to the movies afterwards.",
"_obj(eat, dinner)\n" +
"conj_and(eat, go_out)\n" +
"at(eat, home)\n" +
"_subj(eat, we)\n" +
"to(go_out, movie)\n" +
"_advmod(go_out, afterwards)\n" +
"_subj(go_out, we)\n" +
"_amod(dinner, late)\n");
//conjoined ditransitive verbs
rc &= test_sentence ("She baked him a cake and sang him a song.",
"_iobj(sing, him)\n" +
"_obj(sing, song)\n" +
"_subj(sing, she)\n" +
"_iobj(bake, him)\n" +
"_obj(bake, cake)\n" +
"conj_and(bake, sing)\n" +
"_subj(bake, she)\n");
//conjoined adverbs with modifiers
rc &= test_sentence ("she ran very quickly and extremely quietly.",
"_advmod(run, quickly)\n" +
"_advmod(run, quietly)\n" +
"_subj(run, she)\n" +
"_advmod(quietly, extremely)\n" +
"conj_and(quickly, quietly)\n" +
"_advmod(quickly, very)\n");
//conjoined adverbs with out modifiers
rc &= test_sentence ("She handled it quickly and gracefully.",
"_obj(handle, quickly)\n" +
"_obj(handle, gracefully)\n" +
"_advmod(handle, quickly)\n" +
"_advmod(handle, gracefully)\n" +
"_subj(handle, she)\n" +
"conj_and(quickly, gracefully)\n");
//modifiers on conjoined adjectives
rc &= test_sentence ("He had very long and very white hair.",
"_obj(have, hair)\n" +
"_subj(have, he)\n" +
"_amod(hair, long)\n" +
"_amod(hair, white)\n" +
"_advmod(white, very)\n" +
"conj_and(long, white)\n" +
"_advmod(long, very)\n");
//adjectival modifiers on conjoined object
rc &= test_sentence ("The collision was between the little car and the big truck.",
"_pobj(between, car)\n" +
"_pobj(between, truck)\n" +
"_psubj(between, collision)\n" +
"_amod(truck, big)\n" +
"_amod(car, little)\n" +
"conj_and(car, truck)\n");
//Names Modifiers and conjunction
rc &= test_sentence ("Big Tom and Angry Sue went to the movies.",
"to(go, movie)\n" +
"_subj(go, Big_Tom)\n" +
"_subj(go, Angry_Sue)\n" +
"conj_and(Big_Tom, Angry_Sue)\n");
report(rc, "Conjunction");
return rc;
}
public boolean test_extraposition()
{
boolean rc = true;
rc &= test_sentence ("The woman who lives next door is a registered nurse.",
"_obj(be, nurse)\n" +
"_subj(be, woman)\n" +
"_amod(nurse, registered)\n" +
"_advmod(live, next_door)\n" +
"_subj(live, woman)\n" +
"who(woman, live)\n");
rc &= test_sentence ("A player who is injured has to leave the field.",
"_to-do(have, leave)\n" +
"_subj(have, player)\n" +
"_obj(leave, field)\n" +
"_predadj(player, injured)\n" +
"who(player, injured)\n" );
rc &= test_sentence ("Pizza, which most people love, is not very healthy.",
"_advmod(very, not)\n" +
"_advmod(healthy, very)\n" +
"_obj(love, Pizza)\n" +
"_quantity(people, most)\n" +
"which(Pizza, love)\n" +
"_subj(love, people)\n" +
"_predadj(Pizza, healthy)\n" );
rc &= test_sentence ("The restaurant which belongs to my aunt is very famous.",
"_advmod(famous, very)\n" +
"to(belong, aunt)\n" +
"_subj(belong, restaurant)\n" +
"_poss(aunt, me)\n" +
"which(restaurant, belong)\n" +
"_predadj(restaurant, famous)\n");
rc &= test_sentence ("The books which I read in the library were written by Charles Dickens.",
"_obj(write, book)\n" +
"by(write, Charles_Dickens)\n" +
"_obj(read, book)\n" +
"in(read, library)\n" +
"_subj(read, I)\n" +
"which(book, read)\n");
rc &= test_sentence("This is the book whose author I met in a library.",
"_obj(be, book)\n" +
"_subj(be, this)\n" +
"_obj(meet, author)\n" +
"in(meet, library)\n" +
"_subj(meet, I)\n" +
"whose(book, author)\n");
rc &= test_sentence("The book that Jack lent me is very boring.",
"_advmod(boring, very)\n" +
"_iobj(lend, book)\n" +
"_obj(lend, me)\n" +
"_subj(lend, Jack)\n" +
"that_adj(book, lend)\n" +
"_predadj(book, boring)\n");
rc &= test_sentence("They ate a special curry which was recommended by the restaurant’s owner.",
"_obj(eat, curry)\n" +
"_subj(eat, they)\n" +
"_obj(recommend, curry)\n" +
"by(recommend, owner)\n" +
"_poss(owner, restaurant)\n" +
"which(curry, recommend)\n" +
"_amod(curry, special)\n");
rc &= test_sentence("The dog who Jack said chased me was black.",
"_obj(chase, me)\n" +
"_subj(chase, dog)\n" +
"_subj(say, Jack)\n" +
"_predadj(dog, black)\n" +
"who(dog, chase)\n");
rc &= test_sentence("Jack, who hosted the party, is my cousin.",
"_obj(be, cousin)\n" +
"_subj(be, Jack)\n" +
"_poss(cousin, me)\n" +
"_obj(host, party)\n" +
"_subj(host, Jack)\n" +
"who(Jack, host)\n");
rc &= test_sentence("Jack, whose name is in that book, is the student near the window.",
"near(be, window)\n" +
"_obj(be, student)\n" +
"_subj(be, Jack)\n" +
"_obj(near, window)\n" +
"_pobj(in, book)\n" +
"_psubj(in, name)\n" +
"_det(book, that)\n" +
"whose(Jack, name)\n");
rc &= test_sentence("Jack stopped the police car that was driving fast.",
"_obj(stop, car)\n" +
"_subj(stop, Jack)\n" +
"_advmod(drive, fast)\n" +
"_subj(drive, car)\n" +
"that_adj(car, drive)\n" +
"_nn(car, police)\n");
rc &= test_sentence("Just before the crossroads, the car was stopped by a traffic sign that stood on the street.",
"_obj(stop, car)\n" +
"by(stop, sign)\n" +
"_advmod(stop, just)\n" +
"on(stand, street)\n" +
"_subj(stand, sign)\n" +
"that_adj(sign, stand)\n" +
"_nn(sign, traffic)\n" +
"before(just, crossroads)\n");
report(rc, "Extrapostion");
return rc;
}
public static void main(String[] args)
{
TestRelEx ts = new TestRelEx();
boolean rc = true;
rc &= ts.test_comparatives();
rc &= ts.test_extraposition();
rc &= ts.test_Conjunction();
if (rc) {
System.err.println("Tested " + ts.pass + " sentences, test passed OK");
} else {
System.err.println("Test failed\n\t" +
ts.fail + " sentences failed\n\t" +
ts.pass + " sentences passed");
}
System.err.println("******************************");
System.err.println("Failed test sentences on Relex");
System.err.println("******************************");
if(sentfail.isEmpty())
System.err.println("All test sentences passed");
for(String temp : sentfail){
System.err.println(temp);
}
System.err.println("******************************\n");
}
}
|
src/java_test/relex/test/TestRelEx.java
|
/*
* Copyright 2009 Linas Vepstas
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package relex.test;
import java.util.ArrayList;
import java.util.Collections;
import relex.ParsedSentence;
import relex.RelationExtractor;
import relex.Sentence;
import relex.output.SimpleView;
public class TestRelEx
{
private RelationExtractor re;
private int pass;
private int fail;
private int subpass;
private int subfail;
private static ArrayList<String> sentfail= new ArrayList<String>();
public TestRelEx()
{
re = new RelationExtractor();
pass = 0;
fail = 0;
subpass = 0;
subfail = 0;
}
public ArrayList<String> split(String a)
{
String[] sa = a.split("\n");
ArrayList<String> saa = new ArrayList<String>();
for (String s : sa) {
saa.add(s);
}
Collections.sort (saa);
return saa;
}
/**
* First argument is the sentence.
* Second argument is a list of the relations that RelEx
* should be generating.
* Return true if RelEx generates the same dependencies
* as the second argument.
*/
public boolean test_sentence (String sent, String sf)
{
re.do_penn_tagging = false;
re.setMaxParses(1);
Sentence sntc = re.processSentence(sent);
ParsedSentence parse = sntc.getParses().get(0);
String rs = SimpleView.printBinaryRelations(parse);
String urs = SimpleView.printUnaryRelations(parse);
ArrayList<String> exp = split(sf);
ArrayList<String> brgot = split(rs);
ArrayList<String> urgot = split(urs);
//add number of binary relations from parser-output, to total number of relationships got
int sizeOfGotRelations= brgot.size();
//check expected binary and unary relations
//the below for-loop checks whether all expected binary relations are
//contained in the parser-binary-relation-output arrayList "brgot".
//if any unary relations are expected in the output it checks the
//parser-unary-relation-output arrayList "urgot" for unary relationships
for (int i=0; i< exp.size(); i++)
{
if(!brgot.contains((String)exp.get(i)))
{
if(!urgot.contains(exp.get(i)))
{
System.err.println("Error: content miscompare:\n" +
"\tExpected = " + exp + "\n" +
"\tGot Binary Relations = " + brgot + "\n" +
"\tGot Unary Relations = " + urgot + "\n" +
"\tSentence = " + sent);
subfail ++;
fail ++;
sentfail.add(sent);
return false;
}
//add the unary relation, count to totoal number of binary relations
sizeOfGotRelations++;
}
}
//The size checking of the expected relationships vs output relationships
//is done here purposefully, to accommodate if there is any unary relationships present
//in the expected output(see above for-loop also).
//However it only checks whether parser-output resulted more relationships(binary+unary) than expected relations
//If the parser-output resulted less relationships(binary+unary) than expected it would
//catch that in the above for-loop
if (exp.size() < sizeOfGotRelations)
{
System.err.println("Error: size miscompare:\n" +
"\tExpected = " + exp + "\n" +
"\tGot Binary Relations = " + brgot + "\n" +
"\tGot Unary Relations = " + urgot + "\n" +
"\tSentence = " + sent);
subfail ++;
fail ++;
sentfail.add(sent);
return false;
}
subpass ++;
pass ++;
return true;
}
public void report(boolean rc, String subsys)
{
if (rc) {
System.err.println(subsys + ": Tested " + pass + " sentences, test passed OK");
} else {
System.err.println(subsys + ": Test failed\n\t" +
fail + " sentences failed\n\t" +
pass + " sentences passed");
}
subpass = 0;
subfail = 0;
}
public boolean test_comparatives()
{
boolean rc = true;
rc &= test_sentence ("Some people like pigs less than dogs.",
"_advmod(like, less)\n" +
"_obj(like, pig)\n" +
"_quantity(people, some)\n" +
"_subj(like, people)\n" +
"than(pig, dog)\n");
rc &= test_sentence ("Some people like pigs more than dogs.",
"_advmod(like, more)\n" +
"_obj(like, pig)\n" +
"_quantity(people, some)\n" +
"_subj(like, people)\n" +
"than(pig, dog)\n");
//Non-equal Gradable : Two entities one feature "more/less"
rc &= test_sentence ("He is more intelligent than John.",
"than(he, John)\n" +
"more(intelligent,he)\n" +
"degree(intelligent,comparative)\n"+
"_predadj(he, intelligent)\n");
rc &= test_sentence ("He is less intelligent than John.",
"than(he, John)\n" +
"_more(intelligent,he)\n" +
"degree(intelligent,comparative)\n"+
"_advmod(intelligent, less)\n"+
"_predadj(he, intelligent)\n");
rc &= test_sentence ("He runs more quickly than John.",
"_advmod(run, quickly)\n" +
"_subj(run, he)\n" +
"than(he, John)\n" +
"more(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs less quickly than John.",
"_advmod(run, quickly)\n" +
"_subj(run, he)\n" +
"_advmod(quickly, less)\n"+
"than(he, John)\n" +
"_more(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs more quickly than John does.",
"_advmod(run, quickly)\n" +
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"than(he, John)\n" +
"more(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs less quickly than John does.",
"_advmod(run, quickly)\n" +
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"_advmod(quickly, less)\n"+
"than(he, John)\n" +
"_more(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs more than John.",
"_obj(run, more)\n" +
"_subj(run, he)\n" +
"than(he,John)\n"+
"more(more,run)\n"+
"degree(quickly, comparative)\n");
report(rc, "Comparatives");
return rc;
}
public boolean test_Conjunction()
{
boolean rc = true;
//conjoined verbs
rc &= test_sentence ("Scientists make observations and ask questions.",
"_obj(make, observation)\n" +
"_obj(ask, question)\n" +
"_subj(make, scientist)\n" +
"_subj(ask, scientist)\n" +
"conj_and(make, ask)\n");
//conjoined nouns
rc &= test_sentence ("She is a student and an employee.",
"_obj(be, student)\n" +
"_obj(be, employee)\n" +
"_subj(be, she)\n" +
"conj_and(student, employee)\n");
//conjoined adjectives
rc &= test_sentence ("I hailed a black and white taxi.",
"_obj(hail, taxi)\n" +
"_subj(hail, I)\n" +
"_amod(taxi, black)\n" +
"_amod(taxi, white)\n" +
"conj_and(black, white)\n");
//conjoined adverbs
rc &= test_sentence ("She ran quickly and quietly.",
"_advmod(run, quickly)\n" +
"_advmod(run, quietly)\n" +
"_subj(run, she)\n" +
"conj_and(quickly, quietly)\n");
//adjectival modifiers on conjoined subject
rc &= test_sentence ("The big truck and the little car collided.",
"_amod(car, little)\n" +
"_amod(truck, big)\n" +
"_subj(collide, truck)\n" +
"_subj(collide, car)\n" +
"conj_and(truck, car)\n");
//verbs with modifiers
rc &= test_sentence ( "We ate dinner at home and went to the movies.",
"_obj(eat, dinner)\n" +
"conj_and(eat, go)\n" +
"at(eat, home)\n" +
"_subj(eat, we)\n" +
"to(go, movie)\n" +
"_subj(go, we)\n");
//verb with more modifiers
rc &= test_sentence ("We ate a late dinner at home and went out to the movies afterwards.",
"_obj(eat, dinner)\n" +
"conj_and(eat, go_out)\n" +
"at(eat, home)\n" +
"_subj(eat, we)\n" +
"to(go_out, movie)\n" +
"_advmod(go_out, afterwards)\n" +
"_subj(go_out, we)\n" +
"_amod(dinner, late)\n");
//conjoined ditransitive verbs
rc &= test_sentence ("She baked him a cake and sang him a song.",
"_iobj(sing, him)\n" +
"_obj(sing, song)\n" +
"_subj(sing, she)\n" +
"_iobj(bake, him)\n" +
"_obj(bake, cake)\n" +
"conj_and(bake, sing)\n" +
"_subj(bake, she)\n");
//conjoined adverbs with modifiers
rc &= test_sentence ("she ran very quickly and extremely quietly.",
"_advmod(run, quickly)\n" +
"_advmod(run, quietly)\n" +
"_subj(run, she)\n" +
"_advmod(quietly, extremely)\n" +
"conj_and(quickly, quietly)\n" +
"_advmod(quickly, very)\n");
//conjoined adverbs with out modifiers
rc &= test_sentence ("She handled it quickly and gracefully.",
"_obj(handle, quickly)\n" +
"_obj(handle, gracefully)\n" +
"_advmod(handle, quickly)\n" +
"_advmod(handle, gracefully)\n" +
"_subj(handle, she)\n" +
"conj_and(quickly, gracefully)\n");
//modifiers on conjoined adjectives
rc &= test_sentence ("He had very long and very white hair.",
"_obj(have, hair)\n" +
"_subj(have, he)\n" +
"_amod(hair, long)\n" +
"_amod(hair, white)\n" +
"_advmod(white, very)\n" +
"conj_and(long, white)\n" +
"_advmod(long, very)\n");
//adjectival modifiers on conjoined object
rc &= test_sentence ("The collision was between the little car and the big truck.",
"_pobj(between, car)\n" +
"_pobj(between, truck)\n" +
"_psubj(between, collision)\n" +
"_amod(truck, big)\n" +
"_amod(car, little)\n" +
"conj_and(car, truck)\n");
//Names Modifiers and conjunction
rc &= test_sentence ("Big Tom and Angry Sue went to the movies.",
"to(go, movie)\n" +
"_subj(go, Big_Tom)\n" +
"_subj(go, Angry_Sue)\n" +
"conj_and(Big_Tom, Angry_Sue)\n");
report(rc, "Conjunction");
return rc;
}
public boolean test_extraposition()
{
boolean rc = true;
rc &= test_sentence ("The woman who lives next door is a registered nurse.",
"_obj(be, nurse)\n" +
"_subj(be, woman)\n" +
"_amod(nurse, registered)\n" +
"_advmod(live, next_door)\n" +
"_subj(live, woman)\n" +
"who(woman, live)\n");
rc &= test_sentence ("A player who is injured has to leave the field.",
"_to-do(have, leave)\n" +
"_subj(have, player)\n" +
"_obj(leave, field)\n" +
"_predadj(player, injured)\n" +
"who(player, injured)\n" );
rc &= test_sentence ("Pizza, which most people love, is not very healthy.",
"_advmod(very, not)\n" +
"_advmod(healthy, very)\n" +
"_obj(love, Pizza)\n" +
"_quantity(people, most)\n" +
"which(Pizza, love)\n" +
"_subj(love, people)\n" +
"_predadj(Pizza, healthy)\n" );
rc &= test_sentence ("The restaurant which belongs to my aunt is very famous.",
"_advmod(famous, very)\n" +
"to(belong, aunt)\n" +
"_subj(belong, restaurant)\n" +
"_poss(aunt, me)\n" +
"which(restaurant, belong)\n" +
"_predadj(restaurant, famous)\n");
rc &= test_sentence ("The books which I read in the library were written by Charles Dickens.",
"_obj(write, book)\n" +
"by(write, Charles_Dickens)\n" +
"_obj(read, book)\n" +
"in(read, library)\n" +
"_subj(read, I)\n" +
"which(book, read)\n");
rc &= test_sentence("This is the book whose author I met in a library.",
"_obj(be, book)\n" +
"_subj(be, this)\n" +
"_obj(meet, author)\n" +
"in(meet, library)\n" +
"_subj(meet, I)\n" +
"whose(book, author)\n");
rc &= test_sentence("The book that Jack lent me is very boring.",
"_advmod(boring, very)\n" +
"_iobj(lend, book)\n" +
"_obj(lend, me)\n" +
"_subj(lend, Jack)\n" +
"that_adj(book, lend)\n" +
"_predadj(book, boring)\n");
rc &= test_sentence("They ate a special curry which was recommended by the restaurant’s owner.",
"_obj(eat, curry)\n" +
"_subj(eat, they)\n" +
"_obj(recommend, curry)\n" +
"by(recommend, owner)\n" +
"_poss(owner, restaurant)\n" +
"which(curry, recommend)\n" +
"_amod(curry, special)\n");
rc &= test_sentence("The dog who Jack said chased me was black.",
"_obj(chase, me)\n" +
"_subj(chase, dog)\n" +
"_subj(say, Jack)\n" +
"_predadj(dog, black)\n" +
"who(dog, chase)\n");
rc &= test_sentence("Jack, who hosted the party, is my cousin.",
"_obj(be, cousin)\n" +
"_subj(be, Jack)\n" +
"_poss(cousin, me)\n" +
"_obj(host, party)\n" +
"_subj(host, Jack)\n" +
"who(Jack, host)\n");
rc &= test_sentence("Jack, whose name is in that book, is the student near the window.",
"near(be, window)\n" +
"_obj(be, student)\n" +
"_subj(be, Jack)\n" +
"_obj(near, window)\n" +
"_pobj(in, book)\n" +
"_psubj(in, name)\n" +
"_det(book, that)\n" +
"whose(Jack, name)\n");
rc &= test_sentence("Jack stopped the police car that was driving fast.",
"_obj(stop, car)\n" +
"_subj(stop, Jack)\n" +
"_advmod(drive, fast)\n" +
"_subj(drive, car)\n" +
"that_adj(car, drive)\n" +
"_nn(car, police)\n");
rc &= test_sentence("Just before the crossroads, the car was stopped by a traffic sign that stood on the street.",
"_obj(stop, car)\n" +
"by(stop, sign)\n" +
"_advmod(stop, just)\n" +
"on(stand, street)\n" +
"_subj(stand, sign)\n" +
"that_adj(sign, stand)\n" +
"_nn(sign, traffic)\n" +
"before(just, crossroads)\n");
report(rc, "Extrapostion");
return rc;
}
public static void main(String[] args)
{
TestRelEx ts = new TestRelEx();
boolean rc = true;
rc &= ts.test_comparatives();
rc &= ts.test_extraposition();
rc &= ts.test_Conjunction();
if (rc) {
System.err.println("Tested " + ts.pass + " sentences, test passed OK");
} else {
System.err.println("Test failed\n\t" +
ts.fail + " sentences failed\n\t" +
ts.pass + " sentences passed");
}
System.err.println("******************************");
System.err.println("Failed test sentences on Relex");
System.err.println("******************************");
if(sentfail.isEmpty())
System.err.println("All test sentences passed");
for(String temp : sentfail){
System.err.println(temp);
}
System.err.println("******************************\n");
}
}
|
Adding test sentence "He runs less than John."
|
src/java_test/relex/test/TestRelEx.java
|
Adding test sentence "He runs less than John."
|
|
Java
|
apache-2.0
|
f35cb194e272b6ad6c9271107ac4eaf1f4446169
| 0
|
davidw/hecl
|
/*
* Copyright 2009
* DedaSys LLC - http://www.dedasys.com
*
* Author: David N. Welton <davidw@dedasys.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hecl.files;
import java.io.IOException;
import javax.microedition.io.Connector;
import javax.microedition.io.file.FileConnection;
import javax.microedition.io.file.FileSystemRegistry;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
import org.hecl.DoubleThing;
import org.hecl.HashThing;
import org.hecl.HeclException;
import org.hecl.Interp;
import org.hecl.IntThing;
import org.hecl.ListThing;
import org.hecl.LongThing;
import org.hecl.ObjectThing;
import org.hecl.Operator;
import org.hecl.StringThing;
import org.hecl.Thing;
/**
* The <code>FileCmds</code> class implements various file handling
* commands, but not actual opening/closing of files.
*
* @author <a href="mailto:davidw@dedasys.com">David N. Welton</a>
* @version 1.0
*/
public class FileCmds extends Operator {
public static final int READABLE = 10;
public static final int WRITABLE = 20;
public static final int HIDDEN = 30;
public static final int EXISTS = 40;
public static final int DELETE = 50;
public static final int SIZE = 60;
public static final int BASENAME = 70;
public static final int MTIME = 80;
public static final int ISDIRECTORY = 90;
public static final int ISOPEN = 100;
public static final int LIST = 110;
public static final int MKDIR = 120;
public static final int RENAME = 130;
public static final int TRUNCATE = 140;
public static final int LISTROOTS = 150;
public static final int DU = 160;
public Thing operate(int cmd, Interp interp, Thing[] argv) throws HeclException {
String fname = null;
FileConnection fconn = null;
if (cmd != LISTROOTS) {
fname = StringThing.get(argv[1]);
try {
fconn = (FileConnection)Connector.open(fname);
} catch (IOException e) {
throw new HeclException("IO Exception in " +
argv[0].toString() + ": " + e.toString());
}
}
try {
switch(cmd) {
case READABLE:
{
if (argv.length == 3) {
boolean readable = IntThing.get(argv[2]) == 1;
fconn.setReadable(readable);
}
return IntThing.create(fconn.canRead());
}
case WRITABLE:
{
if (argv.length == 3) {
boolean writable = IntThing.get(argv[2]) == 1;
fconn.setWritable(writable);
}
return IntThing.create(fconn.canWrite());
}
case HIDDEN:
{
if (argv.length == 3) {
boolean hidden = IntThing.get(argv[2]) == 1;
fconn.setHidden(hidden);
}
return IntThing.create(fconn.isHidden());
}
case EXISTS:
{
return IntThing.create(fconn.exists());
}
case SIZE:
{
return LongThing.create(fconn.fileSize());
}
case BASENAME:
{
return new Thing(fconn.getName());
}
case MTIME:
{
return LongThing.create(fconn.lastModified());
}
case ISDIRECTORY:
{
return IntThing.create(fconn.isDirectory());
}
case ISOPEN:
{
return IntThing.create(fconn.isOpen());
}
case LIST: {
Vector v = new Vector();
for (Enumeration e = fconn.list(); e.hasMoreElements();) {
v.addElement(new Thing((String)e.nextElement()));
}
return ListThing.create(v);
}
case LISTROOTS: {
Vector v = new Vector();
for (Enumeration e = FileSystemRegistry.listRoots(); e.hasMoreElements();) {
v.addElement(new Thing((String)e.nextElement()));
}
return ListThing.create(v);
}
case MKDIR: {
fconn.mkdir();
return new Thing(fname);
}
case RENAME: {
fconn.rename(argv[2].toString());
return argv[2];
}
case TRUNCATE: {
fconn.truncate(LongThing.get(argv[2]));
}
case DU: {
Hashtable du = new Hashtable();
du.put("total", LongThing.create(fconn.totalSize()));
du.put("used", LongThing.create(fconn.usedSize()));
return HashThing.create(du);
}
default:
throw new HeclException("Unknown file command '"
+ argv[0].toString() + "' with code '"
+ cmd + "'.");
}
} catch (IOException e) {
throw new HeclException("IO Exception in " +
argv[0].toString() + ": " + e.toString());
}
}
public static void load(Interp ip) throws HeclException {
Operator.load(ip,cmdtable);
}
public static void unload(Interp ip) throws HeclException {
Operator.unload(ip,cmdtable);
}
protected FileCmds(int cmdcode,int minargs,int maxargs) {
super(cmdcode,minargs,maxargs);
}
private static Hashtable cmdtable = new Hashtable();
static {
try {
cmdtable.put("file.readable", new FileCmds(READABLE,1,2));
cmdtable.put("file.writable", new FileCmds(WRITABLE,1,2));
cmdtable.put("file.hidden", new FileCmds(HIDDEN,1,2));
cmdtable.put("file.exists", new FileCmds(EXISTS,1,1));
cmdtable.put("file.exists", new FileCmds(EXISTS,1,1));
cmdtable.put("file.size", new FileCmds(SIZE,1,1));
cmdtable.put("file.basename", new FileCmds(BASENAME,1,1));
cmdtable.put("file.mtime", new FileCmds(MTIME,1,1));
cmdtable.put("file.isdirectory", new FileCmds(ISDIRECTORY,1,1));
cmdtable.put("file.isopen", new FileCmds(ISOPEN,1,1));
cmdtable.put("file.mkdir", new FileCmds(MKDIR,1,1));
cmdtable.put("file.truncate", new FileCmds(TRUNCATE,1,1));
cmdtable.put("file.rename", new FileCmds(RENAME,2,2));
cmdtable.put("file.list", new FileCmds(LIST,1,1));
cmdtable.put("file.devs", new FileCmds(LISTROOTS,0,0));
cmdtable.put("file.du", new FileCmds(DU,1,1));
} catch (Exception e) {
e.printStackTrace();
System.out.println("Can't create file commands.");
}
}
}
|
newfiles/org/hecl/files/FileCmds.java
|
/*
* Copyright 2009
* DedaSys LLC - http://www.dedasys.com
*
* Author: David N. Welton <davidw@dedasys.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hecl.files;
import java.io.IOException;
import javax.microedition.io.Connector;
import javax.microedition.io.file.FileConnection;
import javax.microedition.io.file.FileSystemRegistry;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
import org.hecl.DoubleThing;
import org.hecl.HeclException;
import org.hecl.Interp;
import org.hecl.IntThing;
import org.hecl.ListThing;
import org.hecl.LongThing;
import org.hecl.ObjectThing;
import org.hecl.Operator;
import org.hecl.StringThing;
import org.hecl.Thing;
public class FileCmds extends Operator {
public static final int READABLE = 10;
public static final int WRITABLE = 20;
public static final int HIDDEN = 30;
public static final int EXISTS = 40;
public static final int DELETE = 50;
public static final int SIZE = 60;
public static final int BASENAME = 70;
public static final int MTIME = 80;
public static final int ISDIRECTORY = 90;
public static final int ISOPEN = 100;
public static final int LIST = 110;
public static final int MKDIR = 120;
public static final int RENAME = 130;
public static final int TRUNCATE = 140;
public static final int LISTROOTS = 150;
public Thing operate(int cmd, Interp interp, Thing[] argv) throws HeclException {
String fname = null;
FileConnection fconn = null;
if (cmd != LISTROOTS) {
fname = StringThing.get(argv[1]);
try {
fconn = (FileConnection)Connector.open(fname);
} catch (IOException e) {
throw new HeclException("IO Exception in " +
argv[0].toString() + ": " + e.toString());
}
}
try {
switch(cmd) {
case READABLE:
{
if (argv.length == 3) {
boolean readable = IntThing.get(argv[2]) == 1;
fconn.setReadable(readable);
}
return IntThing.create(fconn.canRead());
}
case WRITABLE:
{
if (argv.length == 3) {
boolean writable = IntThing.get(argv[2]) == 1;
fconn.setWritable(writable);
}
return IntThing.create(fconn.canWrite());
}
case HIDDEN:
{
if (argv.length == 3) {
boolean hidden = IntThing.get(argv[2]) == 1;
fconn.setHidden(hidden);
}
return IntThing.create(fconn.isHidden());
}
case EXISTS:
{
return IntThing.create(fconn.exists());
}
case SIZE:
{
return LongThing.create(fconn.fileSize());
}
case BASENAME:
{
return new Thing(fconn.getName());
}
case MTIME:
{
return LongThing.create(fconn.lastModified());
}
case ISDIRECTORY:
{
return IntThing.create(fconn.isDirectory());
}
case ISOPEN:
{
return IntThing.create(fconn.isOpen());
}
case LIST: {
Vector v = new Vector();
for (Enumeration e = fconn.list(); e.hasMoreElements();) {
v.addElement(new Thing((String)e.nextElement()));
}
return ListThing.create(v);
}
case LISTROOTS: {
Vector v = new Vector();
for (Enumeration e = FileSystemRegistry.listRoots(); e.hasMoreElements();) {
v.addElement(new Thing((String)e.nextElement()));
}
return ListThing.create(v);
}
default:
throw new HeclException("Unknown file command '"
+ argv[0].toString() + "' with code '"
+ cmd + "'.");
}
} catch (IOException e) {
throw new HeclException("IO Exception in " +
argv[0].toString() + ": " + e.toString());
}
}
public static void load(Interp ip) throws HeclException {
Operator.load(ip,cmdtable);
}
public static void unload(Interp ip) throws HeclException {
Operator.unload(ip,cmdtable);
}
protected FileCmds(int cmdcode,int minargs,int maxargs) {
super(cmdcode,minargs,maxargs);
}
private static Hashtable cmdtable = new Hashtable();
static {
try {
cmdtable.put("file.readable", new FileCmds(READABLE,1,2));
cmdtable.put("file.writable", new FileCmds(WRITABLE,1,2));
cmdtable.put("file.hidden", new FileCmds(HIDDEN,1,2));
cmdtable.put("file.exists", new FileCmds(EXISTS,1,1));
cmdtable.put("file.exists", new FileCmds(EXISTS,1,1));
cmdtable.put("file.size", new FileCmds(SIZE,1,1));
cmdtable.put("file.basename", new FileCmds(BASENAME,1,1));
cmdtable.put("file.mtime", new FileCmds(MTIME,1,1));
cmdtable.put("file.isdirectory", new FileCmds(ISDIRECTORY,1,1));
cmdtable.put("file.isopen", new FileCmds(ISOPEN,1,1));
cmdtable.put("file.list", new FileCmds(LIST,1,1));
cmdtable.put("file.devs", new FileCmds(LISTROOTS,0,0));
} catch (Exception e) {
e.printStackTrace();
System.out.println("Can't create file commands.");
}
}
}
|
Implemented a few more file commands.
|
newfiles/org/hecl/files/FileCmds.java
|
Implemented a few more file commands.
|
|
Java
|
apache-2.0
|
55a162885b7577b8037b98e9fea289fc02e5d680
| 0
|
mtransitapps/commons-android,mtransitapps/commons-android,mtransitapps/commons-android
|
package org.mtransit.android.commons;
import java.util.regex.Pattern;
public final class HtmlUtils implements MTLog.Loggable {
private static final String TAG = HtmlUtils.class.getSimpleName();
@Override
public String getLogTag() {
return TAG;
}
public static final String URL_PARAM_AND = "&";
public static final String URL_PARAM_EQ = "=";
public static final String BR = "<BR/>";
public static final String B1 = "<B>";
public static final String B2 = "</B>";
private static final String BOLD_FORMAT = B1 + "%s" + B2;
public static String applyBold(CharSequence html) {
return String.format(BOLD_FORMAT, html);
}
private static final String FONT_COLOR_1_FORMAT = "<FONT COLOR=\"#%s\">";
private static final String FONT2 = "</FONT>";
private static final String FONT_COLOR_FORMAT = FONT_COLOR_1_FORMAT + "%s" + FONT2;
public static String applyFontColor(CharSequence html, CharSequence color) {
return String.format(FONT_COLOR_FORMAT, color, html);
}
private static final String LINKIFY = "<A HREF=\"%s\">%s</A>";
public static String linkify(CharSequence url) {
return String.format(LINKIFY, url, url);
}
private static final Pattern NEW_LINE_REGEX = Pattern.compile("(\n)");
public static String toHTML(String html) {
return NEW_LINE_REGEX.matcher(html).replaceAll(BR);
}
private static final Pattern REMOVE_BOLD = Pattern.compile(
"(<strong[^>]*>|</strong>|<h[1-6]{1}>|</h[1-6]{1}>|<span[^>]*>|</span>|font\\-weight\\:[\\s]*bold[;]?)", Pattern.CASE_INSENSITIVE);
private static final String REMOVE_BOLD_REPLACEMENT = StringUtils.EMPTY;
public static String removeBold(String html) {
try {
return REMOVE_BOLD.matcher(html).replaceAll(REMOVE_BOLD_REPLACEMENT);
} catch (Exception e) {
MTLog.w(TAG, e, "Error while removing bold!");
return html;
}
}
private static final Pattern FIX_TEXT_VIEW_BR = Pattern.compile("(<ul[^>]*>|</ul>|</li>|</h[1-6]{1}>)", Pattern.CASE_INSENSITIVE);
private static final String FIX_TEXT_VIEW_BR_REPLACEMENT = BR;
private static final Pattern FIX_TEXT_VIEW_BR2 = Pattern.compile("(<li[^>]*>)", Pattern.CASE_INSENSITIVE);
private static final String FIX_TEXT_VIEW_BR_REPLACEMENT2 = "- ";
public static String fixTextViewBR(String html) {
try {
html = FIX_TEXT_VIEW_BR.matcher(html).replaceAll(FIX_TEXT_VIEW_BR_REPLACEMENT);
html = FIX_TEXT_VIEW_BR2.matcher(html).replaceAll(FIX_TEXT_VIEW_BR_REPLACEMENT2);
return html;
} catch (Exception e) {
MTLog.w(TAG, e, "Error while fixing TextView BR!");
return html;
}
}
}
|
src/org/mtransit/android/commons/HtmlUtils.java
|
package org.mtransit.android.commons;
import java.util.regex.Pattern;
public final class HtmlUtils {
public static final String URL_PARAM_AND = "&";
public static final String URL_PARAM_EQ = "=";
public static final String BR = "<BR/>";
public static final String B1 = "<B>";
public static final String B2 = "</B>";
private static final String BOLD_FORMAT = B1 + "%s" + B2;
public static String applyBold(CharSequence html) {
return String.format(BOLD_FORMAT, html);
}
private static final String FONT_COLOR_1_FORMAT = "<FONT COLOR=\"#%s\">";
private static final String FONT2 = "</FONT>";
private static final String FONT_COLOR_FORMAT = FONT_COLOR_1_FORMAT + "%s" + FONT2;
public static String applyFontColor(CharSequence html, CharSequence color) {
return String.format(FONT_COLOR_FORMAT, color, html);
}
private static final String LINKIFY = "<A HREF=\"%s\">%s</A>";
public static String linkify(CharSequence url) {
return String.format(LINKIFY, url, url);
}
private static final Pattern NEW_LINE_REGEX = Pattern.compile("(\n)");
public static String toHTML(String html) {
return NEW_LINE_REGEX.matcher(html).replaceAll(BR);
}
}
|
HTML cleaning methods.
|
src/org/mtransit/android/commons/HtmlUtils.java
|
HTML cleaning methods.
|
|
Java
|
apache-2.0
|
9126eadb7c23f8d72e66d66a3d8aaf20d4bc37d5
| 0
|
soomla/android-profile,soomla/android-profile,vedi/android-profile,vedi/android-profile
|
/*
* Copyright (C) 2012-2014 Soomla Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.soomla.profile;
import android.app.Activity;
import android.os.Handler;
import android.os.Looper;
import com.soomla.BusProvider;
import com.soomla.SoomlaUtils;
import com.soomla.data.KeyValueStorage;
import com.soomla.profile.auth.AuthCallbacks;
import com.soomla.profile.auth.IAuthProvider;
import com.soomla.profile.data.UserProfileStorage;
import com.soomla.profile.domain.IProvider;
import com.soomla.profile.domain.UserProfile;
import com.soomla.profile.events.auth.LoginCancelledEvent;
import com.soomla.profile.events.auth.LoginFailedEvent;
import com.soomla.profile.events.auth.LoginFinishedEvent;
import com.soomla.profile.events.auth.LoginStartedEvent;
import com.soomla.profile.events.auth.LogoutFailedEvent;
import com.soomla.profile.events.auth.LogoutFinishedEvent;
import com.soomla.profile.events.auth.LogoutStartedEvent;
import com.soomla.profile.exceptions.ProviderNotFoundException;
import com.soomla.rewards.Reward;
import java.util.Map;
/**
* A class that loads all authentication providers and performs authentication
* actions on with them. This class wraps the provider's authentication
* actions in order to connect them to user profile data and rewards.
*/
public class AuthController<T extends IAuthProvider> extends ProviderLoader<T> {
private static final String DB_KEY_PREFIX = "soomla.profile";
/**
* Constructor
*
* Loads all authentication providers
* @param usingExternalProvider {@link SoomlaProfile#initialize}
*/
public AuthController(boolean usingExternalProvider, Map<IProvider.Provider, ? extends Map<String, String>> profileParams) {
if(usingExternalProvider) {
SoomlaUtils.LogDebug(TAG, "usingExternalProvider");
}
else if (!loadProviders(profileParams)) {
String msg = "You don't have a IAuthProvider service attached. " +
"Decide which IAuthProvider you want, add it to AndroidManifest.xml " +
"and add its jar to the path.";
SoomlaUtils.LogDebug(TAG, msg);
}
}
private final Handler mainThread = new Handler(Looper.getMainLooper());
protected void runOnMainThread(Runnable toRun) {
if (Looper.myLooper() == Looper.getMainLooper()) {
toRun.run();
} else {
mainThread.post(toRun);
}
}
/**
* Logs into the given provider and grants the user a reward.
*
* @param activity The parent activity
* @param provider The provider to login with
* @param autoLogin comes "true" if user login automatically
* @param payload a String to receive when the function returns.
* @param reward The reward to grant the user for logging in @throws ProviderNotFoundException
*/
public void login(final Activity activity, final IProvider.Provider provider, final boolean autoLogin, final String payload, final Reward reward) throws ProviderNotFoundException {
final IAuthProvider authProvider = getProvider(provider);
runOnMainThread(new Runnable() {
@Override
public void run() {
setLoggedInForProvider(provider, false);
BusProvider.getInstance().post(new LoginStartedEvent(provider, autoLogin, payload));
authProvider.login(activity, new AuthCallbacks.LoginListener() {
@Override
public void success(final IProvider.Provider provider) {
afterLogin(provider, authProvider, autoLogin, payload, reward);
}
@Override
public void fail(String message) {
BusProvider.getInstance().post(new LoginFailedEvent(provider, message, autoLogin, payload));
}
@Override
public void cancel() {
BusProvider.getInstance().post(new LoginCancelledEvent(provider, autoLogin, payload));
}
});
}
});
}
private void afterLogin(final IProvider.Provider provider,
IAuthProvider authProvider, final boolean autoLogin, final String payload, final Reward reward) {
authProvider.getUserProfile(new AuthCallbacks.UserProfileListener() {
@Override
public void success(UserProfile userProfile) {
UserProfileStorage.setUserProfile(userProfile);
setLoggedInForProvider(provider, true);
BusProvider.getInstance().post(new LoginFinishedEvent(userProfile, autoLogin, payload));
if (reward != null) {
reward.give();
}
}
@Override
public void fail(String message) {
BusProvider.getInstance().post(new LoginFailedEvent(provider, message, autoLogin, payload));
}
});
}
/**
* Logs out of the given provider
*
* @param provider The provider to logout from
* @throws ProviderNotFoundException
*/
public void logout(final IProvider.Provider provider) throws ProviderNotFoundException {
final IAuthProvider authProvider = getProvider(provider);
final UserProfile userProfile = getStoredUserProfile(provider);
BusProvider.getInstance().post(new LogoutStartedEvent(provider));
setLoggedInForProvider(provider, false);
if (!isLoggedIn(provider)) {
if (userProfile != null) {
UserProfileStorage.removeUserProfile(userProfile);
}
BusProvider.getInstance().post(new LogoutFinishedEvent(provider));
return;
}
authProvider.logout(new AuthCallbacks.LogoutListener() {
@Override
public void success() {
if (userProfile != null) {
UserProfileStorage.removeUserProfile(userProfile);
}
// if caller needs stuff from the user, they should get it before logout
// pass only the provider here
BusProvider.getInstance().post(new LogoutFinishedEvent(provider));
}
@Override
public void fail(String message) {
BusProvider.getInstance().post(new LogoutFailedEvent(provider, message));
}
});
}
/**
* Fetches the user profile for the given provider from the device's storage.
*
* @param provider The provider to get the stored user profile for
* @return The user profile for the given provider
*/
public UserProfile getStoredUserProfile(IProvider.Provider provider) {
UserProfile userProfile = UserProfileStorage.getUserProfile(provider);
if (userProfile == null) {
return null;
}
return UserProfileStorage.getUserProfile(provider);
}
/**
* Checks if the user is logged in the given provider
*
* @param activity The parent activity
* @param provider The provider to check
* @return true if the user is logged in, false otherwise
* @throws ProviderNotFoundException if the given provider is not loaded
*/
public boolean isLoggedIn(final Activity activity, IProvider.Provider provider) throws ProviderNotFoundException {
final IAuthProvider authProvider = getProvider(provider);
return authProvider.isLoggedIn(activity);
}
/**
* Checks if the user is logged in the given provider
*
* @param provider The provider to check
* @return true if the user is logged in, false otherwise
* @throws ProviderNotFoundException if the given provider is not loaded
*/
public boolean isLoggedIn(IProvider.Provider provider) throws ProviderNotFoundException {
final IAuthProvider authProvider = getProvider(provider);
return authProvider.isLoggedIn();
}
/**
* Perform login to providers, if (where) it's needed.
*/
@SuppressWarnings("ConstantConditions")
public void settleAutoLogin(Activity activity) {
for (Map.Entry<IProvider.Provider, T> entry : this.mProviders.entrySet()) {
T authProvider = entry.getValue();
if (authProvider.isAutoLogin()) {
IProvider.Provider provider = entry.getKey();
if (this.wasLoggedInWithProvider(provider)) {
String payload = "";
Reward reward = null;
if (authProvider.isLoggedIn(activity)) {
setLoggedInForProvider(provider, false);
BusProvider.getInstance().post(new LoginStartedEvent(provider, true, payload));
afterLogin(provider, authProvider, true, payload, reward);
} else {
login(activity, provider, true, payload, reward);
}
}
}
}
}
private void setLoggedInForProvider(IProvider.Provider provider, boolean value) {
String key = getLoggedInStorageKeyForProvider(provider);
if (value) {
KeyValueStorage.setValue(key, "true");
} else {
KeyValueStorage.deleteKeyValue(key);
}
}
private boolean wasLoggedInWithProvider(IProvider.Provider provider) {
return "true".equals(KeyValueStorage.getValue(getLoggedInStorageKeyForProvider(provider)));
}
private String getLoggedInStorageKeyForProvider(IProvider.Provider provider) {
return String.format("%s.%s.%s", DB_KEY_PREFIX, provider.toString(), "loggedIn");
}
private static final String TAG = "SOOMLA AuthController";
}
|
SoomlaAndroidProfile/src/com/soomla/profile/AuthController.java
|
/*
* Copyright (C) 2012-2014 Soomla Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.soomla.profile;
import android.app.Activity;
import android.os.Handler;
import android.os.Looper;
import com.soomla.BusProvider;
import com.soomla.SoomlaUtils;
import com.soomla.data.KeyValueStorage;
import com.soomla.profile.auth.AuthCallbacks;
import com.soomla.profile.auth.IAuthProvider;
import com.soomla.profile.data.UserProfileStorage;
import com.soomla.profile.domain.IProvider;
import com.soomla.profile.domain.UserProfile;
import com.soomla.profile.events.auth.LoginCancelledEvent;
import com.soomla.profile.events.auth.LoginFailedEvent;
import com.soomla.profile.events.auth.LoginFinishedEvent;
import com.soomla.profile.events.auth.LoginStartedEvent;
import com.soomla.profile.events.auth.LogoutFailedEvent;
import com.soomla.profile.events.auth.LogoutFinishedEvent;
import com.soomla.profile.events.auth.LogoutStartedEvent;
import com.soomla.profile.exceptions.ProviderNotFoundException;
import com.soomla.rewards.Reward;
import java.util.Map;
/**
* A class that loads all authentication providers and performs authentication
* actions on with them. This class wraps the provider's authentication
* actions in order to connect them to user profile data and rewards.
*/
public class AuthController<T extends IAuthProvider> extends ProviderLoader<T> {
private static final String DB_KEY_PREFIX = "soomla.profile";
/**
* Constructor
*
* Loads all authentication providers
* @param usingExternalProvider {@link SoomlaProfile#initialize}
*/
public AuthController(boolean usingExternalProvider, Map<IProvider.Provider, ? extends Map<String, String>> profileParams) {
if(usingExternalProvider) {
SoomlaUtils.LogDebug(TAG, "usingExternalProvider");
}
else if (!loadProviders(profileParams)) {
String msg = "You don't have a IAuthProvider service attached. " +
"Decide which IAuthProvider you want, add it to AndroidManifest.xml " +
"and add its jar to the path.";
SoomlaUtils.LogDebug(TAG, msg);
}
}
private final Handler mainThread = new Handler(Looper.getMainLooper());
protected void runOnMainThread(Runnable toRun) {
if (Looper.myLooper() == Looper.getMainLooper()) {
toRun.run();
} else {
mainThread.post(toRun);
}
}
/**
* Logs into the given provider and grants the user a reward.
*
* @param activity The parent activity
* @param provider The provider to login with
* @param autoLogin comes "true" if user login automatically
* @param payload a String to receive when the function returns.
* @param reward The reward to grant the user for logging in @throws ProviderNotFoundException
*/
public void login(final Activity activity, final IProvider.Provider provider, final boolean autoLogin, final String payload, final Reward reward) throws ProviderNotFoundException {
final IAuthProvider authProvider = getProvider(provider);
runOnMainThread(new Runnable() {
@Override
public void run() {
setLoggedInForProvider(provider, false);
BusProvider.getInstance().post(new LoginStartedEvent(provider, autoLogin, payload));
authProvider.login(activity, new AuthCallbacks.LoginListener() {
@Override
public void success(final IProvider.Provider provider) {
afterLogin(provider, authProvider, autoLogin, payload, reward);
}
@Override
public void fail(String message) {
BusProvider.getInstance().post(new LoginFailedEvent(provider, message, autoLogin, payload));
}
@Override
public void cancel() {
BusProvider.getInstance().post(new LoginCancelledEvent(provider, autoLogin, payload));
}
});
}
});
}
private void afterLogin(final IProvider.Provider provider,
IAuthProvider authProvider, final boolean autoLogin, final String payload, final Reward reward) {
authProvider.getUserProfile(new AuthCallbacks.UserProfileListener() {
@Override
public void success(UserProfile userProfile) {
UserProfileStorage.setUserProfile(userProfile);
setLoggedInForProvider(provider, true);
BusProvider.getInstance().post(new LoginFinishedEvent(userProfile, autoLogin, payload));
if (reward != null) {
reward.give();
}
}
@Override
public void fail(String message) {
BusProvider.getInstance().post(new LoginFailedEvent(provider, message, autoLogin, payload));
}
});
}
/**
* Logs out of the given provider
*
* @param provider The provider to logout from
* @throws ProviderNotFoundException
*/
public void logout(final IProvider.Provider provider) throws ProviderNotFoundException {
final IAuthProvider authProvider = getProvider(provider);
final UserProfile userProfile = getStoredUserProfile(provider);
BusProvider.getInstance().post(new LogoutStartedEvent(provider));
authProvider.logout(new AuthCallbacks.LogoutListener() {
@Override
public void success() {
if (userProfile != null) {
UserProfileStorage.removeUserProfile(userProfile);
}
// if caller needs stuff from the user, they should get it before logout
// pass only the provider here
BusProvider.getInstance().post(new LogoutFinishedEvent(provider));
}
@Override
public void fail(String message) {
BusProvider.getInstance().post(new LogoutFailedEvent(provider, message));
}
});
}
/**
* Fetches the user profile for the given provider from the device's storage.
*
* @param provider The provider to get the stored user profile for
* @return The user profile for the given provider
*/
public UserProfile getStoredUserProfile(IProvider.Provider provider) {
UserProfile userProfile = UserProfileStorage.getUserProfile(provider);
if (userProfile == null) {
return null;
}
return UserProfileStorage.getUserProfile(provider);
}
/**
* Checks if the user is logged in the given provider
*
* @param activity The parent activity
* @param provider The provider to check
* @return true if the user is logged in, false otherwise
* @throws ProviderNotFoundException if the given provider is not loaded
*/
public boolean isLoggedIn(final Activity activity, IProvider.Provider provider) throws ProviderNotFoundException {
final IAuthProvider authProvider = getProvider(provider);
return authProvider.isLoggedIn(activity);
}
/**
* Checks if the user is logged in the given provider
*
* @param provider The provider to check
* @return true if the user is logged in, false otherwise
* @throws ProviderNotFoundException if the given provider is not loaded
*/
public boolean isLoggedIn(IProvider.Provider provider) throws ProviderNotFoundException {
final IAuthProvider authProvider = getProvider(provider);
return authProvider.isLoggedIn();
}
/**
* Perform login to providers, if (where) it's needed.
*/
@SuppressWarnings("ConstantConditions")
public void settleAutoLogin(Activity activity) {
for (Map.Entry<IProvider.Provider, T> entry : this.mProviders.entrySet()) {
T authProvider = entry.getValue();
if (authProvider.isAutoLogin()) {
IProvider.Provider provider = entry.getKey();
if (this.wasLoggedInWithProvider(provider)) {
String payload = "";
Reward reward = null;
if (authProvider.isLoggedIn(activity)) {
setLoggedInForProvider(provider, false);
BusProvider.getInstance().post(new LoginStartedEvent(provider, true, payload));
afterLogin(provider, authProvider, true, payload, reward);
} else {
login(activity, provider, true, payload, reward);
}
}
}
}
}
private void setLoggedInForProvider(IProvider.Provider provider, boolean value) {
String key = getLoggedInStorageKeyForProvider(provider);
if (value) {
KeyValueStorage.setValue(key, "true");
} else {
KeyValueStorage.deleteKeyValue(key);
}
}
private boolean wasLoggedInWithProvider(IProvider.Provider provider) {
return "true".equals(KeyValueStorage.getValue(getLoggedInStorageKeyForProvider(provider)));
}
private String getLoggedInStorageKeyForProvider(IProvider.Provider provider) {
return String.format("%s.%s.%s", DB_KEY_PREFIX, provider.toString(), "loggedIn");
}
private static final String TAG = "SOOMLA AuthController";
}
|
Fix #112 Clear user profile from storage when calling logout
|
SoomlaAndroidProfile/src/com/soomla/profile/AuthController.java
|
Fix #112 Clear user profile from storage when calling logout
|
|
Java
|
apache-2.0
|
f86dc0a117976b7463bf7dabf119b537644e7a9a
| 0
|
nectec-wisru/android-TanrabadSurvey,nectec-wisru/android-TanlabadSurvey,tanrabad/survey
|
/*
* Copyright (c) 2016 NECTEC
* National Electronics and Computer Technology Center, Thailand
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tanrabad.survey.presenter;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.animation.Animation;
import android.widget.CheckBox;
import com.google.android.gms.appindexing.Action;
import com.google.android.gms.appindexing.AppIndex;
import com.google.android.gms.appindexing.Thing;
import com.google.android.gms.common.api.GoogleApiClient;
import org.tanrabad.survey.BuildConfig;
import org.tanrabad.survey.R;
import org.tanrabad.survey.TanrabadApp;
import org.tanrabad.survey.entity.User;
import org.tanrabad.survey.job.AbsJobRunner;
import org.tanrabad.survey.job.DeleteUserDataJob;
import org.tanrabad.survey.job.SetTrialModeAndSelectApiServerJob;
import org.tanrabad.survey.job.UploadJobRunner;
import org.tanrabad.survey.presenter.authen.AuthenActivity;
import org.tanrabad.survey.repository.BrokerUserRepository;
import org.tanrabad.survey.service.PlaceRestService;
import org.tanrabad.survey.service.ServiceLastUpdatePreference;
import org.tanrabad.survey.service.TrialModePreference;
import org.tanrabad.survey.utils.alert.Alert;
import org.tanrabad.survey.utils.android.InternetConnection;
import org.tanrabad.survey.utils.showcase.ShowcasePreference;
import static android.view.WindowManager.LayoutParams.FLAG_FULLSCREEN;
import static android.view.animation.AnimationUtils.loadAnimation;
public class LoginActivity extends TanrabadActivity {
private static final int AUTHEN_REQUEST_CODE = 1232;
private CheckBox needShowcase;
private ShowcasePreference showcasePreference;
private TrialModePreference trialModePreference;
private GoogleApiClient appIndexClient;
@Override
protected void onCreate(Bundle savedInstanceState) {
getWindow().setFlags(FLAG_FULLSCREEN, FLAG_FULLSCREEN);
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
appIndexClient = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build();
setupPreferences();
setupShowcaseOption();
findViewById(R.id.trial).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
trialLogin();
}
});
findViewById(R.id.authentication_button).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
openAuthenWeb();
}
});
startAnimation();
}
private void setupPreferences() {
trialModePreference = new TrialModePreference(this);
showcasePreference = new ShowcasePreference(this);
}
private void setupShowcaseOption() {
needShowcase = (CheckBox) findViewById(R.id.need_showcase);
needShowcase.setChecked(showcasePreference.get());
}
private void trialLogin() {
if (isFirstTime() && !InternetConnection.isAvailable(this)) {
Alert.highLevel().show(R.string.connect_internet_when_use_for_first_time);
TanrabadApp.action().firstTimeWithoutInternet();
return;
}
if (!trialModePreference.isUsingTrialMode()) {
if (!InternetConnection.isAvailable(this)) {
Alert.highLevel().show(R.string.connect_internet_before_using_trial_mode);
return;
}
AccountUtils.setUser(BrokerUserRepository.getInstance().findByUsername(BuildConfig.TRIAL_USER));
AbsJobRunner jobRunner = new UploadJobRunner();
jobRunner.addJob(new DeleteUserDataJob(this));
jobRunner.addJob(new SetTrialModeAndSelectApiServerJob(this, true));
jobRunner.addJob(new StartInitialActivityJob(this));
jobRunner.start();
} else {
AccountUtils.setUser(BrokerUserRepository.getInstance().findByUsername(BuildConfig.TRIAL_USER));
startInitialActivity();
}
showcasePreference.save(needShowcase.isChecked());
}
private void openAuthenWeb() {
User lastLoginUser = AccountUtils.getLastLoginUser();
if (lastLoginUser != null
&& !AccountUtils.isTrialUser(lastLoginUser)) {
AccountUtils.setUser(lastLoginUser);
InitialActivity.open(this);
finish();
} else if (InternetConnection.isAvailable(this)) {
Intent intent = new Intent(this, AuthenActivity.class);
startActivityForResult(intent, AUTHEN_REQUEST_CODE);
} else {
Alert.highLevel().show(R.string.connect_internet_before_authen);
}
}
private void startAnimation() {
findViewById(R.id.bg_blue).startAnimation(loadAnimation(this, R.anim.login_bg_blue));
Animation dropIn = loadAnimation(this, R.anim.logo);
dropIn.setStartOffset(1200);
View logoTrb = findViewById(R.id.logo_tabrabad);
logoTrb.startAnimation(dropIn);
logoTrb.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View view) {
Intent intent = new Intent(LoginActivity.this, SplashScreenActivity.class);
startActivity(intent);
return true;
}
});
}
private boolean isFirstTime() {
String placeTimeStamp = new ServiceLastUpdatePreference(this, PlaceRestService.PATH).get();
return TextUtils.isEmpty(placeTimeStamp);
}
private void startInitialActivity() {
InitialActivity.open(LoginActivity.this);
overridePendingTransition(R.anim.drop_in, R.anim.drop_out);
finish();
}
@Override
protected void onStop() {
appIndexClient.disconnect();
AppIndex.AppIndexApi.end(appIndexClient, getAppIndexAction());
super.onStop();
}
public Action getAppIndexAction() {
Thing thing = new Thing.Builder()
.setName("ทันระบาดสำรวจ")
.setDescription("ประสบการณ์ใหม่ สำหรับการสำรวจลูกน้ำยุง")
.setUrl(Uri.parse("http://www.tanrabad.org/survey"))
.build();
return new Action.Builder(Action.TYPE_VIEW)
.setObject(thing)
.setActionStatus(Action.STATUS_TYPE_COMPLETED)
.build();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == AUTHEN_REQUEST_CODE) {
if (resultCode == RESULT_OK) {
if (trialModePreference.isUsingTrialMode()) {
AbsJobRunner jobRunner = new UploadJobRunner();
jobRunner.addJob(new DeleteUserDataJob(this));
jobRunner.addJob(new SetTrialModeAndSelectApiServerJob(this, false));
jobRunner.addJob(new StartInitialActivityJob(this));
jobRunner.start();
} else {
startInitialActivity();
}
showcasePreference.save(needShowcase.isChecked());
} else if (resultCode == AuthenActivity.RESULT_ERROR) {
Alert.highLevel().show(R.string.authen_error_response);
}
}
super.onActivityResult(requestCode, resultCode, data);
}
@Override
protected void onStart() {
super.onStart();
appIndexClient.connect();
AppIndex.AppIndexApi.start(appIndexClient, getAppIndexAction());
}
}
|
app/src/main/java/org/tanrabad/survey/presenter/LoginActivity.java
|
/*
* Copyright (c) 2016 NECTEC
* National Electronics and Computer Technology Center, Thailand
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tanrabad.survey.presenter;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.animation.Animation;
import android.widget.CheckBox;
import com.google.android.gms.appindexing.Action;
import com.google.android.gms.appindexing.AppIndex;
import com.google.android.gms.appindexing.Thing;
import com.google.android.gms.common.api.GoogleApiClient;
import org.tanrabad.survey.BuildConfig;
import org.tanrabad.survey.R;
import org.tanrabad.survey.TanrabadApp;
import org.tanrabad.survey.entity.User;
import org.tanrabad.survey.job.AbsJobRunner;
import org.tanrabad.survey.job.DeleteUserDataJob;
import org.tanrabad.survey.job.SetTrialModeAndSelectApiServerJob;
import org.tanrabad.survey.job.UploadJobRunner;
import org.tanrabad.survey.presenter.authen.AuthenActivity;
import org.tanrabad.survey.repository.BrokerUserRepository;
import org.tanrabad.survey.service.PlaceRestService;
import org.tanrabad.survey.service.ServiceLastUpdatePreference;
import org.tanrabad.survey.service.TrialModePreference;
import org.tanrabad.survey.utils.alert.Alert;
import org.tanrabad.survey.utils.android.InternetConnection;
import org.tanrabad.survey.utils.showcase.ShowcasePreference;
import static android.view.WindowManager.LayoutParams.FLAG_FULLSCREEN;
import static android.view.animation.AnimationUtils.loadAnimation;
public class LoginActivity extends TanrabadActivity {
private static final int AUTHEN_REQUEST_CODE = 1232;
private CheckBox needShowcase;
private ShowcasePreference showcasePreference;
private TrialModePreference trialModePreference;
private GoogleApiClient appIndexClient;
@Override
protected void onCreate(Bundle savedInstanceState) {
getWindow().setFlags(FLAG_FULLSCREEN, FLAG_FULLSCREEN);
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
appIndexClient = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build();
setupPreferences();
setupShowcaseOption();
findViewById(R.id.trial).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
trialLogin();
}
});
findViewById(R.id.authentication_button).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
openAuthenWeb();
}
});
startAnimation();
}
private void setupPreferences() {
trialModePreference = new TrialModePreference(this);
showcasePreference = new ShowcasePreference(this);
}
private void setupShowcaseOption() {
needShowcase = (CheckBox) findViewById(R.id.need_showcase);
needShowcase.setChecked(showcasePreference.get());
}
private void trialLogin() {
if (isFirstTime() && !InternetConnection.isAvailable(this)) {
Alert.highLevel().show(R.string.connect_internet_when_use_for_first_time);
TanrabadApp.action().firstTimeWithoutInternet();
return;
}
if (!trialModePreference.isUsingTrialMode()) {
if (!InternetConnection.isAvailable(this)) {
Alert.highLevel().show(R.string.connect_internet_before_using_trial_mode);
return;
}
AccountUtils.setUser(BrokerUserRepository.getInstance().findByUsername(BuildConfig.TRIAL_USER));
AbsJobRunner jobRunner = new UploadJobRunner();
jobRunner.addJob(new DeleteUserDataJob(this));
jobRunner.addJob(new StartInitialActivityJob(this));
jobRunner.start();
} else {
AccountUtils.setUser(BrokerUserRepository.getInstance().findByUsername(BuildConfig.TRIAL_USER));
startInitialActivity();
}
showcasePreference.save(needShowcase.isChecked());
}
private void openAuthenWeb() {
User lastLoginUser = AccountUtils.getLastLoginUser();
if (lastLoginUser != null
&& !AccountUtils.isTrialUser(lastLoginUser)) {
AccountUtils.setUser(lastLoginUser);
InitialActivity.open(this);
finish();
} else if (InternetConnection.isAvailable(this)) {
Intent intent = new Intent(this, AuthenActivity.class);
startActivityForResult(intent, AUTHEN_REQUEST_CODE);
} else {
Alert.highLevel().show(R.string.connect_internet_before_authen);
}
}
private void startAnimation() {
findViewById(R.id.bg_blue).startAnimation(loadAnimation(this, R.anim.login_bg_blue));
Animation dropIn = loadAnimation(this, R.anim.logo);
dropIn.setStartOffset(1200);
View logoTrb = findViewById(R.id.logo_tabrabad);
logoTrb.startAnimation(dropIn);
logoTrb.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View view) {
Intent intent = new Intent(LoginActivity.this, SplashScreenActivity.class);
startActivity(intent);
return true;
}
});
}
private boolean isFirstTime() {
String placeTimeStamp = new ServiceLastUpdatePreference(this, PlaceRestService.PATH).get();
return TextUtils.isEmpty(placeTimeStamp);
}
private void startInitialActivity() {
InitialActivity.open(LoginActivity.this);
overridePendingTransition(R.anim.drop_in, R.anim.drop_out);
finish();
}
@Override
protected void onStop() {
appIndexClient.disconnect();
AppIndex.AppIndexApi.end(appIndexClient, getAppIndexAction());
super.onStop();
}
public Action getAppIndexAction() {
Thing thing = new Thing.Builder()
.setName("ทันระบาดสำรวจ")
.setDescription("ประสบการณ์ใหม่ สำหรับการสำรวจลูกน้ำยุง")
.setUrl(Uri.parse("http://www.tanrabad.org/survey"))
.build();
return new Action.Builder(Action.TYPE_VIEW)
.setObject(thing)
.setActionStatus(Action.STATUS_TYPE_COMPLETED)
.build();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == AUTHEN_REQUEST_CODE) {
if (resultCode == RESULT_OK) {
if (trialModePreference.isUsingTrialMode()) {
AbsJobRunner jobRunner = new UploadJobRunner();
jobRunner.addJob(new DeleteUserDataJob(this));
jobRunner.addJob(new SetTrialModeAndSelectApiServerJob(this, false));
jobRunner.addJob(new StartInitialActivityJob(this));
jobRunner.start();
} else {
startInitialActivity();
}
showcasePreference.save(needShowcase.isChecked());
} else if (resultCode == AuthenActivity.RESULT_ERROR) {
Alert.highLevel().show(R.string.authen_error_response);
}
}
super.onActivityResult(requestCode, resultCode, data);
}
@Override
protected void onStart() {
super.onStart();
appIndexClient.connect();
AppIndex.AppIndexApi.start(appIndexClient, getAppIndexAction());
}
}
|
แก้บัคแอพล่าข้อมูลทุกครั้งที่ทดลองใช้งาน
|
app/src/main/java/org/tanrabad/survey/presenter/LoginActivity.java
|
แก้บัคแอพล่าข้อมูลทุกครั้งที่ทดลองใช้งาน
|
|
Java
|
apache-2.0
|
356147c06f7ce94bb3dc180bae75cc605313862d
| 0
|
Kerbores/Falsework,Kerbores/Falsework,Kerbores/Falsework,Kerbores/Falsework,Kerbores/Falsework
|
package com.sino.scaffold.utils;
import java.util.Iterator;
import java.util.Map;
import org.nutz.json.Json;
import org.nutz.json.JsonFormat;
import org.nutz.lang.util.NutMap;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* @author kerbores
*
*/
@ApiModel("响应结果")
public class Result {
/**
* 创建一个异常结果
*
* @return 一个异常结果实例,不携带异常信息
*/
public static Result exception() {
return Result.me().setOperationState(OperationState.EXCEPTION);
}
/**
* 未登录
*
* @return
*/
public static Result unlogin() {
return Result.me().setOperationState(OperationState.UNLOGINED);
}
/**
* 创建一个异常结果
*
* @param e
* 异常
* @return 一个异常结果实例,包含参数异常的信息
*/
public static Result exception(Exception e) {
return Result.exception(e.getMessage());
}
/**
* 创建一个异常结果
*
* @param msg
* 异常信息
* @return 一个异常结果实例,不携带异常信息
*/
public static Result exception(String msg) {
return Result.exception().addData("reason", msg);
}
/**
* 创建一个带失败信息的result
*
* @param reason
* 失败原因
* @return result实例
*/
public static Result fail(String reason) {
Map data = new NutMap();
data.put("reason", reason);
return Result.me().setOperationState(OperationState.FAIL).setData(data);
}
/**
* 获取一个result实例
*
* @return 一个不携带任何信息的result实例
*/
public static Result me() {
return new Result();
}
/**
* 创建一个成功结果
*
* @return result实例状态为成功无数据携带
*/
public static Result success() {
return Result.me().setOperationState(OperationState.SUCCESS);
}
/**
* 创建一个成功结果
*
* @param data
* 需要携带的数据
* @return result实例状态为成功数据位传入参数
*/
public static Result success(Map data) {
return Result.success().setData(data);
}
/**
* 操作结果数据 假设一个操作要返回很多的数据 一个用户名 一个产品 一个相关产品列表 一个产品的评论信息列表 我们以key
* value形式进行保存,页面获取data对象读取其对于的value即可
*/
@ApiModelProperty("数据")
private NutMap data = new NutMap();
/**
* 带状态的操作 比如登录有成功和失败
*/
@ApiModelProperty("状态")
private OperationState operationState = OperationState.DEFAULT;
/**
* 用于在jsp中显示标题的字段 title
*/
@ApiModelProperty("标题")
private String title;
public Result() {
super();
}
public Result(OperationState operationState, Map data, String title) {
super();
this.operationState = operationState;
this.data = NutMap.WRAP(data);
this.title = title;
}
/**
* 添加更多的数据
*
* @param data
* 待添加的数据
* @return 结果实例
*/
public Result addData(Map<String, Object> data) {
Iterator iterator = data.keySet().iterator();
while (iterator.hasNext()) {
String key = iterator.next().toString();
this.data.put(key, data.get(key));
}
return this;
}
/**
* 添加数据
*
* @param key
* @param object
* @return
*/
public Result addData(String key, Object object) {
if (this.data == null) {
data = new NutMap();
}
data.put(key, object);
return this;
}
/**
* 清空结果
*/
public Result clear() {
this.operationState = OperationState.DEFAULT;
if (data != null) {
this.data.clear();
}
this.title = "";
return this;
}
public NutMap getData() {
return data;
}
public OperationState getOperationState() {
return operationState;
}
/**
* 获取错误获取异常原因
*
* @return
*/
@ApiModelProperty("失败原因")
public String getReason() {
return getData().getString("reason");
}
public String getTitle() {
return title;
}
/**
* 是否成功
*
* @return
*/
@ApiModelProperty("是否业务处理成功标识")
public boolean isSuccess() {
return getOperationState() == OperationState.SUCCESS;
}
public Result setData(Map<String, Object> data) {
this.data = NutMap.WRAP(data);
return this;
}
public Result setOperationState(OperationState operationState) {
this.operationState = operationState;
return this;
}
public Result setTitle(String title) {
this.title = title;
return this;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return Json.toJson(this, JsonFormat.forLook());
}
}
|
boot-nutz-vue/src/main/java/com/sino/scaffold/utils/Result.java
|
package com.sino.scaffold.utils;
import java.util.Iterator;
import java.util.Map;
import org.nutz.json.Json;
import org.nutz.json.JsonFormat;
import org.nutz.lang.util.NutMap;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* @author kerbores
*
*/
@ApiModel("响应结果")
public class Result {
/**
* 创建一个异常结果
*
* @return 一个异常结果实例,不携带异常信息
*/
public static Result exception() {
return Result.me().setOperationState(OperationState.EXCEPTION);
}
/**
* 未登录
*
* @return
*/
public static Result unlogin() {
return Result.me().setOperationState(OperationState.UNLOGINED);
}
/**
* 创建一个异常结果
*
* @param e
* 异常
* @return 一个异常结果实例,包含参数异常的信息
*/
public static Result exception(Exception e) {
return Result.exception(e.getMessage());
}
/**
* 创建一个异常结果
*
* @param msg
* 异常信息
* @return 一个异常结果实例,不携带异常信息
*/
public static Result exception(String msg) {
return Result.exception().addData("reason", msg);
}
/**
* 创建一个带失败信息的result
*
* @param reason
* 失败原因
* @return result实例
*/
public static Result fail(String reason) {
Map data = new NutMap();
data.put("reason", reason);
return Result.me().setOperationState(OperationState.FAIL).setData(data);
}
/**
* 获取一个result实例
*
* @return 一个不携带任何信息的result实例
*/
public static Result me() {
return new Result();
}
/**
* 创建一个成功结果
*
* @return result实例状态为成功无数据携带
*/
public static Result success() {
return Result.me().setOperationState(OperationState.SUCCESS);
}
/**
* 创建一个成功结果
*
* @param data
* 需要携带的数据
* @return result实例状态为成功数据位传入参数
*/
public static Result success(Map data) {
return Result.success().setData(data);
}
/**
* 操作结果数据 假设一个操作要返回很多的数据 一个用户名 一个产品 一个相关产品列表 一个产品的评论信息列表 我们以key
* value形式进行保存,页面获取data对象读取其对于的value即可
*/
@ApiModelProperty("数据")
private NutMap data = new NutMap();
/**
* 带状态的操作 比如登录有成功和失败
*/
@ApiModelProperty("状态")
private OperationState operationState = OperationState.DEFAULT;
/**
* 用于在jsp中显示标题的字段 title
*/
@ApiModelProperty("标题")
private String title;
public Result() {
super();
}
public Result(OperationState operationState, Map data, String title) {
super();
this.operationState = operationState;
this.data = NutMap.WRAP(data);
this.title = title;
}
/**
* 添加更多的数据
*
* @param data
* 待添加的数据
* @return 结果实例
*/
public Result addData(Map data) {
Iterator iterator = data.keySet().iterator();
while (iterator.hasNext()) {
String key = iterator.next().toString();
this.data.put(key, data.get(key));
}
return this;
}
/**
* 添加数据
*
* @param key
* @param object
* @return
*/
public Result addData(String key, Object object) {
if (this.data == null) {
data = new NutMap();
}
data.put(key, object);
return this;
}
/**
* 清空结果
*/
public Result clear() {
this.operationState = OperationState.DEFAULT;
if (data != null) {
this.data.clear();
}
this.title = "";
return this;
}
public NutMap getData() {
return data;
}
/**
* 以nutmap包装数据
*
* @return
*/
@ApiModelProperty("数据的nutMap封装")
public NutMap getNutMapData() {
return NutMap.WRAP(data);
}
public OperationState getOperationState() {
return operationState;
}
/**
* 获取错误获取异常原因
*
* @return
*/
@ApiModelProperty("失败原因")
public String getReason() {
return getData().getString("reason");
}
public String getTitle() {
return title;
}
/**
* 是否成功
*
* @return
*/
@ApiModelProperty("是否业务处理成功标识")
public boolean isSuccess() {
return getOperationState() == OperationState.SUCCESS;
}
public Result setData(Map data) {
this.data = NutMap.WRAP(data);
return this;
}
public Result setOperationState(OperationState operationState) {
this.operationState = operationState;
return this;
}
public Result setTitle(String title) {
this.title = title;
return this;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return Json.toJson(this, JsonFormat.forLook());
}
}
|
fix:编译器的泛型检查
|
boot-nutz-vue/src/main/java/com/sino/scaffold/utils/Result.java
|
fix:编译器的泛型检查
|
|
Java
|
apache-2.0
|
9dd59e0aa6a16771a315732af95ee47fa3a7f5e9
| 0
|
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ui.mac;
import com.apple.eawt.*;
import com.intellij.ide.ActiveWindowsWatcher;
import com.intellij.jdkEx.JdkEx;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.openapi.wm.impl.IdeFrameDecorator;
import com.intellij.openapi.wm.impl.IdeRootPane;
import com.intellij.openapi.wm.impl.ProjectFrameHelper;
import com.intellij.ui.components.panels.NonOpaquePanel;
import com.intellij.ui.mac.foundation.Foundation;
import com.intellij.ui.mac.foundation.ID;
import com.intellij.ui.mac.foundation.MacUtil;
import com.intellij.util.EventDispatcher;
import com.intellij.util.ui.JBDimension;
import com.intellij.util.ui.UIUtil;
import com.sun.jna.Callback;
import com.sun.jna.Pointer;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.concurrency.AsyncPromise;
import org.jetbrains.concurrency.Promise;
import org.jetbrains.concurrency.Promises;
import javax.swing.*;
import java.awt.*;
import java.lang.reflect.Method;
import java.util.EventListener;
import java.util.LinkedList;
import java.util.Queue;
public final class MacMainFrameDecorator extends IdeFrameDecorator {
private interface FSListener extends FullScreenListener, EventListener {}
private static class FSAdapter extends FullScreenAdapter implements FSListener {}
private static class FullScreenQueue {
private final Queue<Runnable> myQueue = new LinkedList<>();
private boolean myWaitingForAppKit = false;
synchronized void runOrEnqueue(Runnable runnable) {
if (myWaitingForAppKit) {
myQueue.add(runnable);
}
else {
ApplicationManager.getApplication().invokeLater(runnable);
myWaitingForAppKit = true;
}
}
synchronized void runFromQueue() {
if (!myQueue.isEmpty()) {
myQueue.remove().run();
myWaitingForAppKit = true;
}
else {
myWaitingForAppKit = false;
}
}
}
private void enterFullScreen() {
myInFullScreen = true;
storeFullScreenStateIfNeeded();
myFullScreenQueue.runFromQueue();
updateTabBar(myFrame, 0);
}
private void exitFullScreen() {
myInFullScreen = false;
storeFullScreenStateIfNeeded();
JRootPane rootPane = myFrame.getRootPane();
if (rootPane != null) rootPane.putClientProperty(FULL_SCREEN, null);
myFullScreenQueue.runFromQueue();
updateTabBarOnExitFromFullScreen();
}
private void storeFullScreenStateIfNeeded() {
// todo should we really check that frame has not null project as it was implemented previously?
myFrame.doLayout();
}
public static final String FULL_SCREEN = "Idea.Is.In.FullScreen.Mode.Now";
private static Method toggleFullScreenMethod;
private static Method enterFullScreenMethod;
private static Method leaveFullScreenMethod;
static {
try {
//noinspection SpellCheckingInspection
Class.forName("com.apple.eawt.FullScreenUtilities");
try {
//noinspection JavaReflectionMemberAccess
enterFullScreenMethod = Application.class.getMethod("requestEnterFullScreen", Window.class);
//noinspection JavaReflectionMemberAccess
leaveFullScreenMethod = Application.class.getMethod("requestLeaveFullScreen", Window.class);
}
catch (NoSuchMethodException e) {
// temporary solution for the old runtime
//noinspection JavaReflectionMemberAccess
toggleFullScreenMethod = Application.class.getMethod("requestToggleFullScreen", Window.class);
}
}
catch (Exception e) {
Logger.getInstance(MacMainFrameDecorator.class).debug(e);
}
}
private final FullScreenQueue myFullScreenQueue = new FullScreenQueue();
private final EventDispatcher<FSListener> myDispatcher = EventDispatcher.create(FSListener.class);
private boolean myInFullScreen;
private boolean myShowFrame;
private boolean myInitFrame;
private static final String WIN_TAB_FILLER = "WIN_TAB_FILLER_KEY";
private static int DEFAULT_WIN_TAB_HEIGHT() {
return Registry.intValue("ide.mac.bigsur.window.with.tabs.height", 28);
}
@SuppressWarnings("FieldCanBeLocal")
private static Callback myObserverCallback; // don't convert to local var
private static ID myObserverDelegate;
private static void initTabObserver(@NotNull ID window) {
ID tabBar = Foundation.invoke(window, "tabGroup");
if (!ID.NIL.equals(Foundation.invoke(tabBar, "observationInfo"))) {
return;
}
if (myObserverDelegate == null) {
myObserverCallback = new Callback() {
@SuppressWarnings("unused")
public void callback(ID self, Pointer selector, ID ofObject, ID change, Pointer context) {
ApplicationManager.getApplication().invokeLater(() -> updateTabBars(null));
}
};
ID delegateClass = Foundation.allocateObjcClassPair(Foundation.getObjcClass("NSObject"), "MyWindowTabGroupObserver");
Foundation.addMethod(delegateClass, Foundation.createSelector("observeValueForKeyPath:ofObject:change:context:"),
myObserverCallback, "v*");
Foundation.registerObjcClassPair(delegateClass);
myObserverDelegate = Foundation.invoke("MyWindowTabGroupObserver", "new");
}
Foundation.invoke(tabBar, "addObserver:forKeyPath:options:context:", myObserverDelegate, Foundation.nsString("windows"), 0, ID.NIL);
}
@NotNull
public static JComponent _wrapRootPaneNorthSide(@NotNull JRootPane rootPane, @NotNull JComponent northComponent) {
if (!JdkEx.isTabbingModeAvailable()) {
return northComponent;
}
NonOpaquePanel panel = new NonOpaquePanel(new BorderLayout());
NonOpaquePanel filler = new NonOpaquePanel();
filler.setVisible(false);
panel.add(filler, BorderLayout.NORTH);
panel.add(northComponent);
rootPane.putClientProperty(WIN_TAB_FILLER, filler);
return panel;
}
public MacMainFrameDecorator(@NotNull JFrame frame, @NotNull Disposable parentDisposable) {
super(frame);
if (JdkEx.setTabbingMode(frame, () -> updateTabBars(null))) {
Disposer.register(parentDisposable, new Disposable() { // don't convert to lambda
@Override
public void dispose() {
updateTabBars(null);
}
});
}
if (leaveFullScreenMethod != null || toggleFullScreenMethod != null) {
FullScreenUtilities.setWindowCanFullScreen(frame, true);
// Native full screen listener can be set only once
FullScreenUtilities.addFullScreenListenerTo(frame, new FullScreenListener() {
@Override
public void windowEnteringFullScreen(AppEvent.FullScreenEvent event) {
myDispatcher.getMulticaster().windowEnteringFullScreen(event);
}
@Override
public void windowEnteredFullScreen(AppEvent.FullScreenEvent event) {
myDispatcher.getMulticaster().windowEnteredFullScreen(event);
}
@Override
public void windowExitingFullScreen(AppEvent.FullScreenEvent event) {
myDispatcher.getMulticaster().windowExitingFullScreen(event);
}
@Override
public void windowExitedFullScreen(AppEvent.FullScreenEvent event) {
myDispatcher.getMulticaster().windowExitedFullScreen(event);
}
});
myDispatcher.addListener(new FSAdapter() {
@Override
public void windowEnteringFullScreen(AppEvent.FullScreenEvent event) {
JRootPane rootPane = myFrame.getRootPane();
if (rootPane != null && rootPane.getBorder() != null && Registry.is("ide.mac.transparentTitleBarAppearance")) {
rootPane.setBorder(null);
}
updateTabBar(myFrame, 0);
}
@Override
public void windowEnteredFullScreen(AppEvent.FullScreenEvent event) {
// We can get the notification when the frame has been disposed
JRootPane rootPane = myFrame.getRootPane();
if (rootPane != null) rootPane.putClientProperty(FULL_SCREEN, Boolean.TRUE);
enterFullScreen();
myFrame.validate();
}
@Override
public void windowExitedFullScreen(AppEvent.FullScreenEvent event) {
// We can get the notification when the frame has been disposed
JRootPane rootPane = myFrame.getRootPane();
if (rootPane instanceof IdeRootPane && Registry.is("ide.mac.transparentTitleBarAppearance")) {
IdeRootPane ideRootPane = (IdeRootPane)rootPane;
UIUtil.setCustomTitleBar(myFrame, ideRootPane, runnable -> {
Disposer.register(parentDisposable, () -> runnable.run());
});
}
exitFullScreen();
ActiveWindowsWatcher.addActiveWindow(myFrame);
myFrame.validate();
}
});
}
}
@Override
public void frameShow() {
myShowFrame = true;
if (myInitFrame) {
initUpdateTabBars();
}
}
@Override
public void setProject() {
myInitFrame = true;
if (myShowFrame) {
initUpdateTabBars();
}
}
private void initUpdateTabBars() {
// update tab logic only after call [NSWindow makeKeyAndOrderFront] and after add frame to window manager
ApplicationManager.getApplication().invokeLater(() -> updateTabBars(myFrame));
}
private static void updateTabBars(@Nullable JFrame newFrame) {
if (!JdkEx.isTabbingModeAvailable()) {
return;
}
IdeFrame[] frames = WindowManager.getInstance().getAllProjectFrames();
if (frames.length < 2) {
if (frames.length == 1) {
updateTabBar(frames[0], 0);
if (newFrame != null) {
Foundation.executeOnMainThread(true, false, () -> {
initTabObserver(MacUtil.getWindowFromJavaWindow(newFrame));
});
}
}
return;
}
ApplicationManager.getApplication().invokeLater(() -> {
Integer[] visibleAndHeights = new Integer[frames.length];
boolean callInAppkit = false;
int newIndex = -1;
for (int i = 0; i < frames.length; i++) {
ProjectFrameHelper helper = (ProjectFrameHelper)frames[i];
if (newFrame == helper.getFrame()) {
newIndex = i;
}
if (helper.isInFullScreen()) {
visibleAndHeights[i] = 0;
}
else {
callInAppkit = true;
}
}
if (callInAppkit) {
// call only for shown window and only in Appkit
Foundation.executeOnMainThread(true, false, () -> {
if (newFrame != null) {
initTabObserver(MacUtil.getWindowFromJavaWindow(newFrame));
}
for (int i = 0; i < frames.length; i++) {
if (visibleAndHeights[i] == null) {
ID window = MacUtil.getWindowFromJavaWindow(((ProjectFrameHelper)frames[i]).getFrame());
int styleMask = Foundation.invoke(window, "styleMask").intValue();
if ((styleMask & (1 << 14)) != 0) { // NSWindowStyleMaskFullScreen
visibleAndHeights[i] = 0;
}
else {
visibleAndHeights[i] = (int)Foundation.invoke_fpret(window, "getTabBarVisibleAndHeight");
if (visibleAndHeights[i] == -1) {
visibleAndHeights[i] = DEFAULT_WIN_TAB_HEIGHT();
}
}
}
}
ApplicationManager.getApplication().invokeLater(() -> {
for (int i = 0; i < frames.length; i++) {
updateTabBar(frames[i], visibleAndHeights[i]);
}
});
});
}
else {
if (newFrame != null) {
Foundation.executeOnMainThread(true, false, () -> {
initTabObserver(MacUtil.getWindowFromJavaWindow(newFrame));
});
}
if (newIndex != -1) {
visibleAndHeights[newIndex] = 0;
}
for (int i = 0; i < frames.length; i++) {
updateTabBar(frames[i], visibleAndHeights[i]);
}
}
});
}
private void updateTabBarOnExitFromFullScreen() {
if (!JdkEx.isTabbingModeAvailable()) {
return;
}
Foundation.executeOnMainThread(true, false, () -> {
ID window = MacUtil.getWindowFromJavaWindow(myFrame);
int visibleAndHeight = (int)Foundation.invoke_fpret(window, "getTabBarVisibleAndHeight");
ApplicationManager.getApplication()
.invokeLater(() -> updateTabBar(myFrame, visibleAndHeight == -1 ? DEFAULT_WIN_TAB_HEIGHT() : visibleAndHeight));
});
}
private static void updateTabBar(@NotNull Object frameObject, int height) {
JFrame frame = null;
if (frameObject instanceof JFrame) {
frame = (JFrame)frameObject;
}
else if (frameObject instanceof ProjectFrameHelper) {
frame = ((ProjectFrameHelper)frameObject).getFrame();
}
if (frame == null) {
return;
}
JComponent filler = (JComponent)frame.getRootPane().getClientProperty(WIN_TAB_FILLER);
if (filler == null) {
return;
}
boolean visible = height > 0;
boolean oldVisible = filler.isVisible();
filler.setVisible(visible);
filler.setPreferredSize(new JBDimension(-1, height));
Container parent = filler.getParent();
if (parent == null || oldVisible == visible) {
return;
}
parent.doLayout();
parent.revalidate();
parent.repaint();
}
@Override
public boolean isInFullScreen() {
return myInFullScreen;
}
@Override
public @NotNull Promise<Boolean> toggleFullScreen(boolean state) {
if (myInFullScreen == state) {
return Promises.resolvedPromise(state);
}
AsyncPromise<Boolean> promise = new AsyncPromise<>();
myDispatcher.addListener(new FSAdapter() {
@Override
public void windowExitedFullScreen(AppEvent.FullScreenEvent event) {
promise.setResult(false);
myDispatcher.removeListener(this);
}
@Override
public void windowEnteredFullScreen(AppEvent.FullScreenEvent event) {
promise.setResult(true);
myDispatcher.removeListener(this);
}
});
// temporary solution for the old runtime
if (toggleFullScreenMethod != null) {
myFullScreenQueue.runOrEnqueue(() -> invokeAppMethod(toggleFullScreenMethod));
}
else if (state) {
myFullScreenQueue.runOrEnqueue(() -> invokeAppMethod(enterFullScreenMethod));
}
else {
myFullScreenQueue.runOrEnqueue(() -> invokeAppMethod(leaveFullScreenMethod));
}
return promise;
}
private void invokeAppMethod(Method method) {
try {
method.invoke(Application.getApplication(), myFrame);
}
catch (Exception e) {
Logger.getInstance(MacMainFrameDecorator.class).warn(e);
}
}
}
|
platform/platform-impl/src/com/intellij/ui/mac/MacMainFrameDecorator.java
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ui.mac;
import com.apple.eawt.*;
import com.intellij.ide.ActiveWindowsWatcher;
import com.intellij.jdkEx.JdkEx;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.openapi.wm.impl.IdeFrameDecorator;
import com.intellij.openapi.wm.impl.IdeRootPane;
import com.intellij.openapi.wm.impl.ProjectFrameHelper;
import com.intellij.ui.components.panels.NonOpaquePanel;
import com.intellij.ui.mac.foundation.Foundation;
import com.intellij.ui.mac.foundation.ID;
import com.intellij.ui.mac.foundation.MacUtil;
import com.intellij.util.EventDispatcher;
import com.intellij.util.ui.JBDimension;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.concurrency.AsyncPromise;
import org.jetbrains.concurrency.Promise;
import org.jetbrains.concurrency.Promises;
import javax.swing.*;
import java.awt.*;
import java.lang.reflect.Method;
import java.util.EventListener;
import java.util.LinkedList;
import java.util.Queue;
public final class MacMainFrameDecorator extends IdeFrameDecorator {
private interface FSListener extends FullScreenListener, EventListener {}
private static class FSAdapter extends FullScreenAdapter implements FSListener {}
private static class FullScreenQueue {
private final Queue<Runnable> myQueue = new LinkedList<>();
private boolean myWaitingForAppKit = false;
synchronized void runOrEnqueue(Runnable runnable) {
if (myWaitingForAppKit) {
myQueue.add(runnable);
}
else {
ApplicationManager.getApplication().invokeLater(runnable);
myWaitingForAppKit = true;
}
}
synchronized void runFromQueue() {
if (!myQueue.isEmpty()) {
myQueue.remove().run();
myWaitingForAppKit = true;
}
else {
myWaitingForAppKit = false;
}
}
}
private void enterFullScreen() {
myInFullScreen = true;
storeFullScreenStateIfNeeded();
myFullScreenQueue.runFromQueue();
updateTabBar(myFrame, 0);
}
private void exitFullScreen() {
myInFullScreen = false;
storeFullScreenStateIfNeeded();
JRootPane rootPane = myFrame.getRootPane();
if (rootPane != null) rootPane.putClientProperty(FULL_SCREEN, null);
myFullScreenQueue.runFromQueue();
updateTabBarOnExitFromFullScreen();
}
private void storeFullScreenStateIfNeeded() {
// todo should we really check that frame has not null project as it was implemented previously?
myFrame.doLayout();
}
public static final String FULL_SCREEN = "Idea.Is.In.FullScreen.Mode.Now";
private static Method toggleFullScreenMethod;
private static Method enterFullScreenMethod;
private static Method leaveFullScreenMethod;
static {
try {
//noinspection SpellCheckingInspection
Class.forName("com.apple.eawt.FullScreenUtilities");
try {
//noinspection JavaReflectionMemberAccess
enterFullScreenMethod = Application.class.getMethod("requestEnterFullScreen", Window.class);
//noinspection JavaReflectionMemberAccess
leaveFullScreenMethod = Application.class.getMethod("requestLeaveFullScreen", Window.class);
}
catch (NoSuchMethodException e) {
// temporary solution for the old runtime
//noinspection JavaReflectionMemberAccess
toggleFullScreenMethod = Application.class.getMethod("requestToggleFullScreen", Window.class);
}
}
catch (Exception e) {
Logger.getInstance(MacMainFrameDecorator.class).debug(e);
}
}
private final FullScreenQueue myFullScreenQueue = new FullScreenQueue();
private final EventDispatcher<FSListener> myDispatcher = EventDispatcher.create(FSListener.class);
private boolean myInFullScreen;
private boolean myShowFrame;
private boolean myInitFrame;
private static final String WIN_TAB_FILLER = "WIN_TAB_FILLER_KEY";
private static int DEFAULT_WIN_TAB_HEIGHT() {
return Registry.intValue("ide.mac.bigsur.window.with.tabs.height", 28);
}
@NotNull
public static JComponent _wrapRootPaneNorthSide(@NotNull JRootPane rootPane, @NotNull JComponent northComponent) {
if (!JdkEx.isTabbingModeAvailable()) {
return northComponent;
}
NonOpaquePanel panel = new NonOpaquePanel(new BorderLayout());
NonOpaquePanel filler = new NonOpaquePanel();
filler.setVisible(false);
panel.add(filler, BorderLayout.NORTH);
panel.add(northComponent);
rootPane.putClientProperty(WIN_TAB_FILLER, filler);
return panel;
}
public MacMainFrameDecorator(@NotNull JFrame frame, @NotNull Disposable parentDisposable) {
super(frame);
if (JdkEx.setTabbingMode(frame, () -> updateTabBars(null))) {
Disposer.register(parentDisposable, new Disposable() { // don't convert to lambda
@Override
public void dispose() {
updateTabBars(null);
}
});
}
if (leaveFullScreenMethod != null || toggleFullScreenMethod != null) {
FullScreenUtilities.setWindowCanFullScreen(frame, true);
// Native full screen listener can be set only once
FullScreenUtilities.addFullScreenListenerTo(frame, new FullScreenListener() {
@Override
public void windowEnteringFullScreen(AppEvent.FullScreenEvent event) {
myDispatcher.getMulticaster().windowEnteringFullScreen(event);
}
@Override
public void windowEnteredFullScreen(AppEvent.FullScreenEvent event) {
myDispatcher.getMulticaster().windowEnteredFullScreen(event);
}
@Override
public void windowExitingFullScreen(AppEvent.FullScreenEvent event) {
myDispatcher.getMulticaster().windowExitingFullScreen(event);
}
@Override
public void windowExitedFullScreen(AppEvent.FullScreenEvent event) {
myDispatcher.getMulticaster().windowExitedFullScreen(event);
}
});
myDispatcher.addListener(new FSAdapter() {
@Override
public void windowEnteringFullScreen(AppEvent.FullScreenEvent event) {
JRootPane rootPane = myFrame.getRootPane();
if (rootPane != null && rootPane.getBorder() != null && Registry.is("ide.mac.transparentTitleBarAppearance")) {
rootPane.setBorder(null);
}
updateTabBar(myFrame, 0);
}
@Override
public void windowEnteredFullScreen(AppEvent.FullScreenEvent event) {
// We can get the notification when the frame has been disposed
JRootPane rootPane = myFrame.getRootPane();
if (rootPane != null) rootPane.putClientProperty(FULL_SCREEN, Boolean.TRUE);
enterFullScreen();
myFrame.validate();
}
@Override
public void windowExitedFullScreen(AppEvent.FullScreenEvent event) {
// We can get the notification when the frame has been disposed
JRootPane rootPane = myFrame.getRootPane();
if (rootPane instanceof IdeRootPane && Registry.is("ide.mac.transparentTitleBarAppearance")) {
IdeRootPane ideRootPane = (IdeRootPane)rootPane;
UIUtil.setCustomTitleBar(myFrame, ideRootPane, runnable -> {
Disposer.register(parentDisposable, () -> runnable.run());
});
}
exitFullScreen();
ActiveWindowsWatcher.addActiveWindow(myFrame);
myFrame.validate();
}
});
}
}
@Override
public void frameShow() {
myShowFrame = true;
if (myInitFrame) {
initUpdateTabBars();
}
}
@Override
public void setProject() {
myInitFrame = true;
if (myShowFrame) {
initUpdateTabBars();
}
}
private void initUpdateTabBars() {
// update tab logic only after call [NSWindow makeKeyAndOrderFront] and after add frame to window manager
ApplicationManager.getApplication().invokeLater(() -> updateTabBars(myFrame));
}
private static void updateTabBars(@Nullable JFrame newFrame) {
if (!JdkEx.isTabbingModeAvailable()) {
return;
}
IdeFrame[] frames = WindowManager.getInstance().getAllProjectFrames();
if (frames.length < 2) {
if (frames.length == 1) {
updateTabBar(frames[0], 0);
}
return;
}
ApplicationManager.getApplication().invokeLater(() -> {
Integer[] visibleAndHeights = new Integer[frames.length];
boolean callInAppkit = false;
int newIndex = -1;
for (int i = 0; i < frames.length; i++) {
ProjectFrameHelper helper = (ProjectFrameHelper)frames[i];
if (newFrame == helper.getFrame()) {
newIndex = i;
}
if (helper.isInFullScreen()) {
visibleAndHeights[i] = 0;
}
else {
callInAppkit = true;
}
}
if (callInAppkit) {
// call only for shown window and only in Appkit
Foundation.executeOnMainThread(true, false, () -> {
for (int i = 0; i < frames.length; i++) {
if (visibleAndHeights[i] == null) {
ID window = MacUtil.getWindowFromJavaWindow(((ProjectFrameHelper)frames[i]).getFrame());
int styleMask = Foundation.invoke(window, "styleMask").intValue();
if ((styleMask & (1 << 14)) != 0) { // NSWindowStyleMaskFullScreen
visibleAndHeights[i] = 0;
}
else {
visibleAndHeights[i] = (int)Foundation.invoke_fpret(window, "getTabBarVisibleAndHeight");
if (visibleAndHeights[i] == -1) {
visibleAndHeights[i] = DEFAULT_WIN_TAB_HEIGHT();
}
}
}
}
ApplicationManager.getApplication().invokeLater(() -> {
for (int i = 0; i < frames.length; i++) {
updateTabBar(frames[i], visibleAndHeights[i]);
}
});
});
}
else {
if (newIndex != -1) {
visibleAndHeights[newIndex] = 0;
}
for (int i = 0; i < frames.length; i++) {
updateTabBar(frames[i], visibleAndHeights[i]);
}
}
});
}
private void updateTabBarOnExitFromFullScreen() {
if (!JdkEx.isTabbingModeAvailable()) {
return;
}
Foundation.executeOnMainThread(true, false, () -> {
ID window = MacUtil.getWindowFromJavaWindow(myFrame);
int visibleAndHeight = (int)Foundation.invoke_fpret(window, "getTabBarVisibleAndHeight");
ApplicationManager.getApplication()
.invokeLater(() -> updateTabBar(myFrame, visibleAndHeight == -1 ? DEFAULT_WIN_TAB_HEIGHT() : visibleAndHeight));
});
}
private static void updateTabBar(@NotNull Object frameObject, int height) {
JFrame frame = null;
if (frameObject instanceof JFrame) {
frame = (JFrame)frameObject;
}
else if (frameObject instanceof ProjectFrameHelper) {
frame = ((ProjectFrameHelper)frameObject).getFrame();
}
if (frame == null) {
return;
}
JComponent filler = (JComponent)frame.getRootPane().getClientProperty(WIN_TAB_FILLER);
if (filler == null) {
return;
}
boolean visible = height > 0;
boolean oldVisible = filler.isVisible();
filler.setVisible(visible);
filler.setPreferredSize(new JBDimension(-1, height));
Container parent = filler.getParent();
if (parent == null || oldVisible == visible) {
return;
}
parent.doLayout();
parent.revalidate();
parent.repaint();
}
@Override
public boolean isInFullScreen() {
return myInFullScreen;
}
@Override
public @NotNull Promise<Boolean> toggleFullScreen(boolean state) {
if (myInFullScreen == state) {
return Promises.resolvedPromise(state);
}
AsyncPromise<Boolean> promise = new AsyncPromise<>();
myDispatcher.addListener(new FSAdapter() {
@Override
public void windowExitedFullScreen(AppEvent.FullScreenEvent event) {
promise.setResult(false);
myDispatcher.removeListener(this);
}
@Override
public void windowEnteredFullScreen(AppEvent.FullScreenEvent event) {
promise.setResult(true);
myDispatcher.removeListener(this);
}
});
// temporary solution for the old runtime
if (toggleFullScreenMethod != null) {
myFullScreenQueue.runOrEnqueue(() -> invokeAppMethod(toggleFullScreenMethod));
}
else if (state) {
myFullScreenQueue.runOrEnqueue(() -> invokeAppMethod(enterFullScreenMethod));
}
else {
myFullScreenQueue.runOrEnqueue(() -> invokeAppMethod(leaveFullScreenMethod));
}
return promise;
}
private void invokeAppMethod(Method method) {
try {
method.invoke(Application.getApplication(), myFrame);
}
catch (Exception e) {
Logger.getInstance(MacMainFrameDecorator.class).warn(e);
}
}
}
|
JBR-2893 Big Sur: Add support of opening project as tabs
IDEA-257810 Big Sur: Empty grey line remains a project window after drag-and-drop a project tab out of the window
GitOrigin-RevId: 90da6f34915784eb76df8f9bd727951fdc6757cd
|
platform/platform-impl/src/com/intellij/ui/mac/MacMainFrameDecorator.java
|
JBR-2893 Big Sur: Add support of opening project as tabs
|
|
Java
|
apache-2.0
|
e57aa921f5baca5424e6d9201f51f46bf24823c1
| 0
|
gabemontero/jenkins-sync-plugin,gabemontero/jenkins-sync-plugin,gabemontero/jenkins-sync-plugin,gabemontero/jenkins-sync-plugin
|
/**
* Copyright (C) 2016 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fabric8.jenkins.openshiftsync;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.fabric8.kubernetes.api.model.ObjectMeta;
import io.fabric8.kubernetes.api.model.ReplicationController;
import io.fabric8.kubernetes.api.model.ReplicationControllerStatus;
import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.ServiceSpec;
import io.fabric8.kubernetes.client.Config;
import io.fabric8.openshift.api.model.Build;
import io.fabric8.openshift.api.model.BuildConfig;
import io.fabric8.openshift.api.model.BuildConfigSpec;
import io.fabric8.openshift.api.model.BuildSource;
import io.fabric8.openshift.api.model.BuildStatus;
import io.fabric8.openshift.api.model.GitBuildSource;
import io.fabric8.openshift.api.model.Route;
import io.fabric8.openshift.api.model.RouteList;
import io.fabric8.openshift.api.model.RouteSpec;
import io.fabric8.openshift.client.DefaultOpenShiftClient;
import io.fabric8.openshift.client.OpenShiftClient;
import io.fabric8.openshift.client.OpenShiftConfigBuilder;
import org.apache.commons.lang.StringUtils;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import static io.fabric8.jenkins.openshiftsync.BuildPhases.NEW;
import static io.fabric8.jenkins.openshiftsync.BuildPhases.PENDING;
import static io.fabric8.jenkins.openshiftsync.BuildPhases.RUNNING;
import static io.fabric8.jenkins.openshiftsync.Constants.OPENSHIFT_DEFAULT_NAMESPACE;
import static java.util.logging.Level.INFO;
/**
*/
public class OpenShiftUtils {
private final static Logger logger = Logger.getLogger(OpenShiftUtils.class.getName());
private static OpenShiftClient openShiftClient;
private static final DateTimeFormatter dateFormatter = ISODateTimeFormat.dateTimeNoMillis();
/**
* Initializes an {@link OpenShiftClient}
*
* @param serverUrl the optional URL of where the OpenShift cluster API server is running
*/
public synchronized static void initializeOpenShiftClient(String serverUrl) {
OpenShiftConfigBuilder configBuilder = new OpenShiftConfigBuilder();
if (serverUrl != null && !serverUrl.isEmpty()) {
configBuilder.withMasterUrl(serverUrl);
}
Config config = configBuilder.build();
openShiftClient = new DefaultOpenShiftClient(config);
}
public synchronized static OpenShiftClient getOpenShiftClient() {
return openShiftClient;
}
public synchronized static void shutdownOpenShiftClient() {
if (openShiftClient != null) {
openShiftClient.close();
openShiftClient = null;
}
}
/**
* Checks if a {@link BuildConfig} relates to a Jenkins build
*
* @param bc the BuildConfig
* @return true if this is an OpenShift BuildConfig which should be mirrored to
* a Jenkins Job
*/
public static boolean isJenkinsBuildConfig(BuildConfig bc) {
if (BuildConfigToJobMapper.JENKINS_PIPELINE_BUILD_STRATEGY.equalsIgnoreCase(bc.getSpec().getStrategy().getType()) &&
bc.getSpec().getStrategy().getJenkinsPipelineStrategy() != null) {
return true;
}
ObjectMeta metadata = bc.getMetadata();
if (metadata != null) {
Map<String, String> annotations = metadata.getAnnotations();
if (annotations != null) {
if (annotations.get("fabric8.link.jenkins.job/label") != null) {
return true;
}
}
}
return false;
}
/**
* Finds the Jenkins job name for the given {@link BuildConfig}.
*
* @param bc the BuildConfig
* @return the jenkins job name for the given BuildConfig
*/
public static String jenkinsJobName(BuildConfig bc) {
String namespace = bc.getMetadata().getNamespace();
String name = bc.getMetadata().getName();
return jenkinsJobName(namespace, name);
}
/**
* Creates the Jenkins Job name for the given buildConfigName
*
* @param namespace the namespace of the build
* @param buildConfigName the name of the {@link BuildConfig} in in the namespace
* @return the jenkins job name for the given namespace and name
*/
public static String jenkinsJobName(String namespace, String buildConfigName) {
return namespace + "-" + buildConfigName;
}
/**
* Finds the Jenkins job display name for the given {@link BuildConfig}.
*
* @param bc the BuildConfig
* @return the jenkins job display name for the given BuildConfig
*/
public static String jenkinsJobDisplayName(BuildConfig bc) {
String namespace = bc.getMetadata().getNamespace();
String name = bc.getMetadata().getName();
return jenkinsJobDisplayName(namespace, name);
}
/**
* Creates the Jenkins Job display name for the given buildConfigName
*
* @param namespace the namespace of the build
* @param buildConfigName the name of the {@link BuildConfig} in in the namespace
* @return the jenkins job display name for the given namespace and name
*/
public static String jenkinsJobDisplayName(String namespace, String buildConfigName) {
return namespace + "/" + buildConfigName;
}
/**
* Gets the current namespace running Jenkins inside or returns a reasonable default
*
* @param configuredNamespace the optional configured namespace
* @param client the OpenShift client
* @return the default namespace using either the configuration value, the default namespace on the client or "default"
*/
public static String getNamespaceOrUseDefault(String configuredNamespace, OpenShiftClient client) {
String namespace = configuredNamespace;
if (StringUtils.isBlank(namespace)) {
namespace = client.getNamespace();
if (StringUtils.isBlank(namespace)) {
namespace = OPENSHIFT_DEFAULT_NAMESPACE;
}
}
return namespace;
}
/**
* Returns the public URL of the given service
*
* @param openShiftClient the OpenShiftClient to use
* @param defaultProtocolText the protocol text part of a URL such as <code>http://</code>
* @param namespace the Kubernetes namespace
* @param serviceName the service name
* @return the external URL of the service
*/
public static String getExternalServiceUrl(OpenShiftClient openShiftClient, String defaultProtocolText, String namespace, String serviceName) {
try {
RouteList routes = openShiftClient.routes().inNamespace(namespace).list();
for (Route route : routes.getItems()) {
RouteSpec spec = route.getSpec();
if (spec != null && spec.getTo() != null && "Service".equalsIgnoreCase(spec.getTo().getKind()) && serviceName.equalsIgnoreCase(spec.getTo().getName())) {
String host = spec.getHost();
if (host != null && host.length() > 0) {
if (spec.getTls() != null) {
return "https://" + host;
}
return "http://" + host;
}
}
}
} catch (Exception e) {
logger.log(Level.WARNING, "Could not find Route for service " + namespace + "/" + serviceName + ". " + e, e);
}
// lets try the portalIP instead
try {
Service service = openShiftClient.services().inNamespace(namespace).withName(serviceName).get();
if (service != null) {
ServiceSpec spec = service.getSpec();
if (spec != null) {
String host = spec.getPortalIP();
if (host != null && host.length() > 0) {
return defaultProtocolText + host;
}
}
}
} catch (Exception e) {
logger.log(Level.WARNING, "Could not find Route for service " + namespace + "/" + serviceName + ". " + e, e);
}
// lets default to the service DNS name
return defaultProtocolText + serviceName;
}
/**
* Calculates the external URL to access Jenkins
*
* @param namespace the namespace Jenkins is runing inside
* @param openShiftClient the OpenShift client
* @return the external URL to access Jenkins
*/
public static String getJenkinsURL(OpenShiftClient openShiftClient, String namespace) {
return getExternalServiceUrl(openShiftClient, "http://", namespace ,"jenkins");
}
/**
* Lazily creates the GitSource if need be then updates the git URL
* @param buildConfig the BuildConfig to update
* @param gitUrl the URL to the git repo
* @param ref
*/
public static void updateGitSourceUrl(BuildConfig buildConfig, String gitUrl, String ref) {
BuildConfigSpec spec = buildConfig.getSpec();
if (spec == null) {
spec = new BuildConfigSpec();
buildConfig.setSpec(spec);
}
BuildSource source = spec.getSource();
if (source == null) {
source = new BuildSource();
spec.setSource(source);
}
source.setType("Git");
GitBuildSource gitSource = source.getGit();
if (gitSource == null) {
gitSource = new GitBuildSource();
source.setGit(gitSource);
}
gitSource.setUri(gitUrl);
gitSource.setRef(ref);
}
public static void updateOpenShiftBuildPhase(Build build, String phase) {
logger.log(INFO, "setting build to {} in namespace {}/{}", new Object[]{phase, build.getMetadata().getNamespace(), build.getMetadata().getName()});
getOpenShiftClient().builds().inNamespace(build.getMetadata().getNamespace()).withName(build.getMetadata().getName())
.edit()
.editStatus().withPhase(phase).endStatus()
.done();
}
/**
* Maps a Jenkins Job name to an ObjectShift BuildConfig name
*
* @return the namespaced name for the BuildConfig
* @param jobName the job to associate to a BuildConfig name
* @param namespace the default namespace that Jenkins is running inside
*/
public static NamespaceName buildConfigNameFromJenkinsJobName(String jobName, String namespace) {
// TODO lets detect the namespace separator in the jobName for cases where a jenkins is used for
// BuildConfigs in multiple namespaces?
return new NamespaceName(namespace, jobName);
}
public static long parseResourceVersion(HasMetadata obj) {
return parseResourceVersion(obj.getMetadata().getResourceVersion());
}
public static long parseResourceVersion(String resourceVersion) {
try {
return Long.parseLong(resourceVersion);
} catch (NumberFormatException e) {
return 0;
}
}
public static String formatTimestamp(long timestamp) {
return dateFormatter.print(new DateTime(timestamp));
}
public static long parseTimestamp(String timestamp) {
return dateFormatter.parseMillis(timestamp);
}
public static boolean isResourceWithoutStateEqual(HasMetadata oldObj, HasMetadata newObj) {
try {
byte[] oldDigest = MessageDigest.getInstance("MD5").digest(dumpWithoutRuntimeStateAsYaml(oldObj).getBytes(StandardCharsets.UTF_8));
byte[] newDigest = MessageDigest.getInstance("MD5").digest(dumpWithoutRuntimeStateAsYaml(newObj).getBytes(StandardCharsets.UTF_8));
return Arrays.equals(oldDigest, newDigest);
} catch (NoSuchAlgorithmException | JsonProcessingException e) {
throw new RuntimeException(e);
}
}
public static String dumpWithoutRuntimeStateAsYaml(HasMetadata obj) throws JsonProcessingException {
ObjectMapper statelessMapper = new ObjectMapper(new YAMLFactory());
statelessMapper.addMixInAnnotations(ObjectMeta.class, ObjectMetaMixIn.class);
statelessMapper.addMixInAnnotations(ReplicationController.class, StatelessReplicationControllerMixIn.class);
return statelessMapper.writeValueAsString(obj);
}
public static boolean isCancellable(BuildStatus buildStatus) {
String phase = buildStatus.getPhase();
return phase.equals(NEW) || phase.equals(PENDING) || phase.equals(RUNNING);
}
public static boolean isNew(BuildStatus buildStatus) {
return buildStatus.getPhase().equals(NEW);
}
public static boolean isCancelled(BuildStatus status) {
return Boolean.TRUE.equals(status.getCancelled());
}
abstract class StatelessReplicationControllerMixIn extends ReplicationController {
@JsonIgnore
private ReplicationControllerStatus status;
StatelessReplicationControllerMixIn() {
}
@JsonIgnore
public abstract ReplicationControllerStatus getStatus();
}
abstract class ObjectMetaMixIn extends ObjectMeta {
@JsonIgnore
private String creationTimestamp;
@JsonIgnore
private String deletionTimestamp;
@JsonIgnore
private Long generation;
@JsonIgnore
private String resourceVersion;
@JsonIgnore
private String selfLink;
@JsonIgnore
private String uid;
ObjectMetaMixIn() {
}
@JsonIgnore
public abstract String getCreationTimestamp();
@JsonIgnore
public abstract String getDeletionTimestamp();
@JsonIgnore
public abstract Long getGeneration();
@JsonIgnore
public abstract String getResourceVersion();
@JsonIgnore
public abstract String getSelfLink();
@JsonIgnore
public abstract String getUid();
}
}
|
src/main/java/io/fabric8/jenkins/openshiftsync/OpenShiftUtils.java
|
/**
* Copyright (C) 2016 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fabric8.jenkins.openshiftsync;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.fabric8.kubernetes.api.model.ObjectMeta;
import io.fabric8.kubernetes.api.model.ReplicationController;
import io.fabric8.kubernetes.api.model.ReplicationControllerStatus;
import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.ServiceSpec;
import io.fabric8.kubernetes.client.Config;
import io.fabric8.openshift.api.model.Build;
import io.fabric8.openshift.api.model.BuildConfig;
import io.fabric8.openshift.api.model.BuildConfigSpec;
import io.fabric8.openshift.api.model.BuildSource;
import io.fabric8.openshift.api.model.BuildStatus;
import io.fabric8.openshift.api.model.GitBuildSource;
import io.fabric8.openshift.api.model.Route;
import io.fabric8.openshift.api.model.RouteList;
import io.fabric8.openshift.api.model.RouteSpec;
import io.fabric8.openshift.client.DefaultOpenShiftClient;
import io.fabric8.openshift.client.OpenShiftClient;
import io.fabric8.openshift.client.OpenShiftConfigBuilder;
import org.apache.commons.lang.StringUtils;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import static io.fabric8.jenkins.openshiftsync.BuildPhases.NEW;
import static io.fabric8.jenkins.openshiftsync.BuildPhases.PENDING;
import static io.fabric8.jenkins.openshiftsync.BuildPhases.RUNNING;
import static io.fabric8.jenkins.openshiftsync.Constants.OPENSHIFT_DEFAULT_NAMESPACE;
/**
*/
public class OpenShiftUtils {
private final static Logger logger = Logger.getLogger(OpenShiftUtils.class.getName());
private static OpenShiftClient openShiftClient;
private static final DateTimeFormatter dateFormatter = ISODateTimeFormat.dateTimeNoMillis();
/**
* Initializes an {@link OpenShiftClient}
*
* @param serverUrl the optional URL of where the OpenShift cluster API server is running
*/
public synchronized static void initializeOpenShiftClient(String serverUrl) {
OpenShiftConfigBuilder configBuilder = new OpenShiftConfigBuilder();
if (serverUrl != null && !serverUrl.isEmpty()) {
configBuilder.withMasterUrl(serverUrl);
}
Config config = configBuilder.build();
openShiftClient = new DefaultOpenShiftClient(config);
}
public synchronized static OpenShiftClient getOpenShiftClient() {
return openShiftClient;
}
public synchronized static void shutdownOpenShiftClient() {
if (openShiftClient != null) {
openShiftClient.close();
openShiftClient = null;
}
}
/**
* Checks if a {@link BuildConfig} relates to a Jenkins build
*
* @param bc the BuildConfig
* @return true if this is an OpenShift BuildConfig which should be mirrored to
* a Jenkins Job
*/
public static boolean isJenkinsBuildConfig(BuildConfig bc) {
if (BuildConfigToJobMapper.JENKINS_PIPELINE_BUILD_STRATEGY.equalsIgnoreCase(bc.getSpec().getStrategy().getType()) &&
bc.getSpec().getStrategy().getJenkinsPipelineStrategy() != null) {
return true;
}
ObjectMeta metadata = bc.getMetadata();
if (metadata != null) {
Map<String, String> annotations = metadata.getAnnotations();
if (annotations != null) {
if (annotations.get("fabric8.link.jenkins.job/label") != null) {
return true;
}
}
}
return false;
}
/**
* Finds the Jenkins job name for the given {@link BuildConfig}.
*
* @param bc the BuildConfig
* @return the jenkins job name for the given BuildConfig
*/
public static String jenkinsJobName(BuildConfig bc) {
String namespace = bc.getMetadata().getNamespace();
String name = bc.getMetadata().getName();
return jenkinsJobName(namespace, name);
}
/**
* Creates the Jenkins Job name for the given buildConfigName
*
* @param namespace the namespace of the build
* @param buildConfigName the name of the {@link BuildConfig} in in the namespace
* @return the jenkins job name for the given namespace and name
*/
public static String jenkinsJobName(String namespace, String buildConfigName) {
return namespace + "-" + buildConfigName;
}
/**
* Finds the Jenkins job display name for the given {@link BuildConfig}.
*
* @param bc the BuildConfig
* @return the jenkins job display name for the given BuildConfig
*/
public static String jenkinsJobDisplayName(BuildConfig bc) {
String namespace = bc.getMetadata().getNamespace();
String name = bc.getMetadata().getName();
return jenkinsJobDisplayName(namespace, name);
}
/**
* Creates the Jenkins Job display name for the given buildConfigName
*
* @param namespace the namespace of the build
* @param buildConfigName the name of the {@link BuildConfig} in in the namespace
* @return the jenkins job display name for the given namespace and name
*/
public static String jenkinsJobDisplayName(String namespace, String buildConfigName) {
return namespace + "/" + buildConfigName;
}
/**
* Gets the current namespace running Jenkins inside or returns a reasonable default
*
* @param configuredNamespace the optional configured namespace
* @param client the OpenShift client
* @return the default namespace using either the configuration value, the default namespace on the client or "default"
*/
public static String getNamespaceOrUseDefault(String configuredNamespace, OpenShiftClient client) {
String namespace = configuredNamespace;
if (StringUtils.isBlank(namespace)) {
namespace = client.getNamespace();
if (StringUtils.isBlank(namespace)) {
namespace = OPENSHIFT_DEFAULT_NAMESPACE;
}
}
return namespace;
}
/**
* Returns the public URL of the given service
*
* @param openShiftClient the OpenShiftClient to use
* @param defaultProtocolText the protocol text part of a URL such as <code>http://</code>
* @param namespace the Kubernetes namespace
* @param serviceName the service name
* @return the external URL of the service
*/
public static String getExternalServiceUrl(OpenShiftClient openShiftClient, String defaultProtocolText, String namespace, String serviceName) {
try {
RouteList routes = openShiftClient.routes().inNamespace(namespace).list();
for (Route route : routes.getItems()) {
RouteSpec spec = route.getSpec();
if (spec != null && spec.getTo() != null && "Service".equalsIgnoreCase(spec.getTo().getKind()) && serviceName.equalsIgnoreCase(spec.getTo().getName())) {
String host = spec.getHost();
if (host != null && host.length() > 0) {
if (spec.getTls() != null) {
return "https://" + host;
}
return "http://" + host;
}
}
}
} catch (Exception e) {
logger.log(Level.WARNING, "Could not find Route for namespace " + namespace + " service " + serviceName + ". " + e, e);
}
// lets try the portalIP instead
try {
Service service = openShiftClient.services().inNamespace(namespace).withName(serviceName).get();
if (service != null) {
ServiceSpec spec = service.getSpec();
if (spec != null) {
String host = spec.getPortalIP();
if (host != null && host.length() > 0) {
return defaultProtocolText + host;
}
}
}
} catch (Exception e) {
logger.log(Level.WARNING, "Could not find Route for namespace " + namespace + " service " + serviceName + ". " + e, e);
}
// lets default to the service DNS name
return defaultProtocolText + serviceName;
}
/**
* Calculates the external URL to access Jenkins
*
* @param namespace the namespace Jenkins is runing inside
* @param openShiftClient the OpenShift client
* @return the external URL to access Jenkins
*/
public static String getJenkinsURL(OpenShiftClient openShiftClient, String namespace) {
return getExternalServiceUrl(openShiftClient, "http://", namespace ,"jenkins");
}
/**
* Lazily creates the GitSource if need be then updates the git URL
* @param buildConfig the BuildConfig to update
* @param gitUrl the URL to the git repo
* @param ref
*/
public static void updateGitSourceUrl(BuildConfig buildConfig, String gitUrl, String ref) {
BuildConfigSpec spec = buildConfig.getSpec();
if (spec == null) {
spec = new BuildConfigSpec();
buildConfig.setSpec(spec);
}
BuildSource source = spec.getSource();
if (source == null) {
source = new BuildSource();
spec.setSource(source);
}
source.setType("Git");
GitBuildSource gitSource = source.getGit();
if (gitSource == null) {
gitSource = new GitBuildSource();
source.setGit(gitSource);
}
gitSource.setUri(gitUrl);
gitSource.setRef(ref);
}
public static void updateOpenShiftBuildPhase(Build build, String phase) {
logger.info("setting build to pending in namespace " + build.getMetadata().getNamespace() + " with name: " + build.getMetadata().getName());
getOpenShiftClient().builds().inNamespace(build.getMetadata().getNamespace()).withName(build.getMetadata().getName())
.edit()
.editStatus().withPhase(phase).endStatus()
.done();
}
/**
* Maps a Jenkins Job name to an ObjectShift BuildConfig name
*
* @return the namespaced name for the BuildConfig
* @param jobName the job to associate to a BuildConfig name
* @param namespace the default namespace that Jenkins is running inside
*/
public static NamespaceName buildConfigNameFromJenkinsJobName(String jobName, String namespace) {
// TODO lets detect the namespace separator in the jobName for cases where a jenkins is used for
// BuildConfigs in multiple namespaces?
return new NamespaceName(namespace, jobName);
}
public static long parseResourceVersion(HasMetadata obj) {
return parseResourceVersion(obj.getMetadata().getResourceVersion());
}
public static long parseResourceVersion(String resourceVersion) {
try {
return Long.parseLong(resourceVersion);
} catch (NumberFormatException e) {
return 0;
}
}
public static String formatTimestamp(long timestamp) {
return dateFormatter.print(new DateTime(timestamp));
}
public static long parseTimestamp(String timestamp) {
return dateFormatter.parseMillis(timestamp);
}
public static boolean isResourceWithoutStateEqual(HasMetadata oldObj, HasMetadata newObj) {
try {
byte[] oldDigest = MessageDigest.getInstance("MD5").digest(dumpWithoutRuntimeStateAsYaml(oldObj).getBytes(StandardCharsets.UTF_8));
byte[] newDigest = MessageDigest.getInstance("MD5").digest(dumpWithoutRuntimeStateAsYaml(newObj).getBytes(StandardCharsets.UTF_8));
return Arrays.equals(oldDigest, newDigest);
} catch (NoSuchAlgorithmException | JsonProcessingException e) {
throw new RuntimeException(e);
}
}
public static String dumpWithoutRuntimeStateAsYaml(HasMetadata obj) throws JsonProcessingException {
ObjectMapper statelessMapper = new ObjectMapper(new YAMLFactory());
statelessMapper.addMixInAnnotations(ObjectMeta.class, ObjectMetaMixIn.class);
statelessMapper.addMixInAnnotations(ReplicationController.class, StatelessReplicationControllerMixIn.class);
return statelessMapper.writeValueAsString(obj);
}
public static boolean isCancellable(BuildStatus buildStatus) {
String phase = buildStatus.getPhase();
return phase.equals(NEW) || phase.equals(PENDING) || phase.equals(RUNNING);
}
public static boolean isNew(BuildStatus buildStatus) {
return buildStatus.getPhase().equals(NEW);
}
public static boolean isCancelled(BuildStatus status) {
return Boolean.TRUE.equals(status.getCancelled());
}
abstract class StatelessReplicationControllerMixIn extends ReplicationController {
@JsonIgnore
private ReplicationControllerStatus status;
StatelessReplicationControllerMixIn() {
}
@JsonIgnore
public abstract ReplicationControllerStatus getStatus();
}
abstract class ObjectMetaMixIn extends ObjectMeta {
@JsonIgnore
private String creationTimestamp;
@JsonIgnore
private String deletionTimestamp;
@JsonIgnore
private Long generation;
@JsonIgnore
private String resourceVersion;
@JsonIgnore
private String selfLink;
@JsonIgnore
private String uid;
ObjectMetaMixIn() {
}
@JsonIgnore
public abstract String getCreationTimestamp();
@JsonIgnore
public abstract String getDeletionTimestamp();
@JsonIgnore
public abstract Long getGeneration();
@JsonIgnore
public abstract String getResourceVersion();
@JsonIgnore
public abstract String getSelfLink();
@JsonIgnore
public abstract String getUid();
}
}
|
Fix up logging messages
|
src/main/java/io/fabric8/jenkins/openshiftsync/OpenShiftUtils.java
|
Fix up logging messages
|
|
Java
|
apache-2.0
|
74b91a982d901f0abdb0489a07aef87a0d0713f6
| 0
|
chenchulakshmig/scsb-etl,angelindayana/scsb-etl,chenchulakshmig/scsb-etl,angelindayana/scsb-etl,chenchulakshmig/scsb-etl,premkumarbalu/scsb-etl,angelindayana/scsb-etl,premkumarbalu/scsb-etl,premkumarbalu/scsb-etl
|
package org.recap.model.csv;
import org.apache.camel.dataformat.bindy.annotation.CsvRecord;
import org.apache.camel.dataformat.bindy.annotation.DataField;
/**
* Created by angelind on 22/7/16.
*/
@CsvRecord(generateHeaderColumns = true, separator = ",", quote = "\"", crlf = "UNIX")
public class SuccessReportReCAPCSVRecord {
@DataField(pos = 1)
private String fileName;
@DataField(pos = 2)
private Integer totalRecordsInFile;
@DataField(pos = 3)
private Integer totalBibsLoaded;
@DataField(pos = 4)
private Integer totalHoldingsLoaded;
@DataField(pos = 5)
private Integer totalItemsLoaded;
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public Integer getTotalRecordsInFile() {
return totalRecordsInFile;
}
public void setTotalRecordsInFile(Integer totalRecordsInFile) {
this.totalRecordsInFile = totalRecordsInFile;
}
public Integer getTotalBibsLoaded() {
return totalBibsLoaded;
}
public void setTotalBibsLoaded(Integer totalBibsLoaded) {
this.totalBibsLoaded = totalBibsLoaded;
}
public Integer getTotalHoldingsLoaded() {
return totalHoldingsLoaded;
}
public void setTotalHoldingsLoaded(Integer totalHoldingsLoaded) {
this.totalHoldingsLoaded = totalHoldingsLoaded;
}
public Integer getTotalItemsLoaded() {
return totalItemsLoaded;
}
public void setTotalItemsLoaded(Integer totalItemsLoaded) {
this.totalItemsLoaded = totalItemsLoaded;
}
}
|
src/main/java/org/recap/model/csv/SuccessReportReCAPCSVRecord.java
|
package org.recap.model.csv;
import org.apache.camel.dataformat.bindy.annotation.CsvRecord;
import org.apache.camel.dataformat.bindy.annotation.DataField;
/**
* Created by angelind on 22/7/16.
*/
@CsvRecord(generateHeaderColumns = true, separator = ",", crlf = "UNIX")
public class SuccessReportReCAPCSVRecord {
@DataField(pos = 1)
private String fileName;
@DataField(pos = 2)
private Integer totalRecordsInFile;
@DataField(pos = 3)
private Integer totalBibsLoaded;
@DataField(pos = 4)
private Integer totalHoldingsLoaded;
@DataField(pos = 5)
private Integer totalItemsLoaded;
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public Integer getTotalRecordsInFile() {
return totalRecordsInFile;
}
public void setTotalRecordsInFile(Integer totalRecordsInFile) {
this.totalRecordsInFile = totalRecordsInFile;
}
public Integer getTotalBibsLoaded() {
return totalBibsLoaded;
}
public void setTotalBibsLoaded(Integer totalBibsLoaded) {
this.totalBibsLoaded = totalBibsLoaded;
}
public Integer getTotalHoldingsLoaded() {
return totalHoldingsLoaded;
}
public void setTotalHoldingsLoaded(Integer totalHoldingsLoaded) {
this.totalHoldingsLoaded = totalHoldingsLoaded;
}
public Integer getTotalItemsLoaded() {
return totalItemsLoaded;
}
public void setTotalItemsLoaded(Integer totalItemsLoaded) {
this.totalItemsLoaded = totalItemsLoaded;
}
}
|
added "" to encapsute the csv values.
|
src/main/java/org/recap/model/csv/SuccessReportReCAPCSVRecord.java
|
added "" to encapsute the csv values.
|
|
Java
|
apache-2.0
|
41f54cc0163430439e45b0f1fe8b3c2b88222c90
| 0
|
alexryndin/ambari,alexryndin/ambari,sekikn/ambari,alexryndin/ambari,radicalbit/ambari,arenadata/ambari,alexryndin/ambari,radicalbit/ambari,arenadata/ambari,arenadata/ambari,arenadata/ambari,alexryndin/ambari,alexryndin/ambari,arenadata/ambari,radicalbit/ambari,radicalbit/ambari,arenadata/ambari,arenadata/ambari,sekikn/ambari,sekikn/ambari,arenadata/ambari,alexryndin/ambari,radicalbit/ambari,radicalbit/ambari,sekikn/ambari,alexryndin/ambari,arenadata/ambari,sekikn/ambari,arenadata/ambari,alexryndin/ambari,alexryndin/ambari,sekikn/ambari,radicalbit/ambari,sekikn/ambari,arenadata/ambari,sekikn/ambari,radicalbit/ambari,radicalbit/ambari,radicalbit/ambari,alexryndin/ambari,radicalbit/ambari,sekikn/ambari
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.utils;
import org.apache.commons.lang.StringUtils;
import com.google.common.base.Joiner;
import com.google.gson.Gson;
import com.google.inject.Inject;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.Role;
import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.actionmanager.Stage;
import org.apache.ambari.server.actionmanager.StageFactory;
import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.controller.ActionExecutionContext;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.HostComponentAdminState;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceComponent;
import org.apache.ambari.server.state.ServiceComponentHost;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent;
import org.apache.ambari.server.topology.TopologyManager;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig;
import javax.xml.bind.JAXBException;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
public class StageUtils {
public static final Integer DEFAULT_PING_PORT = 8670;
public static final String DEFAULT_RACK = "/default-rack";
public static final String DEFAULT_IPV4_ADDRESS = "127.0.0.1";
private static final Log LOG = LogFactory.getLog(StageUtils.class);
static final String AMBARI_SERVER_HOST = "ambari_server_host";
private static final String HOSTS_LIST = "all_hosts";
private static final String PORTS = "all_ping_ports";
private static final String RACKS = "all_racks";
private static final String IPV4_ADDRESSES = "all_ipv4_ips";
private static Map<String, String> componentToClusterInfoKeyMap =
new HashMap<String, String>();
private static Map<String, String> decommissionedToClusterInfoKeyMap =
new HashMap<String, String>();
private volatile static Gson gson;
@Inject
private static StageFactory stageFactory;
@Inject
private static TopologyManager topologyManager;
@Inject
public StageUtils(StageFactory stageFactory) {
StageUtils.stageFactory = stageFactory;
}
private static String server_hostname;
static {
try {
server_hostname = InetAddress.getLocalHost().getCanonicalHostName().toLowerCase();
} catch (UnknownHostException e) {
LOG.warn("Could not find canonical hostname ", e);
server_hostname = "localhost";
}
}
public static Gson getGson() {
if (gson != null) {
return gson;
} else {
synchronized (LOG) {
if (gson == null) {
gson = new Gson();
}
return gson;
}
}
}
public static void setGson(Gson gson) {
if (gson == null) {
StageUtils.gson = gson;
}
}
//todo: proper static injection
public static void setTopologyManager(TopologyManager topologyManager) {
StageUtils.topologyManager = topologyManager;
}
static {
componentToClusterInfoKeyMap.put("NAMENODE", "namenode_host");
componentToClusterInfoKeyMap.put("JOBTRACKER", "jtnode_host");
componentToClusterInfoKeyMap.put("SECONDARY_NAMENODE", "snamenode_host");
componentToClusterInfoKeyMap.put("RESOURCEMANAGER", "rm_host");
componentToClusterInfoKeyMap.put("NODEMANAGER", "nm_hosts");
componentToClusterInfoKeyMap.put("HISTORYSERVER", "hs_host");
componentToClusterInfoKeyMap.put("JOURNALNODE", "journalnode_hosts");
componentToClusterInfoKeyMap.put("ZKFC", "zkfc_hosts");
componentToClusterInfoKeyMap.put("ZOOKEEPER_SERVER", "zookeeper_hosts");
componentToClusterInfoKeyMap.put("FLUME_HANDLER", "flume_hosts");
componentToClusterInfoKeyMap.put("HBASE_MASTER", "hbase_master_hosts");
componentToClusterInfoKeyMap.put("HBASE_REGIONSERVER", "hbase_rs_hosts");
componentToClusterInfoKeyMap.put("HIVE_SERVER", "hive_server_host");
componentToClusterInfoKeyMap.put("HIVE_METASTORE", "hive_metastore_host");
componentToClusterInfoKeyMap.put("OOZIE_SERVER", "oozie_server");
componentToClusterInfoKeyMap.put("WEBHCAT_SERVER", "webhcat_server_host");
componentToClusterInfoKeyMap.put("MYSQL_SERVER", "hive_mysql_host");
componentToClusterInfoKeyMap.put("DASHBOARD", "dashboard_host");
componentToClusterInfoKeyMap.put("GANGLIA_SERVER", "ganglia_server_host");
componentToClusterInfoKeyMap.put("DATANODE", "slave_hosts");
componentToClusterInfoKeyMap.put("TASKTRACKER", "mapred_tt_hosts");
componentToClusterInfoKeyMap.put("HBASE_REGIONSERVER", "hbase_rs_hosts");
componentToClusterInfoKeyMap.put("KERBEROS_SERVER", "kdc_host");
componentToClusterInfoKeyMap.put("KERBEROS_ADMIN_CLIENT", "kerberos_adminclient_host");
}
static {
decommissionedToClusterInfoKeyMap.put("DATANODE", "decom_dn_hosts");
decommissionedToClusterInfoKeyMap.put("TASKTRACKER", "decom_tt_hosts");
decommissionedToClusterInfoKeyMap.put("NODEMANAGER", "decom_nm_hosts");
decommissionedToClusterInfoKeyMap.put("HBASE_REGIONSERVER", "decom_hbase_rs_hosts");
}
public static String getActionId(long requestId, long stageId) {
return requestId + "-" + stageId;
}
public static Map<String, String> getComponentToClusterInfoKeyMap() {
return componentToClusterInfoKeyMap;
}
public static long[] getRequestStage(String actionId) {
String[] fields = actionId.split("-");
long[] requestStageIds = new long[2];
requestStageIds[0] = Long.parseLong(fields[0]);
requestStageIds[1] = Long.parseLong(fields[1]);
return requestStageIds;
}
public static Stage getATestStage(long requestId, long stageId, String clusterHostInfo, String commandParamsStage, String hostParamsStage) {
String hostname;
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
hostname = "host-dummy";
}
return getATestStage(requestId, stageId, hostname, clusterHostInfo, commandParamsStage, hostParamsStage);
}
//For testing only
@Inject
public static Stage getATestStage(long requestId, long stageId, String hostname, String clusterHostInfo, String commandParamsStage, String hostParamsStage) {
Stage s = stageFactory.createNew(requestId, "/tmp", "cluster1", 1L, "context", clusterHostInfo, commandParamsStage, hostParamsStage);
s.setStageId(stageId);
long now = System.currentTimeMillis();
s.addHostRoleExecutionCommand(hostname, Role.NAMENODE, RoleCommand.INSTALL,
new ServiceComponentHostInstallEvent("NAMENODE", hostname, now, "HDP-1.2.0"),
"cluster1", "HDFS", false);
ExecutionCommand execCmd = s.getExecutionCommandWrapper(hostname, "NAMENODE").getExecutionCommand();
execCmd.setCommandId(s.getActionId());
List<String> slaveHostList = new ArrayList<String>();
slaveHostList.add(hostname);
slaveHostList.add("host2");
Map<String, String> hdfsSite = new TreeMap<String, String>();
hdfsSite.put("dfs.block.size", "2560000000");
Map<String, Map<String, String>> configurations =
new TreeMap<String, Map<String, String>>();
configurations.put("hdfs-site", hdfsSite);
execCmd.setConfigurations(configurations);
Map<String, Map<String, Map<String, String>>> configurationAttributes =
new TreeMap<String, Map<String, Map<String, String>>>();
Map<String, Map<String, String>> hdfsSiteAttributes = new TreeMap<String, Map<String, String>>();
Map<String, String> finalAttribute = new TreeMap<String, String>();
finalAttribute.put("dfs.block.size", "true");
hdfsSiteAttributes.put("final", finalAttribute);
configurationAttributes.put("hdfsSite", hdfsSiteAttributes);
execCmd.setConfigurationAttributes(configurationAttributes);
Map<String, String> params = new TreeMap<String, String>();
params.put("jdklocation", "/x/y/z");
params.put("stack_version", "1.2.0");
params.put("stack_name", "HDP");
execCmd.setHostLevelParams(params);
Map<String, String> roleParams = new TreeMap<String, String>();
roleParams.put("format", "false");
execCmd.setRoleParams(roleParams);
Map<String, String> commandParams = new TreeMap<String, String>();
commandParams.put(ExecutionCommand.KeyNames.COMMAND_TIMEOUT, "600");
execCmd.setCommandParams(commandParams);
return s;
}
public static String jaxbToString(Object jaxbObj) throws JAXBException,
JsonGenerationException, JsonMappingException, IOException {
return getGson().toJson(jaxbObj);
}
public static <T> T fromJson(String json, Class<T> clazz) throws IOException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true);
mapper.configure(SerializationConfig.Feature.USE_ANNOTATIONS, true);
InputStream is = new ByteArrayInputStream(json.getBytes(Charset.forName("UTF8")));
return mapper.readValue(is, clazz);
}
public static Map<String, String> getCommandParamsStage(ActionExecutionContext actionExecContext) throws AmbariException {
return actionExecContext.getParameters() != null ? actionExecContext.getParameters() : new TreeMap<String, String>();
}
public static Map<String, Set<String>> getClusterHostInfo(Cluster cluster) throws AmbariException {
//Fill hosts and ports lists
Set<String> hostsSet = new LinkedHashSet<String>();
List<Integer> portsList = new ArrayList<Integer>();
List<String> rackList = new ArrayList<String>();
List<String> ipV4List = new ArrayList<String>();
Collection<Host> allHosts = cluster.getHosts();
for (Host host : allHosts) {
hostsSet.add(host.getHostName());
Integer currentPingPort = host.getCurrentPingPort();
portsList.add(currentPingPort == null ? DEFAULT_PING_PORT : currentPingPort);
String rackInfo = host.getRackInfo();
rackList.add(StringUtils.isEmpty(rackInfo) ? DEFAULT_RACK : rackInfo );
String iPv4 = host.getIPv4();
ipV4List.add(StringUtils.isEmpty(iPv4) ? DEFAULT_IPV4_ADDRESS : iPv4 );
}
// add hosts from topology manager
Map<String, Collection<String>> pendingHostComponents = topologyManager.getProjectedTopology();
for (String hostname : pendingHostComponents.keySet()) {
if (!hostsSet.contains(hostname)) {
hostsSet.add(hostname);
portsList.add(DEFAULT_PING_PORT);
rackList.add(DEFAULT_RACK);
ipV4List.add(DEFAULT_IPV4_ADDRESS);
}
}
List<String> hostsList = new ArrayList<String>(hostsSet);
Map<String, String> additionalComponentToClusterInfoKeyMap = new HashMap<String, String>();
// Fill hosts for services
Map<String, SortedSet<Integer>> hostRolesInfo = new HashMap<String, SortedSet<Integer>>();
for (Map.Entry<String, Service> serviceEntry : cluster.getServices().entrySet()) {
Service service = serviceEntry.getValue();
for (Map.Entry<String, ServiceComponent> serviceComponentEntry : service.getServiceComponents().entrySet()) {
ServiceComponent serviceComponent = serviceComponentEntry.getValue();
String componentName = serviceComponent.getName();
String roleName = componentToClusterInfoKeyMap.get(componentName);
if(null == roleName) {
roleName = additionalComponentToClusterInfoKeyMap.get(componentName);
}
if (null == roleName && !serviceComponent.isClientComponent()) {
roleName = componentName.toLowerCase() + "_hosts";
additionalComponentToClusterInfoKeyMap.put(componentName, roleName);
}
String decomRoleName = decommissionedToClusterInfoKeyMap.get(componentName);
if (roleName == null && decomRoleName == null) {
continue;
}
for (String hostName : serviceComponent.getServiceComponentHosts().keySet()) {
if (roleName != null) {
SortedSet<Integer> hostsForComponentsHost = hostRolesInfo.get(roleName);
if (hostsForComponentsHost == null) {
hostsForComponentsHost = new TreeSet<Integer>();
hostRolesInfo.put(roleName, hostsForComponentsHost);
}
int hostIndex = hostsList.indexOf(hostName);
//Add index of host to current host role
hostsForComponentsHost.add(hostIndex);
}
if (decomRoleName != null) {
ServiceComponentHost scHost = serviceComponent.getServiceComponentHost(hostName);
if (scHost.getComponentAdminState() == HostComponentAdminState.DECOMMISSIONED) {
SortedSet<Integer> hostsForComponentsHost = hostRolesInfo.get(decomRoleName);
if (hostsForComponentsHost == null) {
hostsForComponentsHost = new TreeSet<Integer>();
hostRolesInfo.put(decomRoleName, hostsForComponentsHost);
}
int hostIndex = hostsList.indexOf(hostName);
//Add index of host to current host role
hostsForComponentsHost.add(hostIndex);
}
}
}
}
}
// add components from topology manager
for (Map.Entry<String, Collection<String>> entry : pendingHostComponents.entrySet()) {
String hostname = entry.getKey();
Collection<String> hostComponents = entry.getValue();
for (String hostComponent : hostComponents) {
String roleName = componentToClusterInfoKeyMap.get(hostComponent);
if (null == roleName) {
roleName = additionalComponentToClusterInfoKeyMap.get(hostComponent);
}
if (null == roleName) {
// even though all mappings are being added, componentToClusterInfoKeyMap is
// a higher priority lookup
for (Service service : cluster.getServices().values()) {
for (ServiceComponent sc : service.getServiceComponents().values()) {
if (!sc.isClientComponent() && sc.getName().equals(hostComponent)) {
roleName = hostComponent.toLowerCase() + "_hosts";
additionalComponentToClusterInfoKeyMap.put(hostComponent, roleName);
}
}
}
}
if (roleName != null) {
SortedSet<Integer> hostsForComponentsHost = hostRolesInfo.get(roleName);
if (hostsForComponentsHost == null) {
hostsForComponentsHost = new TreeSet<Integer>();
hostRolesInfo.put(roleName, hostsForComponentsHost);
}
int hostIndex = hostsList.indexOf(hostname);
if (hostIndex != -1) {
if (!hostsForComponentsHost.contains(hostIndex)) {
hostsForComponentsHost.add(hostIndex);
}
} else {
//todo: I don't think that this can happen
//todo: determine if it can and if so, handle properly
//todo: if it 'cant' should probably enforce invariant
throw new RuntimeException("Unable to get host index for host: " + hostname);
}
}
}
}
Map<String, Set<String>> clusterHostInfo = new HashMap<String, Set<String>>();
for (Map.Entry<String, SortedSet<Integer>> entry : hostRolesInfo.entrySet()) {
TreeSet<Integer> sortedSet = new TreeSet<Integer>(entry.getValue());
Set<String> replacedRangesSet = replaceRanges(sortedSet);
clusterHostInfo.put(entry.getKey(), replacedRangesSet);
}
clusterHostInfo.put(HOSTS_LIST, hostsSet);
clusterHostInfo.put(PORTS, replaceMappedRanges(portsList));
clusterHostInfo.put(IPV4_ADDRESSES, replaceMappedRanges(ipV4List));
clusterHostInfo.put(RACKS, replaceMappedRanges(rackList));
// Fill server host
/*
* Note: We don't replace server host name by an index (like we do
* with component hostnames), because if ambari-agent is not installed
* at ambari-server host, then allHosts map will not contain
* ambari-server hostname.
*/
TreeSet<String> serverHost = new TreeSet<String>();
serverHost.add(getHostName());
clusterHostInfo.put(AMBARI_SERVER_HOST, serverHost);
return clusterHostInfo;
}
/**
* Finds ranges in sorted set and replaces ranges by compact notation
* <p/>
* <p>For example, suppose <tt>set</tt> comprises<tt> [1, 2, 3, 4, 7]</tt>.
* After invoking <tt>rangedSet = StageUtils.replaceRanges(set)</tt>
* <tt>rangedSet</tt> will comprise
* <tt>["1-4", "7"]</tt>..
*
* @param set the source set to be ranged
*/
public static Set<String> replaceRanges(SortedSet<Integer> set) {
if (set == null) {
return null;
}
Set<String> rangedSet = new HashSet<String>();
Integer prevElement = null;
Integer startOfRange = set.first();
for (Integer i : set) {
if (prevElement != null && (i - prevElement) > 1) {
String rangeItem = getRangedItem(startOfRange, prevElement);
rangedSet.add(rangeItem);
startOfRange = i;
}
prevElement = i;
}
rangedSet.add(getRangedItem(startOfRange, prevElement));
return rangedSet;
}
/**
* Finds ranges in list and replaces ranges by compact notation
* <p/>
* <p>For example, suppose <tt>list</tt> comprises<tt> [1, 1, 2, 2, 1, 3]</tt>.
* After invoking <tt>rangedMappedSet = StageUtils.replaceMappedRanges(list)</tt>
* <tt>rangedMappedSet</tt> will comprise
* <tt>["1:0-1,4", "2:2-3", "3:5"]</tt>..
*
* @param values the source list to be ranged
*/
public static <T> Set<String> replaceMappedRanges(List<T> values) {
Map<T, SortedSet<Integer>> convolutedValues = new HashMap<T, SortedSet<Integer>>();
int valueIndex = 0;
for (T value : values) {
SortedSet<Integer> correspValues = convolutedValues.get(value);
if (correspValues == null) {
correspValues = new TreeSet<Integer>();
convolutedValues.put(value, correspValues);
}
correspValues.add(valueIndex);
valueIndex++;
}
Set<String> result = new HashSet<String>();
for (Entry<T, SortedSet<Integer>> entry : convolutedValues.entrySet()) {
Set<String> replacedRanges = replaceRanges(entry.getValue());
result.add(entry.getKey() + ":" + Joiner.on(",").join(replacedRanges));
}
return result;
}
private static String getRangedItem(Integer startOfRange, Integer endOfRange) {
String separator = (endOfRange - startOfRange) > 1 ? "-" : ",";
String rangeItem = endOfRange.equals(startOfRange) ?
endOfRange.toString() :
startOfRange + separator + endOfRange;
return rangeItem;
}
public static String getHostName() {
return server_hostname;
}
}
|
ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.utils;
import com.google.common.base.Joiner;
import com.google.gson.Gson;
import com.google.inject.Inject;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.Role;
import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.actionmanager.Stage;
import org.apache.ambari.server.actionmanager.StageFactory;
import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.controller.ActionExecutionContext;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.HostComponentAdminState;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceComponent;
import org.apache.ambari.server.state.ServiceComponentHost;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent;
import org.apache.ambari.server.topology.TopologyManager;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig;
import javax.xml.bind.JAXBException;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
public class StageUtils {
public static final Integer DEFAULT_PING_PORT = 8670;
public static final String DEFAULT_RACK = "/default-rack";
public static final String DEFAULT_IPV4_ADDRESS = "127.0.0.1";
private static final Log LOG = LogFactory.getLog(StageUtils.class);
static final String AMBARI_SERVER_HOST = "ambari_server_host";
private static final String HOSTS_LIST = "all_hosts";
private static final String PORTS = "all_ping_ports";
private static final String RACKS = "all_racks";
private static final String IPV4_ADDRESSES = "all_ipv4_ips";
private static Map<String, String> componentToClusterInfoKeyMap =
new HashMap<String, String>();
private static Map<String, String> decommissionedToClusterInfoKeyMap =
new HashMap<String, String>();
private volatile static Gson gson;
@Inject
private static StageFactory stageFactory;
@Inject
private static TopologyManager topologyManager;
@Inject
public StageUtils(StageFactory stageFactory) {
StageUtils.stageFactory = stageFactory;
}
private static String server_hostname;
static {
try {
server_hostname = InetAddress.getLocalHost().getCanonicalHostName().toLowerCase();
} catch (UnknownHostException e) {
LOG.warn("Could not find canonical hostname ", e);
server_hostname = "localhost";
}
}
public static Gson getGson() {
if (gson != null) {
return gson;
} else {
synchronized (LOG) {
if (gson == null) {
gson = new Gson();
}
return gson;
}
}
}
public static void setGson(Gson gson) {
if (gson == null) {
StageUtils.gson = gson;
}
}
//todo: proper static injection
public static void setTopologyManager(TopologyManager topologyManager) {
StageUtils.topologyManager = topologyManager;
}
static {
componentToClusterInfoKeyMap.put("NAMENODE", "namenode_host");
componentToClusterInfoKeyMap.put("JOBTRACKER", "jtnode_host");
componentToClusterInfoKeyMap.put("SECONDARY_NAMENODE", "snamenode_host");
componentToClusterInfoKeyMap.put("RESOURCEMANAGER", "rm_host");
componentToClusterInfoKeyMap.put("NODEMANAGER", "nm_hosts");
componentToClusterInfoKeyMap.put("HISTORYSERVER", "hs_host");
componentToClusterInfoKeyMap.put("JOURNALNODE", "journalnode_hosts");
componentToClusterInfoKeyMap.put("ZKFC", "zkfc_hosts");
componentToClusterInfoKeyMap.put("ZOOKEEPER_SERVER", "zookeeper_hosts");
componentToClusterInfoKeyMap.put("FLUME_HANDLER", "flume_hosts");
componentToClusterInfoKeyMap.put("HBASE_MASTER", "hbase_master_hosts");
componentToClusterInfoKeyMap.put("HBASE_REGIONSERVER", "hbase_rs_hosts");
componentToClusterInfoKeyMap.put("HIVE_SERVER", "hive_server_host");
componentToClusterInfoKeyMap.put("HIVE_METASTORE", "hive_metastore_host");
componentToClusterInfoKeyMap.put("OOZIE_SERVER", "oozie_server");
componentToClusterInfoKeyMap.put("WEBHCAT_SERVER", "webhcat_server_host");
componentToClusterInfoKeyMap.put("MYSQL_SERVER", "hive_mysql_host");
componentToClusterInfoKeyMap.put("DASHBOARD", "dashboard_host");
componentToClusterInfoKeyMap.put("GANGLIA_SERVER", "ganglia_server_host");
componentToClusterInfoKeyMap.put("DATANODE", "slave_hosts");
componentToClusterInfoKeyMap.put("TASKTRACKER", "mapred_tt_hosts");
componentToClusterInfoKeyMap.put("HBASE_REGIONSERVER", "hbase_rs_hosts");
componentToClusterInfoKeyMap.put("KERBEROS_SERVER", "kdc_host");
componentToClusterInfoKeyMap.put("KERBEROS_ADMIN_CLIENT", "kerberos_adminclient_host");
}
static {
decommissionedToClusterInfoKeyMap.put("DATANODE", "decom_dn_hosts");
decommissionedToClusterInfoKeyMap.put("TASKTRACKER", "decom_tt_hosts");
decommissionedToClusterInfoKeyMap.put("NODEMANAGER", "decom_nm_hosts");
decommissionedToClusterInfoKeyMap.put("HBASE_REGIONSERVER", "decom_hbase_rs_hosts");
}
public static String getActionId(long requestId, long stageId) {
return requestId + "-" + stageId;
}
public static Map<String, String> getComponentToClusterInfoKeyMap() {
return componentToClusterInfoKeyMap;
}
public static long[] getRequestStage(String actionId) {
String[] fields = actionId.split("-");
long[] requestStageIds = new long[2];
requestStageIds[0] = Long.parseLong(fields[0]);
requestStageIds[1] = Long.parseLong(fields[1]);
return requestStageIds;
}
public static Stage getATestStage(long requestId, long stageId, String clusterHostInfo, String commandParamsStage, String hostParamsStage) {
String hostname;
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
hostname = "host-dummy";
}
return getATestStage(requestId, stageId, hostname, clusterHostInfo, commandParamsStage, hostParamsStage);
}
//For testing only
@Inject
public static Stage getATestStage(long requestId, long stageId, String hostname, String clusterHostInfo, String commandParamsStage, String hostParamsStage) {
Stage s = stageFactory.createNew(requestId, "/tmp", "cluster1", 1L, "context", clusterHostInfo, commandParamsStage, hostParamsStage);
s.setStageId(stageId);
long now = System.currentTimeMillis();
s.addHostRoleExecutionCommand(hostname, Role.NAMENODE, RoleCommand.INSTALL,
new ServiceComponentHostInstallEvent("NAMENODE", hostname, now, "HDP-1.2.0"),
"cluster1", "HDFS", false);
ExecutionCommand execCmd = s.getExecutionCommandWrapper(hostname, "NAMENODE").getExecutionCommand();
execCmd.setCommandId(s.getActionId());
List<String> slaveHostList = new ArrayList<String>();
slaveHostList.add(hostname);
slaveHostList.add("host2");
Map<String, String> hdfsSite = new TreeMap<String, String>();
hdfsSite.put("dfs.block.size", "2560000000");
Map<String, Map<String, String>> configurations =
new TreeMap<String, Map<String, String>>();
configurations.put("hdfs-site", hdfsSite);
execCmd.setConfigurations(configurations);
Map<String, Map<String, Map<String, String>>> configurationAttributes =
new TreeMap<String, Map<String, Map<String, String>>>();
Map<String, Map<String, String>> hdfsSiteAttributes = new TreeMap<String, Map<String, String>>();
Map<String, String> finalAttribute = new TreeMap<String, String>();
finalAttribute.put("dfs.block.size", "true");
hdfsSiteAttributes.put("final", finalAttribute);
configurationAttributes.put("hdfsSite", hdfsSiteAttributes);
execCmd.setConfigurationAttributes(configurationAttributes);
Map<String, String> params = new TreeMap<String, String>();
params.put("jdklocation", "/x/y/z");
params.put("stack_version", "1.2.0");
params.put("stack_name", "HDP");
execCmd.setHostLevelParams(params);
Map<String, String> roleParams = new TreeMap<String, String>();
roleParams.put("format", "false");
execCmd.setRoleParams(roleParams);
Map<String, String> commandParams = new TreeMap<String, String>();
commandParams.put(ExecutionCommand.KeyNames.COMMAND_TIMEOUT, "600");
execCmd.setCommandParams(commandParams);
return s;
}
public static String jaxbToString(Object jaxbObj) throws JAXBException,
JsonGenerationException, JsonMappingException, IOException {
return getGson().toJson(jaxbObj);
}
public static <T> T fromJson(String json, Class<T> clazz) throws IOException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true);
mapper.configure(SerializationConfig.Feature.USE_ANNOTATIONS, true);
InputStream is = new ByteArrayInputStream(json.getBytes(Charset.forName("UTF8")));
return mapper.readValue(is, clazz);
}
public static Map<String, String> getCommandParamsStage(ActionExecutionContext actionExecContext) throws AmbariException {
return actionExecContext.getParameters() != null ? actionExecContext.getParameters() : new TreeMap<String, String>();
}
public static Map<String, Set<String>> getClusterHostInfo(Cluster cluster) throws AmbariException {
//Fill hosts and ports lists
Set<String> hostsSet = new LinkedHashSet<String>();
List<Integer> portsList = new ArrayList<Integer>();
List<String> rackList = new ArrayList<String>();
List<String> ipV4List = new ArrayList<String>();
Collection<Host> allHosts = cluster.getHosts();
for (Host host : allHosts) {
hostsSet.add(host.getHostName());
Integer currentPingPort = host.getCurrentPingPort();
portsList.add(currentPingPort == null ? DEFAULT_PING_PORT : currentPingPort);
String rackInfo = host.getRackInfo();
rackList.add(rackInfo == null ? DEFAULT_RACK : rackInfo );
String iPv4 = host.getIPv4();
ipV4List.add(iPv4 == null ? DEFAULT_IPV4_ADDRESS : iPv4 );
}
// add hosts from topology manager
Map<String, Collection<String>> pendingHostComponents = topologyManager.getProjectedTopology();
for (String hostname : pendingHostComponents.keySet()) {
if (! hostsSet.contains(hostname)) {
hostsSet.add(hostname);
// this is only set in heartbeat handler and since these hosts haven't yet been provisioned, set the default
portsList.add(DEFAULT_PING_PORT);
}
}
List<String> hostsList = new ArrayList<String>(hostsSet);
// Fill hosts for services
Map<String, SortedSet<Integer>> hostRolesInfo = new HashMap<String, SortedSet<Integer>>();
for (Map.Entry<String, Service> serviceEntry : cluster.getServices().entrySet()) {
Service service = serviceEntry.getValue();
for (Map.Entry<String, ServiceComponent> serviceComponentEntry : service.getServiceComponents().entrySet()) {
ServiceComponent serviceComponent = serviceComponentEntry.getValue();
String componentName = serviceComponent.getName();
String roleName = componentToClusterInfoKeyMap.get(componentName);
if (null == roleName && !serviceComponent.isClientComponent()) {
roleName = componentName.toLowerCase() + "_hosts";
}
String decomRoleName = decommissionedToClusterInfoKeyMap.get(componentName);
if (roleName == null && decomRoleName == null) {
continue;
}
for (String hostName : serviceComponent.getServiceComponentHosts().keySet()) {
if (roleName != null) {
SortedSet<Integer> hostsForComponentsHost = hostRolesInfo.get(roleName);
if (hostsForComponentsHost == null) {
hostsForComponentsHost = new TreeSet<Integer>();
hostRolesInfo.put(roleName, hostsForComponentsHost);
}
int hostIndex = hostsList.indexOf(hostName);
//Add index of host to current host role
hostsForComponentsHost.add(hostIndex);
}
if (decomRoleName != null) {
ServiceComponentHost scHost = serviceComponent.getServiceComponentHost(hostName);
if (scHost.getComponentAdminState() == HostComponentAdminState.DECOMMISSIONED) {
SortedSet<Integer> hostsForComponentsHost = hostRolesInfo.get(decomRoleName);
if (hostsForComponentsHost == null) {
hostsForComponentsHost = new TreeSet<Integer>();
hostRolesInfo.put(decomRoleName, hostsForComponentsHost);
}
int hostIndex = hostsList.indexOf(hostName);
//Add index of host to current host role
hostsForComponentsHost.add(hostIndex);
}
}
}
}
}
// add components from topology manager
for (Map.Entry<String, Collection<String>> entry : pendingHostComponents.entrySet()) {
String hostname = entry.getKey();
Collection<String> hostComponents = entry.getValue();
for (String hostComponent : hostComponents) {
String roleName = componentToClusterInfoKeyMap.get(hostComponent);
SortedSet<Integer> hostsForComponentsHost = hostRolesInfo.get(roleName);
if (hostsForComponentsHost == null) {
hostsForComponentsHost = new TreeSet<Integer>();
hostRolesInfo.put(roleName, hostsForComponentsHost);
}
int hostIndex = hostsList.indexOf(hostname);
if (hostIndex != -1) {
if (! hostsForComponentsHost.contains(hostIndex)) {
hostsForComponentsHost.add(hostIndex);
}
} else {
//todo: I don't think that this can happen
//todo: determine if it can and if so, handle properly
//todo: if it 'cant' should probably enforce invariant
throw new RuntimeException("Unable to get host index for host: " + hostname);
}
}
}
Map<String, Set<String>> clusterHostInfo = new HashMap<String, Set<String>>();
for (Map.Entry<String, SortedSet<Integer>> entry : hostRolesInfo.entrySet()) {
TreeSet<Integer> sortedSet = new TreeSet<Integer>(entry.getValue());
Set<String> replacedRangesSet = replaceRanges(sortedSet);
clusterHostInfo.put(entry.getKey(), replacedRangesSet);
}
clusterHostInfo.put(HOSTS_LIST, hostsSet);
clusterHostInfo.put(PORTS, replaceMappedRanges(portsList));
clusterHostInfo.put(IPV4_ADDRESSES, replaceMappedRanges(ipV4List));
clusterHostInfo.put(RACKS, replaceMappedRanges(rackList));
// Fill server host
/*
* Note: We don't replace server host name by an index (like we do
* with component hostnames), because if ambari-agent is not installed
* at ambari-server host, then allHosts map will not contain
* ambari-server hostname.
*/
TreeSet<String> serverHost = new TreeSet<String>();
serverHost.add(getHostName());
clusterHostInfo.put(AMBARI_SERVER_HOST, serverHost);
return clusterHostInfo;
}
/**
* Finds ranges in sorted set and replaces ranges by compact notation
* <p/>
* <p>For example, suppose <tt>set</tt> comprises<tt> [1, 2, 3, 4, 7]</tt>.
* After invoking <tt>rangedSet = StageUtils.replaceRanges(set)</tt>
* <tt>rangedSet</tt> will comprise
* <tt>["1-4", "7"]</tt>..
*
* @param set the source set to be ranged
*/
public static Set<String> replaceRanges(SortedSet<Integer> set) {
if (set == null) {
return null;
}
Set<String> rangedSet = new HashSet<String>();
Integer prevElement = null;
Integer startOfRange = set.first();
for (Integer i : set) {
if (prevElement != null && (i - prevElement) > 1) {
String rangeItem = getRangedItem(startOfRange, prevElement);
rangedSet.add(rangeItem);
startOfRange = i;
}
prevElement = i;
}
rangedSet.add(getRangedItem(startOfRange, prevElement));
return rangedSet;
}
/**
* Finds ranges in list and replaces ranges by compact notation
* <p/>
* <p>For example, suppose <tt>list</tt> comprises<tt> [1, 1, 2, 2, 1, 3]</tt>.
* After invoking <tt>rangedMappedSet = StageUtils.replaceMappedRanges(list)</tt>
* <tt>rangedMappedSet</tt> will comprise
* <tt>["1:0-1,4", "2:2-3", "3:5"]</tt>..
*
* @param values the source list to be ranged
*/
public static <T> Set<String> replaceMappedRanges(List<T> values) {
Map<T, SortedSet<Integer>> convolutedValues = new HashMap<T, SortedSet<Integer>>();
int valueIndex = 0;
for (T value : values) {
SortedSet<Integer> correspValues = convolutedValues.get(value);
if (correspValues == null) {
correspValues = new TreeSet<Integer>();
convolutedValues.put(value, correspValues);
}
correspValues.add(valueIndex);
valueIndex++;
}
Set<String> result = new HashSet<String>();
for (Entry<T, SortedSet<Integer>> entry : convolutedValues.entrySet()) {
Set<String> replacedRanges = replaceRanges(entry.getValue());
result.add(entry.getKey() + ":" + Joiner.on(",").join(replacedRanges));
}
return result;
}
private static String getRangedItem(Integer startOfRange, Integer endOfRange) {
String separator = (endOfRange - startOfRange) > 1 ? "-" : ",";
String rangeItem = endOfRange.equals(startOfRange) ?
endOfRange.toString() :
startOfRange + separator + endOfRange;
return rangeItem;
}
public static String getHostName() {
return server_hostname;
}
}
|
AMBARI-11011. all_hosts list is out of sync with all_ipv4_ips and all_racks when hosts are being added progressively
|
ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
|
AMBARI-11011. all_hosts list is out of sync with all_ipv4_ips and all_racks when hosts are being added progressively
|
|
Java
|
apache-2.0
|
bf1ad8f451246eb7f2c6787cf4edb5670dcf0b1e
| 0
|
Gameeso/openkit-android,Gameeso/openkit-android,OpenKit/openkit-android,nagyistoce/openkit-android
|
/**
* Copyright 2012 OpenKit
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.openkit;
import io.openkit.asynchttp.*;
import io.openkit.leaderboards.*;
import io.openkit.user.OKUserUtilities;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.json.*;
import android.content.Context;
import android.content.Intent;
import android.os.Parcel;
import android.os.Parcelable;
import android.util.Log;
public class OKLeaderboard implements Parcelable{
private String name;
private int OKAPP_id;
private int OKLeaderboard_id;
private LeaderboardSortType sortType;
private String icon_url;
private int playerCount;
//This isn't parcelable since it's only used in display
private OKLeaderboardTimeRange displayTimeRange = OKLeaderboardTimeRange.AllTime;
public static final String LEADERBOARD_KEY = "OKLeaderboard";
public static final String LEADERBOARD_ID_KEY = "OKLeaderboardID";
public static final int NUM_SCORES_PER_PAGE = 25;
private static final String DEFAULT_LEADERBOARD_LIST_TAG = "v1";
@Override
public void writeToParcel(Parcel out, int flags)
{
//private String name;
out.writeString(name);
//private int OKAPP_id;
out.writeInt(OKAPP_id);
//private int OKLeaderboard_id;
out.writeInt(OKLeaderboard_id);
//private LeaderboardSortType sortType;
out.writeString((sortType == null) ? "" : sortType.name());
//private String icon_url;
out.writeString(icon_url);
//private int playerCount;
out.writeInt(playerCount);
}
private OKLeaderboard(Parcel in)
{
//private String name;
name = in.readString();
//private int OKAPP_id;
OKAPP_id = in.readInt();
//private int OKLeaderboard_id;
OKLeaderboard_id = in.readInt();
//private LeaderboardSortType sortType;
try{
sortType = LeaderboardSortType.valueOf(in.readString());
}
catch (Exception e) {
sortType = null;
}
//private String icon_url;
icon_url = in.readString();
//private int playerCount;
playerCount = in.readInt();
}
/*
private static int boolToInt(boolean a)
{
return (a) ? 1 : 0;
}
private static boolean intToBool(int a)
{
return a == 1;
}
*/
/**
* Creates OKLeaderboard from JSON
*/
public OKLeaderboard(JSONObject leaderboardJSON)
{
super();
initFromJSON(leaderboardJSON);
}
public OKLeaderboard(int leaderboardID)
{
super();
this.OKLeaderboard_id = leaderboardID;
}
public String getName(){
return name;
}
public void setName(String aName)
{
this.name = aName;
}
public int getOKAPP_id()
{
return OKAPP_id;
}
public void setOKAPP_id(int aID)
{
this.OKAPP_id = aID;
}
public int getOKLeaderboard_id()
{
return OKLeaderboard_id;
}
public void setOKLeaderboard_id(int aID)
{
this.OKLeaderboard_id = aID;
}
public LeaderboardSortType getSortType()
{
return sortType;
}
public void setSortType(LeaderboardSortType aType)
{
this.sortType = aType;
}
public String getIconURL()
{
return icon_url;
}
public void setIconURL(String aURL)
{
this.icon_url = aURL;
}
public int getPlayerCount()
{
return playerCount;
}
public String getPlayerCountString()
{
return Integer.toString(playerCount);
}
public void setPlayerCount(int aCount)
{
this.playerCount = aCount;
}
public OKLeaderboardTimeRange getDisplayedTimeRange()
{
return displayTimeRange;
}
public void setDisplayedTimeRange(OKLeaderboardTimeRange range)
{
this.displayTimeRange = range;
}
private void initFromJSON(JSONObject leaderboardJSON)
{
try{
this.name = leaderboardJSON.optString("name");
this.OKLeaderboard_id = leaderboardJSON.getInt("id");
this.OKAPP_id = leaderboardJSON.optInt("app_id");
this.icon_url = leaderboardJSON.optString("icon_url");
this.playerCount = leaderboardJSON.optInt("player_count");
String sortTypeString = leaderboardJSON.optString("sort_type");
if(sortTypeString.equalsIgnoreCase("HighValue")){
this.sortType = LeaderboardSortType.HighValue;
}
else {
this.sortType = LeaderboardSortType.LowValue;
}
}
catch(JSONException e){
Log.e("OpenKit", "Error parsing JSON for leaderboard: " + e.toString());
}
}
/**
* Required for parcelable
* @return
*/
@Override
public int describeContents()
{
return 0;
}
public static final Parcelable.Creator<OKLeaderboard> CREATOR
= new Parcelable.Creator<OKLeaderboard>() {
@Override
public OKLeaderboard createFromParcel(Parcel in){
return new OKLeaderboard(in);
}
@Override
public OKLeaderboard[] newArray(int size) {
return new OKLeaderboard[size];
}
};
/**
* @return Get a score Comparator for this leaderboard based on leaderboard's sortType
*/
public Comparator<OKScore> getScoreComparator()
{
if(getSortType() == LeaderboardSortType.HighValue) {
return new Comparator<OKScore>() {
@Override
public int compare(OKScore s1, OKScore s2) {
return (s1.getScoreValue()>s2.getScoreValue() ? -1 : (s1.getScoreValue()==s2.getScoreValue() ? 0 : 1));
}
};
} else {
return new Comparator<OKScore>() {
@Override
public int compare(OKScore s1, OKScore s2) {
return (s1.getScoreValue()>s2.getScoreValue() ? 1 : (s1.getScoreValue()==s2.getScoreValue() ? 0 : -1));
}
};
}
}
public static Intent getLeaderboardIntent(Context ctx, int leaderboardID)
{
Intent leaderboardIntent = new Intent(ctx, OKScoresActivity.class);
leaderboardIntent.putExtra(OKLeaderboard.LEADERBOARD_ID_KEY, leaderboardID);
return leaderboardIntent;
}
/** Gets info for one leaderboard **/
public static void getLeaderboard(int leaderboardID, final OKLeaderboardsListResponseHandler responseHandler)
{
OKLog.d("Getting leaderboard ID:" + leaderboardID);
RequestParams params = new RequestParams();
String requestPath = "leaderboards/" + leaderboardID;
OKHTTPClient.get(requestPath, params, new OKJsonHttpResponseHandler() {
@Override
public void onSuccess(JSONObject object) {
OKLeaderboard leaderboard = new OKLeaderboard(object);
List<OKLeaderboard> leaderboardList = new ArrayList<OKLeaderboard>(1);
leaderboardList.add(leaderboard);
responseHandler.onSuccess(leaderboardList, leaderboard.playerCount);
}
@Override
public void onSuccess(JSONArray array) {
responseHandler.onFailure(new IllegalArgumentException("Got back an array of leaderboards when expecting a single one"), null);
}
@Override
public void onFailure(Throwable error, String content) {
responseHandler.onFailure(error, null);
}
@Override
public void onFailure(Throwable e, JSONArray errorResponse) {
responseHandler.onFailure(e, null);
}
@Override
public void onFailure(Throwable e, JSONObject errorResponse) {
responseHandler.onFailure(e, errorResponse);
}
});
}
/**
* Gets a list of leaderboards for the app
* @param responseHandler Response handler interface with callbacks to be overridden, typically anonymously
*/
public static void getLeaderboards(OKLeaderboardsListResponseHandler responseHandler)
{
// By default, if a leaderboard list tag is not defined through OKManger, we
// load the leaderboards with the tag = 'v1'. In the OK Dashboard, new leaderboards
// have a default tag of v1. This sets up future proofing so a developer can issue
// a set of leaderboards in the first version of their game, and then change the leaderboards
// in a future version of their game
if(OKManager.INSTANCE.getLeaderboardListTag() == null) {
getLeaderboards(DEFAULT_LEADERBOARD_LIST_TAG, responseHandler);
} else {
getLeaderboards(OKManager.INSTANCE.getLeaderboardListTag(), responseHandler);
}
}
private static void getLeaderboards(String leaderboardListTag, OKLeaderboardsListResponseHandler responseHandler)
{
RequestParams params = new RequestParams();
params.put("tag", leaderboardListTag);
OKLog.d("Getting list of leaderboards for tag: " + leaderboardListTag);
final OKLeaderboardsListResponseHandler finalResponseHandler = responseHandler;
OKHTTPClient.get("leaderboards", params, new OKJsonHttpResponseHandler() {
@Override
public void onSuccess(JSONObject object) {
// This method should never be called, because the server always returns an array.
// If there are no leaderboards defined, the server returns an empty array.
OKLog.d("Parsed JSON from server with object");
finalResponseHandler.onFailure(new IllegalArgumentException("Server returned a single JSON object when expecting an Array"), object);
}
@Override
public void onSuccess(JSONArray array) {
//OKLog.d("Received leaderboards JSON array from server: " + array.toString());
int maxPlayerCount = 0;
int numLeaderboards = array.length();
List<OKLeaderboard> leaderboards = new ArrayList<OKLeaderboard>(numLeaderboards);
for(int x = 0; x < numLeaderboards; x++)
{
try {
JSONObject leaderBoard = array.getJSONObject(x);
leaderboards.add(new OKLeaderboard(leaderBoard));
if(leaderboards.get(x).getPlayerCount() > maxPlayerCount) {
maxPlayerCount = leaderboards.get(x).getPlayerCount();
}
} catch (JSONException e) {
OKLog.d("Error parsing list of leaderboards JSON: " + e.toString());
}
}
finalResponseHandler.onSuccess(leaderboards, maxPlayerCount);
}
@Override
public void onFailure(Throwable error, String content) {
OKLog.d("Failure to connect");
finalResponseHandler.onFailure(error, null);
}
@Override
public void onFailure(Throwable e, JSONArray errorResponse) {
OKLog.d("Failure from server with object");
finalResponseHandler.onFailure(e, null);
}
@Override
public void onFailure(Throwable e, JSONObject errorResponse) {
OKLog.d("Failure from server with object");
finalResponseHandler.onFailure(e, errorResponse);
}
});
}
public List<OKScore> getPlayerTopScoreFromCache()
{
List<OKScore> topScoreList = new ArrayList<OKScore>();
List<OKScore> localScores = OKManager.INSTANCE.getSharedCache().getCachedScoresForLeaderboardID(this.OKLeaderboard_id, false);
if(localScores.size() > 0) {
Comparator<OKScore> comparator = getScoreComparator();
Collections.sort(localScores, comparator);
OKScore topScore = localScores.get(0);
topScore.setOKUser(OKUserUtilities.getGuestUser());
topScoreList.add(topScore);
}
return topScoreList;
}
/**
* Gets the current user's top score for this leaderboard. If the user is not logged in, calls onFailure and returns.
* @param responseHandler Response handler for the request.
*/
public void getUsersTopScoreForLeaderboard(final OKScoresResponseHandler responseHandler)
{
OKUser currentUser = OpenKit.getCurrentUser();
if(currentUser == null) {
responseHandler.onSuccess(getPlayerTopScoreFromCache());
return;
}
RequestParams params = new RequestParams();
params.put("leaderboard_id", Integer.toString(this.OKLeaderboard_id));
params.put("user_id", Integer.toString(currentUser.getOKUserID()));
params.put("leaderboard_range", getParamForLeaderboardDisplayRange());
OKHTTPClient.get("best_scores/user", params, new OKJsonHttpResponseHandler() {
@Override
public void onSuccess(JSONObject object) {
OKScore topScore = new OKScore(object);
List<OKScore> list = new ArrayList<OKScore>();
list.add(topScore);
responseHandler.onSuccess(list);
}
//We expect a single OKScore object from the API, not an array
@Override
public void onSuccess(JSONArray array) {
responseHandler.onFailure(new Throwable("Received an array when getting users top score. Expected a single object!"), null);
}
@Override
public void onFailure(Throwable error, String content) {
responseHandler.onFailure(error, null);
}
@Override
public void onFailure(Throwable e, JSONArray errorResponse) {
responseHandler.onFailure(e, null);
}
@Override
public void onFailure(Throwable e, JSONObject errorResponse) {
responseHandler.onFailure(e, errorResponse);
}
});
}
/**
* Returns the correct parameter to use with the server REST API for the leaderboard time range
* @return
*/
private String getParamForLeaderboardDisplayRange()
{
switch (displayTimeRange) {
case OneDay:
return "today";
case OneWeek:
return "this_week";
default:
return "all_time";
}
}
/**
* Get scores for the given leaderboard (only the first page of scores)
* @param responseHandler ResponseHandler called on success/fail
*/
public void getLeaderboardScores(OKScoresResponseHandler responseHandler)
{
getLeaderboardScores(1, responseHandler);
}
private List<OKScore> parseScoresJSONArray(JSONArray arrayOfScores)
{
int numScores = arrayOfScores.length();
List<OKScore> scoresList = new ArrayList<OKScore>(numScores);
for(int x = 0; x < numScores; x++)
{
JSONObject score;
try {
score = arrayOfScores.getJSONObject(x);
scoresList.add(new OKScore(score));
} catch (JSONException e) {
OKLog.d("Error parsing list of scores for a leaderboard: " + e.toString());
}
}
return scoresList;
}
/**
* Get scores for the given leaderboard and page number
* @param pageNumber Page number in leaderboard scores pagination
* @param responseHandler ResponseHandler called on success/fail
*/
public void getLeaderboardScores(int pageNumber, OKScoresResponseHandler responseHandler)
{
RequestParams params = new RequestParams();
params.put("leaderboard_id", Integer.toString(this.OKLeaderboard_id));
params.put("leaderboard_range", getParamForLeaderboardDisplayRange());
params.put("page_num", Integer.toString(pageNumber));
params.put("num_per_page",Integer.toString(NUM_SCORES_PER_PAGE));
OKLog.d("Getting leaderboard scores for range: " + getParamForLeaderboardDisplayRange());
final OKScoresResponseHandler finalResponseHandler = responseHandler;
OKHTTPClient.get("best_scores", params, new OKJsonHttpResponseHandler() {
@Override
public void onSuccess(JSONObject object) {
//This should never be called because the server
//always responds with an array of scores, even if it's empty
// or only has 1 score
finalResponseHandler.onFailure(new IllegalArgumentException("Server returned only one JSON object instead of an array"), null);
}
@Override
public void onSuccess(JSONArray array)
{
List<OKScore> scoresList = parseScoresJSONArray(array);
finalResponseHandler.onSuccess(scoresList);
}
@Override
public void onFailure(Throwable error, String content) {
finalResponseHandler.onFailure(error, null);
}
@Override
public void onFailure(Throwable e, JSONArray errorResponse) {
finalResponseHandler.onFailure(e, null);
}
@Override
public void onFailure(Throwable e, JSONObject errorResponse) {
finalResponseHandler.onFailure(e, errorResponse);
}
});
}
public void getFacebookFriendsScoresWithFacebookFriends(ArrayList<Long> friendsArray, final OKScoresResponseHandler responseHandler)
{
JSONObject requestParams = new JSONObject();
try {
requestParams.put("leaderboard_id", Integer.toString(this.OKLeaderboard_id));
JSONArray array = new JSONArray(friendsArray);
requestParams.put("fb_friends", array);
} catch (JSONException e) {
OKLog.v("Error formatting JSON params for getting social scores from OpenKit");
responseHandler.onFailure(e, null);
e.printStackTrace();
}
OKLog.d("Getting fb friends scores");
OKHTTPClient.postJSON("/best_scores/social", requestParams, new OKJsonHttpResponseHandler() {
@Override
public void onSuccess(JSONObject object) {
//This should never be called because the server
//always responds with an array of scores, even if it's empty
// or only has 1 score
responseHandler.onFailure(new IllegalArgumentException("Server returned only one JSON object instead of an array"), null);
}
@Override
public void onSuccess(JSONArray array) {
List<OKScore> scoresList = parseScoresJSONArray(array);
responseHandler.onSuccess(scoresList);
}
@Override
public void onFailure(Throwable error, String content) {
responseHandler.onFailure(error, null);
}
@Override
public void onFailure(Throwable e, JSONArray errorResponse) {
responseHandler.onFailure(e, null);
}
@Override
public void onFailure(Throwable e, JSONObject errorResponse) {
responseHandler.onFailure(e, errorResponse);
}
});
}
public void getScoresFromGPG()
{
//TODO
}
public void getSocialScoresFromGPG()
{
//TODO
}
public void getUsersTopScoreFromGPG()
{
//TODO
}
public void showGlobalScoresFromGPG()
{
//TODO
}
}
|
OpenKitSDK/src/io/openkit/OKLeaderboard.java
|
/**
* Copyright 2012 OpenKit
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.openkit;
import io.openkit.asynchttp.*;
import io.openkit.leaderboards.*;
import io.openkit.user.OKUserUtilities;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.json.*;
import android.content.Context;
import android.content.Intent;
import android.os.Parcel;
import android.os.Parcelable;
import android.util.Log;
public class OKLeaderboard implements Parcelable{
private String name;
private int OKAPP_id;
private int OKLeaderboard_id;
private LeaderboardSortType sortType;
private String icon_url;
private int playerCount;
//This isn't parcelable since it's only used in display
private OKLeaderboardTimeRange displayTimeRange = OKLeaderboardTimeRange.AllTime;
public static final String LEADERBOARD_KEY = "OKLeaderboard";
public static final String LEADERBOARD_ID_KEY = "OKLeaderboardID";
public static final int NUM_SCORES_PER_PAGE = 25;
private static final String DEFAULT_LEADERBOARD_LIST_TAG = "v1";
@Override
public void writeToParcel(Parcel out, int flags)
{
//private String name;
out.writeString(name);
//private int OKAPP_id;
out.writeInt(OKAPP_id);
//private int OKLeaderboard_id;
out.writeInt(OKLeaderboard_id);
//private LeaderboardSortType sortType;
out.writeString((sortType == null) ? "" : sortType.name());
//private String icon_url;
out.writeString(icon_url);
//private int playerCount;
out.writeInt(playerCount);
}
private OKLeaderboard(Parcel in)
{
//private String name;
name = in.readString();
//private int OKAPP_id;
OKAPP_id = in.readInt();
//private int OKLeaderboard_id;
OKLeaderboard_id = in.readInt();
//private LeaderboardSortType sortType;
try{
sortType = LeaderboardSortType.valueOf(in.readString());
}
catch (Exception e) {
sortType = null;
}
//private String icon_url;
icon_url = in.readString();
//private int playerCount;
playerCount = in.readInt();
}
/*
private static int boolToInt(boolean a)
{
return (a) ? 1 : 0;
}
private static boolean intToBool(int a)
{
return a == 1;
}
*/
/**
* Creates OKLeaderboard from JSON
*/
public OKLeaderboard(JSONObject leaderboardJSON)
{
super();
initFromJSON(leaderboardJSON);
}
public String getName(){
return name;
}
public void setName(String aName)
{
this.name = aName;
}
public int getOKAPP_id()
{
return OKAPP_id;
}
public void setOKAPP_id(int aID)
{
this.OKAPP_id = aID;
}
public int getOKLeaderboard_id()
{
return OKLeaderboard_id;
}
public void setOKLeaderboard_id(int aID)
{
this.OKLeaderboard_id = aID;
}
public LeaderboardSortType getSortType()
{
return sortType;
}
public void setSortType(LeaderboardSortType aType)
{
this.sortType = aType;
}
public String getIconURL()
{
return icon_url;
}
public void setIconURL(String aURL)
{
this.icon_url = aURL;
}
public int getPlayerCount()
{
return playerCount;
}
public String getPlayerCountString()
{
return Integer.toString(playerCount);
}
public void setPlayerCount(int aCount)
{
this.playerCount = aCount;
}
public OKLeaderboardTimeRange getDisplayedTimeRange()
{
return displayTimeRange;
}
public void setDisplayedTimeRange(OKLeaderboardTimeRange range)
{
this.displayTimeRange = range;
}
private void initFromJSON(JSONObject leaderboardJSON)
{
try{
this.name = leaderboardJSON.optString("name");
this.OKLeaderboard_id = leaderboardJSON.getInt("id");
this.OKAPP_id = leaderboardJSON.optInt("app_id");
this.icon_url = leaderboardJSON.optString("icon_url");
this.playerCount = leaderboardJSON.optInt("player_count");
String sortTypeString = leaderboardJSON.optString("sort_type");
if(sortTypeString.equalsIgnoreCase("HighValue")){
this.sortType = LeaderboardSortType.HighValue;
}
else {
this.sortType = LeaderboardSortType.LowValue;
}
}
catch(JSONException e){
Log.e("OpenKit", "Error parsing JSON for leaderboard: " + e.toString());
}
}
/**
* Required for parcelable
* @return
*/
@Override
public int describeContents()
{
return 0;
}
public static final Parcelable.Creator<OKLeaderboard> CREATOR
= new Parcelable.Creator<OKLeaderboard>() {
@Override
public OKLeaderboard createFromParcel(Parcel in){
return new OKLeaderboard(in);
}
@Override
public OKLeaderboard[] newArray(int size) {
return new OKLeaderboard[size];
}
};
/**
* @return Get a score Comparator for this leaderboard based on leaderboard's sortType
*/
public Comparator<OKScore> getScoreComparator()
{
if(getSortType() == LeaderboardSortType.HighValue) {
return new Comparator<OKScore>() {
@Override
public int compare(OKScore s1, OKScore s2) {
return (s1.getScoreValue()>s2.getScoreValue() ? -1 : (s1.getScoreValue()==s2.getScoreValue() ? 0 : 1));
}
};
} else {
return new Comparator<OKScore>() {
@Override
public int compare(OKScore s1, OKScore s2) {
return (s1.getScoreValue()>s2.getScoreValue() ? 1 : (s1.getScoreValue()==s2.getScoreValue() ? 0 : -1));
}
};
}
}
public static Intent getLeaderboardIntent(Context ctx, int leaderboardID)
{
Intent leaderboardIntent = new Intent(ctx, OKScoresActivity.class);
leaderboardIntent.putExtra(OKLeaderboard.LEADERBOARD_ID_KEY, leaderboardID);
return leaderboardIntent;
}
/** Gets info for one leaderboard **/
public static void getLeaderboard(int leaderboardID, final OKLeaderboardsListResponseHandler responseHandler)
{
OKLog.d("Getting leaderboard ID:" + leaderboardID);
RequestParams params = new RequestParams();
String requestPath = "leaderboards/" + leaderboardID;
OKHTTPClient.get(requestPath, params, new OKJsonHttpResponseHandler() {
@Override
public void onSuccess(JSONObject object) {
OKLeaderboard leaderboard = new OKLeaderboard(object);
List<OKLeaderboard> leaderboardList = new ArrayList<OKLeaderboard>(1);
leaderboardList.add(leaderboard);
responseHandler.onSuccess(leaderboardList, leaderboard.playerCount);
}
@Override
public void onSuccess(JSONArray array) {
responseHandler.onFailure(new IllegalArgumentException("Got back an array of leaderboards when expecting a single one"), null);
}
@Override
public void onFailure(Throwable error, String content) {
responseHandler.onFailure(error, null);
}
@Override
public void onFailure(Throwable e, JSONArray errorResponse) {
responseHandler.onFailure(e, null);
}
@Override
public void onFailure(Throwable e, JSONObject errorResponse) {
responseHandler.onFailure(e, errorResponse);
}
});
}
/**
* Gets a list of leaderboards for the app
* @param responseHandler Response handler interface with callbacks to be overridden, typically anonymously
*/
public static void getLeaderboards(OKLeaderboardsListResponseHandler responseHandler)
{
// By default, if a leaderboard list tag is not defined through OKManger, we
// load the leaderboards with the tag = 'v1'. In the OK Dashboard, new leaderboards
// have a default tag of v1. This sets up future proofing so a developer can issue
// a set of leaderboards in the first version of their game, and then change the leaderboards
// in a future version of their game
if(OKManager.INSTANCE.getLeaderboardListTag() == null) {
getLeaderboards(DEFAULT_LEADERBOARD_LIST_TAG, responseHandler);
} else {
getLeaderboards(OKManager.INSTANCE.getLeaderboardListTag(), responseHandler);
}
}
private static void getLeaderboards(String leaderboardListTag, OKLeaderboardsListResponseHandler responseHandler)
{
RequestParams params = new RequestParams();
params.put("tag", leaderboardListTag);
OKLog.d("Getting list of leaderboards for tag: " + leaderboardListTag);
final OKLeaderboardsListResponseHandler finalResponseHandler = responseHandler;
OKHTTPClient.get("leaderboards", params, new OKJsonHttpResponseHandler() {
@Override
public void onSuccess(JSONObject object) {
// This method should never be called, because the server always returns an array.
// If there are no leaderboards defined, the server returns an empty array.
OKLog.d("Parsed JSON from server with object");
finalResponseHandler.onFailure(new IllegalArgumentException("Server returned a single JSON object when expecting an Array"), object);
}
@Override
public void onSuccess(JSONArray array) {
//OKLog.d("Received leaderboards JSON array from server: " + array.toString());
int maxPlayerCount = 0;
int numLeaderboards = array.length();
List<OKLeaderboard> leaderboards = new ArrayList<OKLeaderboard>(numLeaderboards);
for(int x = 0; x < numLeaderboards; x++)
{
try {
JSONObject leaderBoard = array.getJSONObject(x);
leaderboards.add(new OKLeaderboard(leaderBoard));
if(leaderboards.get(x).getPlayerCount() > maxPlayerCount) {
maxPlayerCount = leaderboards.get(x).getPlayerCount();
}
} catch (JSONException e) {
OKLog.d("Error parsing list of leaderboards JSON: " + e.toString());
}
}
finalResponseHandler.onSuccess(leaderboards, maxPlayerCount);
}
@Override
public void onFailure(Throwable error, String content) {
OKLog.d("Failure to connect");
finalResponseHandler.onFailure(error, null);
}
@Override
public void onFailure(Throwable e, JSONArray errorResponse) {
OKLog.d("Failure from server with object");
finalResponseHandler.onFailure(e, null);
}
@Override
public void onFailure(Throwable e, JSONObject errorResponse) {
OKLog.d("Failure from server with object");
finalResponseHandler.onFailure(e, errorResponse);
}
});
}
public List<OKScore> getPlayerTopScoreFromCache()
{
List<OKScore> topScoreList = new ArrayList<OKScore>();
List<OKScore> localScores = OKManager.INSTANCE.getSharedCache().getCachedScoresForLeaderboardID(this.OKLeaderboard_id, false);
if(localScores.size() > 0) {
Comparator<OKScore> comparator = getScoreComparator();
Collections.sort(localScores, comparator);
OKScore topScore = localScores.get(0);
topScore.setOKUser(OKUserUtilities.getGuestUser());
topScoreList.add(topScore);
}
return topScoreList;
}
/**
* Gets the current user's top score for this leaderboard. If the user is not logged in, calls onFailure and returns.
* @param responseHandler Response handler for the request.
*/
public void getUsersTopScoreForLeaderboard(final OKScoresResponseHandler responseHandler)
{
OKUser currentUser = OpenKit.getCurrentUser();
if(currentUser == null) {
responseHandler.onSuccess(getPlayerTopScoreFromCache());
return;
}
RequestParams params = new RequestParams();
params.put("leaderboard_id", Integer.toString(this.OKLeaderboard_id));
params.put("user_id", Integer.toString(currentUser.getOKUserID()));
params.put("leaderboard_range", getParamForLeaderboardDisplayRange());
OKHTTPClient.get("best_scores/user", params, new OKJsonHttpResponseHandler() {
@Override
public void onSuccess(JSONObject object) {
OKScore topScore = new OKScore(object);
List<OKScore> list = new ArrayList<OKScore>();
list.add(topScore);
responseHandler.onSuccess(list);
}
//We expect a single OKScore object from the API, not an array
@Override
public void onSuccess(JSONArray array) {
responseHandler.onFailure(new Throwable("Received an array when getting users top score. Expected a single object!"), null);
}
@Override
public void onFailure(Throwable error, String content) {
responseHandler.onFailure(error, null);
}
@Override
public void onFailure(Throwable e, JSONArray errorResponse) {
responseHandler.onFailure(e, null);
}
@Override
public void onFailure(Throwable e, JSONObject errorResponse) {
responseHandler.onFailure(e, errorResponse);
}
});
}
/**
* Returns the correct parameter to use with the server REST API for the leaderboard time range
* @return
*/
private String getParamForLeaderboardDisplayRange()
{
switch (displayTimeRange) {
case OneDay:
return "today";
case OneWeek:
return "this_week";
default:
return "all_time";
}
}
/**
* Get scores for the given leaderboard (only the first page of scores)
* @param responseHandler ResponseHandler called on success/fail
*/
public void getLeaderboardScores(OKScoresResponseHandler responseHandler)
{
getLeaderboardScores(1, responseHandler);
}
private List<OKScore> parseScoresJSONArray(JSONArray arrayOfScores)
{
int numScores = arrayOfScores.length();
List<OKScore> scoresList = new ArrayList<OKScore>(numScores);
for(int x = 0; x < numScores; x++)
{
JSONObject score;
try {
score = arrayOfScores.getJSONObject(x);
scoresList.add(new OKScore(score));
} catch (JSONException e) {
OKLog.d("Error parsing list of scores for a leaderboard: " + e.toString());
}
}
return scoresList;
}
/**
* Get scores for the given leaderboard and page number
* @param pageNumber Page number in leaderboard scores pagination
* @param responseHandler ResponseHandler called on success/fail
*/
public void getLeaderboardScores(int pageNumber, OKScoresResponseHandler responseHandler)
{
RequestParams params = new RequestParams();
params.put("leaderboard_id", Integer.toString(this.OKLeaderboard_id));
params.put("leaderboard_range", getParamForLeaderboardDisplayRange());
params.put("page_num", Integer.toString(pageNumber));
params.put("num_per_page",Integer.toString(NUM_SCORES_PER_PAGE));
OKLog.d("Getting leaderboard scores for range: " + getParamForLeaderboardDisplayRange());
final OKScoresResponseHandler finalResponseHandler = responseHandler;
OKHTTPClient.get("best_scores", params, new OKJsonHttpResponseHandler() {
@Override
public void onSuccess(JSONObject object) {
//This should never be called because the server
//always responds with an array of scores, even if it's empty
// or only has 1 score
finalResponseHandler.onFailure(new IllegalArgumentException("Server returned only one JSON object instead of an array"), null);
}
@Override
public void onSuccess(JSONArray array)
{
List<OKScore> scoresList = parseScoresJSONArray(array);
finalResponseHandler.onSuccess(scoresList);
}
@Override
public void onFailure(Throwable error, String content) {
finalResponseHandler.onFailure(error, null);
}
@Override
public void onFailure(Throwable e, JSONArray errorResponse) {
finalResponseHandler.onFailure(e, null);
}
@Override
public void onFailure(Throwable e, JSONObject errorResponse) {
finalResponseHandler.onFailure(e, errorResponse);
}
});
}
public void getFacebookFriendsScoresWithFacebookFriends(ArrayList<Long> friendsArray, final OKScoresResponseHandler responseHandler)
{
JSONObject requestParams = new JSONObject();
try {
requestParams.put("leaderboard_id", Integer.toString(this.OKLeaderboard_id));
JSONArray array = new JSONArray(friendsArray);
requestParams.put("fb_friends", array);
} catch (JSONException e) {
OKLog.v("Error formatting JSON params for getting social scores from OpenKit");
responseHandler.onFailure(e, null);
e.printStackTrace();
}
OKLog.d("Getting fb friends scores");
OKHTTPClient.postJSON("/best_scores/social", requestParams, new OKJsonHttpResponseHandler() {
@Override
public void onSuccess(JSONObject object) {
//This should never be called because the server
//always responds with an array of scores, even if it's empty
// or only has 1 score
responseHandler.onFailure(new IllegalArgumentException("Server returned only one JSON object instead of an array"), null);
}
@Override
public void onSuccess(JSONArray array) {
List<OKScore> scoresList = parseScoresJSONArray(array);
responseHandler.onSuccess(scoresList);
}
@Override
public void onFailure(Throwable error, String content) {
responseHandler.onFailure(error, null);
}
@Override
public void onFailure(Throwable e, JSONArray errorResponse) {
responseHandler.onFailure(e, null);
}
@Override
public void onFailure(Throwable e, JSONObject errorResponse) {
responseHandler.onFailure(e, errorResponse);
}
});
}
public void getScoresFromGPG()
{
//TODO
}
public void getSocialScoresFromGPG()
{
//TODO
}
public void getUsersTopScoreFromGPG()
{
//TODO
}
public void showGlobalScoresFromGPG()
{
//TODO
}
}
|
add initializer on OKLeaderboard to init with ID
|
OpenKitSDK/src/io/openkit/OKLeaderboard.java
|
add initializer on OKLeaderboard to init with ID
|
|
Java
|
apache-2.0
|
87cf4c496ca686501f58ee6ff95d7be3b7812719
| 0
|
esoco/esoco-business
|
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// This file is a part of the 'esoco-business' project.
// Copyright 2016 Elmar Sonnenschein, esoco GmbH, Flensburg, Germany
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
package de.esoco.data.element;
import de.esoco.lib.model.DataSet;
import de.esoco.lib.property.PropertyName;
/********************************************************************
* A data element that stores a {@link DataSet} for a chart.
*
* @author eso
*/
public class DataSetDataElement extends DataElement<DataSet<?>>
{
//~ Enums ------------------------------------------------------------------
/********************************************************************
* Redefinition of chart types to make them available on the server side.
*/
public enum ChartType { AREA, BAR, COLUMN, GEO_MAP, LINE, PIE, GAUGE }
/********************************************************************
* Redefinition of legend positions to make them available on the server
* side.
*/
public enum LegendPosition { TOP, BOTTOM, LEFT, RIGHT, NONE }
//~ Static fields/initializers ---------------------------------------------
private static final long serialVersionUID = 1L;
/**
* Enum property: the type of the chart to render (for possible values see
* {@link DataSetChartType})
*/
public static final PropertyName<ChartType> CHART_TYPE =
PropertyName.newEnumName("CHART_TYPE", ChartType.class);
/**
* Enum property: the legend position of the chart (for possible values see
* {@link LegendPosition})
*/
public static final PropertyName<LegendPosition> CHART_LEGEND_POSITION =
PropertyName.newEnumName("CHART_LEGEND_POSITION", LegendPosition.class);
/** String property: the chart background color */
public static final PropertyName<String> CHART_BACKGROUND =
PropertyName.newStringName("CHART_BACKGROUND");
/** Boolean property: display chart in 3D */
public static final PropertyName<Boolean> CHART_3D =
PropertyName.newBooleanName("CHART_3D");
//~ Instance fields --------------------------------------------------------
private DataSet<?> rDataSet;
//~ Constructors -----------------------------------------------------------
/***************************************
* Creates a new instance.
*
* @param sName The name of the data element
* @param rDataSet The data set for the chart
*/
public DataSetDataElement(String sName, DataSet<?> rDataSet)
{
super(sName, null, DISPLAY_FLAGS);
this.rDataSet = rDataSet;
}
/***************************************
* Creates a new instance with certain display properties set.
*
* @param sName The name of the data element
* @param aDataSet The chart data
* @param eChartType The chart type
* @param eLegendPosition The legend position
* @param sBackgroundColor The chart background color
* @param b3D TRUE for a 3D chart
*
* @return A new data element
*/
public DataSetDataElement(String sName,
DataSet<?> aDataSet,
ChartType eChartType,
LegendPosition eLegendPosition,
String sBackgroundColor,
boolean b3D)
{
this(sName, aDataSet);
setProperty(CHART_BACKGROUND, sBackgroundColor);
setProperty(CHART_LEGEND_POSITION, eLegendPosition);
setProperty(CHART_TYPE, eChartType);
if (b3D)
{
setFlag(CHART_3D);
}
}
/***************************************
* Default constructor for GWT serialization.
*/
DataSetDataElement()
{
}
//~ Static methods ---------------------------------------------------------
/***************************************
* This method should be invoked to initialize the property name constants
* for de-serialization.
*/
public static void init()
{
}
//~ Methods ----------------------------------------------------------------
/***************************************
* @see DataElement#getValue()
*/
@Override
public DataSet<?> getValue()
{
return rDataSet;
}
/***************************************
* @see DataElement#updateValue(Object)
*/
@Override
protected void updateValue(DataSet<?> rNewDataSet)
{
rDataSet = rNewDataSet;
}
}
|
src/main/java/de/esoco/data/element/DataSetDataElement.java
|
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// This file is a part of the 'esoco-business' project.
// Copyright 2016 Elmar Sonnenschein, esoco GmbH, Flensburg, Germany
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
package de.esoco.data.element;
import de.esoco.lib.model.DataSet;
import de.esoco.lib.property.PropertyName;
/********************************************************************
* A data element that stores a {@link DataSet} for a chart.
*
* @author eso
*/
public class DataSetDataElement extends DataElement<DataSet<?>>
{
//~ Enums ------------------------------------------------------------------
/********************************************************************
* Redefinition of chart types to make them available on the server side.
*/
public enum ChartType { AREA, BAR, COLUMN, GEO_MAP, LINE, PIE, NETWORK }
/********************************************************************
* Redefinition of legend positions to make them available on the server
* side.
*/
public enum LegendPosition { TOP, BOTTOM, LEFT, RIGHT, NONE }
//~ Static fields/initializers ---------------------------------------------
private static final long serialVersionUID = 1L;
/**
* Enum property: the type of the chart to render (for possible values see
* {@link DataSetChartType})
*/
public static final PropertyName<ChartType> CHART_TYPE =
PropertyName.newEnumName("CHART_TYPE", ChartType.class);
/**
* Enum property: the legend position of the chart (for possible values see
* {@link LegendPosition})
*/
public static final PropertyName<LegendPosition> CHART_LEGEND_POSITION =
PropertyName.newEnumName("CHART_LEGEND_POSITION", LegendPosition.class);
/** String property: the chart background color */
public static final PropertyName<String> CHART_BACKGROUND =
PropertyName.newStringName("CHART_BACKGROUND");
/** Boolean property: display chart in 3D */
public static final PropertyName<Boolean> CHART_3D =
PropertyName.newBooleanName("CHART_3D");
//~ Instance fields --------------------------------------------------------
private DataSet<?> rDataSet;
//~ Constructors -----------------------------------------------------------
/***************************************
* Creates a new instance.
*
* @param sName The name of the data element
* @param rDataSet The data set for the chart
*/
public DataSetDataElement(String sName, DataSet<?> rDataSet)
{
super(sName, null, DISPLAY_FLAGS);
this.rDataSet = rDataSet;
}
/***************************************
* Creates a new instance with certain display properties set.
*
* @param sName The name of the data element
* @param aDataSet The chart data
* @param eChartType The chart type
* @param eLegendPosition The legend position
* @param sBackgroundColor The chart background color
* @param b3D TRUE for a 3D chart
*
* @return A new data element
*/
public DataSetDataElement(String sName,
DataSet<?> aDataSet,
ChartType eChartType,
LegendPosition eLegendPosition,
String sBackgroundColor,
boolean b3D)
{
this(sName, aDataSet);
setProperty(CHART_BACKGROUND, sBackgroundColor);
setProperty(CHART_LEGEND_POSITION, eLegendPosition);
setProperty(CHART_TYPE, eChartType);
if (b3D)
{
setFlag(CHART_3D);
}
}
/***************************************
* Default constructor for GWT serialization.
*/
DataSetDataElement()
{
}
//~ Static methods ---------------------------------------------------------
/***************************************
* This method should be invoked to initialize the property name constants
* for de-serialization.
*/
public static void init()
{
}
//~ Methods ----------------------------------------------------------------
/***************************************
* @see DataElement#getValue()
*/
@Override
public DataSet<?> getValue()
{
return rDataSet;
}
/***************************************
* @see DataElement#updateValue(Object)
*/
@Override
protected void updateValue(DataSet<?> rNewDataSet)
{
rDataSet = rNewDataSet;
}
}
|
SDACK-15: esoco-charts: adapt project to use gwt-charts
|
src/main/java/de/esoco/data/element/DataSetDataElement.java
|
SDACK-15: esoco-charts: adapt project to use gwt-charts
|
|
Java
|
apache-2.0
|
1752b9629fc4151ef8b11c18b909adfed0c01eb8
| 0
|
subchen/jetbrick-template-1x
|
/**
* jetbrick-template
* http://subchen.github.io/jetbrick-template/
*
* Copyright 2010-2014 Guoqiang Chen. All rights reserved.
* Email: subchen@gmail.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrick.template.runtime;
import java.lang.reflect.Array;
import java.util.*;
import jetbrick.template.utils.*;
/**
* 提供给 #for指令用的内部 Iterator 包装器
*
* @since 1.1.3
* @author Guoqiang Chen
*/
public final class JetForIterator<T> implements Iterator<T>, JetForStatus {
protected final Iterator<?> iterator;
protected final int size;
protected int index;
protected JetForStatus status;
public JetForIterator(Object items) {
if (items == null) {
iterator = EmptyIterator.INSTANCE;
size = 0;
} else if (items instanceof Iterator) {
if (items instanceof LoopIterator) {
iterator = (Iterator<?>) items;
size = ((LoopIterator) items).getSize();
} else {
List<?> list = asList((Iterator<?>) items);
iterator = list.iterator();
size = list.size();
}
} else if (items instanceof Iterable) {
if (items instanceof Collection) {
iterator = ((Iterable<?>) items).iterator();
size = ((Collection<?>) items).size();
} else {
List<?> list = asList(((Iterable<?>) items).iterator());
iterator = list.iterator();
size = list.size();
}
} else if (items instanceof Map) {
iterator = ((Map<?, ?>) items).entrySet().iterator();
size = ((Map<?, ?>) items).size();
} else if (items instanceof Enumeration) {
ArrayList<?> list = Collections.list((Enumeration<?>) items);
iterator = list.iterator();
size = list.size();
} else if (items.getClass().isArray()) {
iterator = new ArrayIterator(items);
size = Array.getLength(items);
} else if ((items instanceof Class) && ((Class<?>) items).isEnum()) {
List<?> list = Arrays.asList(((Class<?>) items).getEnumConstants());
iterator = list.iterator();
size = list.size();
} else {
iterator = Collections.singleton(items).iterator();
size = 1;
}
this.index = 0;
}
private List<?> asList(Iterator<?> it) {
List<Object> list = new ArrayList<Object>();
while (it.hasNext()) {
list.add(it.next());
}
return list;
}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
@SuppressWarnings("unchecked")
public T next() {
Object value = iterator.next();
index++;
return (T) value;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
/**
* 给 for-else 用来判断是否没有元素
*/
public boolean empty() {
return size == 0 || (size == -1 && index == 0);
}
@Override
public int getIndex() {
return index;
}
@Override
public int getSize() {
return size;
}
@Override
public boolean isFirst() {
return index == 1;
}
@Override
public boolean isLast() {
return !iterator.hasNext();
}
@Override
public boolean isOdd() {
return index % 2 != 0;
}
@Override
public boolean isEven() {
return index % 2 == 0;
}
}
|
src/main/java/jetbrick/template/runtime/JetForIterator.java
|
/**
* jetbrick-template
* http://subchen.github.io/jetbrick-template/
*
* Copyright 2010-2014 Guoqiang Chen. All rights reserved.
* Email: subchen@gmail.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrick.template.runtime;
import java.lang.reflect.Array;
import java.util.*;
import jetbrick.template.utils.*;
/**
* 提供给 #for指令用的内部 Iterator 包装器
*
* @since 1.1.3
* @author Guoqiang Chen
*/
public final class JetForIterator<T> implements Iterator<T>, JetForStatus {
protected final Iterator<?> iterator;
protected final int size;
protected int index;
protected JetForStatus status;
public JetForIterator(Object items) {
if (items == null) {
iterator = EmptyIterator.INSTANCE;
size = 0;
} else if (items instanceof Iterator) {
if (items instanceof LoopIterator) {
iterator = (Iterator<?>) items;
size = ((LoopIterator) items).getSize();
} else {
List<?> list = asList(((Iterable<?>) items).iterator());
iterator = list.iterator();
size = list.size();
}
} else if (items instanceof Iterable) {
if (items instanceof Collection) {
iterator = ((Iterable<?>) items).iterator();
size = ((Collection<?>) items).size();
} else {
List<?> list = asList(((Iterable<?>) items).iterator());
iterator = list.iterator();
size = list.size();
}
} else if (items instanceof Map) {
iterator = ((Map<?, ?>) items).entrySet().iterator();
size = ((Map<?, ?>) items).size();
} else if (items instanceof Enumeration) {
ArrayList<?> list = Collections.list((Enumeration<?>) items);
iterator = list.iterator();
size = list.size();
} else if (items.getClass().isArray()) {
iterator = new ArrayIterator(items);
size = Array.getLength(items);
} else if ((items instanceof Class) && ((Class<?>) items).isEnum()) {
List<?> list = Arrays.asList(((Class<?>) items).getEnumConstants());
iterator = list.iterator();
size = list.size();
} else {
iterator = Collections.singleton(items).iterator();
size = 1;
}
this.index = 0;
}
private List<?> asList(Iterator<?> it) {
List<Object> list = new ArrayList<Object>();
while (it.hasNext()) {
list.add(it.next());
}
return list;
}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
@SuppressWarnings("unchecked")
public T next() {
Object value = iterator.next();
index++;
return (T) value;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
/**
* 给 for-else 用来判断是否没有元素
*/
public boolean empty() {
return size == 0 || (size == -1 && index == 0);
}
@Override
public int getIndex() {
return index;
}
@Override
public int getSize() {
return size;
}
@Override
public boolean isFirst() {
return index == 1;
}
@Override
public boolean isLast() {
return !iterator.hasNext();
}
@Override
public boolean isOdd() {
return index % 2 != 0;
}
@Override
public boolean isEven() {
return index % 2 == 0;
}
}
|
Fixed for.size support for iterator
|
src/main/java/jetbrick/template/runtime/JetForIterator.java
|
Fixed for.size support for iterator
|
|
Java
|
bsd-3-clause
|
a9580ef1360925d909765e64d499c887214e6619
| 0
|
bmc/javautil,bmc/javautil
|
/*---------------------------------------------------------------------------*\
$Id: ClassUtil.java 5607 2005-11-25 04:32:30Z bmc $
---------------------------------------------------------------------------
This software is released under a Berkeley-style license:
Copyright (c) 2006 Brian M. Clapper. All rights reserved.
Redistribution and use in source and binary forms are permitted provided
that: (1) source distributions retain this entire copyright notice and
comment; and (2) modifications made to the software are prominently
mentioned, and a copy of the original software (or a pointer to its
location) are included. The name of the author may not be used to endorse
or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
Effectively, this means you can do what you want with the software except
remove this notice or take advantage of the author's name. If you modify
the software and redistribute your modified version, you must indicate that
your version is a modification of the original, and you must provide either
a pointer to or a copy of the original.
\*---------------------------------------------------------------------------*/
package org.clapper.util.classutil;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.LinkedHashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.jar.Attributes;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.clapper.util.logging.Logger;
import org.clapper.util.io.AndFileFilter;
import org.clapper.util.io.FileOnlyFilter;
import org.clapper.util.io.FileFilterMatchType;
import org.clapper.util.io.RegexFileFilter;
import org.clapper.util.io.RecursiveFileFinder;
/**
* <p>A <tt>ClassFinder</tt> object is used to find classes. By default, an
* instantiated <tt>ClassFinder</tt> won't find any classes; you have to
* add the classpath (via a call to {@link #addClassPath}), add jar files,
* add zip files, and/or add directories to the <tt>ClassFinder</tt> so it
* knows where to look. Adding a jar file to a <tt>ClassFinder</tt> causes
* the <tt>ClassFinder</tt> to look at the jar's manifest for a
* "Class-Path" entry; if the <tt>ClassFinder</tt> finds such an entry, it
* adds the contents to the search path, as well.</p>
*
* <p>The following example illustrates how you might use a
* <tt>ClassFinder</tt> to locate all non-abstract classes that implement
* the <tt>ClassFilter</tt> interface, searching the classpath as well
* as anything specified on the command line.</p>
*
* <blockqutoe><pre>
* import org.clapper.util.classutil.*;
*
* public class Test
* {
* public static void main (String[] args) throws Throwable
* {
* ClassFinder finder = new ClassFinder();
* for (String arg : args)
* finder.add (arg);
*
* ClassFilter filter =
* new AndClassFilter
* // Must not be an interface
* (new NotClassFilter (new InterfaceOnlyClassFilter()),
*
* // Must implement the interface class
* new SubclassClassFilter (ClassFilter.class),
*
* // Must not be abstract
* new NotClassFilter (new AbstractClassFilter()));
*
* Collection<String> classNames = new ArrayList<String>();
* finder.findClasses (classNames, filter);
*
* for (String className : classNames)
* System.out.println ("Found " + className);
* }
* }
* </pre></blockquote>
*
* @version <tt>$Revision: 5607 $</tt>
*
* @author Copyright © 2006 Brian M. Clapper
*/
public class ClassFinder
{
/*----------------------------------------------------------------------*\
Private Data Items
\*----------------------------------------------------------------------*/
private LinkedHashMap<String,File> placesToSearch =
new LinkedHashMap<String,File>();
/**
* For logging
*/
private static final Logger log = new Logger (ClassFinder.class);
/*----------------------------------------------------------------------*\
Constructor
\*----------------------------------------------------------------------*/
/**
* Create a new <tt>ClassFinder</tt> that will search for classes
* using the default class loader.
*/
public ClassFinder()
{
}
/*----------------------------------------------------------------------*\
Public Methods
\*----------------------------------------------------------------------*/
/**
* Add the contents of the system classpath for classes.
*/
public void addClassPath()
{
String path = null;
try
{
path = System.getProperty ("java.class.path");
}
catch (Exception ex)
{
path= "";
log.error ("Unable to get class path", ex);
}
StringTokenizer tok = new StringTokenizer (path, File.pathSeparator);
while (tok.hasMoreTokens())
add (new File (tok.nextToken()));
}
/**
* Add a jar file, zip file or directory to the list of places to search
* for classes.
*
* @param file the jar file, zip file or directory
*
* @return <tt>true</tt> if the file was suitable for adding;
* <tt>false</tt> if it was not a jar file, zip file, or
* directory.
*/
public boolean add (File file)
{
boolean added = false;
if (ClassUtil.fileCanContainClasses (file))
{
String absPath = file.getAbsolutePath();
if (placesToSearch.get (absPath) == null)
{
placesToSearch.put (absPath, file);
if (isJar (absPath))
loadJarClassPathEntries (file);
}
added = true;
}
return added;
}
/**
* Clear the finder's notion of where to search.
*/
public void clear()
{
placesToSearch.clear();
}
/**
* Find all classes in the search areas, implicitly accepting all of
* them.
*
* @param classNames where to store the resulting matches
*
* @return the number of matched classes added to the collection
*/
public int findClasses (Collection<String> classNames)
{
return findClasses (classNames,
new ClassFilter()
{
public boolean accept (String className)
{
return true;
}
});
}
/**
* Search all classes in the search areas, keeping only those that
* pass the specified filter.
*
* @param classNames where to store the resulting matches
* @param filter the filter
*
* @return the number of matched classes added to the collection
*/
public int findClasses (Collection<String> classNames,
ClassFilter filter)
{
int total = 0;
// Dump them into a set, so we don't put the same class in the set
// twice, even if we find it twice. Can't use the caller's
// Collection, because it might not be a Set. Use a LinkedHashSet,
// because we want to maintain the order of the classes as we find
// them. (Let the caller re-order them, if desired.)
Set<String> foundClasses = new LinkedHashSet<String>();
for (File file : placesToSearch.values())
{
String name = file.getPath();
log.debug ("Finding classes in " + name);
if (isJar (name))
total += processJar (name, filter, foundClasses);
else if (isZip (name))
total += processZip (name, filter, foundClasses);
else
total += processDirectory (file, filter, foundClasses);
}
classNames.addAll (foundClasses);
return total;
}
/*----------------------------------------------------------------------*\
Private Methods
\*----------------------------------------------------------------------*/
private int processJar (String jarName,
ClassFilter filter,
Collection<String> classNames)
{
int total = 0;
try
{
total = processOpenZip (new JarFile (jarName), filter, classNames);
}
catch (IOException ex)
{
log.error ("Can't open jar file \"" + jarName + "\"", ex);
}
return total;
}
private int processZip (String zipName,
ClassFilter filter,
Collection<String> classNames)
{
int total = 0;
try
{
total = processOpenZip (new ZipFile (zipName), filter, classNames);
}
catch (IOException ex)
{
log.error ("Can't open jar file \"" + zipName + "\"", ex);
}
return total;
}
private int processOpenZip (ZipFile zip,
ClassFilter filter,
Collection<String> classNames)
{
int total = 0;
for (Enumeration<? extends ZipEntry> e = zip.entries();
e.hasMoreElements(); )
{
ZipEntry entry = e.nextElement();
if ((! entry.isDirectory()) &&
(entry.getName().toLowerCase().endsWith (".class")))
{
String className = getClassNameFrom (entry.getName());
if (filter.accept (className))
{
classNames.add (className);
total++;
}
}
}
return total;
}
private int processDirectory (File dir,
ClassFilter classFilter,
Collection<String> classNames)
{
int total = 0;
RecursiveFileFinder finder = new RecursiveFileFinder();
RegexFileFilter nameFilter =
new RegexFileFilter ("\\.class$", FileFilterMatchType.FILENAME);
AndFileFilter fileFilter = new AndFileFilter (nameFilter,
new FileOnlyFilter());
Collection<File> files = new ArrayList<File>();
finder.findFiles (dir, fileFilter, files);
for (File f : files)
{
String path = f.getPath();
path = path.replaceFirst ("^" + dir.getPath() + "/?", "");
String className = getClassNameFrom (path);
if (classFilter.accept (className))
{
classNames.add (className);
total++;
}
}
return total;
}
private void loadJarClassPathEntries (File jarFile)
{
try
{
JarFile jar = new JarFile (jarFile);
Manifest manifest = jar.getManifest();
if (manifest == null)
return;
Map map = manifest.getEntries();
Attributes attrs = manifest.getMainAttributes();
Set<Object> keys = attrs.keySet();
for (Object key : keys)
{
String value = (String) attrs.get (key);
if (key.toString().equals ("Class-Path"))
{
String jarName = jar.getName();
log.debug ("Adding Class-Path from jar " + jarName);
StringBuilder buf = new StringBuilder();
StringTokenizer tok = new StringTokenizer (value);
while (tok.hasMoreTokens())
{
buf.setLength (0);
String element = tok.nextToken();
String parent = jarFile.getParent();
if (parent != null)
{
buf.append (parent);
buf.append (File.separator);
}
buf.append (element);
}
String element = buf.toString();
log.debug ("From " + jarName + ": " + element);
add (new File (element));
}
}
}
catch (IOException ex)
{
log.error ("I/O error processing jar file \""
+ jarFile.getPath()
+ "\"",
ex);
}
}
private String getClassNameFrom (String entryName)
{
String s = new String (entryName).replace ('/', '.');
s = s.replace ('\\', '.');
return s.substring (0, s.lastIndexOf ( '.' ));
}
private boolean isJar (String fileName)
{
return fileName.toLowerCase().endsWith (".jar");
}
private boolean isZip (String fileName)
{
return fileName.toLowerCase().endsWith (".zip");
}
}
|
src/org/clapper/util/classutil/ClassFinder.java
|
/*---------------------------------------------------------------------------*\
$Id: ClassUtil.java 5607 2005-11-25 04:32:30Z bmc $
---------------------------------------------------------------------------
This software is released under a Berkeley-style license:
Copyright (c) 2006 Brian M. Clapper. All rights reserved.
Redistribution and use in source and binary forms are permitted provided
that: (1) source distributions retain this entire copyright notice and
comment; and (2) modifications made to the software are prominently
mentioned, and a copy of the original software (or a pointer to its
location) are included. The name of the author may not be used to endorse
or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
Effectively, this means you can do what you want with the software except
remove this notice or take advantage of the author's name. If you modify
the software and redistribute your modified version, you must indicate that
your version is a modification of the original, and you must provide either
a pointer to or a copy of the original.
\*---------------------------------------------------------------------------*/
package org.clapper.util.classutil;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.LinkedHashSet;
import java.util.LinkedHashMap;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.jar.JarFile;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.clapper.util.logging.Logger;
import org.clapper.util.io.AndFileFilter;
import org.clapper.util.io.FileOnlyFilter;
import org.clapper.util.io.FileFilterMatchType;
import org.clapper.util.io.RegexFileFilter;
import org.clapper.util.io.RecursiveFileFinder;
/**
* A <tt>ClassFinder</tt> object is used to find classes. By default, an
* instantiated <tt>ClassFinder</tt> won't find any classes; you have to
* add the classpath (via a call to {@link #addClassPath}), add jar
* files, add zip files, and/or add directories to the <tt>ClassFinder</tt>
* so it knows where to look.
*
* @version <tt>$Revision: 5607 $</tt>
*
* @author Copyright © 2006 Brian M. Clapper
*/
public class ClassFinder
{
/*----------------------------------------------------------------------*\
Private Data Items
\*----------------------------------------------------------------------*/
private LinkedHashMap<String,File> placesToSearch =
new LinkedHashMap<String,File>();
/**
* For logging
*/
private static final Logger log = new Logger (ClassFinder.class);
/*----------------------------------------------------------------------*\
Constructor
\*----------------------------------------------------------------------*/
/**
* Create a new <tt>ClassFinder</tt> that will search for classes
* using the default class loader.
*/
public ClassFinder()
{
}
/*----------------------------------------------------------------------*\
Public Methods
\*----------------------------------------------------------------------*/
/**
* Add the contents of the system classpath for classes.
*/
public void addClassPath()
{
String path = null;
try
{
path = System.getProperty ("java.class.path");
}
catch (Exception ex)
{
path= "";
log.error ("Unable to get class path", ex);
}
StringTokenizer tok = new StringTokenizer (path, File.pathSeparator);
while (tok.hasMoreTokens())
add (new File (tok.nextToken()));
}
/**
* Add a jar file, zip file or directory to the list of places to search
* for classes.
*
* @param file the jar file, zip file or directory
*
* @return <tt>true</tt> if the file was suitable for adding;
* <tt>false</tt> if it was not a jar file, zip file, or
* directory.
*/
public boolean add (File file)
{
boolean added = false;
if (ClassUtil.fileCanContainClasses (file))
{
String absPath = file.getAbsolutePath();
if (placesToSearch.get (absPath) == null)
placesToSearch.put (absPath, file);
added = true;
}
return added;
}
/**
* Clear the finder's notion of where to search.
*/
public void clear()
{
placesToSearch.clear();
}
/**
* Find all classes in the search areas, implicitly accepting all of
* them.
*
* @param classNames where to store the resulting matches
*
* @return the number of matched classes added to the collection
*/
public int findClasses (Collection<String> classNames)
{
return findClasses (classNames,
new ClassFilter()
{
public boolean accept (String className)
{
return true;
}
});
}
/**
* Search all classes in the search areas, keeping only those that
* pass the specified filter.
*
* @param classNames where to store the resulting matches
* @param filter the filter
*
* @return the number of matched classes added to the collection
*/
public int findClasses (Collection<String> classNames,
ClassFilter filter)
{
int total = 0;
// Dump them into a set, so we don't put the same class in the set
// twice, even if we find it twice. Can't use the caller's
// Collection, because it might not be a Set. Use a LinkedHashSet,
// because we want to maintain the order of the classes as we find
// them. (Let the caller re-order them, if desired.)
Set<String> foundClasses = new LinkedHashSet<String>();
for (File file : placesToSearch.values())
{
String name = file.getPath();
log.debug ("Finding classes in " + name);
if (name.endsWith (".jar"))
total += processJar (name, filter, foundClasses);
else if (name.endsWith (".zip"))
total += processZip (name, filter, foundClasses);
else
total += processDirectory (file, filter, foundClasses);
}
classNames.addAll (foundClasses);
return total;
}
/*----------------------------------------------------------------------*\
Private Methods
\*----------------------------------------------------------------------*/
private int processJar (String jarName,
ClassFilter filter,
Collection<String> classNames)
{
int total = 0;
try
{
total = processOpenZip (new JarFile (jarName), filter, classNames);
}
catch (IOException ex)
{
log.error ("Can't open jar file \"" + jarName + "\"", ex);
}
return total;
}
private int processZip (String zipName,
ClassFilter filter,
Collection<String> classNames)
{
int total = 0;
try
{
total = processOpenZip (new ZipFile (zipName), filter, classNames);
}
catch (IOException ex)
{
log.error ("Can't open jar file \"" + zipName + "\"", ex);
}
return total;
}
private int processOpenZip (ZipFile zip,
ClassFilter filter,
Collection<String> classNames)
{
int total = 0;
for (Enumeration<? extends ZipEntry> e = zip.entries();
e.hasMoreElements(); )
{
ZipEntry entry = e.nextElement();
if ((! entry.isDirectory()) &&
(entry.getName().toLowerCase().endsWith (".class")))
{
String className = getClassNameFrom (entry.getName());
if (filter.accept (className))
{
classNames.add (className);
total++;
}
}
}
return total;
}
private int processDirectory (File dir,
ClassFilter classFilter,
Collection<String> classNames)
{
int total = 0;
RecursiveFileFinder finder = new RecursiveFileFinder();
RegexFileFilter nameFilter =
new RegexFileFilter ("\\.class$", FileFilterMatchType.FILENAME);
AndFileFilter fileFilter = new AndFileFilter (nameFilter,
new FileOnlyFilter());
Collection<File> files = new ArrayList<File>();
finder.findFiles (dir, fileFilter, files);
for (File f : files)
{
String path = f.getPath();
path = path.replaceFirst ("^" + dir.getPath() + "/?", "");
String className = getClassNameFrom (path);
if (classFilter.accept (className))
{
classNames.add (className);
total++;
}
}
return total;
}
private String getClassNameFrom (String entryName)
{
String s = new String (entryName).replace ('/', '.');
s = s.replace ('\\', '.');
return s.substring (0, s.lastIndexOf ( '.' ));
}
}
|
1. Now loads Class-Path from jar manifest, if present.
2. Enhanced class javadocs.
|
src/org/clapper/util/classutil/ClassFinder.java
|
1. Now loads Class-Path from jar manifest, if present. 2. Enhanced class javadocs.
|
|
Java
|
bsd-3-clause
|
4c266a0314e830f0850003daa8da996ef487f235
| 0
|
NCIP/iso21090
|
package gov.nih.nci.iso21090;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Represents the iso TEL.URL data type.
* No use codes
* TEL.URL constrains TEL so that it must point to a locatable resource that returns binary content.
* The URL scheme must be file, nfs, ftp, cid (for SOAP attachments), http, or https
* @author lpower
*/
@SuppressWarnings("PMD.CyclomaticComplexity")
public final class TelUrl extends Tel implements Cloneable {
private static final long serialVersionUID = 1L;
/** scheme. */
public static final String SCHEME_FILE = "file";
/** scheme. */
public static final String SCHEME_NFS = "nfs";
/** scheme. */
public static final String SCHEME_FTP = "ftp";
/** scheme. */
public static final String SCHEME_HTTP = "http";
/** scheme. */
public static final String SCHEME_HTTPS = "https";
/** set of allowed URI schemes. */
public static final List<String> SCHEMES = Collections.unmodifiableList(Arrays.asList(
SCHEME_FILE,
SCHEME_NFS,
SCHEME_FTP,
SCHEME_HTTP,
SCHEME_HTTPS));
/** {@inheritDoc} */
@Override
protected List<String> getAllowedSchemes() {
return SCHEMES;
}
/**
* {@inheritDoc}
*/
@Override
public TelUrl clone() {
return (TelUrl) super.clone();
}
}
|
code/localization/src/main/java/gov/nih/nci/iso21090/TelUrl.java
|
package gov.nih.nci.iso21090;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Represents the iso TEL.URL data type.
* No use codes
* TEL.URL constrains TEL so that it must point to a locatable resource that returns binary content.
* The URL scheme must be file, nfs, ftp, cid (for SOAP attachments), http, or https
* @author lpower
*/
@SuppressWarnings("PMD.CyclomaticComplexity")
public final class TelUrl extends Tel implements Cloneable {
private static final long serialVersionUID = 1L;
/** scheme. */
public static final String SCHEME_FILE = "file";
/** scheme. */
public static final String SCHEME_NFS = "nfs";
/** scheme. */
public static final String SCHEME_FTP = "ftp";
/** scheme. */
public static final String SCHEME_CID = "cid";
/** scheme. */
public static final String SCHEME_HTTP = "http";
/** scheme. */
public static final String SCHEME_HTTPS = "https";
/** set of allowed URI schemes. */
public static final List<String> SCHEMES = Collections.unmodifiableList(Arrays.asList(
SCHEME_FILE,
SCHEME_NFS,
SCHEME_FTP,
SCHEME_CID,
SCHEME_HTTP,
SCHEME_HTTPS));
/** {@inheritDoc} */
@Override
protected List<String> getAllowedSchemes() {
return SCHEMES;
}
/**
* {@inheritDoc}
*/
@Override
public TelUrl clone() {
return (TelUrl) super.clone();
}
}
|
Removed cid scheme to be consistent with XSD
SVN-Revision: 233
|
code/localization/src/main/java/gov/nih/nci/iso21090/TelUrl.java
|
Removed cid scheme to be consistent with XSD
|
|
Java
|
bsd-3-clause
|
a979be909d4c3027751860dbc5a4c1541d30fac4
| 0
|
EsotericSoftware/kryo,EsotericSoftware/kryo,EsotericSoftware/kryo,EsotericSoftware/kryo
|
/* Copyright (c) 2008-2018, Nathan Sweet
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
* - Neither the name of Esoteric Software nor the names of its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
package com.esotericsoftware.kryo.unsafe;
import static com.esotericsoftware.kryo.unsafe.UnsafeUtil.*;
import com.esotericsoftware.kryo.KryoException;
import com.esotericsoftware.kryo.io.ByteBufferOutput;
import com.esotericsoftware.kryo.util.Util;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import sun.nio.ch.DirectBuffer;
/** A {@link ByteBufferOutput} that writes data to a direct ByteBuffer (off-heap memory) using sun.misc.Unsafe. Multi-byte
* primitive types use native byte order, so the native byte order on different computers which read and write the data must be
* the same.
* <p>
* Not available on all JVMs. {@link Util#unsafe} can be checked before using this class.
* <p>
* This class may be much faster when {@link #setVariableLengthEncoding(boolean)} is false.
* @author Roman Levenstein <romixlev@gmail.com>
* @author Nathan Sweet */
public class UnsafeByteBufferOutput extends ByteBufferOutput {
/** Start address of the memory buffer. It must be non-movable, which normally means that is is allocated off-heap. */
private long bufferAddress;
/** Creates an uninitialized Output, {@link #setBuffer(ByteBuffer)} must be called before the Output is used. */
public UnsafeByteBufferOutput () {
}
/** Creates a new Output for writing to a direct {@link ByteBuffer}.
* @param bufferSize The size of the buffer. An exception is thrown if more bytes than this are written and {@link #flush()}
* does not empty the buffer. */
public UnsafeByteBufferOutput (int bufferSize) {
super(bufferSize);
updateBufferAddress();
}
/** Creates a new Output for writing to a direct ByteBuffer.
* @param bufferSize The initial size of the buffer.
* @param maxBufferSize If {@link #flush()} does not empty the buffer, the buffer is doubled as needed until it exceeds
* maxBufferSize and an exception is thrown. Can be -1 for no maximum. */
public UnsafeByteBufferOutput (int bufferSize, int maxBufferSize) {
super(bufferSize, maxBufferSize);
updateBufferAddress();
}
/** Creates a new Output for writing to an OutputStream. A buffer size of 4096 is used. */
public UnsafeByteBufferOutput (OutputStream outputStream) {
super(outputStream);
updateBufferAddress();
}
/** Creates a new Output for writing to an OutputStream with the specified buffer size. */
public UnsafeByteBufferOutput (OutputStream outputStream, int bufferSize) {
super(outputStream, bufferSize);
updateBufferAddress();
}
/** Creates a new Output for writing to a ByteBuffer representing the memory region at the specified address and size. @throws
* UnsupportedOperationException if creating a ByteBuffer this way is not available. */
public UnsafeByteBufferOutput (long address, int size) {
super(newDirectBuffer(address, size));
updateBufferAddress();
}
public void setBuffer (ByteBuffer buffer, int maxBufferSize) {
if (!(buffer instanceof DirectBuffer)) throw new IllegalArgumentException("buffer must be direct.");
if (buffer != byteBuffer) UnsafeUtil.dispose(byteBuffer);
super.setBuffer(buffer, maxBufferSize);
updateBufferAddress();
}
private void updateBufferAddress () {
bufferAddress = ((DirectBuffer)byteBuffer).address();
}
protected boolean require (int required) throws KryoException {
ByteBuffer oldBuffer = byteBuffer;
boolean result = super.require(required);
if (byteBuffer != oldBuffer) {
UnsafeUtil.dispose(oldBuffer);
updateBufferAddress();
}
return result;
}
/** Releases the byte buffer immediately, rather than waiting for GC. This output can no longer be used until a new byte buffer
* is set. */
public void dispose () {
UnsafeUtil.dispose(byteBuffer);
byteBuffer = null;
bufferAddress = 0;
}
public void write (int value) throws KryoException {
if (position == capacity) require(1);
unsafe.putByte(bufferAddress + position++, (byte)value);
byteBuffer.position(position);
}
public void writeByte (byte value) throws KryoException {
if (position == capacity) require(1);
unsafe.putByte(bufferAddress + position++, value);
byteBuffer.position(position);
}
public void writeByte (int value) throws KryoException {
if (position == capacity) require(1);
unsafe.putByte(bufferAddress + position++, (byte)value);
byteBuffer.position(position);
}
public void writeInt (int value) throws KryoException {
require(4);
unsafe.putInt(bufferAddress + position, value);
position += 4;
byteBuffer.position(position);
}
public void writeLong (long value) throws KryoException {
require(8);
unsafe.putLong(bufferAddress + position, value);
position += 8;
byteBuffer.position(position);
}
public void writeFloat (float value) throws KryoException {
require(4);
unsafe.putFloat(bufferAddress + position, value);
position += 4;
byteBuffer.position(position);
}
public void writeDouble (double value) throws KryoException {
require(8);
unsafe.putDouble(bufferAddress + position, value);
position += 8;
byteBuffer.position(position);
}
public void writeShort (int value) throws KryoException {
require(2);
unsafe.putShort(bufferAddress + position, (short)value);
position += 2;
byteBuffer.position(position);
}
public void writeChar (char value) throws KryoException {
require(2);
unsafe.putChar(bufferAddress + position, value);
position += 2;
byteBuffer.position(position);
}
public void writeBoolean (boolean value) throws KryoException {
if (position == capacity) require(1);
unsafe.putByte(bufferAddress + position++, value ? (byte)1 : 0);
byteBuffer.position(position);
}
public void writeInts (int[] array, int offset, int count) throws KryoException {
writeBytes(array, intArrayBaseOffset, array.length << 2);
}
public void writeLongs (long[] array, int offset, int count) throws KryoException {
writeBytes(array, longArrayBaseOffset, array.length << 3);
}
public void writeFloats (float[] array, int offset, int count) throws KryoException {
writeBytes(array, floatArrayBaseOffset, array.length << 2);
}
public void writeDoubles (double[] array, int offset, int count) throws KryoException {
writeBytes(array, doubleArrayBaseOffset, array.length << 3);
}
public void writeShorts (short[] array, int offset, int count) throws KryoException {
writeBytes(array, shortArrayBaseOffset, array.length << 1);
}
public void writeChars (char[] array, int offset, int count) throws KryoException {
writeBytes(array, charArrayBaseOffset, array.length << 1);
}
public void writeBooleans (boolean[] array, int offset, int count) throws KryoException {
writeBytes(array, booleanArrayBaseOffset, array.length);
}
public void writeBytes (byte[] array, int offset, int count) throws KryoException {
writeBytes(array, byteArrayBaseOffset + offset, count);
}
/** Write count bytes to the byte buffer, reading from the given offset inside the in-memory representation of the object. */
public void writeBytes (Object from, long offset, int count) throws KryoException {
int copyCount = Math.min(capacity - position, count);
while (true) {
unsafe.copyMemory(from, offset, null, bufferAddress + position, copyCount);
position += copyCount;
count -= copyCount;
if (count == 0) break;
offset += copyCount;
copyCount = Math.min(capacity, count);
require(copyCount);
}
byteBuffer.position(position);
}
}
|
src/com/esotericsoftware/kryo/unsafe/UnsafeByteBufferOutput.java
|
/* Copyright (c) 2008-2018, Nathan Sweet
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
* - Neither the name of Esoteric Software nor the names of its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
package com.esotericsoftware.kryo.unsafe;
import static com.esotericsoftware.kryo.unsafe.UnsafeUtil.*;
import com.esotericsoftware.kryo.KryoException;
import com.esotericsoftware.kryo.io.ByteBufferOutput;
import com.esotericsoftware.kryo.util.Util;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import sun.nio.ch.DirectBuffer;
/** A {@link ByteBufferOutput} that writes data to a direct ByteBuffer (off-heap memory) using sun.misc.Unsafe. Multi-byte
* primitive types use native byte order, so the native byte order on different computers which read and write the data must be
* the same.
* <p>
* Not available on all JVMs. {@link Util#unsafe} can be checked before using this class.
* <p>
* This class may be much faster when {@link #setVariableLengthEncoding(boolean)} is false.
* @author Roman Levenstein <romixlev@gmail.com>
* @author Nathan Sweet */
public class UnsafeByteBufferOutput extends ByteBufferOutput {
/** Start address of the memory buffer. It must be non-movable, which normally means that is is allocated off-heap. */
private long bufferAddress;
/** Creates an uninitialized Output, {@link #setBuffer(ByteBuffer)} must be called before the Output is used. */
public UnsafeByteBufferOutput () {
}
/** Creates a new Output for writing to a direct {@link ByteBuffer}.
* @param bufferSize The size of the buffer. An exception is thrown if more bytes than this are written and {@link #flush()}
* does not empty the buffer. */
public UnsafeByteBufferOutput (int bufferSize) {
super(bufferSize);
updateBufferAddress();
}
/** Creates a new Output for writing to a direct ByteBuffer.
* @param bufferSize The initial size of the buffer.
* @param maxBufferSize If {@link #flush()} does not empty the buffer, the buffer is doubled as needed until it exceeds
* maxBufferSize and an exception is thrown. Can be -1 for no maximum. */
public UnsafeByteBufferOutput (int bufferSize, int maxBufferSize) {
super(bufferSize, maxBufferSize);
updateBufferAddress();
}
/** Creates a new Output for writing to an OutputStream. A buffer size of 4096 is used. */
public UnsafeByteBufferOutput (OutputStream outputStream) {
super(outputStream);
updateBufferAddress();
}
/** Creates a new Output for writing to an OutputStream with the specified buffer size. */
public UnsafeByteBufferOutput (OutputStream outputStream, int bufferSize) {
super(outputStream, bufferSize);
updateBufferAddress();
}
/** Creates a new Output for writing to a ByteBuffer representing the memory region at the specified address and size. @throws
* UnsupportedOperationException if creating a ByteBuffer this way is not available. */
public UnsafeByteBufferOutput (long address, int size) {
super(newDirectBuffer(address, size));
updateBufferAddress();
}
public void setBuffer (ByteBuffer buffer, int maxBufferSize) {
if (!(buffer instanceof DirectBuffer)) throw new IllegalArgumentException("buffer must be direct.");
if (buffer != byteBuffer) UnsafeUtil.dispose(byteBuffer);
super.setBuffer(buffer, maxBufferSize);
updateBufferAddress();
}
private void updateBufferAddress () {
bufferAddress = ((DirectBuffer)byteBuffer).address();
}
protected boolean require (int required) throws KryoException {
ByteBuffer oldBuffer = byteBuffer;
boolean result = super.require(required);
if (byteBuffer != oldBuffer) UnsafeUtil.dispose(oldBuffer);
return result;
}
/** Releases the byte buffer immediately, rather than waiting for GC. This output can no longer be used until a new byte buffer
* is set. */
public void dispose () {
UnsafeUtil.dispose(byteBuffer);
byteBuffer = null;
bufferAddress = 0;
}
public void write (int value) throws KryoException {
if (position == capacity) require(1);
unsafe.putByte(bufferAddress + position++, (byte)value);
byteBuffer.position(position);
}
public void writeByte (byte value) throws KryoException {
if (position == capacity) require(1);
unsafe.putByte(bufferAddress + position++, value);
byteBuffer.position(position);
}
public void writeByte (int value) throws KryoException {
if (position == capacity) require(1);
unsafe.putByte(bufferAddress + position++, (byte)value);
byteBuffer.position(position);
}
public void writeInt (int value) throws KryoException {
require(4);
unsafe.putInt(bufferAddress + position, value);
position += 4;
byteBuffer.position(position);
}
public void writeLong (long value) throws KryoException {
require(8);
unsafe.putLong(bufferAddress + position, value);
position += 8;
byteBuffer.position(position);
}
public void writeFloat (float value) throws KryoException {
require(4);
unsafe.putFloat(bufferAddress + position, value);
position += 4;
byteBuffer.position(position);
}
public void writeDouble (double value) throws KryoException {
require(8);
unsafe.putDouble(bufferAddress + position, value);
position += 8;
byteBuffer.position(position);
}
public void writeShort (int value) throws KryoException {
require(2);
unsafe.putShort(bufferAddress + position, (short)value);
position += 2;
byteBuffer.position(position);
}
public void writeChar (char value) throws KryoException {
require(2);
unsafe.putChar(bufferAddress + position, value);
position += 2;
byteBuffer.position(position);
}
public void writeBoolean (boolean value) throws KryoException {
if (position == capacity) require(1);
unsafe.putByte(bufferAddress + position++, value ? (byte)1 : 0);
byteBuffer.position(position);
}
public void writeInts (int[] array, int offset, int count) throws KryoException {
writeBytes(array, intArrayBaseOffset, array.length << 2);
}
public void writeLongs (long[] array, int offset, int count) throws KryoException {
writeBytes(array, longArrayBaseOffset, array.length << 3);
}
public void writeFloats (float[] array, int offset, int count) throws KryoException {
writeBytes(array, floatArrayBaseOffset, array.length << 2);
}
public void writeDoubles (double[] array, int offset, int count) throws KryoException {
writeBytes(array, doubleArrayBaseOffset, array.length << 3);
}
public void writeShorts (short[] array, int offset, int count) throws KryoException {
writeBytes(array, shortArrayBaseOffset, array.length << 1);
}
public void writeChars (char[] array, int offset, int count) throws KryoException {
writeBytes(array, charArrayBaseOffset, array.length << 1);
}
public void writeBooleans (boolean[] array, int offset, int count) throws KryoException {
writeBytes(array, booleanArrayBaseOffset, array.length);
}
public void writeBytes (byte[] array, int offset, int count) throws KryoException {
writeBytes(array, byteArrayBaseOffset + offset, count);
}
/** Write count bytes to the byte buffer, reading from the given offset inside the in-memory representation of the object. */
public void writeBytes (Object from, long offset, int count) throws KryoException {
int copyCount = Math.min(capacity - position, count);
while (true) {
unsafe.copyMemory(from, offset, null, bufferAddress + position, copyCount);
position += copyCount;
count -= copyCount;
if (count == 0) break;
offset += copyCount;
copyCount = Math.min(capacity, count);
require(copyCount);
}
byteBuffer.position(position);
}
}
|
Update the UnsafeByteBufferOutput buffer address when it changes.
|
src/com/esotericsoftware/kryo/unsafe/UnsafeByteBufferOutput.java
|
Update the UnsafeByteBufferOutput buffer address when it changes.
|
|
Java
|
bsd-3-clause
|
6c8a1c736009189aaf6797a7b8af58c42d01e556
| 0
|
iSnow/sqlWebservice
|
package de.isnow.sqlws.resources;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.isnow.sqlws.model.*;
import de.isnow.sqlws.model.viewModel.VmColumn;
import de.isnow.sqlws.model.viewModel.VmForeignKey;
import de.isnow.sqlws.model.viewModel.VmObject;
import de.isnow.sqlws.model.viewModel.VmTable;
import de.isnow.sqlws.util.DbUtil;
import de.isnow.sqlws.util.RestUtils;
import lombok.SneakyThrows;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.PathSegment;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.util.stream.Collectors;
@Path("/data")
public class TableContentService {
@GET
@Path("/table/{tableid}")
@Produces(MediaType.APPLICATION_JSON)
@SneakyThrows
public Map getContents(
@PathParam("tableid") String tableId,
@QueryParam ("startrecord") @DefaultValue("0") Long startRecord,
@QueryParam ("maxrecords") @DefaultValue("100") Long maxRecords,
@QueryParam("columnsToShow") Set<String> columnsToShow,
@QueryParam("filters") Set<String> filters
) throws JsonProcessingException {
Map<WsColumn, String> colFilters = new HashMap<>();
WsTable table = WsTable.get(tableId);
if (null == table) {
return null;
}
filters.forEach((f) -> {
String parts[] = f.split(":");
if (parts.length > 1) {
WsColumn col = WsColumn.get(parts[0].replace("filter-", ""));
colFilters.put(col, parts[1]);
}
});
WsSchema schema = table.getOwningSchema();
WsCatalog catalog = schema.getOwningCatalog();
WsConnection conn = catalog.getOwningConnection();
if ((null == columnsToShow) || (columnsToShow.isEmpty()))
columnsToShow = table.getColumnsByName().keySet();
PreparedStatement p = DbUtil.createLimitedReadQuery(
table,
startRecord,
maxRecords,
columnsToShow,
colFilters,
conn.getNativeConnection());
ResultSet rs = p.executeQuery();
int cnt = 0;
List retVal = new ArrayList();
if (null == maxRecords) {
while (rs.next()) {
List row = new ArrayList();
retVal.add(row);
for (String name : columnsToShow) {
row.add(rs.getObject(name));
}
}
} else {
while ((rs.next() && (cnt < maxRecords))) {
List row = new ArrayList();
retVal.add(row);
for (String name : columnsToShow) {
row.add(rs.getObject(name));
}
cnt++;
}
}
Map<String, Object> response = RestUtils.createJsonWrapperForCollection(retVal);
response.put("id", tableId);
response.put("model", table.getColumns());
return response;
}
@GET
@Path("schema/{schemaid}/table/{tableid}/pk/{pks: .*}")
@Produces(MediaType.APPLICATION_JSON)
@SneakyThrows
public Map getContentsOfRow(
@PathParam("schemaid") String schemaId,
@PathParam("tableid") String tableId,
@PathParam("pks") List<PathSegment> primaryKeys,
@QueryParam("columnsToShow") Set<String> columnsToShow,
@QueryParam("depth") int depth) {
WsSchema schema = WsSchema.get(schemaId);
if (null == schema)
return null;
WsTable table = WsTable.get(tableId);
if (null == table) {
return null;
}
WsSchema schema2 = table.getOwningSchema();
if (!schema2.equals(schema))
return null;
WsCatalog catalog = schema.getOwningCatalog();
WsConnection conn = catalog.getOwningConnection();
final Map<String, String> pks = getKeyValues(primaryKeys);
//testPksMatch(table, pks);
VmTable tableToReturn = VmTable.fromWsTable(table, columnsToShow, depth, true);
transformTable(table, tableToReturn, pks, conn);
if (tableToReturn.getChildren().size() > 0) {
for (VmTable child : tableToReturn.getChildren()) {
WsTable wstChild = null;
Map<String, String> childPks = new HashMap<>();
for (VmForeignKey rel : child.getRelations().values()) {
if (null == wstChild)
wstChild = WsTable.get(rel.getChildTableKey());
if (rel.getParentTableKey().equals(table.getId())) {
for (Map<String, Object> r : rel.getPrimaryForeignKeyRelationships()) {
String fk = (String) r.get("fk");
String pk = (String) r.get("pk");
String fkColName = child.getColumnByFullName(fk).getName();
Object val = tableToReturn.getColumnByFullName(pk).getValue();
childPks.put(fkColName, val.toString());
}
}
}
transformTable(wstChild, child, childPks, conn);
}
}
Map<String, Object> response = RestUtils.createJsonWrapper(new Object[]{tableToReturn});
response.put("id", tableId);
return response;
}
@POST
@Path("schema/{schemaid}/table/{tableid}/pk/{pks: .*}")
@Produces(MediaType.APPLICATION_JSON)
@SneakyThrows
public Map updatefRow(
@PathParam("schemaid") String schemaId,
@PathParam("tableid") String tableId,
@PathParam("pks") List<PathSegment> primaryKeys,
@FormParam("data") String payload) {
WsSchema schema = WsSchema.get(schemaId);
if (null == schema)
return null;
WsTable table = WsTable.get(tableId);
if (null == table) {
return null;
}
WsSchema schema2 = table.getOwningSchema();
if (!schema2.equals(schema))
return null;
return null;
}
private static void transformTable(
WsTable table,
VmTable tableToReturn,
Map<String, String> pks,
WsConnection conn) {
Set<String> colNames = tableToReturn
.getColumns()
.stream()
.map(VmObject::getName).collect(Collectors.toSet());
final Set<WsColumn> lColumnsToShow = table.getColumnsToShow(colNames);
final Map<String, Object> row = readTableRow(
table, pks, lColumnsToShow, conn);
tableToReturn.getColumns().forEach((c) -> c.setValue(row.get(c.getName())));
}
private void testPksMatch(WsTable table, Map<String, String> pks) {
List<String> pkColNames = table
.getPrimaryKeyColumns()
.stream()
.map(WsObject::getFullName)
.collect(Collectors.toList());
List<String> inPkNames = new ArrayList<>(pks.keySet());
if (!pkColNames.equals(inPkNames)) {
throw new IllegalArgumentException("list of primary keys mismatches table primary keys");
}
}
private static Map<String, String> getKeyValues(List<PathSegment> pathSegments) {
Map<String, String> kvs = new LinkedHashMap<>();
String key = null;
for (PathSegment seg : pathSegments) {
if (null == key)
key = seg.getPath();
else {
kvs.put(key, seg.getPath());
key = null;
}
}
return kvs;
}
@SneakyThrows
private static List<Map<String, Object>> readDependentTable (
WsTable parentTable,
WsTable childTable,
Map<String, String> pkVals,
Set<String> columnsToShow,
WsConnection conn) {
List<Map<String, Object>> retVal = new ArrayList<>();
final Set<String> lColumnsToShow =
((null == columnsToShow) || (columnsToShow.isEmpty())) ?
childTable.getColumnsByName().keySet()
: columnsToShow;
final Map<WsColumn, WsColumn> childPks = new HashMap<>();
Set<WsTable.WsForeignKey> fkCols = childTable.getForeignKeys();
fkCols.forEach((c) -> {
if (c.getParentTableKey().equals(parentTable.getFullName())) {
c.getPrimaryForeignKeyRelationships().forEach((f) -> {
WsColumn pfkCol = parentTable.getColumnByFullName(f.getPkColumnName());
WsColumn ffkCol = childTable.getColumnByFullName(f.getFkColumnName());
childPks.put(pfkCol, ffkCol);
try {
PreparedStatement s = DbUtil.createChildTableReadQuery(
childTable, childPks, pkVals,
null, conn.getNativeConnection());
ResultSet rs = s.executeQuery();
while (rs.next()) {
Map<String, Object> row = new LinkedHashMap<>();
for (String name : lColumnsToShow) {
row.put(name, rs.getObject(name));
}
retVal.add(row);
}
} catch (SQLException ex) {
ex.printStackTrace();
}
});
}
});
return retVal;
}
@SneakyThrows
private static Map<String, Object> readTableRow (
WsTable table,
Map<String, String> pks,
Set<WsColumn> columnsToShow,
WsConnection conn) {
Set<String> lColumnsToShow = null;
if ((null == columnsToShow) || (columnsToShow.isEmpty())) {
lColumnsToShow = table.getColumnsByName().keySet();
} else {
lColumnsToShow = columnsToShow
.stream()
.map(WsObject::getName)
.collect(Collectors.toSet());
}
Map<WsColumn, String> pkCols = new HashMap<>();
pks.keySet().forEach((k) -> {
pkCols.put(table.getColumnByName(k), pks.get(k));
});
PreparedStatement p = DbUtil.createSingleReadQuery(
table,
pkCols,
lColumnsToShow,
conn.getNativeConnection());
ResultSet rs = p.executeQuery();
Map<String, Object> row = new LinkedHashMap<>();
if (rs.next()) {
for (String name : lColumnsToShow) {
row.put(name, rs.getObject(name));
}
}
return row;
}
}
|
src/main/java/de/isnow/sqlws/resources/TableContentService.java
|
package de.isnow.sqlws.resources;
import com.fasterxml.jackson.core.JsonProcessingException;
import de.isnow.sqlws.model.*;
import de.isnow.sqlws.model.viewModel.VmColumn;
import de.isnow.sqlws.model.viewModel.VmForeignKey;
import de.isnow.sqlws.model.viewModel.VmObject;
import de.isnow.sqlws.model.viewModel.VmTable;
import de.isnow.sqlws.util.DbUtil;
import de.isnow.sqlws.util.RestUtils;
import lombok.SneakyThrows;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.PathSegment;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.util.stream.Collectors;
@Path("/data")
public class TableContentService {
@GET
@Path("/table/{tableid}")
@Produces(MediaType.APPLICATION_JSON)
@SneakyThrows
public Map getContents(
@PathParam("tableid") String tableId,
@QueryParam ("startrecord") @DefaultValue("0") Long startRecord,
@QueryParam ("maxrecords") @DefaultValue("100") Long maxRecords,
@QueryParam("columnsToShow") Set<String> columnsToShow,
@QueryParam("filters") Set<String> filters
) throws JsonProcessingException {
Map<WsColumn, String> colFilters = new HashMap<>();
WsTable table = WsTable.get(tableId);
if (null == table) {
return null;
}
filters.forEach((f) -> {
String parts[] = f.split(":");
if (parts.length > 1) {
WsColumn col = WsColumn.get(parts[0].replace("filter-", ""));
colFilters.put(col, parts[1]);
}
});
WsSchema schema = table.getOwningSchema();
WsCatalog catalog = schema.getOwningCatalog();
WsConnection conn = catalog.getOwningConnection();
if ((null == columnsToShow) || (columnsToShow.isEmpty()))
columnsToShow = table.getColumnsByName().keySet();
PreparedStatement p = DbUtil.createLimitedReadQuery(
table,
startRecord,
maxRecords,
columnsToShow,
colFilters,
conn.getNativeConnection());
ResultSet rs = p.executeQuery();
int cnt = 0;
List retVal = new ArrayList();
if (null == maxRecords) {
while (rs.next()) {
List row = new ArrayList();
retVal.add(row);
for (String name : columnsToShow) {
row.add(rs.getObject(name));
}
}
} else {
while ((rs.next() && (cnt < maxRecords))) {
List row = new ArrayList();
retVal.add(row);
for (String name : columnsToShow) {
row.add(rs.getObject(name));
}
cnt++;
}
}
Map<String, Object> response = RestUtils.createJsonWrapperForCollection(retVal);
response.put("id", tableId);
response.put("model", table.getColumns());
return response;
}
@GET
@Path("schema/{schemaid}/table/{tableid}/pk/{pks: .*}")
//@Consumes({MediaType.MULTIPART_FORM_DATA})
@Produces(MediaType.APPLICATION_JSON)
@SneakyThrows
public Map getContentsOfRow(
@PathParam("schemaid") String schemaId,
@PathParam("tableid") String tableId,
@PathParam("pks") List<PathSegment> primaryKeys,
@QueryParam("columnsToShow") Set<String> columnsToShow,
@QueryParam("depth") int depth) {
WsSchema schema = WsSchema.get(schemaId);
if (null == schema)
return null;
WsTable table = WsTable.get(tableId);
if (null == table) {
return null;
}
WsSchema schema2 = table.getOwningSchema();
if (!schema2.equals(schema))
return null;
WsCatalog catalog = schema.getOwningCatalog();
WsConnection conn = catalog.getOwningConnection();
final Map<String, String> pks = getKeyValues(primaryKeys);
//testPksMatch(table, pks);
VmTable tableToReturn = VmTable.fromWsTable(table, columnsToShow, depth, true);
transformTable(table, tableToReturn, pks, conn);
if (tableToReturn.getChildren().size() > 0) {
for (VmTable child : tableToReturn.getChildren()) {
WsTable wstChild = null;
Map<String, String> childPks = new HashMap<>();
for (VmForeignKey rel : child.getRelations().values()) {
if (null == wstChild)
wstChild = WsTable.get(rel.getChildTableKey());
wstChild = WsTable.get(rel.getChildTableKey());
if (rel.getParentTableKey().equals(table.getId())) {
for (Map<String, Object> r : rel.getPrimaryForeignKeyRelationships()) {
String fk = (String) r.get("fk");
String pk = (String) r.get("pk");
String fkColName = child.getColumnByFullName(fk).getName();
Object val = tableToReturn.getColumnByFullName(pk).getValue();
childPks.put(fkColName, val.toString());
}
}
}
transformTable(wstChild, child, childPks, conn);
}
}
Map<String, Object> response = RestUtils.createJsonWrapper(new Object[]{tableToReturn});
response.put("id", tableId);
return response;
}
private static void transformTable(
WsTable table,
VmTable tableToReturn,
Map<String, String> pks,
WsConnection conn) {
Set<String> colNames = tableToReturn
.getColumns()
.stream()
.map(VmObject::getName).collect(Collectors.toSet());
final Set<WsColumn> lColumnsToShow = table.getColumnsToShow(colNames);
final Map<String, Object> row = readTableRow(
table, pks, lColumnsToShow, conn);
tableToReturn.getColumns().forEach((c) -> c.setValue(row.get(c.getName())));
}
private static void transformSubTable(
WsTable table,
VmTable tableToReturn,
Map<String, String> pks,
WsConnection conn) {
Set<String> colNames = tableToReturn
.getColumns()
.stream()
.map(VmObject::getName).collect(Collectors.toSet());
final Set<WsColumn> lColumnsToShow = table.getColumnsToShow(colNames);
final Map<String, Object> row = readTableRow(
table, pks, lColumnsToShow, conn);
tableToReturn.getColumns().forEach((c) -> c.setValue(row.get(c.getName())));
}
/*private static Set<VmColumn> transformTable(
WsTable table,
Set<String> columnsToShow,
Map<String, String> pks,
WsConnection conn) {
final Set<WsColumn> lColumnsToShow = getColumnsToShow(table, columnsToShow);
final Set<VmColumn> lColumnsToReturn = lColumnsToShow
.stream()
.map(VmColumn::fromWsColumn)
.collect(Collectors.toSet());
final Map<String, Object> row = readTableRow(
table, pks, lColumnsToShow, conn);
lColumnsToReturn.forEach((c) -> c.setValue(row.get(c.getName())));
return lColumnsToReturn;
}
private static Set<WsColumn> getColumnsToShow(WsTable table, Set<String> columnsToShow) {
Set<String> lColumnsToShow =
((null == columnsToShow) || (columnsToShow.isEmpty())) ?
table.getColumnsByName().keySet()
: columnsToShow;
return lColumnsToShow.stream().map((name) ->
table.getColumnByName(name))
.collect(Collectors.toSet());
}*/
private void testPksMatch(WsTable table, Map<String, String> pks) {
List<String> pkColNames = table
.getPrimaryKeyColumns()
.stream()
.map(WsObject::getFullName)
.collect(Collectors.toList());
List<String> inPkNames = new ArrayList<>(pks.keySet());
if (!pkColNames.equals(inPkNames)) {
throw new IllegalArgumentException("list of primary keys mismatches table primary keys");
}
}
private static Map<String, String> getKeyValues(List<PathSegment> pathSegments) {
Map<String, String> kvs = new LinkedHashMap<>();
String key = null;
for (PathSegment seg : pathSegments) {
if (null == key)
key = seg.getPath();
else {
kvs.put(key, seg.getPath());
key = null;
}
}
return kvs;
}
@SneakyThrows
private static List<Map<String, Object>> readDependentTable (
WsTable parentTable,
WsTable childTable,
Map<String, String> pkVals,
Set<String> columnsToShow,
WsConnection conn) {
List<Map<String, Object>> retVal = new ArrayList<>();
final Set<String> lColumnsToShow =
((null == columnsToShow) || (columnsToShow.isEmpty())) ?
childTable.getColumnsByName().keySet()
: columnsToShow;
final Map<WsColumn, WsColumn> childPks = new HashMap<>();
Set<WsTable.WsForeignKey> fkCols = childTable.getForeignKeys();
fkCols.forEach((c) -> {
if (c.getParentTableKey().equals(parentTable.getFullName())) {
c.getPrimaryForeignKeyRelationships().forEach((f) -> {
WsColumn pfkCol = parentTable.getColumnByFullName(f.getPkColumnName());
WsColumn ffkCol = childTable.getColumnByFullName(f.getFkColumnName());
childPks.put(pfkCol, ffkCol);
try {
PreparedStatement s = DbUtil.createChildTableReadQuery(
childTable, childPks, pkVals,
null, conn.getNativeConnection());
ResultSet rs = s.executeQuery();
while (rs.next()) {
Map<String, Object> row = new LinkedHashMap<>();
for (String name : lColumnsToShow) {
row.put(name, rs.getObject(name));
}
retVal.add(row);
}
} catch (SQLException ex) {
ex.printStackTrace();
}
});
}
});
return retVal;
}
/*@SneakyThrows
private static Map<String, Object> readDependentTableRow(
WsTable table,
Map<String, String> fks,
Set<String> columnsToShow,
WsConnection conn) {
final Set<String> lColumnsToShow =
((null == columnsToShow) || (columnsToShow.isEmpty())) ?
table.getColumnsByName().keySet()
: columnsToShow;
Map<String, WsColumn> colsDict = table.getColumnsByFullName();
Map<WsColumn, String> keyCols = new HashMap<>();
fks.keySet().stream().forEach((k) -> {
keyCols.put(colsDict.get(k), fks.get(k));
});
PreparedStatement p = DbUtil.createSingleReadQuery(
table,
keyCols,
lColumnsToShow,
conn.getNativeConnection());
ResultSet rs = p.executeQuery();
Map<String, Object> row = new LinkedHashMap<>();
if (rs.next()) {
for (String name : lColumnsToShow) {
row.put(name, rs.getObject(name));
}
}
return row;
}*/
/*@SneakyThrows
private static Map<String, Object> readTableRow(
WsTable table,
Map<String, String> pks,
Set<String> columnsToShow,
WsConnection conn) {
final Set<String> lColumnsToShow =
((null == columnsToShow) || (columnsToShow.isEmpty())) ?
table.getColumnsByName().keySet()
: columnsToShow;
Map<String, WsColumn> colsDict = table.getColumnsByName();
Map<WsColumn, String> pkCols = new HashMap<>();
pks.keySet().stream().forEach((k) -> {
pkCols.put(colsDict.get(k), pks.get(k));
});
PreparedStatement p = DbUtil.createSingleReadQuery(
table,
pkCols,
lColumnsToShow,
conn.getNativeConnection());
ResultSet rs = p.executeQuery();
Map<String, Object> row = new LinkedHashMap<>();
if (rs.next()) {
for (String name : lColumnsToShow) {
row.put(name, rs.getObject(name));
}
}
return row;
}*/
@SneakyThrows
private static Map<String, Object> readTableRow (
WsTable table,
Map<String, String> pks,
Set<WsColumn> columnsToShow,
WsConnection conn) {
Set<String> lColumnsToShow = null;
if ((null == columnsToShow) || (columnsToShow.isEmpty())) {
lColumnsToShow = table.getColumnsByName().keySet();
} else {
lColumnsToShow = columnsToShow
.stream()
.map(WsObject::getName)
.collect(Collectors.toSet());
}
Map<WsColumn, String> pkCols = new HashMap<>();
pks.keySet().forEach((k) -> {
pkCols.put(table.getColumnByName(k), pks.get(k));
});
PreparedStatement p = DbUtil.createSingleReadQuery(
table,
pkCols,
lColumnsToShow,
conn.getNativeConnection());
ResultSet rs = p.executeQuery();
Map<String, Object> row = new LinkedHashMap<>();
if (rs.next()) {
for (String name : lColumnsToShow) {
row.put(name, rs.getObject(name));
}
}
return row;
}
/*
@SneakyThrows
private static Map<String, Object> readTableRow(
WsTable table,
Map<WsColumn, String> pks,
Set<String> columnsToShow,
WsConnection conn,
int depth) {
PreparedStatement p = DbUtil.createSingleReadQuery(
table,
pks,
columnsToShow,
conn.getNativeConnection());
ResultSet rs = p.executeQuery();
Map<String, Object> row = new LinkedHashMap<>();
if (rs.next()) {
for (String name : columnsToShow) {
row.put(name, rs.getObject(name));
}
}
return row;
}*/
}
|
Including pk/fk-relationships
|
src/main/java/de/isnow/sqlws/resources/TableContentService.java
|
Including pk/fk-relationships
|
|
Java
|
mit
|
e5b4d500367c7e5b87b8f3816b1077b1bfa5dc26
| 0
|
kreneskyp/openconferenceware-android
|
package org.osb;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.*;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.text.Html;
import android.text.method.LinkMovementMethod;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SubMenu;
import android.view.View;
import android.view.ViewGroup;
import android.view.View.OnClickListener;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.ViewFlipper;
public class ScheduleActivity extends AbstractActivity {
private static final int MENU_DATE_BASE = 1000;
private static final int MENU_NEXT = 5;
private static final int MENU_PREV = 6;
private static final int MENU_ABOUT = 7;
private static final int MENU_NOW = 8;
private static final int MENU_REFRESH = 9;
// state
Date mCurrentDate;
TextView mDate;
boolean mDetail = false;
Handler mHandler;
// general conference data
Conference mConference;
Date[] mDates;
HashMap<Date, Schedule> mSchedule;
// session list
EventAdapter mAdapter;
ListView mEvents;
// screen animation
ViewFlipper mFlipper;
Animation mInLeft;
Animation mInRight;
Animation mOutLeft;
Animation mOutRight;
// session details
Event mEvent = null;
HashMap<Integer, Speaker> mSpeakers;
View mHeader;
TextView mTitle;
TextView mTime;
TextView mLocation;
View mTimeLocation;
TextView mSpeaker;
ScrollView mDescriptionScroller;
TextView mDescription;
ImageView mMapImage;
LinearLayout mBio;
// session detail actions
Button mFoursquare;
Button mShare;
Button mMap;
Button mShowDescription;
Button mShowBio;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mHandler = new Handler();
mSpeakers = new HashMap<Integer, Speaker>();
mSchedule = new HashMap<Date, Schedule>();
mDate = (TextView) findViewById(R.id.date);
mEvents = (ListView) findViewById(R.id.events);
mFlipper = (ViewFlipper) findViewById(R.id.flipper);
Context context = getApplicationContext();
mInLeft = AnimationUtils.loadAnimation(context, R.anim.slide_in_left);
mInRight = AnimationUtils.loadAnimation(context, R.anim.slide_in_right);
mOutLeft = AnimationUtils.loadAnimation(context, R.anim.slide_out_left);
mOutRight = AnimationUtils.loadAnimation(context, R.anim.slide_out_right);
// grab views for details
View detail = findViewById(R.id.detail);
mHeader = findViewById(R.id.detail_header);
mSpeaker = (TextView) findViewById(R.id.speaker);
mTitle = (TextView) detail.findViewById(R.id.title);
mTimeLocation = detail.findViewById(R.id.time_location);
mTime = (TextView) detail.findViewById(R.id.time);
mLocation = (TextView) detail.findViewById(R.id.location);
mDescription = (TextView) detail.findViewById(R.id.description);
mDescriptionScroller = (ScrollView) detail.findViewById(R.id.description_scroller);
mMapImage = (ImageView) detail.findViewById(R.id.map_image);
mBio = (LinearLayout) detail.findViewById(R.id.bio);
// detail action buttons
mFoursquare = (Button) findViewById(R.id.foursquare);
mShare = (Button) findViewById(R.id.share);
mMap = (Button) findViewById(R.id.map);
mShowDescription = (Button) findViewById(R.id.show_description);
mShowBio = (Button) findViewById(R.id.show_bio);
mEvents.setOnItemClickListener(new ListView.OnItemClickListener() {
public void onItemClick(AdapterView<?> adapterview, View view, int position, long id) {
Object item = mAdapter.mFiltered.get(position);
if (item instanceof Date) {
return;// ignore clicks on the dates
}
Event event = (Event) item;
Track track = mConference.tracks.get(event.track);
Location location = mConference.locations.get(event.location);
if (!event.details){
// load detailed info for this session
DataService service = getDataService();
event = service.getEvent(event.id, false);
mAdapter.mFiltered.set(position, event);
}
String speaker_names = "";
Speaker speaker;
for(Integer sid: event.speaker_ids){
if (mSpeakers.containsKey(sid)){
speaker = mSpeakers.get(sid);
} else {
speaker = getDataService().getSpeaker(sid, false);
mSpeakers.put(sid, speaker);
}
if (speaker_names == "") {
speaker_names = speaker.name;
} else {
speaker_names = speaker_names + ", " + speaker.name;
}
}
mSpeaker.setText(speaker_names);
mHeader.setBackgroundColor(Color.parseColor(track.color));
mTitle.setText(event.title);
mTitle.setTextColor(Color.parseColor(track.color_text));
mLocation.setText(location.name);
DateFormat startFormat = new SimpleDateFormat("E, h:mm");
DateFormat endFormat = new SimpleDateFormat("h:mm a");
String timeString = startFormat.format(event.start) + " - " + endFormat.format(event.end);
mTime.setText(timeString);
mTimeLocation.setBackgroundColor(Color.parseColor(track.color_dark));
mDescription.setMovementMethod(LinkMovementMethod.getInstance());
mDescription.setText(Html.fromHtml(event.description));
show_description();
mDescriptionScroller.scrollTo(0, 0);
mFlipper.setInAnimation(mInRight);
mFlipper.setOutAnimation(mOutLeft);
mFlipper.showNext();
mEvent = event;
mDetail = true;
}
});
mShowDescription.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
show_description();
}
});
mMap.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
int id = getResources().getIdentifier("map_"+mEvent.location,"drawable",getPackageName());
mDescription.setVisibility(View.GONE);
mBio.setVisibility(View.GONE);
// only set&show image if a map image was found
if (id!=0){
mMapImage.setImageResource(id);
mMapImage.setVisibility(View.VISIBLE);
}
}
});
mShowBio.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
mBio.removeAllViews();
Integer[] speaker_ids = mEvent.speaker_ids;
if (speaker_ids != null) {
for (int i=0; i<speaker_ids.length; i++) {
View view = loadBioView(speaker_ids[i]);
if (view != null) {
if (i>0){
view.setPadding(0, 30, 0, 0);
}
mBio.addView(view);
}
}
mDescription.setVisibility(View.GONE);
mMapImage.setVisibility(View.GONE);
mBio.setVisibility(View.VISIBLE);
}
}
/**
* loads a view populated with the speakers info
* @param id
* @return
*/
private View loadBioView(int sid) {
Integer id = new Integer(sid);
Speaker speaker = null;
View view = null;
// check memory to see if speaker had already been loaded
// else load the speaker from persistent storage
if (mSpeakers.containsKey(id)){
speaker = mSpeakers.get(id);
} else {
speaker = getDataService().getSpeaker(id, false);
mSpeakers.put(id, speaker);
}
// create view
if (speaker != null) {
LayoutInflater vi = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
view = vi.inflate(R.layout.bio, null);
TextView name = (TextView) view.findViewById(R.id.name);
name.setText(speaker.name);
TextView biography = (TextView) view.findViewById(R.id.biography);
biography.setMovementMethod(LinkMovementMethod.getInstance());
biography.setText(Html.fromHtml(speaker.biography));
String twitter = speaker.twitter;
if (twitter != null && twitter != "" && twitter != "null"){
TextView text = (TextView) view.findViewById(R.id.twitter);
text.setText(twitter);
View parent = (View) text.getParent();
parent.setVisibility(View.VISIBLE);
}
String website = speaker.website;
if (website != null && website != "" && website != "null"){
TextView text = (TextView) view.findViewById(R.id.website);
text.setText(speaker.website);
View parent = (View) text.getParent();
parent.setVisibility(View.VISIBLE);
}
String blog = speaker.blog;
if (blog != null && blog != "" && blog != "null"){
TextView text = (TextView) view.findViewById(R.id.blog);
text.setText(speaker.blog);
View parent = (View) text.getParent();
parent.setVisibility(View.VISIBLE);
}
if (speaker.affiliation != null){
TextView text = (TextView) view.findViewById(R.id.affiliation);
text.setText(speaker.affiliation);
}
String identica = speaker.identica;
if (identica != null && identica != "" && identica != "null"){
TextView text = (TextView) view.findViewById(R.id.identica);
text.setText(speaker.identica);
View parent = (View) text.getParent();
parent.setVisibility(View.VISIBLE);
}
}
return view;
}
});
mFoursquare.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
String url = mapRoomNameToFqUrl((mLocation).getText().toString());
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
startActivity(intent);
}
private String mapRoomNameToFqUrl(String roomName) {
String vid = "";
if (roomName.equals("Hawthorne")) {
vid = "4281683";
} else if (roomName.equals("Burnside")) {
vid = "4281826";
} else if (roomName.equals("St. Johns")) {
vid = "4281970";
} else if (roomName.equals("Broadway")) {
vid = "4281777";
} else if (roomName.equals("Morrison")) {
vid = "4281923";
} else if (roomName.equals("Fremont")) {
vid = "4281874";
} else if (roomName.equals("Steel")) {
vid = "4282004";
}
return "http://m.foursquare.com/checkin?vid="+vid;
}
});
mShare.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
Intent intent = new Intent(android.content.Intent.ACTION_SEND);
intent.setType("text/plain");
Resources r = getApplicationContext().getResources();
intent.putExtra(Intent.EXTRA_SUBJECT, r.getString(R.string.share_subject));
intent.putExtra(Intent.EXTRA_TEXT, r.getString(R.string.share_text) + mTitle.getText() + r.getString(R.string.share_text2));
startActivity(Intent.createChooser(intent, "Share"));
}
});
// spawn loading into separate thread
mHandler.post(new Runnable() {
public void run() {
loadSchedule(false);
now();
}
});
}
/**
* Shows the session description, hides all other subviews
*/
private void show_description(){
mMapImage.setVisibility(View.GONE);
mBio.setVisibility(View.GONE);
mDescription.setVisibility(View.VISIBLE);
}
/**
* overridden to hook back button when on the detail page
*/
public boolean onKeyDown(int keyCode, KeyEvent event){
if (mDetail && keyCode == KeyEvent.KEYCODE_BACK){
showList();
return true;
}
return super.onKeyDown(keyCode, event);
}
/* Creates the menu items */
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(0, MENU_PREV, 0, "Previous Day").setIcon(R.drawable.ic_menu_back);
SubMenu dayMenu = menu.addSubMenu("Day").setIcon(android.R.drawable.ic_menu_today);
DateFormat formatter = new SimpleDateFormat("EEEE, MMMM d");
Date date;
for (int i=0; i<mDates.length; i++){
date = mDates[i];
dayMenu.add(0, MENU_DATE_BASE+i, 0, formatter.format(date));
}
menu.add(0, MENU_NEXT, 0, "Next Day").setIcon(R.drawable.ic_menu_forward);
menu.add(0, MENU_NOW, 0, "Now").setIcon(R.drawable.time);
menu.add(0, MENU_REFRESH, 0, "Refresh").setIcon(R.drawable.ic_menu_refresh);
menu.add(0, MENU_ABOUT, 0, "About").setIcon(android.R.drawable.ic_menu_info_details);
return true;
}
/* Handles item selections */
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
switch (id) {
case MENU_NOW:
now();
return true;
case MENU_PREV:
previous();
return true;
case MENU_NEXT:
next();
return true;
case MENU_ABOUT:
showDialog(0);
return true;
case MENU_REFRESH:
mHandler.post(new Runnable() {
public void run() {
loadSchedule(true);
now();
}
});
return true;
default:
if (id >= MENU_DATE_BASE) {
// must be a date menu option. all dates
// menu options are an index offset by MENU_DATE_BASE
setDay(mDates[item.getItemId()-MENU_DATE_BASE]);
return true;
}
}
return false;
}
/* sets the current day, filtering the list if need be */
public void setDay(Date date) {
if (isSameDay(mCurrentDate, date)) {
// same day, just jump to current time
mAdapter.now(date);
} else {
// different day, update the list. Load the date requested
// if it is not already loaded
mCurrentDate = date;
mAdapter.filterDay(date);
DateFormat formatter = new SimpleDateFormat("E, MMMM d");
mDate.setText(formatter.format(mCurrentDate));
}
// take user back to the listings if not already there
showList();
}
/**
* Jumps the user to right now in the event list:
*
* - if its before or after the conference, it shows the beginning
* of day 1
* - if its during the conference it will show the first event
* currently underway
*/
public void now(){
Date now = new Date();
if (now.before(mDates[0]) || now.after(mConference.end)) {
setDay((Date) mDates[0].clone());
} else {
// use now, since it will have the time of day for
// jumping to the right time
setDay(now);
}
}
/**
* Jumps to the next day, if not already at the end
*/
public void next() {
try {
if (!isSameDay(mCurrentDate, mConference.end)) {
Date load = new Date(mCurrentDate.getYear(), mCurrentDate.getMonth(), mCurrentDate.getDate()+1);
setDay(load);
}
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Jumps to the previous day if now already at the beginning
*/
public void previous() {
if (!isSameDay(mCurrentDate, mConference.start)) {
Date load = new Date(mCurrentDate.getYear(), mCurrentDate.getMonth(), mCurrentDate.getDate()-1);
setDay(load);
}
}
/**
* Shows the event listing
*/
public void showList() {
if (mDetail) {
mFlipper.setInAnimation(mInLeft);
mFlipper.setOutAnimation(mOutRight);
mFlipper.showPrevious();
mDetail=false;
}
}
/**
* Loads the osbridge schedule from a combination of ICal and json data
* @param force - force reload
*/
private void loadSchedule(boolean force) {
System.out.println("================Schedule================");
//XXX set date to a day that is definitely, not now.
// This will cause it to update the list immediately.
mCurrentDate = new Date(1900, 0, 0);
DataService service = getDataService();
mConference = service.getConference(force);
try {
mDates = mConference.getDates();
mAdapter = new EventAdapter(this, R.layout.listevent, new ArrayList<Event>());
mEvents.setAdapter(mAdapter);
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("----------------Schedule----------------");
}
protected Dialog onCreateDialog(int id){
Context context = getApplicationContext();
LayoutInflater inflater = (LayoutInflater) context.getSystemService(LAYOUT_INFLATER_SERVICE);
View v = inflater.inflate(R.layout.about, null);
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("About");
builder.setCancelable(true);
builder.setView(v);
builder.setIcon(android.R.drawable.ic_dialog_info);
final AlertDialog alert = builder.create();
return alert;
}
/**
* EventAdapter used for displaying a list of events
*
*/
private class EventAdapter extends ArrayAdapter<Event> {
private List<Event> mItems;
private List<Object> mFiltered;
public EventAdapter(Context context, int textViewResourceId,
List<Event> items) {
super(context, textViewResourceId, items);
mItems = items;
mFiltered = new ArrayList<Object>();
}
/**
* Sets elements to the current schedule. This will use
* cached data if already loaded. Else it will load it from
* the dataservice
* @param date - date to filter by
*/
public void filterDay(Date date){
// Load the data for the requested day, load it from dataservice if needed
// construct a new date with just year,month,day since keys only have that set
// XXX adjust for timezone by setting time to noon
Date load = new Date(date.getYear(), date.getMonth(), date.getDate(), 12, 0);
if (mSchedule.containsKey(load)){
mItems = mSchedule.get(load).events;
} else {
DataService service = getDataService();
Schedule schedule = service.getSchedule(load, false);
mSchedule.put(load, schedule);
mItems = schedule.events;
}
List<Event> items = mItems;
List<Object> filtered = new ArrayList<Object>();
int size = mItems.size();
Date currentStart = null;
for (int i=0; i<size; i++){
Event event = items.get(i);
if(currentStart == null || event.start.after(currentStart)) {
currentStart = event.start;
filtered.add(currentStart);
}
filtered.add(event);
}
mFiltered = filtered;
notifyDataSetChanged();
now(date);
}
/**
* sets the position to the current time
* @param date
*/
public void now(Date date) {
List<Object> filtered = mFiltered;
int size = filtered.size();
for (int i=0; i<size; i++){
Object item = filtered.get(i);
// find either the first session that hasn't ended yet
// or the first time marker that hasn't occured yet.
if (item instanceof Date ){
Date slot = (Date) item;
if (date.before(slot)) {
mEvents.setSelection(i);
return;
}
} else {
Event event = (Event) item;
if (event.end.after(date)) {
// should display the time marker instead of the
// session
mEvents.setSelection(i-1);
return;
}
}
}
// no current event was found, jump to the next day
next();
}
public int getCount(){
return mFiltered.size();
}
public View getView(int position, View convertView, ViewGroup parent) {
View v = convertView;
LayoutInflater vi = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
Object item = mFiltered.get(position);
if (item instanceof Date) {
Date date = (Date)item;
v = vi.inflate(R.layout.list_slot, null);
TextView time = (TextView) v.findViewById(R.id.time);
DateFormat formatter = new SimpleDateFormat("h:mm a");
time.setText(formatter.format(date));
} else {
Event e = (Event) item;
v = vi.inflate(R.layout.listevent, null);
if (e != null) {
TextView title = (TextView) v.findViewById(R.id.title);
TextView locationView = (TextView) v.findViewById(R.id.location);
TextView time = (TextView) v.findViewById(R.id.time);
if (title != null) {
title.setText(e.title);
}
if (e.location != -1) {
Location location = mConference.locations.get(e.location);
locationView.setText(location.name);
}
if (time != null) {
DateFormat formatter = new SimpleDateFormat("h:mm");
time.setText(formatter.format(e.start) + "-" + formatter.format(e.end));
}
if (e.track != -1) {
TextView track_view = (TextView) v.findViewById(R.id.track);
Track track = mConference.tracks.get(e.track);
track_view.setTextColor(Color.parseColor(track.color));
track_view.setText(track.name);
}
}
}
return v;
}
}
/**
* Checks if two dates are the same day
* @param date1
* @param date2
* @return
*/
public static boolean isSameDay(Date date1, Date date2) {
if (date1 == null || date2 == null) {
throw new IllegalArgumentException("The date must not be null");
}
Calendar cal1 = Calendar.getInstance();
cal1.setTime(date1);
Calendar cal2 = Calendar.getInstance();
cal2.setTime(date2);
return isSameDay(cal1, cal2);
}
/**
* Checks if two calendars are the same day
* @param cal1
* @param cal2
* @return
*/
public static boolean isSameDay(Calendar cal1, Calendar cal2) {
if (cal1 == null || cal2 == null) {
throw new IllegalArgumentException("The date must not be null");
}
return (cal1.get(Calendar.ERA) == cal2.get(Calendar.ERA) &&
cal1.get(Calendar.YEAR) == cal2.get(Calendar.YEAR) &&
cal1.get(Calendar.DAY_OF_YEAR) == cal2.get(Calendar.DAY_OF_YEAR));
}
}
|
src/org/osb/ScheduleActivity.java
|
package org.osb;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.*;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.text.Html;
import android.text.method.LinkMovementMethod;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SubMenu;
import android.view.View;
import android.view.ViewGroup;
import android.view.View.OnClickListener;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.ViewFlipper;
public class ScheduleActivity extends AbstractActivity {
private static final int MENU_DATE_BASE = 1000;
private static final int MENU_NEXT = 5;
private static final int MENU_PREV = 6;
private static final int MENU_ABOUT = 7;
private static final int MENU_NOW = 8;
private static final int MENU_REFRESH = 9;
// state
Date mCurrentDate;
TextView mDate;
boolean mDetail = false;
Handler mHandler;
// general conference data
Conference mConference;
Date[] mDates;
HashMap<Date, Schedule> mSchedule;
// session list
EventAdapter mAdapter;
ListView mEvents;
// screen animation
ViewFlipper mFlipper;
Animation mInLeft;
Animation mInRight;
Animation mOutLeft;
Animation mOutRight;
// session details
Event mEvent = null;
HashMap<Integer, Speaker> mSpeakers;
View mHeader;
TextView mTitle;
TextView mTime;
TextView mLocation;
View mTimeLocation;
TextView mSpeaker;
ScrollView mDescriptionScroller;
TextView mDescription;
ImageView mMapImage;
LinearLayout mBio;
// session detail actions
Button mFoursquare;
Button mShare;
Button mMap;
Button mShowDescription;
Button mShowBio;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mHandler = new Handler();
mSpeakers = new HashMap<Integer, Speaker>();
mSchedule = new HashMap<Date, Schedule>();
mDate = (TextView) findViewById(R.id.date);
mEvents = (ListView) findViewById(R.id.events);
mFlipper = (ViewFlipper) findViewById(R.id.flipper);
Context context = getApplicationContext();
mInLeft = AnimationUtils.loadAnimation(context, R.anim.slide_in_left);
mInRight = AnimationUtils.loadAnimation(context, R.anim.slide_in_right);
mOutLeft = AnimationUtils.loadAnimation(context, R.anim.slide_out_left);
mOutRight = AnimationUtils.loadAnimation(context, R.anim.slide_out_right);
// grab views for details
View detail = findViewById(R.id.detail);
mHeader = findViewById(R.id.detail_header);
mSpeaker = (TextView) findViewById(R.id.speaker);
mTitle = (TextView) detail.findViewById(R.id.title);
mTimeLocation = detail.findViewById(R.id.time_location);
mTime = (TextView) detail.findViewById(R.id.time);
mLocation = (TextView) detail.findViewById(R.id.location);
mDescription = (TextView) detail.findViewById(R.id.description);
mDescriptionScroller = (ScrollView) detail.findViewById(R.id.description_scroller);
mMapImage = (ImageView) detail.findViewById(R.id.map_image);
mBio = (LinearLayout) detail.findViewById(R.id.bio);
// detail action buttons
mFoursquare = (Button) findViewById(R.id.foursquare);
mShare = (Button) findViewById(R.id.share);
mMap = (Button) findViewById(R.id.map);
mShowDescription = (Button) findViewById(R.id.show_description);
mShowBio = (Button) findViewById(R.id.show_bio);
mEvents.setOnItemClickListener(new ListView.OnItemClickListener() {
public void onItemClick(AdapterView<?> adapterview, View view, int position, long id) {
Object item = mAdapter.mFiltered.get(position);
if (item instanceof Date) {
return;// ignore clicks on the dates
}
Event event = (Event) item;
Track track = mConference.tracks.get(event.track);
Location location = mConference.locations.get(event.location);
if (!event.details){
// load detailed info for this session
DataService service = getDataService();
event = service.getEvent(event.id, false);
mAdapter.mFiltered.set(position, event);
}
String speaker_names = "";
Speaker speaker;
for(Integer sid: event.speaker_ids){
if (mSpeakers.containsKey(sid)){
speaker = mSpeakers.get(sid);
} else {
speaker = getDataService().getSpeaker(sid, false);
mSpeakers.put(sid, speaker);
}
if (speaker_names == "") {
speaker_names = speaker.name;
} else {
speaker_names = speaker_names + ", " + speaker.name;
}
}
mSpeaker.setText(speaker_names);
mHeader.setBackgroundColor(Color.parseColor(track.color));
mTitle.setText(event.title);
mTitle.setTextColor(Color.parseColor(track.color_text));
mLocation.setText(location.name);
DateFormat startFormat = new SimpleDateFormat("E, h:mm");
DateFormat endFormat = new SimpleDateFormat("h:mm a");
String timeString = startFormat.format(event.start) + " - " + endFormat.format(event.end);
mTime.setText(timeString);
mTimeLocation.setBackgroundColor(Color.parseColor(track.color_dark));
mDescription.setMovementMethod(LinkMovementMethod.getInstance());
mDescription.setText(Html.fromHtml(event.description));
show_description();
mDescriptionScroller.scrollTo(0, 0);
mFlipper.setInAnimation(mInRight);
mFlipper.setOutAnimation(mOutLeft);
mFlipper.showNext();
mEvent = event;
mDetail = true;
}
});
mShowDescription.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
show_description();
}
});
mMap.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
int id = getResources().getIdentifier("map_"+mEvent.location,"drawable",getPackageName());
mDescription.setVisibility(View.GONE);
mBio.setVisibility(View.GONE);
// only set&show image if a map image was found
if (id!=0){
mMapImage.setImageResource(id);
mMapImage.setVisibility(View.VISIBLE);
}
}
});
mShowBio.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
mBio.removeAllViews();
Integer[] speaker_ids = mEvent.speaker_ids;
if (speaker_ids != null) {
for (int i=0; i<speaker_ids.length; i++) {
View view = loadBioView(speaker_ids[i]);
if (view != null) {
if (i>0){
view.setPadding(0, 30, 0, 0);
}
mBio.addView(view);
}
}
mDescription.setVisibility(View.GONE);
mMapImage.setVisibility(View.GONE);
mBio.setVisibility(View.VISIBLE);
}
}
/**
* loads a view populated with the speakers info
* @param id
* @return
*/
private View loadBioView(int sid) {
Integer id = new Integer(sid);
Speaker speaker = null;
View view = null;
// check memory to see if speaker had already been loaded
// else load the speaker from persistent storage
if (mSpeakers.containsKey(id)){
speaker = mSpeakers.get(id);
} else {
speaker = getDataService().getSpeaker(id, false);
mSpeakers.put(id, speaker);
}
// create view
if (speaker != null) {
LayoutInflater vi = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
view = vi.inflate(R.layout.bio, null);
TextView name = (TextView) view.findViewById(R.id.name);
name.setText(speaker.name);
TextView biography = (TextView) view.findViewById(R.id.biography);
biography.setMovementMethod(LinkMovementMethod.getInstance());
biography.setText(Html.fromHtml(speaker.biography));
String twitter = speaker.twitter;
if (twitter != null && twitter != "" && twitter != "null"){
TextView text = (TextView) view.findViewById(R.id.twitter);
text.setText(twitter);
View parent = (View) text.getParent();
parent.setVisibility(View.VISIBLE);
}
String website = speaker.website;
if (website != null && website != "" && website != "null"){
TextView text = (TextView) view.findViewById(R.id.website);
text.setText(speaker.website);
View parent = (View) text.getParent();
parent.setVisibility(View.VISIBLE);
}
String blog = speaker.blog;
if (blog != null && blog != "" && blog != "null"){
TextView text = (TextView) view.findViewById(R.id.blog);
text.setText(speaker.blog);
View parent = (View) text.getParent();
parent.setVisibility(View.VISIBLE);
}
if (speaker.affiliation != null){
TextView text = (TextView) view.findViewById(R.id.affiliation);
text.setText(speaker.affiliation);
}
String identica = speaker.identica;
if (identica != null && identica != "" && identica != "null"){
TextView text = (TextView) view.findViewById(R.id.identica);
text.setText(speaker.identica);
View parent = (View) text.getParent();
parent.setVisibility(View.VISIBLE);
}
}
return view;
}
});
mFoursquare.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
String url = mapRoomNameToFqUrl((mLocation).getText().toString());
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
startActivity(intent);
}
private String mapRoomNameToFqUrl(String roomName) {
String vid = "";
if (roomName.equals("Hawthorne")) {
vid = "4281683";
} else if (roomName.equals("Burnside")) {
vid = "4281826";
} else if (roomName.equals("St. Johns")) {
vid = "4281970";
} else if (roomName.equals("Broadway")) {
vid = "4281777";
} else if (roomName.equals("Morrison")) {
vid = "4281923";
} else if (roomName.equals("Fremont")) {
vid = "4281874";
} else if (roomName.equals("Steel")) {
vid = "4282004";
}
return "http://m.foursquare.com/checkin?vid="+vid;
}
});
mShare.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
Intent intent = new Intent(android.content.Intent.ACTION_SEND);
intent.setType("text/plain");
Resources r = getApplicationContext().getResources();
intent.putExtra(Intent.EXTRA_SUBJECT, r.getString(R.string.share_subject));
intent.putExtra(Intent.EXTRA_TEXT, r.getString(R.string.share_text) + mTitle.getText() + r.getString(R.string.share_text2));
startActivity(Intent.createChooser(intent, "Share"));
}
});
// spawn loading into separate thread
mHandler.post(new Runnable() {
public void run() {
loadSchedule(false);
now();
}
});
}
/**
* Shows the session description, hides all other subviews
*/
private void show_description(){
mMapImage.setVisibility(View.GONE);
mBio.setVisibility(View.GONE);
mDescription.setVisibility(View.VISIBLE);
}
/**
* overridden to hook back button when on the detail page
*/
public boolean onKeyDown(int keyCode, KeyEvent event){
if (mDetail && keyCode == KeyEvent.KEYCODE_BACK){
showList();
return true;
}
return super.onKeyDown(keyCode, event);
}
/* Creates the menu items */
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(0, MENU_PREV, 0, "Previous Day").setIcon(R.drawable.ic_menu_back);
SubMenu dayMenu = menu.addSubMenu("Day").setIcon(android.R.drawable.ic_menu_today);
DateFormat formatter = new SimpleDateFormat("EEEE, MMMM d");
Date date;
for (int i=0; i<mDates.length; i++){
date = mDates[i];
dayMenu.add(0, MENU_DATE_BASE+i, 0, formatter.format(date));
}
menu.add(0, MENU_NEXT, 0, "Next Day").setIcon(R.drawable.ic_menu_forward);
menu.add(0, MENU_NOW, 0, "Now").setIcon(R.drawable.time);
menu.add(0, MENU_REFRESH, 0, "Refresh").setIcon(R.drawable.ic_menu_refresh);
menu.add(0, MENU_ABOUT, 0, "About").setIcon(android.R.drawable.ic_menu_info_details);
return true;
}
/* Handles item selections */
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
switch (id) {
case MENU_NOW:
now();
return true;
case MENU_PREV:
previous();
return true;
case MENU_NEXT:
next();
return true;
case MENU_ABOUT:
showDialog(0);
return true;
case MENU_REFRESH:
mHandler.post(new Runnable() {
public void run() {
loadSchedule(true);
now();
}
});
return true;
default:
if (id >= MENU_DATE_BASE) {
// must be a date menu option. all dates
// menu options are an index offset by MENU_DATE_BASE
setDay(mDates[item.getItemId()-MENU_DATE_BASE]);
return true;
}
}
return false;
}
/* sets the current day, filtering the list if need be */
public void setDay(Date date) {
if (isSameDay(mCurrentDate, date)) {
// same day, just jump to current time
mAdapter.now(date);
} else {
// different day, update the list. Load the date requested
// if it is not already loaded
mCurrentDate = date;
mAdapter.filterDay(date);
DateFormat formatter = new SimpleDateFormat("E, MMMM d");
mDate.setText(formatter.format(mCurrentDate));
}
// take user back to the listings if not already there
showList();
}
/**
* Jumps the user to right now in the event list:
*
* - if its before or after the conference, it shows the beginning
* of day 1
* - if its during the conference it will show the first event
* currently underway
*/
public void now(){
Date now = new Date();
if (now.before(mDates[0]) || now.after(mConference.end)) {
setDay((Date) mDates[0].clone());
} else {
// use now, since it will have the time of day for
// jumping to the right time
setDay(now);
}
}
/**
* Jumps to the next day, if not already at the end
*/
public void next() {
try {
if (!isSameDay(mCurrentDate, mConference.end)) {
Date load = new Date(mCurrentDate.getYear(), mCurrentDate.getMonth(), mCurrentDate.getDate()+1);
setDay(load);
}
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Jumps to the previous day if now already at the beginning
*/
public void previous() {
if (!isSameDay(mCurrentDate, mConference.start)) {
Date load = new Date(mCurrentDate.getYear(), mCurrentDate.getMonth(), mCurrentDate.getDate()-1);
setDay(load);
}
}
/**
* Shows the event listing
*/
public void showList() {
if (mDetail) {
mFlipper.setInAnimation(mInLeft);
mFlipper.setOutAnimation(mOutRight);
mFlipper.showPrevious();
mDetail=false;
}
}
/**
* Loads the osbridge schedule from a combination of ICal and json data
* @param force - force reload
*/
private void loadSchedule(boolean force) {
System.out.println("================Schedule================");
//XXX set date to a day that is definitely, not now.
// This will cause it to update the list immediately.
mCurrentDate = new Date(1900, 0, 0);
DataService service = getDataService();
mConference = service.getConference(force);
try {
mDates = mConference.getDates();
mAdapter = new EventAdapter(this, R.layout.listevent, new ArrayList<Event>());
mEvents.setAdapter(mAdapter);
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("----------------Schedule----------------");
}
protected Dialog onCreateDialog(int id){
Context context = getApplicationContext();
LayoutInflater inflater = (LayoutInflater) context.getSystemService(LAYOUT_INFLATER_SERVICE);
View v = inflater.inflate(R.layout.about, null);
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("About");
builder.setCancelable(true);
builder.setView(v);
builder.setIcon(android.R.drawable.ic_dialog_info);
final AlertDialog alert = builder.create();
return alert;
}
/**
* EventAdapter used for displaying a list of events
*
*/
private class EventAdapter extends ArrayAdapter<Event> {
private List<Event> mItems;
private List<Object> mFiltered;
public EventAdapter(Context context, int textViewResourceId,
List<Event> items) {
super(context, textViewResourceId, items);
mItems = items;
mFiltered = new ArrayList<Object>();
}
/**
* Sets elements to the current schedule. This will use
* cached data if already loaded. Else it will load it from
* the dataservice
* @param date - date to filter by
*/
public void filterDay(Date date){
// Load the data for the requested day, load it from dataservice if needed
// construct a new date with just year,month,day since keys only have that set
// XXX adjust for timezone by setting time to noon
Date load = new Date(date.getYear(), date.getMonth(), date.getDate(), 12, 0);
if (mSchedule.containsKey(load)){
mItems = mSchedule.get(load).events;
} else {
DataService service = getDataService();
Schedule schedule = service.getSchedule(load, false);
mSchedule.put(load, schedule);
mItems = schedule.events;
}
List<Event> items = mItems;
List<Object> filtered = new ArrayList<Object>();
int size = mItems.size();
Date currentStart = null;
for (int i=0; i<size; i++){
Event event = items.get(i);
if(isSameDay(date, event.start)){
if(currentStart == null || event.start.after(currentStart)) {
currentStart = event.start;
filtered.add(currentStart);
}
filtered.add(event);
}
}
mFiltered = filtered;
notifyDataSetChanged();
now(date);
}
/**
* sets the position to the current time
* @param date
*/
public void now(Date date) {
List<Object> filtered = mFiltered;
int size = filtered.size();
for (int i=0; i<size; i++){
Object item = filtered.get(i);
// find either the first session that hasn't ended yet
// or the first time marker that hasn't occured yet.
if (item instanceof Date ){
Date slot = (Date) item;
if (date.before(slot)) {
mEvents.setSelection(i);
return;
}
} else {
Event event = (Event) item;
if (event.end.after(date)) {
// should display the time marker instead of the
// session
mEvents.setSelection(i-1);
return;
}
}
}
// no current event was found, jump to the next day
next();
}
public int getCount(){
return mFiltered.size();
}
public View getView(int position, View convertView, ViewGroup parent) {
View v = convertView;
LayoutInflater vi = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
Object item = mFiltered.get(position);
if (item instanceof Date) {
Date date = (Date)item;
v = vi.inflate(R.layout.list_slot, null);
TextView time = (TextView) v.findViewById(R.id.time);
DateFormat formatter = new SimpleDateFormat("h:mm a");
time.setText(formatter.format(date));
} else {
Event e = (Event) item;
v = vi.inflate(R.layout.listevent, null);
if (e != null) {
TextView title = (TextView) v.findViewById(R.id.title);
TextView locationView = (TextView) v.findViewById(R.id.location);
TextView time = (TextView) v.findViewById(R.id.time);
if (title != null) {
title.setText(e.title);
}
if (e.location != -1) {
Location location = mConference.locations.get(e.location);
locationView.setText(location.name);
}
if (time != null) {
DateFormat formatter = new SimpleDateFormat("h:mm");
time.setText(formatter.format(e.start) + "-" + formatter.format(e.end));
}
if (e.track != -1) {
TextView track_view = (TextView) v.findViewById(R.id.track);
Track track = mConference.tracks.get(e.track);
track_view.setTextColor(Color.parseColor(track.color));
track_view.setText(track.name);
}
}
}
return v;
}
}
/**
* Checks if two dates are the same day
* @param date1
* @param date2
* @return
*/
public static boolean isSameDay(Date date1, Date date2) {
if (date1 == null || date2 == null) {
throw new IllegalArgumentException("The date must not be null");
}
Calendar cal1 = Calendar.getInstance();
cal1.setTime(date1);
Calendar cal2 = Calendar.getInstance();
cal2.setTime(date2);
return isSameDay(cal1, cal2);
}
/**
* Checks if two calendars are the same day
* @param cal1
* @param cal2
* @return
*/
public static boolean isSameDay(Calendar cal1, Calendar cal2) {
if (cal1 == null || cal2 == null) {
throw new IllegalArgumentException("The date must not be null");
}
return (cal1.get(Calendar.ERA) == cal2.get(Calendar.ERA) &&
cal1.get(Calendar.YEAR) == cal2.get(Calendar.YEAR) &&
cal1.get(Calendar.DAY_OF_YEAR) == cal2.get(Calendar.DAY_OF_YEAR));
}
}
|
filterDay no longer checks issameday since the list is already filtered by day
|
src/org/osb/ScheduleActivity.java
|
filterDay no longer checks issameday since the list is already filtered by day
|
|
Java
|
mit
|
8ed2d4cd1169ea60e218171463c86a61e0cd148e
| 0
|
nyee/RMG-Java,jwallen/RMG-Java,keceli/RMG-Java,keceli/RMG-Java,enochd/RMG-Java,nyee/RMG-Java,KEHANG/RMG-Java,connie/RMG-Java,connie/RMG-Java,KEHANG/RMG-Java,ReactionMechanismGenerator/RMG-Java,faribas/RMG-Java,enochd/RMG-Java,ReactionMechanismGenerator/RMG-Java,jwallen/RMG-Java,connie/RMG-Java,nyee/RMG-Java,faribas/RMG-Java,rwest/RMG-Java,enochd/RMG-Java,keceli/RMG-Java,keceli/RMG-Java,KEHANG/RMG-Java,keceli/RMG-Java,keceli/RMG-Java,jwallen/RMG-Java,KEHANG/RMG-Java,faribas/RMG-Java,rwest/RMG-Java,ReactionMechanismGenerator/RMG-Java,enochd/RMG-Java,jwallen/RMG-Java,enochd/RMG-Java,nyee/RMG-Java,ReactionMechanismGenerator/RMG-Java,nyee/RMG-Java,rwest/RMG-Java,ReactionMechanismGenerator/RMG-Java,connie/RMG-Java,KEHANG/RMG-Java,rwest/RMG-Java,nyee/RMG-Java,enochd/RMG-Java,jwallen/RMG-Java,faribas/RMG-Java,KEHANG/RMG-Java,rwest/RMG-Java,connie/RMG-Java,jwallen/RMG-Java
|
////////////////////////////////////////////////////////////////////////////////
//
// RMG - Reaction Mechanism Generator
//
// Copyright (c) 2002-2009 Prof. William H. Green (whgreen@mit.edu) and the
// RMG Team (rmg_dev@mit.edu)
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
////////////////////////////////////////////////////////////////////////////////
package jing.chem;
import java.util.*;
import jing.chemUtil.*;
import jing.param.*;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
//quantum mechanics thermo property estimator; analog of GATP
public class QMTP implements GeneralGAPP {
final protected static double ENTHALPY_HYDROGEN = 52.1; //needed for HBI
private static QMTP INSTANCE = new QMTP(); //## attribute INSTANCE
protected static PrimaryThermoLibrary primaryLibrary;//Note: may be able to separate this out into GeneralGAPP, as this is common to both GATP and QMTP
public static String qmfolder= "QMfiles/";
// protected static HashMap library; //as above, may be able to move this and associated functions to GeneralGAPP (and possibly change from "x implements y" to "x extends y"), as it is common to both GATP and QMTP
protected ThermoGAGroupLibrary thermoLibrary; //needed for HBI
public static String qmprogram= "both";//the qmprogram can be "mopac", "gaussian03", "both" (MOPAC and Gaussian), or "mm4"
public static boolean usePolar = false; //use polar keyword in MOPAC
public static boolean useCanTherm = false; //whether to use CanTherm in MM4 cases for interpreting output via force-constant matrix; this will hopefully avoid zero frequency issues
public static boolean useHindRot = false;//whether to use HinderedRotor scans with MM4 (requires useCanTherm=true)
// Constructors
//## operation QMTP()
private QMTP() {
// initializeLibrary(); //gmagoon 72509: commented out in GATP, so I am mirroring the change here; other library functions below also commented out
initializePrimaryThermoLibrary();
}
//## operation generateThermoData(ChemGraph)
public ThermoData generateThermoData(ChemGraph p_chemGraph) {
//#[ operation generateThermoData(ChemGraph)
//first, check for thermo data in the primary thermo library and library (?); if it is there, use it
ThermoData result = primaryLibrary.getThermoData(p_chemGraph.getGraph());
//System.out.println(result);
if (result != null) {
p_chemGraph.fromprimarythermolibrary = true;
return result;
}
// result = getFromLibrary(p_chemGraph.getChemicalFormula());//gmagoon 72509: commented out in GATP, so I am mirroring the change here
// if (result != null) return result;
result=new ThermoData();
int maxRadNumForQM = Global.maxRadNumForQM;
if (p_chemGraph.getRadicalNumber() > maxRadNumForQM)//use HBI if the molecule has more radicals than maxRadNumForQM; this is helpful because ; also MM4 (and MM3) look like they may have issues with radicals
{//this code is based closely off of GATP saturation (in getGAGroup()), but there are some modifications, particularly for symmetry correction
//find the initial symmetry number
int sigmaRadical = p_chemGraph.getSymmetryNumber();
Graph g = p_chemGraph.getGraph();
HashMap oldCentralNode = (HashMap)(p_chemGraph.getCentralNode()).clone();
// saturate radical site
int max_radNum_molecule = ChemGraph.getMAX_RADICAL_NUM();
int max_radNum_atom = Math.min(8,max_radNum_molecule);
int [] idArray = new int[max_radNum_molecule];
Atom [] atomArray = new Atom[max_radNum_molecule];
Node [][] newnode = new Node[max_radNum_molecule][max_radNum_atom];
int radicalSite = 0;
Iterator iter = p_chemGraph.getNodeList();
FreeElectron satuated = FreeElectron.make("0");
while (iter.hasNext()) {
Node node = (Node)iter.next();
Atom atom = (Atom)node.getElement();
if (atom.isRadical()) {
radicalSite ++;
// save the old radical atom
idArray[radicalSite-1] = node.getID().intValue();
atomArray[radicalSite-1] = atom;
// new a satuated atom and replace the old one
Atom newAtom = new Atom(atom.getChemElement(),satuated);
node.setElement(newAtom);
node.updateFeElement();
}
}
// add H to saturate chem graph
Atom H = Atom.make(ChemElement.make("H"),satuated);
Bond S = Bond.make("S");
for (int i=0;i<radicalSite;i++) {
Node node = p_chemGraph.getNodeAt(idArray[i]);
Atom atom = atomArray[i];
int HNum = atom.getRadicalNumber();
for (int j=0;j<HNum;j++) {
newnode[i][j] = g.addNode(H);
g.addArcBetween(node,S,newnode[i][j]);
}
node.updateFgElement();
}
//find the saturated symmetry number
int sigmaSaturated = p_chemGraph.getSymmetryNumber();
// result = generateThermoData(g);//I'm not sure what GATP does, but this recursive calling will use HBIs on saturated species if it exists in PrimaryThermoLibrary
//check the primary thermo library for the saturated graph
result = primaryLibrary.getThermoData(p_chemGraph.getGraph());
//System.out.println(result);
if (result != null) {
p_chemGraph.fromprimarythermolibrary = true;
}
else{
result=generateQMThermoData(p_chemGraph);
}
// find the BDE for all radical groups
if(thermoLibrary == null) initGAGroupLibrary();
for (int i=0; i<radicalSite; i++) {
int id = idArray[i];
Node node = g.getNodeAt(id);
Atom old = (Atom)node.getElement();
node.setElement(atomArray[i]);
node.updateFeElement();
// get rid of the extra H at ith site
int HNum = atomArray[i].getRadicalNumber();
for (int j=0;j<HNum;j++) {
g.removeNode(newnode[i][j]);
}
node.updateFgElement();
p_chemGraph.resetThermoSite(node);
ThermoGAValue thisGAValue = thermoLibrary.findRadicalGroup(p_chemGraph);
if (thisGAValue == null) {
System.err.println("Radical group not found: " + node.getID());
}
else {
//System.out.println(node.getID() + " radical correction: " + thisGAValue.getName() + " "+thisGAValue.toString());
result.plus(thisGAValue);
}
//recover the saturated site for next radical site calculation
node.setElement(old);
node.updateFeElement();
for (int j=0;j<HNum;j++) {
newnode[i][j] = g.addNode(H);
g.addArcBetween(node,S,newnode[i][j]);
}
node.updateFgElement();
}
// recover the chem graph structure
// recover the radical
for (int i=0; i<radicalSite; i++) {
int id = idArray[i];
Node node = g.getNodeAt(id);
node.setElement(atomArray[i]);
node.updateFeElement();
int HNum = atomArray[i].getRadicalNumber();
//get rid of extra H
for (int j=0;j<HNum;j++) {
g.removeNode(newnode[i][j]);
}
node.updateFgElement();
}
// subtract the enthalphy of H from the result
int rad_number = p_chemGraph.getRadicalNumber();
ThermoGAValue enthalpy_H = new ThermoGAValue(ENTHALPY_HYDROGEN * rad_number, 0,0,0,0,0,0,0,0,0,0,0,null);
result.minus(enthalpy_H);
//correct the symmetry number based on the relative radical and saturated symmetry number; this should hopefully sidestep potential complications based on the fact that certain symmetry effects could be included in HBI value itself, and the fact that the symmetry number correction for saturated molecule has already been implemented, and it is likely to be different than symmetry number considered here, since the correction for the saturated molecule will have been external symmetry number, whereas RMG's ChemGraph symmetry number estimator includes both internal and external symmetry contributions; even so, I don't know if this will handle a change from chiral to achiral (or vice versa) properly
ThermoGAValue symmetryNumberCorrection = new ThermoGAValue(0,-1*GasConstant.getCalMolK()*Math.log((double)(sigmaRadical)/(double)(sigmaSaturated)),0,0,0,0,0,0,0,0,0,0,null);
result.plus(symmetryNumberCorrection);
p_chemGraph.setCentralNode(oldCentralNode);
//display corrected thermo to user
String [] InChInames = getQMFileName(p_chemGraph);//determine the filename (InChIKey) and InChI with appended info for triplets, etc.
String name = InChInames[0];
String InChIaug = InChInames[1];
System.out.println("HBI-based thermo for " + name + "("+InChIaug+"): "+ result.toString());//print result, at least for debugging purposes
}
else{
result = generateQMThermoData(p_chemGraph);
}
return result;
//#]
}
public ThermoData generateQMThermoData(ChemGraph p_chemGraph){
//if there is no data in the libraries, calculate the result based on QM or MM calculations; the below steps will be generalized later to allow for other quantum mechanics packages, etc.
String qmProgram = qmprogram;
String qmMethod = "";
if(qmProgram.equals("mm4")){
qmMethod = "mm4";
}
else{
qmMethod="pm3"; //may eventually want to pass this to various functions to choose which "sub-function" to call
}
ThermoData result = new ThermoData();
String [] InChInames = getQMFileName(p_chemGraph);//determine the filename (InChIKey) and InChI with appended info for triplets, etc.
String name = InChInames[0];
String InChIaug = InChInames[1];
String directory = qmfolder;
File dir=new File(directory);
directory = dir.getAbsolutePath();//this and previous three lines get the absolute path for the directory
if(qmMethod.equals("pm3")){
//first, check to see if the result already exists and the job terminated successfully
boolean gaussianResultExists = successfulGaussianResultExistsQ(name,directory,InChIaug);
boolean mopacResultExists = successfulMopacResultExistsQ(name,directory,InChIaug);
if(!gaussianResultExists && !mopacResultExists){//if a successful result doesn't exist from previous run (or from this run), run the calculation; if a successful result exists, we will skip directly to parsing the file
//steps 1 and 2: create 2D and 3D mole files
molFile p_3dfile = create3Dmolfile(name, p_chemGraph);
//3. create the Gaussian or MOPAC input file
directory = qmfolder;
dir=new File(directory);
directory = dir.getAbsolutePath();//this and previous three lines get the absolute path for the directory
int attemptNumber=1;//counter for attempts using different keywords
int successFlag=0;//flag for success of Gaussian run; 0 means it failed, 1 means it succeeded
int maxAttemptNumber=1;
int multiplicity = p_chemGraph.getRadicalNumber()+1; //multiplicity = radical number + 1
while(successFlag==0 && attemptNumber <= maxAttemptNumber){
//IF block to check which program to use
if (qmProgram.equals("gaussian03")){
if(p_chemGraph.getAtomNumber() > 1){
maxAttemptNumber = createGaussianPM3Input(name, directory, p_3dfile, attemptNumber, InChIaug, multiplicity);
}
else{
maxAttemptNumber = createGaussianPM3Input(name, directory, p_3dfile, -1, InChIaug, multiplicity);//use -1 for attemptNumber for monoatomic case
}
//4. run Gaussian
successFlag = runGaussian(name, directory);
}
else if (qmProgram.equals("mopac") || qmProgram.equals("both")){
maxAttemptNumber = createMopacPM3Input(name, directory, p_3dfile, attemptNumber, InChIaug, multiplicity);
successFlag = runMOPAC(name, directory);
}
else{
System.out.println("Unsupported quantum chemistry program");
System.exit(0);
}
//new IF block to check success
if(successFlag==1){
System.out.println("Attempt #"+attemptNumber + " on species " + name + " ("+InChIaug+") succeeded.");
}
else if(successFlag==0){
if(attemptNumber==maxAttemptNumber){//if this is the last possible attempt, and the calculation fails, exit with an error message
if(qmProgram.equals("both")){ //if we are running with "both" option and all keywords fail, try with Gaussian
qmProgram = "gaussian03";
System.out.println("*****Final MOPAC attempt (#" + maxAttemptNumber + ") on species " + name + " ("+InChIaug+") failed. Trying to use Gaussian.");
attemptNumber=0;//this needs to be 0 so that when we increment attemptNumber below, it becomes 1 when returning to the beginning of the for loop
maxAttemptNumber=1;
}
else{
System.out.println("*****Final attempt (#" + maxAttemptNumber + ") on species " + name + " ("+InChIaug+") failed.");
System.out.print(p_chemGraph.toString());
System.exit(0);
// return new ThermoData(1000,0,0,0,0,0,0,0,0,0,0,0,"failed calculation");
}
}
System.out.println("*****Attempt #"+attemptNumber + " on species " + name + " ("+InChIaug+") failed. Will attempt a new keyword.");
attemptNumber++;//try again with new keyword
}
}
}
//5. parse QM output and record as thermo data (function includes symmetry/point group calcs, etc.); if both Gaussian and MOPAC results exist, Gaussian result is used
if (gaussianResultExists || (qmProgram.equals("gaussian03") && !mopacResultExists)){
result = parseGaussianPM3(name, directory, p_chemGraph);
}
else if (mopacResultExists || qmProgram.equals("mopac") || qmProgram.equals("both")){
result = parseMopacPM3(name, directory, p_chemGraph);
}
else{
System.out.println("Unexpected situation in QMTP thermo estimation");
System.exit(0);
}
}
else{//mm4 case
//first, check to see if the result already exists and the job terminated successfully
boolean mm4ResultExists = successfulMM4ResultExistsQ(name,directory,InChIaug);
if(!mm4ResultExists){//if a successful result doesn't exist from previous run (or from this run), run the calculation; if a successful result exists, we will skip directly to parsing the file
//steps 1 and 2: create 2D and 3D mole files
molFile p_3dfile = create3Dmolfile(name, p_chemGraph);
//3. create the MM4 input file
directory = qmfolder;
dir=new File(directory);
directory = dir.getAbsolutePath();//this and previous three lines get the absolute path for the directory
int attemptNumber=1;//counter for attempts using different keywords
int successFlag=0;//flag for success of MM4 run; 0 means it failed, 1 means it succeeded
int maxAttemptNumber=1;
int multiplicity = p_chemGraph.getRadicalNumber()+1; //multiplicity = radical number + 1
while(successFlag==0 && attemptNumber <= maxAttemptNumber){
maxAttemptNumber = createMM4Input(name, directory, p_3dfile, attemptNumber, InChIaug, multiplicity);
//4. run MM4
successFlag = runMM4(name, directory);
//new IF block to check success
if(successFlag==1){
System.out.println("Attempt #"+attemptNumber + " on species " + name + " ("+InChIaug+") succeeded.");
}
else if(successFlag==0){
if(attemptNumber==maxAttemptNumber){//if this is the last possible attempt, and the calculation fails, exit with an error message
System.out.println("*****Final attempt (#" + maxAttemptNumber + ") on species " + name + " ("+InChIaug+") failed.");
System.out.print(p_chemGraph.toString());
System.exit(0);
//return new ThermoData(1000,0,0,0,0,0,0,0,0,0,0,0,"failed calculation");
}
System.out.println("*****Attempt #"+attemptNumber + " on species " + name + " ("+InChIaug+") failed. Will attempt a new keyword.");
attemptNumber++;//try again with new keyword
}
}
}
//5. parse MM4 output and record as thermo data (function includes symmetry/point group calcs, etc.); if both Gaussian and MOPAC results exist, Gaussian result is used
if(!useCanTherm) result = parseMM4(name, directory, p_chemGraph);
else result = parseMM4withForceMat(name, directory, p_chemGraph);
}
return result;
}
protected static QMTP getINSTANCE() {
return INSTANCE;
}
public void initializePrimaryThermoLibrary(){//svp
primaryLibrary = PrimaryThermoLibrary.getINSTANCE();
}
//creates a 3D molFile; for monoatomic species, it just returns the 2D molFile
public molFile create3Dmolfile(String name, ChemGraph p_chemGraph){
//1. create a 2D file
//use the absolute path for directory, so we can easily reference from other directories in command-line paths
//can't use RMG.workingDirectory, since this basically holds the RMG environment variable, not the workingDirectory
String directory = "2Dmolfiles/";
File dir=new File(directory);
directory = dir.getAbsolutePath();
molFile p_2dfile = new molFile(name, directory, p_chemGraph);
molFile p_3dfile = new molFile();//it seems this must be initialized, so we initialize to empty object
//2. convert from 2D to 3D using RDKit if the 2D molfile is for a molecule with 2 or more atoms
int atoms = p_chemGraph.getAtomNumber();
if(atoms > 1){
int distGeomAttempts=1;
if(atoms > 3){//this check prevents the number of attempts from being negative
distGeomAttempts = 5*(p_chemGraph.getAtomNumber()-3); //number of conformer attempts is just a linear scaling with molecule size, due to time considerations; in practice, it is probably more like 3^(n-3) or something like that
}
p_3dfile = embed3D(p_2dfile, distGeomAttempts);
return p_3dfile;
}
else{
return p_2dfile;
}
}
//embed a molecule in 3D, using RDKit
public molFile embed3D(molFile twoDmolFile, int numConfAttempts){
//convert to 3D MOL file using RDKit script
int flag=0;
String directory = "3Dmolfiles/";
File dir=new File(directory);
directory = dir.getAbsolutePath();//this uses the absolute path for the directory
String name = twoDmolFile.getName();
try{
File runningdir=new File(directory);
String command="";
if (System.getProperty("os.name").toLowerCase().contains("windows")){//special windows case where paths can have spaces and are allowed to be surrounded by quotes
command = "python \""+System.getProperty("RMG.workingDirectory")+"/scripts/distGeomScriptMolLowestEnergyConf.py\" ";
String twoDmolpath=twoDmolFile.getPath();
command=command.concat("\""+twoDmolpath+"\" ");
command=command.concat("\""+name+".mol\" ");//this is the target file name; use the same name as the twoDmolFile (but it will be in he 3Dmolfiles folder
command=command.concat("\""+name+".cmol\" ");//this is the target file name for crude coordinates (corresponding to the minimum energy conformation based on UFF refinement); use the same name as the twoDmolFile (but it will be in he 3Dmolfiles folder) and have suffix .cmol
command=command.concat(numConfAttempts + " ");
command=command.concat("\"" + System.getenv("RDBASE")+"\"");//pass the $RDBASE environment variable to the script so it can use the approprate directory when importing rdkit
}
else{//non-Windows case
command = "python "+System.getProperty("RMG.workingDirectory")+"/scripts/distGeomScriptMolLowestEnergyConf.py ";
String twoDmolpath=twoDmolFile.getPath();
command=command.concat(""+twoDmolpath+" ");
command=command.concat(name+".mol ");//this is the target file name; use the same name as the twoDmolFile (but it will be in he 3Dmolfiles folder
command=command.concat(name+".cmol ");//this is the target file name for crude coordinates (corresponding to the minimum energy conformation based on UFF refinement); use the same name as the twoDmolFile (but it will be in he 3Dmolfiles folder) and have suffix .cmol
command=command.concat(numConfAttempts + " ");
command=command.concat(System.getenv("RDBASE"));//pass the $RDBASE environment variable to the script so it can use the approprate directory when importing rdkit
}
Process pythonProc = Runtime.getRuntime().exec(command, null, runningdir);
String killmsg= "Python process for "+twoDmolFile.getName()+" did not complete within 120 seconds, and the process was killed. File was probably not written.";//message to print if the process times out
Thread timeoutThread = new TimeoutKill(pythonProc, killmsg, 120000L); //create a timeout thread to handle cases where the UFF optimization get's locked up (cf. Ch. 16 of "Ivor Horton's Beginning Java 2: JDK 5 Edition"); once we use the updated version of RDKit, we should be able to get rid of this
timeoutThread.start();//start the thread
//check for errors and display the error if there is one
InputStream is = pythonProc.getErrorStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
line = line.trim();
System.err.println(line);
flag=1;
}
//if there was an error, indicate the file and InChI
if(flag==1){
System.out.println("RDKit received error (see above) on " + twoDmolFile.getName()+". File was probably not written.");
}
int exitValue = pythonProc.waitFor();
if(timeoutThread.isAlive())//if the timeout thread is still alive (indicating that the process has completed in a timely manner), stop the timeout thread
timeoutThread.interrupt();
}
catch (Exception e) {
String err = "Error in running RDKit Python process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
// gmagoon 6/3/09 comment out InChI checking for now; in any case, the code will need to be updated, as it is copied from my testing code
// //check whether the original InChI is reproduced
// if(flag==0){
// try{
// File f=new File("c:/Python25/"+molfilename);
// File newFile= new File("c:/Python25/mol3d.mol");
// if(newFile.exists()){
// newFile.delete();//apparently renaming will not work unless target file does not exist (at least on Vista)
// }
// f.renameTo(newFile);
// String command = "c:/Users/User1/Documents/InChI-1/cInChI-1.exe c:/Python25/mol3d.mol inchi3d.inchi /AuxNone /DoNotAddH";//DoNotAddH used to prevent adding Hs to radicals (this would be a problem for current RDKit output which doesn't use M RAD notation)
// Process inchiProc = Runtime.getRuntime().exec(command);
// // int exitValue = inchiProc.waitFor();
// Thread.sleep(200);//****update: can probably eliminate this using buffered reader
// inchiProc.destroy();
//
// //read output file
// File outputFile = new File("inchi3d.inchi");
// FileReader fr = new FileReader(outputFile);
// BufferedReader br = new BufferedReader(fr);
// String line=null;
// String inchi3d=null;
// while ( (line = br.readLine()) != null) {
// line = line.trim();
// if(line.startsWith("InChI="))
// {
// inchi3d=line;
// }
// }
// fr.close();
//
// //return file to original name:
// File f2=new File("c:/Python25/mol3d.mol");
// File newFile2= new File("c:/Python25/"+molfilename);
// if(newFile2.exists()){
// newFile2.delete();
// }
// f2.renameTo(newFile2);
//
// //compare inchi3d with input inchi and print a message if they don't match
// if(!inchi3d.equals(inchiString)){
// if(inchi3d.startsWith(inchiString)&&inchiString.length()>10){//second condition ensures 1/C does not match 1/CH4; 6 characters for InChI=, 2 characters for 1/, 2 characters for atom layer
// System.out.println("(probably minor) For File: "+ molfilename+" , 3D InChI (" + inchi3d+") begins with, but does not match original InChI ("+inchiString+"). SMILES string: "+ smilesString);
//
// }
// else{
// System.out.println("For File: "+ molfilename+" , 3D InChI (" + inchi3d+") does not match original InChI ("+inchiString+"). SMILES string: "+ smilesString);
// }
// }
// }
// catch (Exception e) {
// String err = "Error in running InChI process \n";
// err += e.toString();
// e.printStackTrace();
// System.exit(0);
// }
// }
//construct molFile pointer to new file (name will be same as 2D mol file
return new molFile(name, directory);
}
//creates Gaussian PM3 input file in directory with filename name.gjf by using OpenBabel to convert p_molfile
//attemptNumber determines which keywords to try
//the function returns the maximum number of keywords that can be attempted; this will be the same throughout the evaluation of the code, so it may be more appropriate to have this as a "constant" attribute of some sort
//attemptNumber=-1 will call a special set of keywords for the monoatomic case
public int createGaussianPM3Input(String name, String directory, molFile p_molfile, int attemptNumber, String InChIaug, int multiplicity){
//write a file with the input keywords
int maxAttemptNumber=18;//update this if additional keyword options are added or removed
try{
File inpKey=new File(directory+"/inputkeywords.txt");
String inpKeyStr="%chk="+directory+"/RMGrunCHKfile.chk\n";
inpKeyStr+="%mem=6MW\n";
inpKeyStr+="%nproc=1\n";
if(attemptNumber==-1) inpKeyStr+="# pm3 freq";//keywords for monoatomic case (still need to use freq keyword to get molecular mass)
else if(attemptNumber==1) inpKeyStr+="# pm3 opt=(verytight,gdiis) freq IOP(2/16=3)";//added IOP option to avoid aborting when symmetry changes; 3 is supposed to be default according to documentation, but it seems that 0 (the default) is the only option that doesn't work from 0-4; also, it is interesting to note that all 4 options seem to work for test case with z-matrix input rather than xyz coords; cf. http://www.ccl.net/cgi-bin/ccl/message-new?2006+10+17+005 for original idea for solution
else if(attemptNumber==2) inpKeyStr+="# pm3 opt=(verytight,gdiis) freq IOP(2/16=3) IOP(4/21=2)";//use different SCF method; this addresses at least one case of failure for a C4H7J species
else if(attemptNumber==3) inpKeyStr+="# pm3 opt=(verytight,calcfc,maxcyc=200) freq IOP(2/16=3) nosymm";//try multiple different options (no gdiis, use calcfc, nosymm); 7/21/09: added maxcyc option to fix case of MPTBUKVAJYJXDE-UHFFFAOYAPmult3 (InChI=1/C4H10O5Si/c1-3-7-9-10(5,6)8-4-2/h4-5H,3H2,1-2H3/mult3) (file manually copied to speed things along)
else if(attemptNumber==4) inpKeyStr+="# pm3 opt=(verytight,calcfc,maxcyc=200) freq=numerical IOP(2/16=3) nosymm";//7/8/09: numerical frequency keyword version of keyword #3; used to address GYFVJYRUZAKGFA-UHFFFAOYALmult3 (InChI=1/C6H14O6Si/c1-3-10-13(8,11-4-2)12-6-5-9-7/h6-7H,3-5H2,1-2H3/mult3) case; (none of the existing Gaussian or MOPAC combinations worked with it)
else if(attemptNumber==5) inpKeyStr+="# pm3 opt=(verytight,gdiis,small) freq IOP(2/16=3)";//7/10/09: somehow, this worked for problematic case of ZGAWAHRALACNPM-UHFFFAOYAF (InChI=1/C8H17O5Si/c1-3-11-14(10,12-4-2)13-8-5-7(9)6-8/h7-9H,3-6H2,1-2H3); (was otherwise giving l402 errors); even though I had a keyword that worked for this case, I manually copied the fixed log file to QMfiles folder to speed things along; note that there are a couple of very low frequencies (~5-6 cm^-1 for this case)
else if(attemptNumber==6) inpKeyStr+="# pm3 opt=(verytight,nolinear,calcfc,small) freq IOP(2/16=3)";//used for troublesome C5H7J2 case (similar error to C5H7J below); calcfc is not necessary for this particular species, but it speeds convergence and probably makes it more robust for other species
else if(attemptNumber==7) inpKeyStr+="# pm3 opt=(verytight,gdiis,maxcyc=200) freq=numerical IOP(2/16=3)"; //use numerical frequencies; this takes a relatively long time, so should only be used as one of the last resorts; this seemed to address at least one case of failure for a C6H10JJ species; 7/15/09: maxcyc=200 added to address GVCMURUDAUQXEY-UHFFFAOYAVmult3 (InChI=1/C3H4O7Si/c1-2(9-6)10-11(7,8)3(4)5/h6-7H,1H2/mult3)...however, result was manually pasted in QMfiles folder to speed things along
else if(attemptNumber==8) inpKeyStr+="# pm3 opt=tight freq IOP(2/16=3)";//7/10/09: this worked for problematic case of SZSSHFMXPBKYPR-UHFFFAOYAF (InChI=1/C7H15O5Si/c1-3-10-13(8,11-4-2)12-7-5-6-9-7/h7H,3-6H2,1-2H3) (otherwise, it had l402.exe errors); corrected log file was manually copied to QMfiles to speed things along; we could also add a freq=numerical version of this keyword combination for added robustness; UPDATE: see below
else if(attemptNumber==9) inpKeyStr+="# pm3 opt=tight freq=numerical IOP(2/16=3)";//7/10/09: used for problematic case of CIKDVMUGTARZCK-UHFFFAOYAImult4 (InChI=1/C8H15O6Si/c1-4-12-15(10,13-5-2)14-7-6-11-8(7,3)9/h7H,3-6H2,1-2H3/mult4 (most other cases had l402.exe errors); corrected log file was manually copied to QMfiles to speed things along
else if(attemptNumber==10) inpKeyStr+="# pm3 opt=(tight,nolinear,calcfc,small,maxcyc=200) freq IOP(2/16=3)";//7/8/09: similar to existing #5, but uses tight rather than verytight; used for ADMPQLGIEMRGAT-UHFFFAOYAUmult3 (InChI=1/C6H14O5Si/c1-4-9-12(8,10-5-2)11-6(3)7/h6-7H,3-5H2,1-2H3/mult3)
else if(attemptNumber==11) inpKeyStr+="# pm3 opt freq IOP(2/16=3)"; //use default (not verytight) convergence criteria; use this as last resort
else if(attemptNumber==12) inpKeyStr+="# pm3 opt=(verytight,gdiis) freq=numerical IOP(2/16=3) IOP(4/21=200)";//to address problematic C10H14JJ case
else if(attemptNumber==13) inpKeyStr+="# pm3 opt=(calcfc,verytight,newton,notrustupdate,small,maxcyc=100,maxstep=100) freq=(numerical,step=10) IOP(2/16=3) nosymm";// added 6/10/09 for very troublesome RRMZRNPRCUANER-UHFFFAOYAQ (InChI=1/C5H7/c1-3-5-4-2/h3H,1-2H3) case...there were troubles with negative frequencies, where I don't think they should have been; step size of numerical frequency was adjusted to give positive result; accuracy of result is questionable; it is possible that not all of these keywords are needed; note that for this and other nearly free rotor cases, I think heat capacity will be overestimated by R/2 (R vs. R/2) (but this is a separate issue)
else if(attemptNumber==14) inpKeyStr+="# pm3 opt=(tight,gdiis,small,maxcyc=200,maxstep=100) freq=numerical IOP(2/16=3) nosymm";// added 6/22/09 for troublesome QDERTVAGQZYPHT-UHFFFAOYAHmult3(InChI=1/C6H14O4Si/c1-4-8-11(7,9-5-2)10-6-3/h4H,5-6H2,1-3H3/mult3); key aspects appear to be tight (rather than verytight) convergence criteria, no calculation of frequencies during optimization, use of numerical frequencies, and probably also the use of opt=small
//gmagoon 7/9/09: commented out since although this produces a "reasonable" result for the problematic case, there is a large amount of spin contamination, apparently introducing 70+ kcal/mol of instability else if(attemptNumber==12) inpKeyStr+="# pm3 opt=(verytight,gdiis,small) freq=numerical IOP(2/16=3) IOP(4/21=200)";//7/9/09: similar to current number 9 with keyword small; this addresses case of VCSJVABXVCFDRA-UHFFFAOYAI (InChI=1/C8H19O5Si/c1-5-10-8(4)13-14(9,11-6-2)12-7-3/h8H,5-7H2,1-4H3)
else if(attemptNumber==15) inpKeyStr+="# pm3 opt=(verytight,gdiis,calcall) IOP(2/16=3)";//used for troublesome C5H7J case; note that before fixing, I got errors like the following: "Incomplete coordinate system. Try restarting with Geom=Check Guess=Read Opt=(ReadFC,NewRedundant) Incomplete coordinate system. Error termination via Lnk1e in l103.exe"; we could try to restart, but it is probably preferrable to have each keyword combination standalone; another keyword that may be helpful if additional problematic cases are encountered is opt=small; 6/9/09 note: originally, this had # pm3 opt=(verytight,gdiis,calcall) freq IOP(2/16=3)" (with freq keyword), but I discovered that in this case, there are two thermochemistry sections and cclib parses frequencies twice, giving twice the number of desired frequencies and hence produces incorrect thermo; this turned up on C5H6JJ isomer
//gmagoon 7/3/09: it is probably best to retire this keyword combination in light of the similar combination below //else if(attemptNumber==6) inpKeyStr+="# pm3 opt=(verytight,gdiis,calcall,small) IOP(2/16=3) IOP(4/21=2)";//6/10/09: worked for OJZYSFFHCAPVGA-UHFFFAOYAK (InChI=1/C5H7/c1-3-5-4-2/h1,4H2,2H3) case; IOP(4/21) keyword was key
else if(attemptNumber==16) inpKeyStr+="# pm3 opt=(verytight,gdiis,calcall,small,maxcyc=200) IOP(2/16=3) IOP(4/21=2) nosymm";//6/29/09: worked for troublesome ketene case: CCGKOQOJPYTBIH-UHFFFAOYAO (InChI=1/C2H2O/c1-2-3/h1H2) (could just increase number of iterations for similar keyword combination above (#6 at the time of this writing), allowing symmetry, but nosymm seemed to reduce # of iterations; I think one of nosymm or higher number of iterations would allow the similar keyword combination to converge; both are included here for robustness)
else if(attemptNumber==17) inpKeyStr+="# pm3 opt=(verytight,gdiis,calcall,small) IOP(2/16=3) nosymm";//7/1/09: added for case of ZWMVZWMBTVHPBS-UHFFFAOYAEmult3 (InChI=1/C4H4O2/c1-3-5-6-4-2/h1-2H2/mult3)
else if(attemptNumber==18) inpKeyStr+="# pm3 opt=(calcall,small,maxcyc=100) IOP(2/16=3)"; //6/10/09: used to address troublesome FILUFGAZMJGNEN-UHFFFAOYAImult3 case (InChI=1/C5H6/c1-3-5-4-2/h3H,1H2,2H3/mult3)
else throw new Exception();//this point should not be reached
// if(multiplicity == 3) inpKeyStr+= " guess=mix"; //assumed to be triplet biradical...use guess=mix to perform unrestricted ; nevermind...I think this would only be for singlet biradicals based on http://www.gaussian.com/g_tech/g_ur/k_guess.htm
if (usePolar) inpKeyStr += " polar";
FileWriter fw = new FileWriter(inpKey);
fw.write(inpKeyStr);
fw.close();
}
catch(Exception e){
String err = "Error in writing inputkeywords.txt \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//call the OpenBabel process (note that this requires OpenBabel environment variable)
try{
File runningdir=new File(directory);
String command=null;
if (System.getProperty("os.name").toLowerCase().contains("windows")){//special windows case
command = "babel -imol \""+ p_molfile.getPath()+ "\" -ogjf \"" + name+".gjf\" -xf inputkeywords.txt --title \""+InChIaug+"\"";
}
else{
command = "babel -imol "+ p_molfile.getPath()+ " -ogjf " + name+".gjf -xf inputkeywords.txt --title "+InChIaug;
}
Process babelProc = Runtime.getRuntime().exec(command, null, runningdir);
//read in output
InputStream is = babelProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
//do nothing
}
int exitValue = babelProc.waitFor();
}
catch(Exception e){
String err = "Error in running OpenBabel MOL to GJF process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
return maxAttemptNumber;
}
//creates MM4 input file and MM4 batch file in directory with filenames name.mm4 and name.com, respectively using MoleCoor
//attemptNumber determines which keywords to try
//the function returns the maximum number of keywords that can be attempted; this will be the same throughout the evaluation of the code, so it may be more appropriate to have this as a "constant" attribute of some sort
public int createMM4Input(String name, String directory, molFile p_molfile, int attemptNumber, String InChIaug, int multiplicity){
//Step 1: write the script for MM4 batch operation
// Example script file:
// #! /bin/csh
// cp testEthylene.mm4 CPD.MM4
// cp $MM4_DATDIR/BLANK.DAT PARA.MM4
// cp $MM4_DATDIR/CONST.MM4 .
// $MM4_EXEDIR/mm4 <<%
// 1
// 2
// 0
// %
// mv TAPE4.MM4 testEthyleneBatch.out
// mv TAPE9.MM4 testEthyleneBatch.opt
// exit
int scriptAttempts = 2;//the number of script permutations available; update as additional options are added
int maxAttemptNumber=2*scriptAttempts;//we will try a second time with crude coordinates if the UFF refined coordinates do not work
try{
//create batch file with executable permissions: cf. http://java.sun.com/docs/books/tutorial/essential/io/fileAttr.html#posix
File inpKey = new File(directory+"/"+name+".com");
String inpKeyStr="#! /bin/csh\n";
inpKeyStr+="cp "+name+".mm4 CPD.MM4\n";
inpKeyStr+="cp $MM4_DATDIR/BLANK.DAT PARA.MM4\n";
inpKeyStr+="cp $MM4_DATDIR/CONST.MM4 .\n";
inpKeyStr+="$MM4_EXEDIR/mm4 <<%\n";
inpKeyStr+="1\n";//read from first line of .mm4 file
if (!useCanTherm){
if(attemptNumber%scriptAttempts==1) inpKeyStr+="2\n"; //Block-Diagonal Method then Full-Matrix Method
else if(attemptNumber%scriptAttempts==2) inpKeyStr+="3\n"; //Full-Matrix Method only
else throw new Exception();//this point should not be reached
inpKeyStr+="\n";//<RETURN> for temperature
inpKeyStr+="4\n";//unofficial option 4 for vibrational eigenvector printout to generate Cartesian force constant matrix in FORCE.MAT file
inpKeyStr+="0\n";//no vibrational amplitude printout
}
else{//CanTherm case: write the FORCE.MAT file
if(attemptNumber%scriptAttempts==1) inpKeyStr+="4\n"; //Block-Diagonal Method then Full-Matrix Method
else if(attemptNumber%scriptAttempts==2) inpKeyStr+="5\n"; //Full-Matrix Method only
else throw new Exception();//this point should not be reached
}
inpKeyStr+="0\n";//terminate the job
inpKeyStr+="%\n";
inpKeyStr+="mv TAPE4.MM4 "+name+".mm4out\n";
inpKeyStr+="mv TAPE9.MM4 "+name+".mm4opt\n";
if(useCanTherm){
inpKeyStr+="mv FORCE.MAT "+name+".fmat\n";
}
inpKeyStr+="exit\n";
FileWriter fw = new FileWriter(inpKey);
fw.write(inpKeyStr);
fw.close();
}
catch(Exception e){
String err = "Error in writing MM4 script file\n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//Step 2: call the MoleCoor process to create the MM4 input file from the mole file
try{
File runningdir=new File(directory);
//this will only be run on Linux so we don't have to worry about Linux vs. Windows issues
String command = "python "+System.getenv("RMG")+"/scripts/MM4InputFileMaker.py ";
//first argument: input file path; for the first attempts, we will use UFF refined coordinates; if that doesn't work, (e.g. case of cyclopropene, InChI=1/C3H4/c1-2-3-1/h1-2H,3H2 OOXWYYGXTJLWHA-UHFFFAOYAJ) we will try crude coordinates (.cmol suffix)
if(attemptNumber<=scriptAttempts){
command=command.concat(p_molfile.getPath() + " ");
}
else{
command=command.concat(p_molfile.getCrudePath() + " ");
}
//second argument: output path
String inpfilepath=directory+"/"+name+".mm4";
command=command.concat(inpfilepath+ " ");
//third argument: molecule name (the augmented InChI)
command=command.concat(InChIaug+ " ");
//fourth argument: PYTHONPATH
command=command.concat(System.getenv("RMG")+"/source/MoleCoor");//this will pass $RMG/source/MoleCoor to the script (in order to get the appropriate path for importing
Process molecoorProc = Runtime.getRuntime().exec(command, null, runningdir);
//read in output
InputStream is = molecoorProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
//do nothing
}
int exitValue = molecoorProc.waitFor();
}
catch(Exception e){
String err = "Error in running MoleCoor MOL to .MM4 process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
return maxAttemptNumber;
}
//returns the extra Mopac keywords to use for radical species, given the spin multiplicity (radical number + 1)
public String getMopacRadicalString(int multiplicity){
if(multiplicity==1) return "";
else if (multiplicity==2) return "uhf doublet";
else if (multiplicity==3) return "uhf triplet";
else if (multiplicity==4) return "uhf quartet";
else if (multiplicity==5) return "uhf quintet";
else if (multiplicity==6) return "uhf sextet";
else if (multiplicity==7) return "uhf septet";
else if (multiplicity==8) return "uhf octet";
else if (multiplicity==9) return "uhf nonet";
else{
System.out.println("Invalid multiplicity encountered: "+multiplicity);
System.exit(0);
}
return "this should not be returned: error associated with getMopacRadicalString()";
}
//creates MOPAC PM3 input file in directory with filename name.mop by using OpenBabel to convert p_molfile
//attemptNumber determines which keywords to try
//the function returns the maximum number of keywords that can be attempted; this will be the same throughout the evaluation of the code, so it may be more appropriate to have this as a "constant" attribute of some sort
//unlike createGaussianPM3 input, this requires an additional input specifying the spin multiplicity (radical number + 1) for the species
public int createMopacPM3Input(String name, String directory, molFile p_molfile, int attemptNumber, String InChIaug, int multiplicity){
//write a file with the input keywords
int maxAttemptNumber=5;//update this if additional keyword options are added or removed
String inpKeyStrBoth = "";//this string will be written at both the top (for optimization) and the bottom (for thermo/force calc)
String inpKeyStrTop = "";//this string will be written only at the top
String inpKeyStrBottom = "";//this string will be written at the bottom
String radicalString = getMopacRadicalString(multiplicity);
try{
// File inpKey=new File(directory+"/inputkeywords.txt");
// String inpKeyStr="%chk="+directory+"\\RMGrunCHKfile.chk\n";
// inpKeyStr+="%mem=6MW\n";
// inpKeyStr+="%nproc=1\n";
if(attemptNumber==1){
inpKeyStrBoth="pm3 "+radicalString;
inpKeyStrTop=" precise nosym";
inpKeyStrBottom="oldgeo thermo nosym precise ";//7/10/09: based on a quick review of recent results, keyword combo #1 rarely works, and when it did (CJAINEUZFLXGFA-UHFFFAOYAUmult3 (InChI=1/C8H16O5Si/c1-4-11-14(9,12-5-2)13-8-6-10-7(8)3/h7-8H,3-6H2,1-2H3/mult3)), the grad. norm on the force step was about 1.7 (too large); I manually removed this result and re-ran...the entropy was increased by nearly 20 cal/mol-K...perhaps we should add a check for the "WARNING" that MOPAC prints out when the gradient is high; 7/22/09: for the case of FUGDBSHZYPTWLG-UHFFFAOYADmult3 (InChI=1/C5H8/c1-4-3-5(4)2/h4-5H,1-3H2/mult3), adding nosym seemed to resolve 1. large discrepancies from Gaussian and 2. negative frequencies in mass-weighted coordinates and possibly related issue in discrepancies between regular and mass-weighted coordinate frequencies
}
else if(attemptNumber==2){//7/9/09: used for VCSJVABXVCFDRA-UHFFFAOYAI (InChI=1/C8H19O5Si/c1-5-10-8(4)13-14(9,11-6-2)12-7-3/h8H,5-7H2,1-4H3); all existing Gaussian keywords also failed; the Gaussian result was also rectified, but the resulting molecule was over 70 kcal/mol less stable, probably due to a large amount of spin contamination (~1.75 in fixed Gaussian result vs. 0.754 for MOPAC)
inpKeyStrBoth="pm3 "+radicalString;
inpKeyStrTop=" precise nosym gnorm=0.0 nonr";
inpKeyStrBottom="oldgeo thermo nosym precise ";
}
else if(attemptNumber==3){//7/8/09: used for ADMPQLGIEMRGAT-UHFFFAOYAUmult3 (InChI=1/C6H14O5Si/c1-4-9-12(8,10-5-2)11-6(3)7/h6-7H,3-5H2,1-2H3/mult3); all existing Gaussian keywords also failed; however, the Gaussian result was also rectified, and the resulting conformation was about 1.0 kcal/mol more stable than the one resulting from this, so fixed Gaussian result was manually copied to QMFiles folder
inpKeyStrBoth="pm3 "+radicalString;
inpKeyStrTop=" precise nosym gnorm=0.0";
inpKeyStrBottom="oldgeo thermo nosym precise "; //precise appeared to be necessary for the problematic case (to avoid negative frequencies);
}
else if(attemptNumber==4){//7/8/09: used for GYFVJYRUZAKGFA-UHFFFAOYALmult3 (InChI=1/C6H14O6Si/c1-3-10-13(8,11-4-2)12-6-5-9-7/h6-7H,3-5H2,1-2H3/mult3) case (negative frequency issues in MOPAC) (also, none of the existing Gaussian combinations worked with it); note that the Gaussian result appears to be a different conformation as it is about 0.85 kcal/mol more stable, so the Gaussian result was manually copied to QMFiles directory; note that the MOPAC output included a very low frequency (4-5 cm^-1)
inpKeyStrBoth="pm3 "+radicalString;
inpKeyStrTop=" precise nosym gnorm=0.0 bfgs";
inpKeyStrBottom="oldgeo thermo nosym precise "; //precise appeared to be necessary for the problematic case (to avoid negative frequencies)
}
// else if(attemptNumber==5){
// inpKeyStrBoth="pm3 "+radicalString;
// inpKeyStrTop=" precise nosym gnorm=0.0 ddmin=0.0";
// inpKeyStrBottom="oldgeo thermo nosym precise ";
// }
// else if(attemptNumber==6){
// inpKeyStrBoth="pm3 "+radicalString;
// inpKeyStrTop=" precise nosym gnorm=0.0 nonr ddmin=0.0";
// inpKeyStrBottom="oldgeo thermo nosym precise ";
// }
// else if(attemptNumber==7){
// inpKeyStrBoth="pm3 "+radicalString;
// inpKeyStrTop=" precise nosym bfgs gnorm=0.0 ddmin=0.0";
// inpKeyStrBottom="oldgeo thermo nosym precise ";
// }
else if(attemptNumber==5){//used for troublesome HGRZRPHFLAXXBT-UHFFFAOYAVmult3 (InChI=1/C3H2O4/c4-2(5)1-3(6)7/h1H2/mult3) case (negative frequency and large gradient issues)
inpKeyStrBoth="pm3 "+radicalString;
inpKeyStrTop=" precise nosym recalc=10 dmax=0.10 nonr cycles=2000 t=2000";
inpKeyStrBottom="oldgeo thermo nosym precise ";
}
// else if(attemptNumber==9){//used for troublesome CMARQPBQDRXBTN-UHFFFAOYAAmult3 (InChI=1/C3H2O4/c1-3(5)7-6-2-4/h1H2/mult3) case (negative frequency issues)
// inpKeyStrBoth="pm3 "+radicalString;
// inpKeyStrTop=" precise nosym recalc=1 dmax=0.05 gnorm=0.0 cycles=1000 t=1000";
// inpKeyStrBottom="oldgeo thermo nosym precise ";
// }
// else if(attemptNumber==10){//used for ATCYLHQLTOSVFK-UHFFFAOYAMmult4 (InChI=1/C4H5O5/c1-3(5)8-9-4(2)7-6/h6H,1-2H2/mult4) case (timeout issue; also, negative frequency issues); note that this is very similar to the keyword below, so we may want to consolidate
// inpKeyStrBoth="pm3 "+radicalString;
// inpKeyStrTop=" precise nosym recalc=1 dmax=0.05 gnorm=0.2 cycles=1000 t=1000";
// inpKeyStrBottom="oldgeo thermo nosym precise ";
// }
else throw new Exception();//this point should not be reached
// FileWriter fw = new FileWriter(inpKey);
// fw.write(inpKeyStr);
// fw.close();
}
catch(Exception e){
String err = "Error in writing inputkeywords.txt \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
String polarString = "";
if (usePolar){
if(multiplicity == 1) polarString = System.getProperty("line.separator") + System.getProperty("line.separator") + System.getProperty("line.separator")+ "oldgeo polar nosym precise " + inpKeyStrBoth;
else polarString = System.getProperty("line.separator") + System.getProperty("line.separator") + System.getProperty("line.separator")+ "oldgeo static nosym precise " + inpKeyStrBoth;
}
//call the OpenBabel process (note that this requires OpenBabel environment variable)
try{
File runningdir=new File(directory);
String inpKeyStrTopCombined = inpKeyStrBoth + inpKeyStrTop;
String command = "babel -imol "+ p_molfile.getPath()+ " -omop " + name+".mop -xk \"" + inpKeyStrTopCombined + "\" --title \""+InChIaug+"\"";
Process babelProc = Runtime.getRuntime().exec(command, null, runningdir);
//read in output
InputStream is = babelProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
//do nothing
}
int exitValue = babelProc.waitFor();
//append the final keywords to the end of the file just written
// File mopacInpFile = new File(directory+"/"+name+".mop");
FileWriter fw = new FileWriter(directory+"/"+name+".mop", true);//filewriter with append = true
fw.write(System.getProperty("line.separator") + inpKeyStrBottom + inpKeyStrBoth + polarString);//on Windows Vista, "\n" appears correctly in WordPad view, but not Notepad view (cf. http://forums.sun.com/thread.jspa?threadID=5386822)
fw.close();
}
catch(Exception e){
String err = "Error in running OpenBabel MOL to MOP process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
return maxAttemptNumber;
}
//name and directory are the name and directory for the input (and output) file;
//input is assumed to be preexisting and have the .gjf suffix
//returns an integer indicating success or failure of the Gaussian calculation: 1 for success, 0 for failure;
public int runGaussian(String name, String directory){
int flag = 0;
int successFlag=0;
try{
String command = "g03 ";
command=command.concat(qmfolder+"/"+name+".gjf ");//specify the input file; space is important
command=command.concat(qmfolder+"/"+name+".log");//specify the output file
Process gaussianProc = Runtime.getRuntime().exec(command);
//check for errors and display the error if there is one
InputStream is = gaussianProc.getErrorStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
line = line.trim();
System.err.println(line);
flag=1;
}
//if there was an error, indicate that an error was obtained
if(flag==1){
System.out.println("Gaussian process received error (see above) on " + name);
}
int exitValue = gaussianProc.waitFor();
}
catch(Exception e){
String err = "Error in running Gaussian process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//look in the output file to check for the successful termination of the Gaussian calculation
//failed jobs will contain the a line beginning with " Error termination" near the end of the file
int failureFlag=0;
String errorLine = "";//string to store the error
try{
FileReader in = new FileReader(directory+"/"+name+".log");
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
if (line.startsWith(" Error termination ")){
failureFlag=1;
errorLine = line.trim();
System.out.println("*****Error in Gaussian log file: "+errorLine);//print the error (note that in general, I think two lines will be printed)
}
else if (line.startsWith(" ******")){//also look for imaginary frequencies
if (line.contains("imaginary frequencies")){
System.out.println("*****Imaginary freqencies found:");
failureFlag=1;
}
}
line=reader.readLine();
}
}
catch(Exception e){
String err = "Error in reading Gaussian log file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//if the failure flag is still 0, the process should have been successful
if (failureFlag==0) successFlag=1;
return successFlag;
}
//name and directory are the name and directory for the input (and output) file;
//input script is assumed to be preexisting and have the .com suffix
//returns an integer indicating success or failure of the calculation: 1 for success, 0 for failure
public int runMM4(String name, String directory){
int successFlag=0;
//int flag = 0;
try{
File runningDirectory = new File(qmfolder);
String command=name+".com";
File script = new File(qmfolder+command);
command = "./"+command;
script.setExecutable(true);
Process mm4Proc = Runtime.getRuntime().exec(command, null, runningDirectory);
//check for errors and display the error if there is one
// InputStream is = mm4Proc.getErrorStream();
// InputStreamReader isr = new InputStreamReader(is);
// BufferedReader br = new BufferedReader(isr);
// String line=null;
// while ( (line = br.readLine()) != null) {
// line = line.trim();
// if(!line.equals("STOP statement executed")){//string listed here seems to be typical
// System.err.println(line);
// flag=1;
// }
// }
InputStream is = mm4Proc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
//do nothing
}
//if there was an error, indicate that an error was obtained
// if(flag==1){
// System.out.println("MM4 process received error (see above) on " + name);
// }
int exitValue = mm4Proc.waitFor();
}
catch(Exception e){
String err = "Error in running MM4 process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//look in the output file to check for the successful termination of the MM4 calculation (cf. successfulMM4ResultExistsQ)
File file = new File(directory+"/"+name+".mm4out");
int failureFlag=1;//flag (1 or 0) indicating whether the MM4 job failed
int failureOverrideFlag=0;//flag (1 or 0) to override success as measured by failureFlag
if(file.exists()){//if the file exists, do further checks; otherwise, we will skip to final statement and return false
try{
FileReader in = new FileReader(file);
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
String trimLine= line.trim();
if (trimLine.equals("STATISTICAL THERMODYNAMICS ANALYSIS")){
failureFlag = 0;
}
else if (trimLine.endsWith("imaginary frequencies,")){//read the number of imaginary frequencies and make sure it is zero
String[] split = trimLine.split("\\s+");
if (Integer.parseInt(split[3])>0){
System.out.println("*****Imaginary freqencies found:");
failureOverrideFlag=1;
}
}
else if (trimLine.contains(" 0.0 (fir )")){
if (useCanTherm){//zero frequencies are only acceptable when CanTherm is used
System.out.println("*****Warning: zero freqencies found (values lower than 7.7 cm^-1 are rounded to zero in MM4 output); CanTherm should hopefully correct this:");
}
else{
System.out.println("*****Zero freqencies found:");
failureOverrideFlag=1;
}
}
line=reader.readLine();
}
}
catch(Exception e){
String err = "Error in reading MM4 output file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
}
//if the failure flag is still 0, the process should have been successful
if(failureOverrideFlag==1) failureFlag=1; //job will be considered a failure if there are imaginary frequencies or if job terminates to to excess time/cycles
//if the failure flag is 0 and there are no negative frequencies, the process should have been successful
if (failureFlag==0) successFlag=1;
return successFlag;
}
//name and directory are the name and directory for the input (and output) file;
//input is assumed to be preexisting and have the .mop suffix
//returns an integer indicating success or failure of the MOPAC calculation: 1 for success, 0 for failure;
//this function is based on the Gaussian analogue
public int runMOPAC(String name, String directory){
int flag = 0;
int successFlag=0;
try{
String command = System.getenv("MOPAC_LICENSE")+"MOPAC2009.exe ";
command=command.concat(directory+"/"+name+".mop ");//specify the input file; space is important
command=command.concat(directory+"/"+name+".out");//specify the output file
Process mopacProc = Runtime.getRuntime().exec(command);
//check for errors and display the error if there is one
InputStream is = mopacProc.getErrorStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
line = line.trim();
System.err.println(line);
flag=1;
}
//if there was an error, indicate that an error was obtained
if(flag==1){
System.out.println("MOPAC process received error (see above) on " + name);
}
int exitValue = mopacProc.waitFor();
}
catch(Exception e){
String err = "Error in running MOPAC process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//look in the output file to check for the successful termination of the calculation (this is a trimmed down version of what appears in successfulMOPACResultExistsQ (it doesn't have the InChI check)
File file = new File(directory+"/"+name+".out");
int failureFlag=1;//flag (1 or 0) indicating whether the MOPAC job failed
int failureOverrideFlag=0;//flag (1 or 0) to override success as measured by failureFlag
if(file.exists()){//if the file exists, do further checks; otherwise, we will skip to final statement and return false
try{
FileReader in = new FileReader(file);
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
String trimLine = line.trim();
if (trimLine.equals("DESCRIPTION OF VIBRATIONS")){//check for this line; if it is here, check for negative frequencies
//if(!MopacFileContainsNegativeFreqsQ(name, directory)) failureFlag=0;
failureFlag=0;
}
//negative frequencies notice example:
// NOTE: SYSTEM IS NOT A GROUND STATE, THEREFORE ZERO POINT
// ENERGY IS NOT MEANINGFULL. ZERO POINT ENERGY PRINTED
// DOES NOT INCLUDE THE 2 IMAGINARY FREQUENCIES
else if (trimLine.endsWith("IMAGINARY FREQUENCIES")){
System.out.println("*****Imaginary freqencies found:");
failureOverrideFlag=1;
}
else if (trimLine.equals("EXCESS NUMBER OF OPTIMIZATION CYCLES")){//exceeding max cycles error
failureOverrideFlag=1;
}
else if (trimLine.equals("NOT ENOUGH TIME FOR ANOTHER CYCLE")){//timeout error
failureOverrideFlag=1;
}
line=reader.readLine();
}
}
catch(Exception e){
String err = "Error in reading MOPAC output file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
}
if(failureOverrideFlag==1) failureFlag=1; //job will be considered a failure if there are imaginary frequencies or if job terminates to to excess time/cycles
//if the failure flag is 0 and there are no negative frequencies, the process should have been successful
if (failureFlag==0) successFlag=1;
return successFlag;
}
//parse the results using cclib and return a ThermoData object; name and directory indicate the location of the Gaussian .log file
//may want to split this into several functions
public ThermoData parseGaussianPM3(String name, String directory, ChemGraph p_chemGraph){
// //parse the Gaussian file using cclib
// int natoms = 0; //number of atoms from Gaussian file; in principle, this should agree with number of chemGraph atoms
// ArrayList atomicNumber = new ArrayList(); //vector of atomic numbers (integers) (apparently Vector is thread-safe; cf. http://answers.yahoo.com/question/index?qid=20081214065127AArZDT3; ...should I be using this instead?)
// ArrayList x_coor = new ArrayList(); //vectors of x-, y-, and z-coordinates (doubles) (Angstroms) (in order corresponding to above atomic numbers)
// ArrayList y_coor = new ArrayList();
// ArrayList z_coor = new ArrayList();
// double energy = 0; //PM3 energy (Hf298) in Hartree
// double molmass = 0; //molecular mass in amu
// ArrayList freqs = new ArrayList(); //list of frequencies in units of cm^-1
// double rotCons_1 = 0;//rotational constants in (1/s)
// double rotCons_2 = 0;
// double rotCons_3 = 0;
// int gdStateDegen = p_chemGraph.getRadicalNumber()+1;//calculate ground state degeneracy from the number of radicals; this should give the same result as spin multiplicity in Gaussian input file (and output file), but we do not explicitly check this (we could use "mult" which cclib reads in if we wanted to do so); also, note that this is not always correct, as there can apparently be additional spatial degeneracy for non-symmetric linear molecules like OH radical (cf. http://cccbdb.nist.gov/thermo.asp)
// try{
// File runningdir=new File(directory);
// String command = "c:/Python25/python.exe c:/Python25/GaussianPM3ParsingScript.py ";//this should eventually be modified for added generality
// String logfilepath=directory+"/"+name+".log";
// command=command.concat(logfilepath);
// Process cclibProc = Runtime.getRuntime().exec(command, null, runningdir);
// //read the stdout of the process, which should contain the desired information in a particular format
// InputStream is = cclibProc.getInputStream();
// InputStreamReader isr = new InputStreamReader(is);
// BufferedReader br = new BufferedReader(isr);
// String line=null;
// //example output:
//// C:\Python25>python.exe GaussianPM3ParsingScript.py TEOS.out
//// 33
//// [ 6 6 8 14 8 6 6 8 6 6 8 6 6 1 1 1 1 1 1 1 1 1 1 1 1
//// 1 1 1 1 1 1 1 1]
//// [[ 2.049061 -0.210375 3.133106]
//// [ 1.654646 0.321749 1.762752]
//// [ 0.359284 -0.110429 1.471465]
//// [-0.201871 -0.013365 -0.12819 ]
//// [ 0.086307 1.504918 -0.82893 ]
//// [-0.559186 2.619928 -0.284003]
//// [-0.180246 3.839463 -1.113029]
//// [ 0.523347 -1.188305 -1.112765]
//// [ 1.857584 -1.018167 -1.495088]
//// [ 2.375559 -2.344392 -2.033403]
//// [-1.870397 -0.297297 -0.075427]
//// [-2.313824 -1.571765 0.300245]
//// [-3.83427 -1.535927 0.372171]
//// [ 1.360346 0.128852 3.917699]
//// [ 2.053945 -1.307678 3.160474]
//// [ 3.055397 0.133647 3.403037]
//// [ 1.677262 1.430072 1.750899]
//// [ 2.372265 -0.029237 0.985204]
//// [-0.245956 2.754188 0.771433]
//// [-1.656897 2.472855 -0.287156]
//// [-0.664186 4.739148 -0.712606]
//// [-0.489413 3.734366 -2.161038]
//// [ 0.903055 4.016867 -1.112198]
//// [ 1.919521 -0.229395 -2.269681]
//// [ 2.474031 -0.680069 -0.629949]
//// [ 2.344478 -3.136247 -1.273862]
//// [ 1.786854 -2.695974 -2.890647]
//// [ 3.41648 -2.242409 -2.365094]
//// [-1.884889 -1.858617 1.28054 ]
//// [-1.976206 -2.322432 -0.440995]
//// [-4.284706 -1.26469 -0.591463]
//// [-4.225999 -2.520759 0.656131]
//// [-4.193468 -0.809557 1.112677]]
//// -14.1664924726
//// [ 9.9615 18.102 27.0569 31.8459 39.0096 55.0091
//// 66.4992 80.4552 86.4912 123.3551 141.6058 155.5448
//// 159.4747 167.0013 178.5676 207.3738 237.3201 255.3487
//// 264.5649 292.867 309.4248 344.6503 434.8231 470.2074
//// 488.9717 749.1722 834.257 834.6594 837.7292 839.6352
//// 887.9767 892.9538 899.5374 992.1851 1020.6164 1020.8671
//// 1028.3897 1046.7945 1049.1768 1059.4704 1065.1505 1107.4001
//// 1108.1567 1109.0466 1112.6677 1122.7785 1124.4315 1128.4163
//// 1153.3438 1167.6705 1170.9627 1174.9613 1232.1826 1331.8459
//// 1335.3932 1335.8677 1343.9556 1371.37 1372.8127 1375.5428
//// 1396.0344 1402.4082 1402.7554 1403.2463 1403.396 1411.6946
//// 1412.2456 1412.3519 1414.5982 1415.3613 1415.5698 1415.7993
//// 1418.5409 2870.7446 2905.3132 2907.0361 2914.1662 2949.2646
//// 2965.825 2967.7667 2971.5223 3086.3849 3086.3878 3086.6448
//// 3086.687 3089.2274 3089.4105 3089.4743 3089.5841 3186.0753
//// 3186.1375 3186.3511 3186.365 ]
//// [ 0.52729 0.49992 0.42466]
////note: above example has since been updated to print molecular mass; also frequency and atomic number format has been updated
// String [] stringArray;
// natoms = Integer.parseInt(br.readLine());//read line 1: number of atoms
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read line 2: the atomic numbers (first removing braces)
// // line = br.readLine().replace("[", "").replace("]","");//read line 2: the atomic numbers (first removing braces)
// // StringTokenizer st = new StringTokenizer(line); //apprently the stringTokenizer class is deprecated, but I am having trouble getting the regular expressions to work properly
// for(int i=0; i < natoms; i++){
// // atomicNumber.add(i,Integer.parseInt(stringArray[i]));
// atomicNumber.add(i,Integer.parseInt(stringArray[i]));
// }
// for(int i=0; i < natoms; i++){
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read line 3+: coordinates for atom i; used /s+ for split; using spaces with default limit of 0 was giving empty string
// x_coor.add(i,Double.parseDouble(stringArray[0]));
// y_coor.add(i,Double.parseDouble(stringArray[1]));
// z_coor.add(i,Double.parseDouble(stringArray[2]));
// }
// energy = Double.parseDouble(br.readLine());//read next line: energy
// molmass = Double.parseDouble(br.readLine());//read next line: molecular mass (in amu)
// if (natoms>1){//read additional info for non-monoatomic species
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read next line: frequencies
// for(int i=0; i < stringArray.length; i++){
// freqs.add(i,Double.parseDouble(stringArray[i]));
// }
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read next line rotational constants (converting from GHz to Hz in the process)
// rotCons_1 = Double.parseDouble(stringArray[0])*1000000000;
// rotCons_2 = Double.parseDouble(stringArray[1])*1000000000;
// rotCons_3 = Double.parseDouble(stringArray[2])*1000000000;
// }
// while ( (line = br.readLine()) != null) {
// //do nothing (there shouldn't be any more information, but this is included to get all the output)
// }
// int exitValue = cclibProc.waitFor();
// }
// catch (Exception e) {
// String err = "Error in running ccLib Python process \n";
// err += e.toString();
// e.printStackTrace();
// System.exit(0);
// }
//
// ThermoData result = calculateThermoFromPM3Calc(natoms, atomicNumber, x_coor, y_coor, z_coor, energy, molmass, freqs, rotCons_1, rotCons_2, rotCons_3, gdStateDegen);
// System.out.println("Thermo for " + name + ": "+ result.toString());//print result, at least for debugging purposes
// return result;
String command = null;
if (System.getProperty("os.name").toLowerCase().contains("windows")){//special windows case where paths can have spaces and are allowed to be surrounded by quotes
command = "python \""+ System.getProperty("RMG.workingDirectory")+"/scripts/GaussianPM3ParsingScript.py\" ";
String logfilepath="\""+directory+"/"+name+".log\"";
command=command.concat(logfilepath);
command=command.concat(" \""+ System.getenv("RMG")+"/source\"");//this will pass $RMG/source to the script (in order to get the appropriate path for importing
}
else{//non-Windows case
command = "python "+ System.getProperty("RMG.workingDirectory")+"/scripts/GaussianPM3ParsingScript.py ";
String logfilepath=directory+"/"+name+".log";
command=command.concat(logfilepath);
command=command.concat(" "+ System.getenv("RMG")+"/source");//this will pass $RMG/source to the script (in order to get the appropriate path for importing
}
ThermoData result = getPM3MM4ThermoDataUsingCCLib(name, directory, p_chemGraph, command);
System.out.println("Thermo for " + name + ": "+ result.toString());//print result, at least for debugging purposes
return result;
}
//parse the results using cclib and return a ThermoData object; name and directory indicate the location of the MM4 .mm4out file
public ThermoData parseMM4(String name, String directory, ChemGraph p_chemGraph){
String command = "python "+System.getProperty("RMG.workingDirectory")+"/scripts/MM4ParsingScript.py ";
String logfilepath=directory+"/"+name+".mm4out";
command=command.concat(logfilepath);
command=command.concat(" "+ System.getenv("RMG")+"/source");//this will pass $RMG/source to the script (in order to get the appropriate path for importing
ThermoData result = getPM3MM4ThermoDataUsingCCLib(name, directory, p_chemGraph, command);
System.out.println("Thermo for " + name + ": "+ result.toString());//print result, at least for debugging purposes
return result;
}
//parse the results using cclib and CanTherm and return a ThermoData object; name and directory indicate the location of the MM4 .mm4out file
public ThermoData parseMM4withForceMat(String name, String directory, ChemGraph p_chemGraph){
//1. parse the MM4 file with cclib to get atomic number vector and geometry
String command = "python "+System.getProperty("RMG.workingDirectory")+"/scripts/MM4ParsingScript.py ";
String logfilepath=directory+"/"+name+".mm4out";
command=command.concat(logfilepath);
command=command.concat(" "+ System.getenv("RMG")+"/source");//this will pass $RMG/source to the script (in order to get the appropriate path for importing
///////////beginning of block taken from the bulk of getPM3MM4ThermoDataUsingCCLib////////////
//parse the file using cclib
int natoms = 0; //number of atoms from Mopac file; in principle, this should agree with number of chemGraph atoms
ArrayList atomicNumber = new ArrayList(); //vector of atomic numbers (integers) (apparently Vector is thread-safe; cf. http://answers.yahoo.com/question/index?qid=20081214065127AArZDT3; ...should I be using this instead?)
ArrayList x_coor = new ArrayList(); //vectors of x-, y-, and z-coordinates (doubles) (Angstroms) (in order corresponding to above atomic numbers)
ArrayList y_coor = new ArrayList();
ArrayList z_coor = new ArrayList();
double energy = 0; // energy (Hf298) in Hartree
double molmass = 0; //molecular mass in amu
ArrayList freqs = new ArrayList(); //list of frequencies in units of cm^-1
double rotCons_1 = 0;//rotational constants in (1/s)
double rotCons_2 = 0;
double rotCons_3 = 0;
int gdStateDegen = p_chemGraph.getRadicalNumber()+1;//calculate ground state degeneracy from the number of radicals; this should give the same result as spin multiplicity in Gaussian input file (and output file), but we do not explicitly check this (we could use "mult" which cclib reads in if we wanted to do so); also, note that this is not always correct, as there can apparently be additional spatial degeneracy for non-symmetric linear molecules like OH radical (cf. http://cccbdb.nist.gov/thermo.asp)
try{
Process cclibProc = Runtime.getRuntime().exec(command);
//read the stdout of the process, which should contain the desired information in a particular format
InputStream is = cclibProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
//example output:
// C:\Python25>python.exe GaussianPM3ParsingScript.py TEOS.out
// 33
// [ 6 6 8 14 8 6 6 8 6 6 8 6 6 1 1 1 1 1 1 1 1 1 1 1 1
// 1 1 1 1 1 1 1 1]
// [[ 2.049061 -0.210375 3.133106]
// [ 1.654646 0.321749 1.762752]
// [ 0.359284 -0.110429 1.471465]
// [-0.201871 -0.013365 -0.12819 ]
// [ 0.086307 1.504918 -0.82893 ]
// [-0.559186 2.619928 -0.284003]
// [-0.180246 3.839463 -1.113029]
// [ 0.523347 -1.188305 -1.112765]
// [ 1.857584 -1.018167 -1.495088]
// [ 2.375559 -2.344392 -2.033403]
// [-1.870397 -0.297297 -0.075427]
// [-2.313824 -1.571765 0.300245]
// [-3.83427 -1.535927 0.372171]
// [ 1.360346 0.128852 3.917699]
// [ 2.053945 -1.307678 3.160474]
// [ 3.055397 0.133647 3.403037]
// [ 1.677262 1.430072 1.750899]
// [ 2.372265 -0.029237 0.985204]
// [-0.245956 2.754188 0.771433]
// [-1.656897 2.472855 -0.287156]
// [-0.664186 4.739148 -0.712606]
// [-0.489413 3.734366 -2.161038]
// [ 0.903055 4.016867 -1.112198]
// [ 1.919521 -0.229395 -2.269681]
// [ 2.474031 -0.680069 -0.629949]
// [ 2.344478 -3.136247 -1.273862]
// [ 1.786854 -2.695974 -2.890647]
// [ 3.41648 -2.242409 -2.365094]
// [-1.884889 -1.858617 1.28054 ]
// [-1.976206 -2.322432 -0.440995]
// [-4.284706 -1.26469 -0.591463]
// [-4.225999 -2.520759 0.656131]
// [-4.193468 -0.809557 1.112677]]
// -14.1664924726
// [ 9.9615 18.102 27.0569 31.8459 39.0096 55.0091
// 66.4992 80.4552 86.4912 123.3551 141.6058 155.5448
// 159.4747 167.0013 178.5676 207.3738 237.3201 255.3487
// 264.5649 292.867 309.4248 344.6503 434.8231 470.2074
// 488.9717 749.1722 834.257 834.6594 837.7292 839.6352
// 887.9767 892.9538 899.5374 992.1851 1020.6164 1020.8671
// 1028.3897 1046.7945 1049.1768 1059.4704 1065.1505 1107.4001
// 1108.1567 1109.0466 1112.6677 1122.7785 1124.4315 1128.4163
// 1153.3438 1167.6705 1170.9627 1174.9613 1232.1826 1331.8459
// 1335.3932 1335.8677 1343.9556 1371.37 1372.8127 1375.5428
// 1396.0344 1402.4082 1402.7554 1403.2463 1403.396 1411.6946
// 1412.2456 1412.3519 1414.5982 1415.3613 1415.5698 1415.7993
// 1418.5409 2870.7446 2905.3132 2907.0361 2914.1662 2949.2646
// 2965.825 2967.7667 2971.5223 3086.3849 3086.3878 3086.6448
// 3086.687 3089.2274 3089.4105 3089.4743 3089.5841 3186.0753
// 3186.1375 3186.3511 3186.365 ]
// [ 0.52729 0.49992 0.42466]
//note: above example has since been updated to print molecular mass; also frequency and atomic number format has been updated
String [] stringArray;
natoms = Integer.parseInt(br.readLine());//read line 1: number of atoms
stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read line 2: the atomic numbers (first removing braces)
// line = br.readLine().replace("[", "").replace("]","");//read line 2: the atomic numbers (first removing braces)
// StringTokenizer st = new StringTokenizer(line); //apprently the stringTokenizer class is deprecated, but I am having trouble getting the regular expressions to work properly
for(int i=0; i < natoms; i++){
// atomicNumber.add(i,Integer.parseInt(stringArray[i]));
atomicNumber.add(i,Integer.parseInt(stringArray[i]));
}
for(int i=0; i < natoms; i++){
stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read line 3+: coordinates for atom i; used /s+ for split; using spaces with default limit of 0 was giving empty string
x_coor.add(i,Double.parseDouble(stringArray[0]));
y_coor.add(i,Double.parseDouble(stringArray[1]));
z_coor.add(i,Double.parseDouble(stringArray[2]));
}
energy = Double.parseDouble(br.readLine());//read next line: energy
molmass = Double.parseDouble(br.readLine());//read next line: molecular mass (in amu)
if (natoms>1){//read additional info for non-monoatomic species
stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read next line: frequencies
for(int i=0; i < stringArray.length; i++){
freqs.add(i,Double.parseDouble(stringArray[i]));
}
stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read next line rotational constants (converting from GHz to Hz in the process)
rotCons_1 = Double.parseDouble(stringArray[0])*1000000000;
rotCons_2 = Double.parseDouble(stringArray[1])*1000000000;
rotCons_3 = Double.parseDouble(stringArray[2])*1000000000;
}
while ( (line = br.readLine()) != null) {
//do nothing (there shouldn't be any more information, but this is included to get all the output)
}
int exitValue = cclibProc.waitFor();
}
catch (Exception e) {
String err = "Error in running ccLib Python process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
///////////end of block taken from the bulk of getPM3MM4ThermoDataUsingCCLib////////////
//2. compute H0; note that we will pass H0 to CanTherm by H0=H298(harmonicMM4)-(H298-H0)harmonicMM4, where harmonicMM4 values come from cclib parsing and since it is enthalpy, it should not be NaN due to zero frequencies
//*** to be written
//3. write CanTherm input file
String canInp = "Thermo\n";
canInp += "Trange 300 100 13\n";//temperatures from 300 to 1500 in increments of 100
canInp += "Scale: 1.0\n";//scale factor of 1
canInp += "Mol 1\n";
if(p_chemGraph.getAtomNumber()==1) canInp += "ATOM\n";
else if (p_chemGraph.isLinear()) canInp+="LINEAR\n";
else canInp+="NONLINEAR\n";
canInp += "GEOM MM4File " + name+".mm4out\n";//geometry file; ***special MM4 treatment in CanTherm; another option would be to use mm4opt file, but CanTherm code would need to be modified accordingly
canInp += "FORCEC MM4File "+name+".fmat\n";//force constant file; ***special MM4 treatment in CanTherm
canInp += "ENERGY "+ energy +" MM4\n";//***special MM4 treatment in CanTherm
canInp+="EXTSYM 1\n";//use external symmetry of 1; it will be corrected for below
canInp+="NELEC 1\n";//multiplicity = 1; all cases we consider will be non-radicals
if(!useHindRot) canInp += "ROTORS 0\n";//do not consider hindered rotors
else{
//this section still needs to be written
canInp+="";
}
try{
File canFile=new File(directory+"/"+name+".can");
FileWriter fw = new FileWriter(canFile);
fw.write(canInp);
fw.close();
}
catch(Exception e){
String err = "Error in writing CanTherm input \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//4. call CanTherm
try{
File runningDirectory = new File(qmfolder);
String canCommand="python " + System.getenv("RMG")+"/source/CanTherm/source/CanTherm.py"+name+".can";
Process canProc = Runtime.getRuntime().exec(canCommand, null, runningDirectory);
InputStream is = canProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
//do nothing
}
int exitValue = canProc.waitFor();
}
catch(Exception e){
String err = "Error in running CanTherm process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//5. read in CanTherm output
double S298 = 0;
try{
File outFile=new File(directory+"/cantherm.out");
FileReader in = new FileReader(outFile);
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
line=reader.readLine();//****needs to be updated once I figure out what output looks like
}
}
catch(Exception e){
String err = "Error in reading CanTherm output \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//6. correct the output for symmetry number (everything has been assumed to be one) : useHindRot=true case: use ChemGraph symmetry number; otherwise, we use SYMMETRY
if(!useHindRot){
double R = 1.9872; //ideal gas constant in cal/mol-K (does this appear elsewhere in RMG, so I don't need to reuse it?)
//determine point group using the SYMMETRY Program
String geom = natoms + "\n";
for(int i=0; i < natoms; i++){
geom += atomicNumber.get(i) + " "+ x_coor.get(i) + " " + y_coor.get(i) + " " +z_coor.get(i) + "\n";
}
String pointGroup = determinePointGroupUsingSYMMETRYProgram(geom);
double sigmaCorr = getSigmaCorr(pointGroup);
S298+= R*sigmaCorr;
}
else{
//***to be written
}
ThermoData result = getPM3MM4ThermoDataUsingCCLib(name, directory, p_chemGraph, command);
System.out.println("Thermo for " + name + ": "+ result.toString());//print result, at least for debugging purposes
return result;
}
//parse the results using cclib and return a ThermoData object; name and directory indicate the location of the MOPAC .out file
public ThermoData parseMopacPM3(String name, String directory, ChemGraph p_chemGraph){
String command=null;
if (System.getProperty("os.name").toLowerCase().contains("windows")){//special windows case where paths can have spaces and are allowed to be surrounded by quotes
command = "python \""+System.getProperty("RMG.workingDirectory")+"/scripts/MopacPM3ParsingScript.py\" ";
String logfilepath="\""+directory+"/"+name+".out\"";
command=command.concat(logfilepath);
command=command.concat(" \""+ System.getenv("RMG")+"/source\"");//this will pass $RMG/source to the script (in order to get the appropriate path for importing
}
else{//non-Windows case
command = "python "+System.getProperty("RMG.workingDirectory")+"/scripts/MopacPM3ParsingScript.py ";
String logfilepath=directory+"/"+name+".out";
command=command.concat(logfilepath);
command=command.concat(" "+ System.getenv("RMG")+"/source");//this will pass $RMG/source to the script (in order to get the appropriate path for importing
}
ThermoData result = getPM3MM4ThermoDataUsingCCLib(name, directory, p_chemGraph, command);
System.out.println("Thermo for " + name + ": "+ result.toString());//print result, at least for debugging purposes
return result;
}
//separated from parseMopacPM3, since the function was originally based off of parseGaussianPM3 and was very similar (differences being command and logfilepath variables);
public ThermoData getPM3MM4ThermoDataUsingCCLib(String name, String directory, ChemGraph p_chemGraph, String command){
//parse the Mopac file using cclib
int natoms = 0; //number of atoms from Mopac file; in principle, this should agree with number of chemGraph atoms
ArrayList atomicNumber = new ArrayList(); //vector of atomic numbers (integers) (apparently Vector is thread-safe; cf. http://answers.yahoo.com/question/index?qid=20081214065127AArZDT3; ...should I be using this instead?)
ArrayList x_coor = new ArrayList(); //vectors of x-, y-, and z-coordinates (doubles) (Angstroms) (in order corresponding to above atomic numbers)
ArrayList y_coor = new ArrayList();
ArrayList z_coor = new ArrayList();
double energy = 0; //PM3 energy (Hf298) in Hartree (***note: in the case of MOPAC, the MOPAC file will contain in units of kcal/mol, but modified ccLib will return in Hartree)
double molmass = 0; //molecular mass in amu
ArrayList freqs = new ArrayList(); //list of frequencies in units of cm^-1
double rotCons_1 = 0;//rotational constants in (1/s)
double rotCons_2 = 0;
double rotCons_3 = 0;
int gdStateDegen = p_chemGraph.getRadicalNumber()+1;//calculate ground state degeneracy from the number of radicals; this should give the same result as spin multiplicity in Gaussian input file (and output file), but we do not explicitly check this (we could use "mult" which cclib reads in if we wanted to do so); also, note that this is not always correct, as there can apparently be additional spatial degeneracy for non-symmetric linear molecules like OH radical (cf. http://cccbdb.nist.gov/thermo.asp)
try{
Process cclibProc = Runtime.getRuntime().exec(command);
//read the stdout of the process, which should contain the desired information in a particular format
InputStream is = cclibProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
//example output:
// C:\Python25>python.exe GaussianPM3ParsingScript.py TEOS.out
// 33
// [ 6 6 8 14 8 6 6 8 6 6 8 6 6 1 1 1 1 1 1 1 1 1 1 1 1
// 1 1 1 1 1 1 1 1]
// [[ 2.049061 -0.210375 3.133106]
// [ 1.654646 0.321749 1.762752]
// [ 0.359284 -0.110429 1.471465]
// [-0.201871 -0.013365 -0.12819 ]
// [ 0.086307 1.504918 -0.82893 ]
// [-0.559186 2.619928 -0.284003]
// [-0.180246 3.839463 -1.113029]
// [ 0.523347 -1.188305 -1.112765]
// [ 1.857584 -1.018167 -1.495088]
// [ 2.375559 -2.344392 -2.033403]
// [-1.870397 -0.297297 -0.075427]
// [-2.313824 -1.571765 0.300245]
// [-3.83427 -1.535927 0.372171]
// [ 1.360346 0.128852 3.917699]
// [ 2.053945 -1.307678 3.160474]
// [ 3.055397 0.133647 3.403037]
// [ 1.677262 1.430072 1.750899]
// [ 2.372265 -0.029237 0.985204]
// [-0.245956 2.754188 0.771433]
// [-1.656897 2.472855 -0.287156]
// [-0.664186 4.739148 -0.712606]
// [-0.489413 3.734366 -2.161038]
// [ 0.903055 4.016867 -1.112198]
// [ 1.919521 -0.229395 -2.269681]
// [ 2.474031 -0.680069 -0.629949]
// [ 2.344478 -3.136247 -1.273862]
// [ 1.786854 -2.695974 -2.890647]
// [ 3.41648 -2.242409 -2.365094]
// [-1.884889 -1.858617 1.28054 ]
// [-1.976206 -2.322432 -0.440995]
// [-4.284706 -1.26469 -0.591463]
// [-4.225999 -2.520759 0.656131]
// [-4.193468 -0.809557 1.112677]]
// -14.1664924726
// [ 9.9615 18.102 27.0569 31.8459 39.0096 55.0091
// 66.4992 80.4552 86.4912 123.3551 141.6058 155.5448
// 159.4747 167.0013 178.5676 207.3738 237.3201 255.3487
// 264.5649 292.867 309.4248 344.6503 434.8231 470.2074
// 488.9717 749.1722 834.257 834.6594 837.7292 839.6352
// 887.9767 892.9538 899.5374 992.1851 1020.6164 1020.8671
// 1028.3897 1046.7945 1049.1768 1059.4704 1065.1505 1107.4001
// 1108.1567 1109.0466 1112.6677 1122.7785 1124.4315 1128.4163
// 1153.3438 1167.6705 1170.9627 1174.9613 1232.1826 1331.8459
// 1335.3932 1335.8677 1343.9556 1371.37 1372.8127 1375.5428
// 1396.0344 1402.4082 1402.7554 1403.2463 1403.396 1411.6946
// 1412.2456 1412.3519 1414.5982 1415.3613 1415.5698 1415.7993
// 1418.5409 2870.7446 2905.3132 2907.0361 2914.1662 2949.2646
// 2965.825 2967.7667 2971.5223 3086.3849 3086.3878 3086.6448
// 3086.687 3089.2274 3089.4105 3089.4743 3089.5841 3186.0753
// 3186.1375 3186.3511 3186.365 ]
// [ 0.52729 0.49992 0.42466]
//note: above example has since been updated to print molecular mass; also frequency and atomic number format has been updated
String [] stringArray;
natoms = Integer.parseInt(br.readLine());//read line 1: number of atoms
stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read line 2: the atomic numbers (first removing braces)
// line = br.readLine().replace("[", "").replace("]","");//read line 2: the atomic numbers (first removing braces)
// StringTokenizer st = new StringTokenizer(line); //apprently the stringTokenizer class is deprecated, but I am having trouble getting the regular expressions to work properly
for(int i=0; i < natoms; i++){
// atomicNumber.add(i,Integer.parseInt(stringArray[i]));
atomicNumber.add(i,Integer.parseInt(stringArray[i]));
}
for(int i=0; i < natoms; i++){
stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read line 3+: coordinates for atom i; used /s+ for split; using spaces with default limit of 0 was giving empty string
x_coor.add(i,Double.parseDouble(stringArray[0]));
y_coor.add(i,Double.parseDouble(stringArray[1]));
z_coor.add(i,Double.parseDouble(stringArray[2]));
}
energy = Double.parseDouble(br.readLine());//read next line: energy
molmass = Double.parseDouble(br.readLine());//read next line: molecular mass (in amu)
if (natoms>1){//read additional info for non-monoatomic species
stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read next line: frequencies
for(int i=0; i < stringArray.length; i++){
freqs.add(i,Double.parseDouble(stringArray[i]));
}
stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read next line rotational constants (converting from GHz to Hz in the process)
rotCons_1 = Double.parseDouble(stringArray[0])*1000000000;
rotCons_2 = Double.parseDouble(stringArray[1])*1000000000;
rotCons_3 = Double.parseDouble(stringArray[2])*1000000000;
}
while ( (line = br.readLine()) != null) {
//do nothing (there shouldn't be any more information, but this is included to get all the output)
}
int exitValue = cclibProc.waitFor();
}
catch (Exception e) {
String err = "Error in running ccLib Python process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
ThermoData result = calculateThermoFromPM3MM4Calc(natoms, atomicNumber, x_coor, y_coor, z_coor, energy, molmass, freqs, rotCons_1, rotCons_2, rotCons_3, gdStateDegen);
return result;
}
//returns a thermo result, given results from quantum PM3 calculation or MM4 calculation (originally, this was in parseGaussianPM3 function
public ThermoData calculateThermoFromPM3MM4Calc(int natoms, ArrayList atomicNumber, ArrayList x_coor, ArrayList y_coor, ArrayList z_coor, double energy, double molmass, ArrayList freqs, double rotCons_1, double rotCons_2, double rotCons_3, int gdStateDegen){
//determine point group using the SYMMETRY Program
String geom = natoms + "\n";
for(int i=0; i < natoms; i++){
geom += atomicNumber.get(i) + " "+ x_coor.get(i) + " " + y_coor.get(i) + " " +z_coor.get(i) + "\n";
}
// String pointGroup = determinePointGroupUsingSYMMETRYProgram(geom, 0.01);
String pointGroup = determinePointGroupUsingSYMMETRYProgram(geom);
//calculate thermo quantities using stat. mech. equations
double R = 1.9872; //ideal gas constant in cal/mol-K (does this appear elsewhere in RMG, so I don't need to reuse it?)
double Hartree_to_kcal = 627.5095; //conversion from Hartree to kcal/mol taken from Gaussian thermo white paper
double Na = 6.02214179E23;//Avagadro's number; cf. http://physics.nist.gov/cgi-bin/cuu/Value?na|search_for=physchem_in!
double k = 1.3806504E-23;//Boltzmann's constant in J/K; cf. http://physics.nist.gov/cgi-bin/cuu/Value?na|search_for=physchem_in!
double h = 6.62606896E-34;//Planck's constant in J-s; cf. http://physics.nist.gov/cgi-bin/cuu/Value?h|search_for=universal_in!
double c = 299792458. *100;//speed of light in vacuum in cm/s, cf. http://physics.nist.gov/cgi-bin/cuu/Value?c|search_for=universal_in!
//boolean linearity = p_chemGraph.isLinear();//determine linearity (perhaps it would be more appropriate to determine this from point group?)
boolean linearity = false;
if (pointGroup.equals("Cinfv")||pointGroup.equals("Dinfh")) linearity=true;//determine linearity from 3D-geometry; changed to correctly consider linear ketene radical case
//we will use number of atoms from above (alternatively, we could use the chemGraph); this is needed to test whether the species is monoatomic
double Hf298, S298, Cp300, Cp400, Cp500, Cp600, Cp800, Cp1000, Cp1500;
double sigmaCorr = getSigmaCorr(pointGroup);
Hf298 = energy*Hartree_to_kcal;
S298 = R*Math.log(gdStateDegen)+R*(3./2.*Math.log(2.*Math.PI*molmass/(1000.*Na*Math.pow(h,2.)))+5./2.*Math.log(k*298.15)-Math.log(100000.)+5./2.);//electronic + translation; note use of 10^5 Pa for standard pressure; also note that molecular mass needs to be divided by 1000 for kg units
Cp300 = 5./2.*R;
Cp400 = 5./2.*R;
Cp500 = 5./2.*R;
Cp600 = 5./2.*R;
Cp800 = 5./2.*R;
Cp1000 = 5./2.*R;
Cp1500 = 5./2.*R;
if(natoms>1){//include statistical correction and rotational (without symmetry number, vibrational contributions if species is polyatomic
if(linearity){//linear case
//determine the rotational constant (note that one of the rotcons will be zero)
double rotCons;
if(rotCons_1 > 0.0001) rotCons = rotCons_1;
else rotCons = rotCons_2;
S298 += R*sigmaCorr+R*(Math.log(k*298.15/(h*rotCons))+1)+R*calcVibS(freqs, 298.15, h, k, c);
Cp300 += R + R*calcVibCp(freqs, 300., h, k, c);
Cp400 += R + R*calcVibCp(freqs, 400., h, k, c);
Cp500 += R + R*calcVibCp(freqs, 500., h, k, c);
Cp600 += R + R*calcVibCp(freqs, 600., h, k, c);
Cp800 += R + R*calcVibCp(freqs, 800., h, k, c);
Cp1000 += R + R*calcVibCp(freqs, 1000., h, k, c);
Cp1500 += R + R*calcVibCp(freqs, 1500., h, k, c);
}
else{//nonlinear case
S298 += R*sigmaCorr+R*(3./2.*Math.log(k*298.15/h)-1./2.*Math.log(rotCons_1*rotCons_2*rotCons_3/Math.PI)+3./2.)+R*calcVibS(freqs, 298.15, h, k, c);
Cp300 += 3./2.*R + R*calcVibCp(freqs, 300., h, k, c);
Cp400 += 3./2.*R + R*calcVibCp(freqs, 400., h, k, c);
Cp500 += 3./2.*R + R*calcVibCp(freqs, 500., h, k, c);
Cp600 += 3./2.*R + R*calcVibCp(freqs, 600., h, k, c);
Cp800 += 3./2.*R + R*calcVibCp(freqs, 800., h, k, c);
Cp1000 += 3./2.*R + R*calcVibCp(freqs, 1000., h, k, c);
Cp1500 += 3./2.*R + R*calcVibCp(freqs, 1500., h, k, c);
}
}
ThermoData result = new ThermoData(Hf298,S298,Cp300,Cp400,Cp500,Cp600,Cp800,Cp1000,Cp1500,5,1,1,"PM3 calculation");//this includes rough estimates of uncertainty
return result;
}
//gets the statistical correction for S in dimensionless units (divided by R)
public double getSigmaCorr(String pointGroup){
double sigmaCorr=0;
//determine statistical correction factor for 1. external rotational symmetry (affects rotational partition function) and 2. chirality (will add R*ln2 to entropy) based on point group
//ref: http://cccbdb.nist.gov/thermo.asp
//assumptions below for Sn, T, Th, O, I seem to be in line with expectations based on order reported at: http://en.wikipedia.org/w/index.php?title=List_of_character_tables_for_chemically_important_3D_point_groups&oldid=287261611 (assuming order = symmetry number * 2 (/2 if chiral))...this appears to be true for all point groups I "know" to be correct
//minor concern: does SYMMETRY appropriately calculate all Sn groups considering 2007 discovery of previous errors in character tables (cf. Wikipedia article above)
if (pointGroup.equals("C1")) sigmaCorr=+Math.log(2.);//rot. sym. = 1, chiral
else if (pointGroup.equals("Cs")) sigmaCorr=0; //rot. sym. = 1
else if (pointGroup.equals("Ci")) sigmaCorr=0; //rot. sym. = 1
else if (pointGroup.equals("C2")) sigmaCorr=0;//rot. sym. = 2, chiral (corrections cancel)
else if (pointGroup.equals("C3")) sigmaCorr=+Math.log(2.)-Math.log(3.);//rot. sym. = 3, chiral
else if (pointGroup.equals("C4")) sigmaCorr=+Math.log(2.)-Math.log(4.);//rot. sym. = 4, chiral
else if (pointGroup.equals("C5")) sigmaCorr=+Math.log(2.)-Math.log(5.);//rot. sym. = 5, chiral
else if (pointGroup.equals("C6")) sigmaCorr=+Math.log(2.)-Math.log(6.);//rot. sym. = 6, chiral
else if (pointGroup.equals("C7")) sigmaCorr=+Math.log(2.)-Math.log(7.);//rot. sym. = 7, chiral
else if (pointGroup.equals("C8")) sigmaCorr=+Math.log(2.)-Math.log(8.);//rot. sym. = 8, chiral
else if (pointGroup.equals("D2")) sigmaCorr=+Math.log(2.)-Math.log(4.);//rot. sym. = 4, chiral
else if (pointGroup.equals("D3")) sigmaCorr=+Math.log(2.)-Math.log(6.);//rot. sym. = 6, chiral
else if (pointGroup.equals("D4")) sigmaCorr=+Math.log(2.)-Math.log(8.);//rot. sym. = 8, chiral
else if (pointGroup.equals("D5")) sigmaCorr=+Math.log(2.)-Math.log(10.);//rot. sym. = 10, chiral
else if (pointGroup.equals("D6")) sigmaCorr=+Math.log(2.)-Math.log(12.);//rot. sym. = 12, chiral
else if (pointGroup.equals("D7")) sigmaCorr=+Math.log(2.)-Math.log(14.);//rot. sym. = 14, chiral
else if (pointGroup.equals("D8")) sigmaCorr=+Math.log(2.)-Math.log(16.);//rot. sym. = 16, chiral
else if (pointGroup.equals("C2v")) sigmaCorr=-Math.log(2.);//rot. sym. = 2
else if (pointGroup.equals("C3v")) sigmaCorr=-Math.log(3.);//rot. sym. = 3
else if (pointGroup.equals("C4v")) sigmaCorr=-Math.log(4.);//rot. sym. = 4
else if (pointGroup.equals("C5v")) sigmaCorr=-Math.log(5.);//rot. sym. = 5
else if (pointGroup.equals("C6v")) sigmaCorr=-Math.log(6.);//rot. sym. = 6
else if (pointGroup.equals("C7v")) sigmaCorr=-Math.log(7.);//rot. sym. = 7
else if (pointGroup.equals("C8v")) sigmaCorr=-Math.log(8.);//rot. sym. = 8
else if (pointGroup.equals("C2h")) sigmaCorr=-Math.log(2.);//rot. sym. = 2
else if (pointGroup.equals("C3h")) sigmaCorr=-Math.log(3.);//rot. sym. = 3
else if (pointGroup.equals("C4h")) sigmaCorr=-Math.log(4.);//rot. sym. = 4
else if (pointGroup.equals("C5h")) sigmaCorr=-Math.log(5.);//rot. sym. = 5
else if (pointGroup.equals("C6h")) sigmaCorr=-Math.log(6.);//rot. sym. = 6
else if (pointGroup.equals("C7h")) sigmaCorr=-Math.log(7.);//rot. sym. = 7
else if (pointGroup.equals("C8h")) sigmaCorr=-Math.log(8.);//rot. sym. = 8
else if (pointGroup.equals("D2h")) sigmaCorr=-Math.log(4.);//rot. sym. = 4
else if (pointGroup.equals("D3h")) sigmaCorr=-Math.log(6.);//rot. sym. = 6
else if (pointGroup.equals("D4h")) sigmaCorr=-Math.log(8.);//rot. sym. = 8
else if (pointGroup.equals("D5h")) sigmaCorr=-Math.log(10.);//rot. sym. = 10
else if (pointGroup.equals("D6h")) sigmaCorr=-Math.log(12.);//rot. sym. = 12
else if (pointGroup.equals("D7h")) sigmaCorr=-Math.log(14.);//rot. sym. = 14
else if (pointGroup.equals("D8h")) sigmaCorr=-Math.log(16.);//rot. sym. = 16
else if (pointGroup.equals("D2d")) sigmaCorr=-Math.log(4.);//rot. sym. = 4
else if (pointGroup.equals("D3d")) sigmaCorr=-Math.log(6.);//rot. sym. = 6
else if (pointGroup.equals("D4d")) sigmaCorr=-Math.log(8.);//rot. sym. = 8
else if (pointGroup.equals("D5d")) sigmaCorr=-Math.log(10.);//rot. sym. = 10
else if (pointGroup.equals("D6d")) sigmaCorr=-Math.log(12.);//rot. sym. = 12
else if (pointGroup.equals("D7d")) sigmaCorr=-Math.log(14.);//rot. sym. = 14
else if (pointGroup.equals("D8d")) sigmaCorr=-Math.log(16.);//rot. sym. = 16
else if (pointGroup.equals("S4")) sigmaCorr=-Math.log(2.);//rot. sym. = 2 ;*** assumed achiral
else if (pointGroup.equals("S6")) sigmaCorr=-Math.log(3.);//rot. sym. = 3 ;*** assumed achiral
else if (pointGroup.equals("S8")) sigmaCorr=-Math.log(4.);//rot. sym. = 4 ;*** assumed achiral
else if (pointGroup.equals("T")) sigmaCorr=+Math.log(2.)-Math.log(12.);//rot. sym. = 12, *** assumed chiral
else if (pointGroup.equals("Th")) sigmaCorr=-Math.log(12.);//***assumed rot. sym. = 12
else if (pointGroup.equals("Td")) sigmaCorr=-Math.log(12.);//rot. sym. = 12
else if (pointGroup.equals("O")) sigmaCorr=+Math.log(2.)-Math.log(24.);//***assumed rot. sym. = 24, chiral
else if (pointGroup.equals("Oh")) sigmaCorr=-Math.log(24.);//rot. sym. = 24
else if (pointGroup.equals("Cinfv")) sigmaCorr=0;//rot. sym. = 1
else if (pointGroup.equals("Dinfh")) sigmaCorr=-Math.log(2.);//rot. sym. = 2
else if (pointGroup.equals("I")) sigmaCorr=+Math.log(2.)-Math.log(60.);//***assumed rot. sym. = 60, chiral
else if (pointGroup.equals("Ih")) sigmaCorr=-Math.log(60.);//rot. sym. = 60
else if (pointGroup.equals("Kh")) sigmaCorr=0;//arbitrarily set to zero...one could argue that it is infinite; apparently this is the point group of a single atom (cf. http://www.cobalt.chem.ucalgary.ca/ps/symmetry/tests/G_Kh); this should not have a rotational partition function, and we should not use the symmetry correction in this case
else{//this point should not be reached, based on checks performed in determinePointGroupUsingSYMMETRYProgram
System.out.println("Unrecognized point group: "+ pointGroup);
System.exit(0);
}
return sigmaCorr;
}
//determine the point group using the SYMMETRY program (http://www.cobalt.chem.ucalgary.ca/ps/symmetry/)
//required input is a line with number of atoms followed by lines for each atom including atom number and x,y,z coordinates
//finalTol determines how loose the point group criteria are; values are comparable to those specifed in the GaussView point group interface
//public String determinePointGroupUsingSYMMETRYProgram(String geom, double finalTol){
public String determinePointGroupUsingSYMMETRYProgram(String geom){
int attemptNumber = 1;
int maxAttemptNumber = 2;
boolean pointGroupFound=false;
//write the input file
try {
File inputFile=new File(qmfolder+"symminput.txt");//SYMMETRY program directory
FileWriter fw = new FileWriter(inputFile);
fw.write(geom);
fw.close();
} catch (IOException e) {
String err = "Error writing input file for point group calculation";
err += e.toString();
System.out.println(err);
System.exit(0);
}
String result = "";
String command = "";
while (attemptNumber<=maxAttemptNumber && !pointGroupFound){
//call the program and read the result
result = "";
String [] lineArray;
try{
if (System.getProperty("os.name").toLowerCase().contains("windows")){//the Windows case where the precompiled executable seems to need to be called from a batch script
if(attemptNumber==1) command = "\""+System.getProperty("RMG.workingDirectory")+"/scripts/symmetryDefault2.bat\" "+qmfolder+ "symminput.txt";//12/1/09 gmagoon: switched to use slightly looser criteria of 0.02 rather than 0.01 to handle methylperoxyl radical result from MOPAC
else if (attemptNumber==2) command = "\""+System.getProperty("RMG.workingDirectory")+"/scripts/symmetryLoose.bat\" " +qmfolder+ "symminput.txt";//looser criteria (0.1 instead of 0.01) to properly identify C2v group in VBURLMBUVWIEMQ-UHFFFAOYAVmult5 (InChI=1/C3H4O2/c1-3(2,4)5/h1-2H2/mult5) MOPAC result; C2 and sigma were identified with default, but it should be C2 and sigma*2
else{
System.out.println("Invalid attemptNumber: "+ attemptNumber);
System.exit(0);
}
}
else{//in other (non-Windows) cases, where it is compiled from scratch, we should be able to run this directly
if(attemptNumber==1) command = System.getProperty("RMG.workingDirectory")+"/bin/SYMMETRY.EXE -final 0.02 " +qmfolder+ "symminput.txt";//12/1/09 gmagoon: switched to use slightly looser criteria of 0.02 rather than 0.01 to handle methylperoxyl radical result from MOPAC
else if (attemptNumber==2) command = System.getProperty("RMG.workingDirectory")+"/bin/SYMMETRY.EXE -final 0.1 " +qmfolder+ "symminput.txt";//looser criteria (0.1 instead of 0.01) to properly identify C2v group in VBURLMBUVWIEMQ-UHFFFAOYAVmult5 (InChI=1/C3H4O2/c1-3(2,4)5/h1-2H2/mult5) MOPAC result; C2 and sigma were identified with default, but it should be C2 and sigma*2
else{
System.out.println("Invalid attemptNumber: "+ attemptNumber);
System.exit(0);
}
}
Process symmProc = Runtime.getRuntime().exec(command);
//check for errors and display the error if there is one
InputStream is = symmProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
if(line.startsWith("It seems to be the ")){//last line, ("It seems to be the [x] point group") indicates point group
lineArray = line.split(" ");//split the line around spaces
result = lineArray[5];//point group string should be the 6th word
}
}
int exitValue = symmProc.waitFor();
}
catch(Exception e){
String err = "Error in running point group calculation process using SYMMETRY \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//check for a recognized point group
if (result.equals("C1")||result.equals("Cs")||result.equals("Ci")||result.equals("C2")||result.equals("C3")||result.equals("C4")||result.equals("C5")||result.equals("C6")||result.equals("C7")||result.equals("C8")||result.equals("D2")||result.equals("D3")||result.equals("D4")||result.equals("D5")||result.equals("D6")||result.equals("D7")||result.equals("D8")||result.equals("C2v")||result.equals("C3v")||result.equals("C4v")||result.equals("C5v")||result.equals("C6v")||result.equals("C7v")||result.equals("C8v")||result.equals("C2h")||result.equals("C3h")||result.equals("C4h")||result.equals("C5h")||result.equals("C6h")||result.equals("C7h")||result.equals("C8h")||result.equals("D2h")||result.equals("D3h")||result.equals("D4h")||result.equals("D5h")||result.equals("D6h")||result.equals("D7h")||result.equals("D8h")||result.equals("D2d")||result.equals("D3d")||result.equals("D4d")||result.equals("D5d")||result.equals("D6d")||result.equals("D7d")||result.equals("D8d")||result.equals("S4")||result.equals("S6")||result.equals("S8")||result.equals("T")||result.equals("Th")||result.equals("Td")||result.equals("O")||result.equals("Oh")||result.equals("Cinfv")||result.equals("Dinfh")||result.equals("I")||result.equals("Ih")||result.equals("Kh")) pointGroupFound=true;
else{
if(attemptNumber < maxAttemptNumber) System.out.println("Attempt number "+attemptNumber+" did not identify a recognized point group (" +result+"). Will retry with looser point group criteria.");
else{
System.out.println("Final attempt number "+attemptNumber+" did not identify a recognized point group (" +result+"). Exiting.");
System.exit(0);
}
attemptNumber++;
}
}
System.out.println("Point group: "+ result);//print result, at least for debugging purposes
return result;
}
//gmagoon 6/8/09
//calculate the vibrational contribution (divided by R, dimensionless) at temperature, T, in Kelvin to entropy
//p_freqs in cm^-1; c in cm/s; k in J/K; h in J-s
//ref.: http://cccbdb.nist.gov/thermo.asp
public double calcVibS(ArrayList p_freqs, double p_T, double h, double k, double c){
double Scontrib = 0;
double dr;
for(int i=0; i < p_freqs.size(); i++){
double freq = (Double)p_freqs.get(i);
dr = h*c*freq/(k*p_T); //frequently used dimensionless ratio
Scontrib = Scontrib - Math.log(1.-Math.exp(-dr))+dr*Math.exp(-dr)/(1.-Math.exp(-dr));
}
return Scontrib;
}
//gmagoon 6/8/09
//calculate the vibrational contribution (divided by R, dimensionless) at temperature, T, in Kelvin to heat capacity, Cp
//p_freqs in cm^-1; c in cm/s; k in J/K; h in J-s
//ref.: http://cccbdb.nist.gov/thermo.asp
public double calcVibCp(ArrayList p_freqs, double p_T, double h, double k, double c){
double Cpcontrib = 0;
double dr;
for(int i=0; i < p_freqs.size(); i++){
double freq = (Double)p_freqs.get(i);
dr = h*c*freq/(k*p_T); //frequently used dimensionless ratio
Cpcontrib = Cpcontrib + Math.pow(dr, 2.)*Math.exp(-dr)/Math.pow(1.-Math.exp(-dr),2.);
}
return Cpcontrib;
}
//determine the QM filename (element 0) and augmented InChI (element 1) for a ChemGraph
//QM filename is InChIKey appended with mult3, mult4, mult5, or mult6 for multiplicities of 3 or higher
//augmented InChI is InChI appended with /mult3, /mult4, /mult5, or /mult6 for multiplicities of 3 or higher
public String [] getQMFileName(ChemGraph p_chemGraph){
String [] result = new String[2];
result[0] = p_chemGraph.getModifiedInChIKeyAnew();//need to generate InChI and key anew because ChemGraph may have changed (in particular, adding/removing hydrogens in HBI process)
result[1] = p_chemGraph.getModifiedInChIAnew();
return result;
}
//returns true if a Gaussian file for the given name and directory (.log suffix) exists and indicates successful completion (same criteria as used after calculation runs); terminates if the InChI doesn't match the InChI in the file or if there is no InChI in the file; returns false otherwise
public boolean successfulGaussianResultExistsQ(String name, String directory, String InChIaug){
//part of the code is taken from runGaussian code above
//look in the output file to check for the successful termination of the Gaussian calculation
//failed jobs will contain the a line beginning with " Error termination" near the end of the file
File file = new File(directory+"/"+name+".log");
if(file.exists()){//if the file exists, do further checks; otherwise, we will skip to final statement and return false
int failureFlag=0;//flag (1 or 0) indicating whether the Gaussian job failed
int InChIMatch=0;//flag (1 or 0) indicating whether the InChI in the file matches InChIaug; this can only be 1 if InChIFound is also 1;
int InChIFound=0;//flag (1 or 0) indicating whether an InChI was found in the log file
int InChIPartialMatch=0;//flag (1 or 0) indicating whether the InChI in the log file is a substring of the InChI in RMG's memory
String logFileInChI="";
try{
FileReader in = new FileReader(file);
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
if (line.startsWith(" Error termination ")) failureFlag=1;
else if (line.startsWith(" ******")){//also look for imaginary frequencies
if (line.contains("imaginary frequencies")) failureFlag=1;
}
else if(line.startsWith(" InChI=")){
logFileInChI = line.trim();
//continue reading lines until a line of dashes is found (in this way, we can read InChIs that span multiple lines)
line=reader.readLine();
while (!line.startsWith(" --------")){
logFileInChI += line.trim();
line=reader.readLine();
}
InChIFound=1;
if(logFileInChI.equals(InChIaug)) InChIMatch=1;
else if(InChIaug.startsWith(logFileInChI)) InChIPartialMatch=1;
}
line=reader.readLine();
}
}
catch(Exception e){
String err = "Error in reading preexisting Gaussian log file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//if the failure flag is still 0, the process should have been successful
if (failureFlag==0&&InChIMatch==1){
System.out.println("Pre-existing successful quantum result for " + name + " ("+InChIaug+") has been found. This log file will be used.");
return true;
}
else if (InChIFound==1 && InChIMatch == 0){//InChIs do not match (most likely due to limited name length mirrored in log file (79 characters), but possibly due to a collision)
if(InChIPartialMatch == 1){//case where the InChI in memory begins with the InChI in the log file; we will continue and check the input file, printing a warning if there is no match
File inputFile = new File(directory+"/"+name+".gjf");
if(inputFile.exists()){//read the Gaussian inputFile
String inputFileInChI="";
try{
FileReader inI = new FileReader(inputFile);
BufferedReader readerI = new BufferedReader(inI);
String lineI=readerI.readLine();
while(lineI!=null){
if(lineI.startsWith(" InChI=")){
inputFileInChI = lineI.trim();
}
lineI=readerI.readLine();
}
}
catch(Exception e){
String err = "Error in reading preexisting Gaussian gjf file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
if(inputFileInChI.equals(InChIaug)){
if(failureFlag==0){
System.out.println("Pre-existing successful quantum result for " + name + " ("+InChIaug+") has been found. This log file will be used. *Note that input file was read to confirm lack of InChIKey collision (InChI probably more than 79 characters)");
return true;
}
else{//otherwise, failureFlag==1
System.out.println("Pre-existing quantum result for " + name + " ("+InChIaug+") has been found, but the result was apparently unsuccessful. The file will be overwritten with a new calculation. *Note that input file was read to confirm lack of InChIKey collision (InChI probably more than 79 characters)");
return false;
}
}
else{
if(inputFileInChI.equals("")){//InChI was not found in input file
System.out.println("*****Warning: potential InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI + " . InChI could not be found in the Gaussian input file. You should manually check that the log file contains the intended species.");
return true;
}
else{//InChI was found but doesn't match
System.out.println("Congratulations! You appear to have discovered the first recorded instance of an InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Gaussian input file Augmented InChI = "+inputFileInChI);
System.exit(0);
}
}
}
else{
System.out.println("*****Warning: potential InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI + " . Gaussian input file could not be found to check full InChI. You should manually check that the log file contains the intended species.");
return true;
}
}
else{
System.out.println("Congratulations! You appear to have discovered the first recorded instance of an InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI);
System.exit(0);
}
}
else if (InChIFound==0){
System.out.println("An InChI was not found in file: " +name+".log");
System.exit(0);
}
else if (failureFlag==1){//note these should cover all possible results for this block, and if the file.exists block is entered, it should return from within the block and should not reach the return statement below
System.out.println("Pre-existing quantum result for " + name + " ("+InChIaug+") has been found, but the result was apparently unsuccessful. The file will be overwritten with a new calculation.");
return false;
}
}
//we could print a line here for cases where the file doesn't exist, but this would probably be too verbose
return false;
}
//returns true if a successful result exists (either Gaussian or MOPAC)
// public boolean [] successfulResultExistsQ(String name, String directory, String InChIaug){
// boolean gaussianResult=successfulGaussianResultExistsQ(name, directory, InChIaug);
// boolean mopacResult=successfulMOPACResultExistsQ(name, directory, InChIaug);
// return (gaussianResult || mopacResult);// returns true if either a successful Gaussian or MOPAC result exists
// }
//returns true if a MOPAC output file for the given name and directory (.out suffix) exists and indicates successful completion (same criteria as used after calculation runs); terminates if the InChI doesn't match the InChI in the file or if there is no InChI in the file; returns false otherwise
public boolean successfulMopacResultExistsQ(String name, String directory, String InChIaug){
//part of the code is taken from analogous code for Gaussian
//look in the output file to check for the successful termination of the calculation (assumed to be successful if "description of vibrations appears)
File file = new File(directory+"/"+name+".out");
if(file.exists()){//if the file exists, do further checks; otherwise, we will skip to final statement and return false
int failureFlag=1;//flag (1 or 0) indicating whether the MOPAC job failed
int failureOverrideFlag=0;//flag (1 or 0) to override success as measured by failureFlag
int InChIMatch=0;//flag (1 or 0) indicating whether the InChI in the file matches InChIaug; this can only be 1 if InChIFound is also 1;
int InChIFound=0;//flag (1 or 0) indicating whether an InChI was found in the log file
int InChIPartialMatch=0;//flag (1 or 0) indicating whether the InChI in the log file is a substring of the InChI in RMG's memory
String logFileInChI="";
try{
FileReader in = new FileReader(file);
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
String trimLine= line.trim();
if (trimLine.equals("DESCRIPTION OF VIBRATIONS")){
// if(!MopacFileContainsNegativeFreqsQ(name, directory)) failureFlag=0;//check for this line; if it is here, check for negative frequencies
failureFlag = 0;
}
//negative frequencies notice example:
// NOTE: SYSTEM IS NOT A GROUND STATE, THEREFORE ZERO POINT
// ENERGY IS NOT MEANINGFULL. ZERO POINT ENERGY PRINTED
// DOES NOT INCLUDE THE 2 IMAGINARY FREQUENCIES
else if (trimLine.endsWith("IMAGINARY FREQUENCIES")){
// System.out.println("*****Imaginary freqencies found:");
failureOverrideFlag=1;
}
else if (trimLine.equals("EXCESS NUMBER OF OPTIMIZATION CYCLES")){//exceeding max cycles error
failureOverrideFlag=1;
}
else if (trimLine.equals("NOT ENOUGH TIME FOR ANOTHER CYCLE")){//timeout error
failureOverrideFlag=1;
}
else if(line.startsWith(" InChI=")){
logFileInChI = line.trim();//output files should take up to 240 characters of the name in the input file
InChIFound=1;
if(logFileInChI.equals(InChIaug)) InChIMatch=1;
else if(InChIaug.startsWith(logFileInChI)) InChIPartialMatch=1;
}
line=reader.readLine();
}
}
catch(Exception e){
String err = "Error in reading preexisting MOPAC output file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
if(failureOverrideFlag==1) failureFlag=1; //job will be considered a failure if there are imaginary frequencies or if job terminates to to excess time/cycles
//if the failure flag is still 0, the process should have been successful
if (failureFlag==0&&InChIMatch==1){
System.out.println("Pre-existing successful MOPAC quantum result for " + name + " ("+InChIaug+") has been found. This log file will be used.");
return true;
}
else if (InChIFound==1 && InChIMatch == 0){//InChIs do not match (most likely due to limited name length mirrored in log file (240 characters), but possibly due to a collision)
// if(InChIPartialMatch == 1){//case where the InChI in memory begins with the InChI in the log file; we will continue and check the input file, printing a warning if there is no match
//look in the input file if the InChI doesn't match (apparently, certain characters can be deleted in MOPAC output file for long InChIs)
File inputFile = new File(directory+"/"+name+".mop");
if(inputFile.exists()){//read the MOPAC inputFile
String inputFileInChI="";
try{
FileReader inI = new FileReader(inputFile);
BufferedReader readerI = new BufferedReader(inI);
String lineI=readerI.readLine();
while(lineI!=null){
if(lineI.startsWith("InChI=")){
inputFileInChI = lineI.trim();
}
lineI=readerI.readLine();
}
}
catch(Exception e){
String err = "Error in reading preexisting MOPAC input file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
if(inputFileInChI.equals(InChIaug)){
if(failureFlag==0){
System.out.println("Pre-existing successful MOPAC quantum result for " + name + " ("+InChIaug+") has been found. This log file will be used. *Note that input file was read to confirm lack of InChIKey collision (InChI probably more than 240 characters or characters probably deleted from InChI in .out file)");
return true;
}
else{//otherwise, failureFlag==1
System.out.println("Pre-existing MOPAC quantum result for " + name + " ("+InChIaug+") has been found, but the result was apparently unsuccessful. The file will be overwritten with a new calculation or Gaussian result (if available) will be used. *Note that input file was read to confirm lack of InChIKey collision (InChI probably more than 240 characters or characters probably deleted from InChI in .out file)");
return false;
}
}
else{
if(inputFileInChI.equals("")){//InChI was not found in input file
System.out.println("*****Warning: potential InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI + " . InChI could not be found in the MOPAC input file. You should manually check that the output file contains the intended species.");
return true;
}
else{//InChI was found but doesn't match
System.out.println("Congratulations! You appear to have discovered the first recorded instance of an InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " MOPAC input file Augmented InChI = " + inputFileInChI + " Log file Augmented InChI = "+logFileInChI);
System.exit(0);
}
}
}
else{
System.out.println("*****Warning: potential InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI + " . MOPAC input file could not be found to check full InChI. You should manually check that the log file contains the intended species.");
return true;
}
// }
// else{
// System.out.println("Congratulations! You appear to have discovered the first recorded instance of an InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " MOPAC output file Augmented InChI = "+logFileInChI);
// System.exit(0);
// }
}
else if (InChIFound==0){
System.out.println("An InChI was not found in file: " +name+".out");
System.exit(0);
}
else if (failureFlag==1){//note these should cover all possible results for this block, and if the file.exists block is entered, it should return from within the block and should not reach the return statement below
System.out.println("Pre-existing MOPAC quantum result for " + name + " ("+InChIaug+") has been found, but the result was apparently unsuccessful. The file will be overwritten with a new calculation or Gaussian result (if available) will be used.");
return false;
}
}
//we could print a line here for cases where the file doesn't exist, but this would probably be too verbose
return false;
}
//returns true if an MM4 output file for the given name and directory (.mm4out suffix) exists and indicates successful completion (same criteria as used after calculation runs); terminates if the InChI doesn't match the InChI in the file or if there is no InChI in the file; returns false otherwise
public boolean successfulMM4ResultExistsQ(String name, String directory, String InChIaug){
//part of the code is taken from analogous code for MOPAC (first ~half) and Gaussian (second ~half)
//look in the output file to check for the successful termination of the calculation (assumed to be successful if "description of vibrations appears)
int failureFlag=1;//flag (1 or 0) indicating whether the MM4 job failed
int failureOverrideFlag=0;//flag (1 or 0) to override success as measured by failureFlag
File file = new File(directory+"/"+name+".mm4out");
int InChIMatch=0;//flag (1 or 0) indicating whether the InChI in the file matches InChIaug; this can only be 1 if InChIFound is also 1;
int InChIFound=0;//flag (1 or 0) indicating whether an InChI was found in the log file
int InChIPartialMatch=0;//flag (1 or 0) indicating whether the InChI in the log file is a substring of the InChI in RMG's memory
if(file.exists()){//if the file exists, do further checks; otherwise, we will skip to final statement and return false
String logFileInChI="";
try{
FileReader in = new FileReader(file);
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
String trimLine= line.trim();
if (trimLine.equals("STATISTICAL THERMODYNAMICS ANALYSIS")){
failureFlag = 0;
}
else if (trimLine.endsWith("imaginary frequencies,")){//read the number of imaginary frequencies and make sure it is zero
String[] split = trimLine.split("\\s+");
if (Integer.parseInt(split[3])>0){
System.out.println("*****Imaginary freqencies found:");
failureOverrideFlag=1;
}
}
else if (trimLine.contains(" 0.0 (fir )")){
if (useCanTherm){//zero frequencies are only acceptable when CanTherm is used
System.out.println("*****Warning: zero freqencies found (values lower than 7.7 cm^-1 are rounded to zero in MM4 output); CanTherm should hopefully correct this:");
}
else{
System.out.println("*****Zero freqencies found:");
failureOverrideFlag=1;
}
}
else if(trimLine.startsWith("InChI=")){
logFileInChI = line.trim();//output files should take up to about 60 (?) characters of the name in the input file
InChIFound=1;
if(logFileInChI.equals(InChIaug)) InChIMatch=1;
else if(InChIaug.startsWith(logFileInChI)) InChIPartialMatch=1;
}
line=reader.readLine();
}
}
catch(Exception e){
String err = "Error in reading preexisting MM4 output file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
if(failureOverrideFlag==1) failureFlag=1; //job will be considered a failure if there are imaginary frequencies or if job terminates to to excess time/cycles
//if the failure flag is still 0, the process should have been successful
if (failureFlag==0&&InChIMatch==1){
System.out.println("Pre-existing successful MM4 result for " + name + " ("+InChIaug+") has been found. This log file will be used.");
return true;
}
else if (InChIFound==1 && InChIMatch == 0){//InChIs do not match (most likely due to limited name length mirrored in log file (79 characters), but possibly due to a collision)
if(InChIPartialMatch == 1){//case where the InChI in memory begins with the InChI in the log file; we will continue and check the input file, printing a warning if there is no match
File inputFile = new File(directory+"/"+name+".mm4");
if(inputFile.exists()){//read the MM4 inputFile
String inputFileInChI="";
try{
FileReader inI = new FileReader(inputFile);
BufferedReader readerI = new BufferedReader(inI);
String lineI=readerI.readLine();
//InChI should be repeated after in the first line of the input file
inputFileInChI = lineI.trim().substring(80);//extract the string starting with character 81
}
catch(Exception e){
String err = "Error in reading preexisting MM4 .mm4 file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
if(inputFileInChI.equals(InChIaug)){
if(failureFlag==0){
System.out.println("Pre-existing successful MM4 result for " + name + " ("+InChIaug+") has been found. This log file will be used. *Note that input file was read to confirm lack of InChIKey collision (InChI probably more than ~60 characters)");
return true;
}
else{//otherwise, failureFlag==1
System.out.println("Pre-existing MM4 result for " + name + " ("+InChIaug+") has been found, but the result was apparently unsuccessful. The file will be overwritten with a new calculation. *Note that input file was read to confirm lack of InChIKey collision (InChI probably more than ~60 characters)");
return false;
}
}
else{
if(inputFileInChI.equals("")){//InChI was not found in input file
System.out.println("*****Warning: potential InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI + " . InChI could not be found in the MM4 input file. You should manually check that the log file contains the intended species.");
return true;
}
else{//InChI was found but doesn't match
System.out.println("Congratulations! You appear to have discovered the first recorded instance of an InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " MM4 input file Augmented InChI = "+inputFileInChI);
System.exit(0);
}
}
}
else{
System.out.println("*****Warning: potential InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI + " . MM4 input file could not be found to check full InChI. You should manually check that the log file contains the intended species.");
return true;
}
}
else{
System.out.println("Congratulations! You appear to have discovered the first recorded instance of an InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI);
System.exit(0);
}
}
else if (InChIFound==0){
System.out.println("An InChI was not found in file: " +name+".mm4out");
System.exit(0);
}
else if (failureFlag==1){//note these should cover all possible results for this block, and if the file.exists block is entered, it should return from within the block and should not reach the return statement below
System.out.println("Pre-existing MM4 result for " + name + " ("+InChIaug+") has been found, but the result was apparently unsuccessful. The file will be overwritten with a new calculation.");
return false;
}
}
//we could print a line here for cases where the file doesn't exist, but this would probably be too verbose
return false;
}
// //checks the MOPAC file for negative frequencies
// public boolean MopacFileContainsNegativeFreqsQ(String name, String directory){
// boolean negativeFreq=false;
//
// //code below copied from parseMopacPM3()
// String command = "c:/Python25/python.exe c:/Python25/MopacPM3ParsingScript.py ";//this should eventually be modified for added generality
// String logfilepath=directory+"/"+name+".out";
// command=command.concat(logfilepath);
//
// //much of code below is copied from calculateThermoFromPM3Calc()
// //parse the Mopac file using cclib
// int natoms = 0; //number of atoms from Mopac file; in principle, this should agree with number of chemGraph atoms
// ArrayList atomicNumber = new ArrayList(); //vector of atomic numbers (integers) (apparently Vector is thread-safe; cf. http://answers.yahoo.com/question/index?qid=20081214065127AArZDT3; ...should I be using this instead?)
// ArrayList x_coor = new ArrayList(); //vectors of x-, y-, and z-coordinates (doubles) (Angstroms) (in order corresponding to above atomic numbers)
// ArrayList y_coor = new ArrayList();
// ArrayList z_coor = new ArrayList();
// double energy = 0; //PM3 energy (Hf298) in Hartree (***note: in the case of MOPAC, the MOPAC file will contain in units of kcal/mol, but modified ccLib will return in Hartree)
// double molmass = 0; //molecular mass in amu
// ArrayList freqs = new ArrayList(); //list of frequencies in units of cm^-1
// double rotCons_1 = 0;//rotational constants in (1/s)
// double rotCons_2 = 0;
// double rotCons_3 = 0;
// //int gdStateDegen = p_chemGraph.getRadicalNumber()+1;//calculate ground state degeneracy from the number of radicals; this should give the same result as spin multiplicity in Gaussian input file (and output file), but we do not explicitly check this (we could use "mult" which cclib reads in if we wanted to do so); also, note that this is not always correct, as there can apparently be additional spatial degeneracy for non-symmetric linear molecules like OH radical (cf. http://cccbdb.nist.gov/thermo.asp)
// try{
// File runningdir=new File(directory);
// Process cclibProc = Runtime.getRuntime().exec(command, null, runningdir);
// //read the stdout of the process, which should contain the desired information in a particular format
// InputStream is = cclibProc.getInputStream();
// InputStreamReader isr = new InputStreamReader(is);
// BufferedReader br = new BufferedReader(isr);
// String line=null;
// //example output:
//// C:\Python25>python.exe GaussianPM3ParsingScript.py TEOS.out
//// 33
//// [ 6 6 8 14 8 6 6 8 6 6 8 6 6 1 1 1 1 1 1 1 1 1 1 1 1
//// 1 1 1 1 1 1 1 1]
//// [[ 2.049061 -0.210375 3.133106]
//// [ 1.654646 0.321749 1.762752]
//// [ 0.359284 -0.110429 1.471465]
//// [-0.201871 -0.013365 -0.12819 ]
//// [ 0.086307 1.504918 -0.82893 ]
//// [-0.559186 2.619928 -0.284003]
//// [-0.180246 3.839463 -1.113029]
//// [ 0.523347 -1.188305 -1.112765]
//// [ 1.857584 -1.018167 -1.495088]
//// [ 2.375559 -2.344392 -2.033403]
//// [-1.870397 -0.297297 -0.075427]
//// [-2.313824 -1.571765 0.300245]
//// [-3.83427 -1.535927 0.372171]
//// [ 1.360346 0.128852 3.917699]
//// [ 2.053945 -1.307678 3.160474]
//// [ 3.055397 0.133647 3.403037]
//// [ 1.677262 1.430072 1.750899]
//// [ 2.372265 -0.029237 0.985204]
//// [-0.245956 2.754188 0.771433]
//// [-1.656897 2.472855 -0.287156]
//// [-0.664186 4.739148 -0.712606]
//// [-0.489413 3.734366 -2.161038]
//// [ 0.903055 4.016867 -1.112198]
//// [ 1.919521 -0.229395 -2.269681]
//// [ 2.474031 -0.680069 -0.629949]
//// [ 2.344478 -3.136247 -1.273862]
//// [ 1.786854 -2.695974 -2.890647]
//// [ 3.41648 -2.242409 -2.365094]
//// [-1.884889 -1.858617 1.28054 ]
//// [-1.976206 -2.322432 -0.440995]
//// [-4.284706 -1.26469 -0.591463]
//// [-4.225999 -2.520759 0.656131]
//// [-4.193468 -0.809557 1.112677]]
//// -14.1664924726
//// [ 9.9615 18.102 27.0569 31.8459 39.0096 55.0091
//// 66.4992 80.4552 86.4912 123.3551 141.6058 155.5448
//// 159.4747 167.0013 178.5676 207.3738 237.3201 255.3487
//// 264.5649 292.867 309.4248 344.6503 434.8231 470.2074
//// 488.9717 749.1722 834.257 834.6594 837.7292 839.6352
//// 887.9767 892.9538 899.5374 992.1851 1020.6164 1020.8671
//// 1028.3897 1046.7945 1049.1768 1059.4704 1065.1505 1107.4001
//// 1108.1567 1109.0466 1112.6677 1122.7785 1124.4315 1128.4163
//// 1153.3438 1167.6705 1170.9627 1174.9613 1232.1826 1331.8459
//// 1335.3932 1335.8677 1343.9556 1371.37 1372.8127 1375.5428
//// 1396.0344 1402.4082 1402.7554 1403.2463 1403.396 1411.6946
//// 1412.2456 1412.3519 1414.5982 1415.3613 1415.5698 1415.7993
//// 1418.5409 2870.7446 2905.3132 2907.0361 2914.1662 2949.2646
//// 2965.825 2967.7667 2971.5223 3086.3849 3086.3878 3086.6448
//// 3086.687 3089.2274 3089.4105 3089.4743 3089.5841 3186.0753
//// 3186.1375 3186.3511 3186.365 ]
//// [ 0.52729 0.49992 0.42466]
////note: above example has since been updated to print molecular mass; also frequency and atomic number format has been updated
// String [] stringArray;
// natoms = Integer.parseInt(br.readLine());//read line 1: number of atoms
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read line 2: the atomic numbers (first removing braces)
// // line = br.readLine().replace("[", "").replace("]","");//read line 2: the atomic numbers (first removing braces)
// // StringTokenizer st = new StringTokenizer(line); //apprently the stringTokenizer class is deprecated, but I am having trouble getting the regular expressions to work properly
// for(int i=0; i < natoms; i++){
// // atomicNumber.add(i,Integer.parseInt(stringArray[i]));
// atomicNumber.add(i,Integer.parseInt(stringArray[i]));
// }
// for(int i=0; i < natoms; i++){
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read line 3+: coordinates for atom i; used /s+ for split; using spaces with default limit of 0 was giving empty string
// x_coor.add(i,Double.parseDouble(stringArray[0]));
// y_coor.add(i,Double.parseDouble(stringArray[1]));
// z_coor.add(i,Double.parseDouble(stringArray[2]));
// }
// energy = Double.parseDouble(br.readLine());//read next line: energy
// molmass = Double.parseDouble(br.readLine());//read next line: molecular mass (in amu)
// if (natoms>1){//read additional info for non-monoatomic species
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read next line: frequencies
// for(int i=0; i < stringArray.length; i++){
// freqs.add(i,Double.parseDouble(stringArray[i]));
// }
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read next line rotational constants (converting from GHz to Hz in the process)
// rotCons_1 = Double.parseDouble(stringArray[0])*1000000000;
// rotCons_2 = Double.parseDouble(stringArray[1])*1000000000;
// rotCons_3 = Double.parseDouble(stringArray[2])*1000000000;
// }
// while ( (line = br.readLine()) != null) {
// //do nothing (there shouldn't be any more information, but this is included to get all the output)
// }
// int exitValue = cclibProc.waitFor();
// }
// catch (Exception e) {
// String err = "Error in running ccLib Python process \n";
// err += e.toString();
// e.printStackTrace();
// System.exit(0);
// }
//
// //start of code "new" to this function (aside from initialization of negativeFreq)
// if(natoms > 0){
// for (int i=0; i<freqs.size(); i++){
// if((Double)freqs.get(i) < 0) negativeFreq = true;
// }
// }
// return negativeFreq;
// }
//## operation initGAGroupLibrary()
protected void initGAGroupLibrary() {
//#[ operation initGAGroupLibrary()
thermoLibrary = ThermoGAGroupLibrary.getINSTANCE();
//#]
}
}
/*********************************************************************
File Path : RMG\RMG\jing\chem\QMTP.java
*********************************************************************/
|
source/RMG/jing/chem/QMTP.java
|
////////////////////////////////////////////////////////////////////////////////
//
// RMG - Reaction Mechanism Generator
//
// Copyright (c) 2002-2009 Prof. William H. Green (whgreen@mit.edu) and the
// RMG Team (rmg_dev@mit.edu)
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
////////////////////////////////////////////////////////////////////////////////
package jing.chem;
import java.util.*;
import jing.chemUtil.*;
import jing.param.*;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
//quantum mechanics thermo property estimator; analog of GATP
public class QMTP implements GeneralGAPP {
final protected static double ENTHALPY_HYDROGEN = 52.1; //needed for HBI
private static QMTP INSTANCE = new QMTP(); //## attribute INSTANCE
protected static PrimaryThermoLibrary primaryLibrary;//Note: may be able to separate this out into GeneralGAPP, as this is common to both GATP and QMTP
public static String qmfolder= "QMfiles/";
// protected static HashMap library; //as above, may be able to move this and associated functions to GeneralGAPP (and possibly change from "x implements y" to "x extends y"), as it is common to both GATP and QMTP
protected ThermoGAGroupLibrary thermoLibrary; //needed for HBI
public static String qmprogram= "both";//the qmprogram can be "mopac", "gaussian03", "both" (MOPAC and Gaussian), or "mm4"
public static boolean usePolar = false; //use polar keyword in MOPAC
public static boolean useCanTherm = false; //whether to use CanTherm in MM4 cases for interpreting output via force-constant matrix; this will hopefully avoid zero frequency issues
public static boolean useHindRot = false;//whether to use HinderedRotor scans with MM4 (requires useCanTherm=true)
// Constructors
//## operation QMTP()
private QMTP() {
// initializeLibrary(); //gmagoon 72509: commented out in GATP, so I am mirroring the change here; other library functions below also commented out
initializePrimaryThermoLibrary();
}
//## operation generateThermoData(ChemGraph)
public ThermoData generateThermoData(ChemGraph p_chemGraph) {
//#[ operation generateThermoData(ChemGraph)
//first, check for thermo data in the primary thermo library and library (?); if it is there, use it
ThermoData result = primaryLibrary.getThermoData(p_chemGraph.getGraph());
//System.out.println(result);
if (result != null) {
p_chemGraph.fromprimarythermolibrary = true;
return result;
}
// result = getFromLibrary(p_chemGraph.getChemicalFormula());//gmagoon 72509: commented out in GATP, so I am mirroring the change here
// if (result != null) return result;
result=new ThermoData();
int maxRadNumForQM = Global.maxRadNumForQM;
if (p_chemGraph.getRadicalNumber() > maxRadNumForQM)//use HBI if the molecule has more radicals than maxRadNumForQM; this is helpful because ; also MM4 (and MM3) look like they may have issues with radicals
{//this code is based closely off of GATP saturation (in getGAGroup()), but there are some modifications, particularly for symmetry correction
//find the initial symmetry number
int sigmaRadical = p_chemGraph.getSymmetryNumber();
Graph g = p_chemGraph.getGraph();
HashMap oldCentralNode = (HashMap)(p_chemGraph.getCentralNode()).clone();
// saturate radical site
int max_radNum_molecule = ChemGraph.getMAX_RADICAL_NUM();
int max_radNum_atom = Math.min(8,max_radNum_molecule);
int [] idArray = new int[max_radNum_molecule];
Atom [] atomArray = new Atom[max_radNum_molecule];
Node [][] newnode = new Node[max_radNum_molecule][max_radNum_atom];
int radicalSite = 0;
Iterator iter = p_chemGraph.getNodeList();
FreeElectron satuated = FreeElectron.make("0");
while (iter.hasNext()) {
Node node = (Node)iter.next();
Atom atom = (Atom)node.getElement();
if (atom.isRadical()) {
radicalSite ++;
// save the old radical atom
idArray[radicalSite-1] = node.getID().intValue();
atomArray[radicalSite-1] = atom;
// new a satuated atom and replace the old one
Atom newAtom = new Atom(atom.getChemElement(),satuated);
node.setElement(newAtom);
node.updateFeElement();
}
}
// add H to saturate chem graph
Atom H = Atom.make(ChemElement.make("H"),satuated);
Bond S = Bond.make("S");
for (int i=0;i<radicalSite;i++) {
Node node = p_chemGraph.getNodeAt(idArray[i]);
Atom atom = atomArray[i];
int HNum = atom.getRadicalNumber();
for (int j=0;j<HNum;j++) {
newnode[i][j] = g.addNode(H);
g.addArcBetween(node,S,newnode[i][j]);
}
node.updateFgElement();
}
//find the saturated symmetry number
int sigmaSaturated = p_chemGraph.getSymmetryNumber();
// result = generateThermoData(g);//I'm not sure what GATP does, but this recursive calling will use HBIs on saturated species if it exists in PrimaryThermoLibrary
//check the primary thermo library for the saturated graph
result = primaryLibrary.getThermoData(p_chemGraph.getGraph());
//System.out.println(result);
if (result != null) {
p_chemGraph.fromprimarythermolibrary = true;
}
else{
result=generateQMThermoData(p_chemGraph);
}
// find the BDE for all radical groups
if(thermoLibrary == null) initGAGroupLibrary();
for (int i=0; i<radicalSite; i++) {
int id = idArray[i];
Node node = g.getNodeAt(id);
Atom old = (Atom)node.getElement();
node.setElement(atomArray[i]);
node.updateFeElement();
// get rid of the extra H at ith site
int HNum = atomArray[i].getRadicalNumber();
for (int j=0;j<HNum;j++) {
g.removeNode(newnode[i][j]);
}
node.updateFgElement();
p_chemGraph.resetThermoSite(node);
ThermoGAValue thisGAValue = thermoLibrary.findRadicalGroup(p_chemGraph);
if (thisGAValue == null) {
System.err.println("Radical group not found: " + node.getID());
}
else {
//System.out.println(node.getID() + " radical correction: " + thisGAValue.getName() + " "+thisGAValue.toString());
result.plus(thisGAValue);
}
//recover the saturated site for next radical site calculation
node.setElement(old);
node.updateFeElement();
for (int j=0;j<HNum;j++) {
newnode[i][j] = g.addNode(H);
g.addArcBetween(node,S,newnode[i][j]);
}
node.updateFgElement();
}
// recover the chem graph structure
// recover the radical
for (int i=0; i<radicalSite; i++) {
int id = idArray[i];
Node node = g.getNodeAt(id);
node.setElement(atomArray[i]);
node.updateFeElement();
int HNum = atomArray[i].getRadicalNumber();
//get rid of extra H
for (int j=0;j<HNum;j++) {
g.removeNode(newnode[i][j]);
}
node.updateFgElement();
}
// subtract the enthalphy of H from the result
int rad_number = p_chemGraph.getRadicalNumber();
ThermoGAValue enthalpy_H = new ThermoGAValue(ENTHALPY_HYDROGEN * rad_number, 0,0,0,0,0,0,0,0,0,0,0,null);
result.minus(enthalpy_H);
//correct the symmetry number based on the relative radical and saturated symmetry number; this should hopefully sidestep potential complications based on the fact that certain symmetry effects could be included in HBI value itself, and the fact that the symmetry number correction for saturated molecule has already been implemented, and it is likely to be different than symmetry number considered here, since the correction for the saturated molecule will have been external symmetry number, whereas RMG's ChemGraph symmetry number estimator includes both internal and external symmetry contributions; even so, I don't know if this will handle a change from chiral to achiral (or vice versa) properly
ThermoGAValue symmetryNumberCorrection = new ThermoGAValue(0,-1*GasConstant.getCalMolK()*Math.log((double)(sigmaRadical)/(double)(sigmaSaturated)),0,0,0,0,0,0,0,0,0,0,null);
result.plus(symmetryNumberCorrection);
p_chemGraph.setCentralNode(oldCentralNode);
//display corrected thermo to user
String [] InChInames = getQMFileName(p_chemGraph);//determine the filename (InChIKey) and InChI with appended info for triplets, etc.
String name = InChInames[0];
String InChIaug = InChInames[1];
System.out.println("HBI-based thermo for " + name + "("+InChIaug+"): "+ result.toString());//print result, at least for debugging purposes
}
else{
result = generateQMThermoData(p_chemGraph);
}
return result;
//#]
}
public ThermoData generateQMThermoData(ChemGraph p_chemGraph){
//if there is no data in the libraries, calculate the result based on QM or MM calculations; the below steps will be generalized later to allow for other quantum mechanics packages, etc.
String qmProgram = qmprogram;
String qmMethod = "";
if(qmProgram.equals("mm4")){
qmMethod = "mm4";
}
else{
qmMethod="pm3"; //may eventually want to pass this to various functions to choose which "sub-function" to call
}
ThermoData result = new ThermoData();
String [] InChInames = getQMFileName(p_chemGraph);//determine the filename (InChIKey) and InChI with appended info for triplets, etc.
String name = InChInames[0];
String InChIaug = InChInames[1];
String directory = qmfolder;
File dir=new File(directory);
directory = dir.getAbsolutePath();//this and previous three lines get the absolute path for the directory
if(qmMethod.equals("pm3")){
//first, check to see if the result already exists and the job terminated successfully
boolean gaussianResultExists = successfulGaussianResultExistsQ(name,directory,InChIaug);
boolean mopacResultExists = successfulMopacResultExistsQ(name,directory,InChIaug);
if(!gaussianResultExists && !mopacResultExists){//if a successful result doesn't exist from previous run (or from this run), run the calculation; if a successful result exists, we will skip directly to parsing the file
//steps 1 and 2: create 2D and 3D mole files
molFile p_3dfile = create3Dmolfile(name, p_chemGraph);
//3. create the Gaussian or MOPAC input file
directory = qmfolder;
dir=new File(directory);
directory = dir.getAbsolutePath();//this and previous three lines get the absolute path for the directory
int attemptNumber=1;//counter for attempts using different keywords
int successFlag=0;//flag for success of Gaussian run; 0 means it failed, 1 means it succeeded
int maxAttemptNumber=1;
int multiplicity = p_chemGraph.getRadicalNumber()+1; //multiplicity = radical number + 1
while(successFlag==0 && attemptNumber <= maxAttemptNumber){
//IF block to check which program to use
if (qmProgram.equals("gaussian03")){
if(p_chemGraph.getAtomNumber() > 1){
maxAttemptNumber = createGaussianPM3Input(name, directory, p_3dfile, attemptNumber, InChIaug, multiplicity);
}
else{
maxAttemptNumber = createGaussianPM3Input(name, directory, p_3dfile, -1, InChIaug, multiplicity);//use -1 for attemptNumber for monoatomic case
}
//4. run Gaussian
successFlag = runGaussian(name, directory);
}
else if (qmProgram.equals("mopac") || qmProgram.equals("both")){
maxAttemptNumber = createMopacPM3Input(name, directory, p_3dfile, attemptNumber, InChIaug, multiplicity);
successFlag = runMOPAC(name, directory);
}
else{
System.out.println("Unsupported quantum chemistry program");
System.exit(0);
}
//new IF block to check success
if(successFlag==1){
System.out.println("Attempt #"+attemptNumber + " on species " + name + " ("+InChIaug+") succeeded.");
}
else if(successFlag==0){
if(attemptNumber==maxAttemptNumber){//if this is the last possible attempt, and the calculation fails, exit with an error message
if(qmProgram.equals("both")){ //if we are running with "both" option and all keywords fail, try with Gaussian
qmProgram = "gaussian03";
System.out.println("*****Final MOPAC attempt (#" + maxAttemptNumber + ") on species " + name + " ("+InChIaug+") failed. Trying to use Gaussian.");
attemptNumber=0;//this needs to be 0 so that when we increment attemptNumber below, it becomes 1 when returning to the beginning of the for loop
maxAttemptNumber=1;
}
else{
System.out.println("*****Final attempt (#" + maxAttemptNumber + ") on species " + name + " ("+InChIaug+") failed.");
System.out.print(p_chemGraph.toString());
System.exit(0);
// return new ThermoData(1000,0,0,0,0,0,0,0,0,0,0,0,"failed calculation");
}
}
System.out.println("*****Attempt #"+attemptNumber + " on species " + name + " ("+InChIaug+") failed. Will attempt a new keyword.");
attemptNumber++;//try again with new keyword
}
}
}
//5. parse QM output and record as thermo data (function includes symmetry/point group calcs, etc.); if both Gaussian and MOPAC results exist, Gaussian result is used
if (gaussianResultExists || (qmProgram.equals("gaussian03") && !mopacResultExists)){
result = parseGaussianPM3(name, directory, p_chemGraph);
}
else if (mopacResultExists || qmProgram.equals("mopac") || qmProgram.equals("both")){
result = parseMopacPM3(name, directory, p_chemGraph);
}
else{
System.out.println("Unexpected situation in QMTP thermo estimation");
System.exit(0);
}
}
else{//mm4 case
//first, check to see if the result already exists and the job terminated successfully
boolean mm4ResultExists = successfulMM4ResultExistsQ(name,directory,InChIaug);
if(!mm4ResultExists){//if a successful result doesn't exist from previous run (or from this run), run the calculation; if a successful result exists, we will skip directly to parsing the file
//steps 1 and 2: create 2D and 3D mole files
molFile p_3dfile = create3Dmolfile(name, p_chemGraph);
//3. create the MM4 input file
directory = qmfolder;
dir=new File(directory);
directory = dir.getAbsolutePath();//this and previous three lines get the absolute path for the directory
int attemptNumber=1;//counter for attempts using different keywords
int successFlag=0;//flag for success of MM4 run; 0 means it failed, 1 means it succeeded
int maxAttemptNumber=1;
int multiplicity = p_chemGraph.getRadicalNumber()+1; //multiplicity = radical number + 1
while(successFlag==0 && attemptNumber <= maxAttemptNumber){
maxAttemptNumber = createMM4Input(name, directory, p_3dfile, attemptNumber, InChIaug, multiplicity);
//4. run MM4
successFlag = runMM4(name, directory);
//new IF block to check success
if(successFlag==1){
System.out.println("Attempt #"+attemptNumber + " on species " + name + " ("+InChIaug+") succeeded.");
}
else if(successFlag==0){
if(attemptNumber==maxAttemptNumber){//if this is the last possible attempt, and the calculation fails, exit with an error message
System.out.println("*****Final attempt (#" + maxAttemptNumber + ") on species " + name + " ("+InChIaug+") failed.");
System.out.print(p_chemGraph.toString());
System.exit(0);
//return new ThermoData(1000,0,0,0,0,0,0,0,0,0,0,0,"failed calculation");
}
System.out.println("*****Attempt #"+attemptNumber + " on species " + name + " ("+InChIaug+") failed. Will attempt a new keyword.");
attemptNumber++;//try again with new keyword
}
}
}
//5. parse MM4 output and record as thermo data (function includes symmetry/point group calcs, etc.); if both Gaussian and MOPAC results exist, Gaussian result is used
if(!useCanTherm) result = parseMM4(name, directory, p_chemGraph);
else result = parseMM4withForceMat(name, directory, p_chemGraph);
}
return result;
}
protected static QMTP getINSTANCE() {
return INSTANCE;
}
public void initializePrimaryThermoLibrary(){//svp
primaryLibrary = PrimaryThermoLibrary.getINSTANCE();
}
//creates a 3D molFile; for monoatomic species, it just returns the 2D molFile
public molFile create3Dmolfile(String name, ChemGraph p_chemGraph){
//1. create a 2D file
//use the absolute path for directory, so we can easily reference from other directories in command-line paths
//can't use RMG.workingDirectory, since this basically holds the RMG environment variable, not the workingDirectory
String directory = "2Dmolfiles/";
File dir=new File(directory);
directory = dir.getAbsolutePath();
molFile p_2dfile = new molFile(name, directory, p_chemGraph);
molFile p_3dfile = new molFile();//it seems this must be initialized, so we initialize to empty object
//2. convert from 2D to 3D using RDKit if the 2D molfile is for a molecule with 2 or more atoms
int atoms = p_chemGraph.getAtomNumber();
if(atoms > 1){
int distGeomAttempts=1;
if(atoms > 3){//this check prevents the number of attempts from being negative
distGeomAttempts = 5*(p_chemGraph.getAtomNumber()-3); //number of conformer attempts is just a linear scaling with molecule size, due to time considerations; in practice, it is probably more like 3^(n-3) or something like that
}
p_3dfile = embed3D(p_2dfile, distGeomAttempts);
return p_3dfile;
}
else{
return p_2dfile;
}
}
//embed a molecule in 3D, using RDKit
public molFile embed3D(molFile twoDmolFile, int numConfAttempts){
//convert to 3D MOL file using RDKit script
int flag=0;
String directory = "3Dmolfiles/";
File dir=new File(directory);
directory = dir.getAbsolutePath();//this uses the absolute path for the directory
String name = twoDmolFile.getName();
try{
File runningdir=new File(directory);
String command="";
if (System.getProperty("os.name").toLowerCase().contains("windows")){//special windows case where paths can have spaces and are allowed to be surrounded by quotes
command = "python \""+System.getProperty("RMG.workingDirectory")+"/scripts/distGeomScriptMolLowestEnergyConf.py\" ";
String twoDmolpath=twoDmolFile.getPath();
command=command.concat("\""+twoDmolpath+"\" ");
command=command.concat("\""+name+".mol\" ");//this is the target file name; use the same name as the twoDmolFile (but it will be in he 3Dmolfiles folder
command=command.concat("\""+name+".cmol\" ");//this is the target file name for crude coordinates (corresponding to the minimum energy conformation based on UFF refinement); use the same name as the twoDmolFile (but it will be in he 3Dmolfiles folder) and have suffix .cmol
command=command.concat(numConfAttempts + " ");
command=command.concat("\"" + System.getenv("RDBASE")+"\"");//pass the $RDBASE environment variable to the script so it can use the approprate directory when importing rdkit
}
else{//non-Windows case
command = "python "+System.getProperty("RMG.workingDirectory")+"/scripts/distGeomScriptMolLowestEnergyConf.py ";
String twoDmolpath=twoDmolFile.getPath();
command=command.concat(""+twoDmolpath+" ");
command=command.concat(name+".mol ");//this is the target file name; use the same name as the twoDmolFile (but it will be in he 3Dmolfiles folder
command=command.concat(name+".cmol ");//this is the target file name for crude coordinates (corresponding to the minimum energy conformation based on UFF refinement); use the same name as the twoDmolFile (but it will be in he 3Dmolfiles folder) and have suffix .cmol
command=command.concat(numConfAttempts + " ");
command=command.concat(System.getenv("RDBASE"));//pass the $RDBASE environment variable to the script so it can use the approprate directory when importing rdkit
}
Process pythonProc = Runtime.getRuntime().exec(command, null, runningdir);
String killmsg= "Python process for "+twoDmolFile.getName()+" did not complete within 120 seconds, and the process was killed. File was probably not written.";//message to print if the process times out
Thread timeoutThread = new TimeoutKill(pythonProc, killmsg, 120000L); //create a timeout thread to handle cases where the UFF optimization get's locked up (cf. Ch. 16 of "Ivor Horton's Beginning Java 2: JDK 5 Edition"); once we use the updated version of RDKit, we should be able to get rid of this
timeoutThread.start();//start the thread
//check for errors and display the error if there is one
InputStream is = pythonProc.getErrorStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
line = line.trim();
System.err.println(line);
flag=1;
}
//if there was an error, indicate the file and InChI
if(flag==1){
System.out.println("RDKit received error (see above) on " + twoDmolFile.getName()+". File was probably not written.");
}
int exitValue = pythonProc.waitFor();
if(timeoutThread.isAlive())//if the timeout thread is still alive (indicating that the process has completed in a timely manner), stop the timeout thread
timeoutThread.interrupt();
}
catch (Exception e) {
String err = "Error in running RDKit Python process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
// gmagoon 6/3/09 comment out InChI checking for now; in any case, the code will need to be updated, as it is copied from my testing code
// //check whether the original InChI is reproduced
// if(flag==0){
// try{
// File f=new File("c:/Python25/"+molfilename);
// File newFile= new File("c:/Python25/mol3d.mol");
// if(newFile.exists()){
// newFile.delete();//apparently renaming will not work unless target file does not exist (at least on Vista)
// }
// f.renameTo(newFile);
// String command = "c:/Users/User1/Documents/InChI-1/cInChI-1.exe c:/Python25/mol3d.mol inchi3d.inchi /AuxNone /DoNotAddH";//DoNotAddH used to prevent adding Hs to radicals (this would be a problem for current RDKit output which doesn't use M RAD notation)
// Process inchiProc = Runtime.getRuntime().exec(command);
// // int exitValue = inchiProc.waitFor();
// Thread.sleep(200);//****update: can probably eliminate this using buffered reader
// inchiProc.destroy();
//
// //read output file
// File outputFile = new File("inchi3d.inchi");
// FileReader fr = new FileReader(outputFile);
// BufferedReader br = new BufferedReader(fr);
// String line=null;
// String inchi3d=null;
// while ( (line = br.readLine()) != null) {
// line = line.trim();
// if(line.startsWith("InChI="))
// {
// inchi3d=line;
// }
// }
// fr.close();
//
// //return file to original name:
// File f2=new File("c:/Python25/mol3d.mol");
// File newFile2= new File("c:/Python25/"+molfilename);
// if(newFile2.exists()){
// newFile2.delete();
// }
// f2.renameTo(newFile2);
//
// //compare inchi3d with input inchi and print a message if they don't match
// if(!inchi3d.equals(inchiString)){
// if(inchi3d.startsWith(inchiString)&&inchiString.length()>10){//second condition ensures 1/C does not match 1/CH4; 6 characters for InChI=, 2 characters for 1/, 2 characters for atom layer
// System.out.println("(probably minor) For File: "+ molfilename+" , 3D InChI (" + inchi3d+") begins with, but does not match original InChI ("+inchiString+"). SMILES string: "+ smilesString);
//
// }
// else{
// System.out.println("For File: "+ molfilename+" , 3D InChI (" + inchi3d+") does not match original InChI ("+inchiString+"). SMILES string: "+ smilesString);
// }
// }
// }
// catch (Exception e) {
// String err = "Error in running InChI process \n";
// err += e.toString();
// e.printStackTrace();
// System.exit(0);
// }
// }
//construct molFile pointer to new file (name will be same as 2D mol file
return new molFile(name, directory);
}
//creates Gaussian PM3 input file in directory with filename name.gjf by using OpenBabel to convert p_molfile
//attemptNumber determines which keywords to try
//the function returns the maximum number of keywords that can be attempted; this will be the same throughout the evaluation of the code, so it may be more appropriate to have this as a "constant" attribute of some sort
//attemptNumber=-1 will call a special set of keywords for the monoatomic case
public int createGaussianPM3Input(String name, String directory, molFile p_molfile, int attemptNumber, String InChIaug, int multiplicity){
//write a file with the input keywords
int maxAttemptNumber=18;//update this if additional keyword options are added or removed
try{
File inpKey=new File(directory+"/inputkeywords.txt");
String inpKeyStr="%chk="+directory+"/RMGrunCHKfile.chk\n";
inpKeyStr+="%mem=6MW\n";
inpKeyStr+="%nproc=1\n";
if(attemptNumber==-1) inpKeyStr+="# pm3 freq";//keywords for monoatomic case (still need to use freq keyword to get molecular mass)
else if(attemptNumber==1) inpKeyStr+="# pm3 opt=(verytight,gdiis) freq IOP(2/16=3)";//added IOP option to avoid aborting when symmetry changes; 3 is supposed to be default according to documentation, but it seems that 0 (the default) is the only option that doesn't work from 0-4; also, it is interesting to note that all 4 options seem to work for test case with z-matrix input rather than xyz coords; cf. http://www.ccl.net/cgi-bin/ccl/message-new?2006+10+17+005 for original idea for solution
else if(attemptNumber==2) inpKeyStr+="# pm3 opt=(verytight,gdiis) freq IOP(2/16=3) IOP(4/21=2)";//use different SCF method; this addresses at least one case of failure for a C4H7J species
else if(attemptNumber==3) inpKeyStr+="# pm3 opt=(verytight,calcfc,maxcyc=200) freq IOP(2/16=3) nosymm";//try multiple different options (no gdiis, use calcfc, nosymm); 7/21/09: added maxcyc option to fix case of MPTBUKVAJYJXDE-UHFFFAOYAPmult3 (InChI=1/C4H10O5Si/c1-3-7-9-10(5,6)8-4-2/h4-5H,3H2,1-2H3/mult3) (file manually copied to speed things along)
else if(attemptNumber==4) inpKeyStr+="# pm3 opt=(verytight,calcfc,maxcyc=200) freq=numerical IOP(2/16=3) nosymm";//7/8/09: numerical frequency keyword version of keyword #3; used to address GYFVJYRUZAKGFA-UHFFFAOYALmult3 (InChI=1/C6H14O6Si/c1-3-10-13(8,11-4-2)12-6-5-9-7/h6-7H,3-5H2,1-2H3/mult3) case; (none of the existing Gaussian or MOPAC combinations worked with it)
else if(attemptNumber==5) inpKeyStr+="# pm3 opt=(verytight,gdiis,small) freq IOP(2/16=3)";//7/10/09: somehow, this worked for problematic case of ZGAWAHRALACNPM-UHFFFAOYAF (InChI=1/C8H17O5Si/c1-3-11-14(10,12-4-2)13-8-5-7(9)6-8/h7-9H,3-6H2,1-2H3); (was otherwise giving l402 errors); even though I had a keyword that worked for this case, I manually copied the fixed log file to QMfiles folder to speed things along; note that there are a couple of very low frequencies (~5-6 cm^-1 for this case)
else if(attemptNumber==6) inpKeyStr+="# pm3 opt=(verytight,nolinear,calcfc,small) freq IOP(2/16=3)";//used for troublesome C5H7J2 case (similar error to C5H7J below); calcfc is not necessary for this particular species, but it speeds convergence and probably makes it more robust for other species
else if(attemptNumber==7) inpKeyStr+="# pm3 opt=(verytight,gdiis,maxcyc=200) freq=numerical IOP(2/16=3)"; //use numerical frequencies; this takes a relatively long time, so should only be used as one of the last resorts; this seemed to address at least one case of failure for a C6H10JJ species; 7/15/09: maxcyc=200 added to address GVCMURUDAUQXEY-UHFFFAOYAVmult3 (InChI=1/C3H4O7Si/c1-2(9-6)10-11(7,8)3(4)5/h6-7H,1H2/mult3)...however, result was manually pasted in QMfiles folder to speed things along
else if(attemptNumber==8) inpKeyStr+="# pm3 opt=tight freq IOP(2/16=3)";//7/10/09: this worked for problematic case of SZSSHFMXPBKYPR-UHFFFAOYAF (InChI=1/C7H15O5Si/c1-3-10-13(8,11-4-2)12-7-5-6-9-7/h7H,3-6H2,1-2H3) (otherwise, it had l402.exe errors); corrected log file was manually copied to QMfiles to speed things along; we could also add a freq=numerical version of this keyword combination for added robustness; UPDATE: see below
else if(attemptNumber==9) inpKeyStr+="# pm3 opt=tight freq=numerical IOP(2/16=3)";//7/10/09: used for problematic case of CIKDVMUGTARZCK-UHFFFAOYAImult4 (InChI=1/C8H15O6Si/c1-4-12-15(10,13-5-2)14-7-6-11-8(7,3)9/h7H,3-6H2,1-2H3/mult4 (most other cases had l402.exe errors); corrected log file was manually copied to QMfiles to speed things along
else if(attemptNumber==10) inpKeyStr+="# pm3 opt=(tight,nolinear,calcfc,small,maxcyc=200) freq IOP(2/16=3)";//7/8/09: similar to existing #5, but uses tight rather than verytight; used for ADMPQLGIEMRGAT-UHFFFAOYAUmult3 (InChI=1/C6H14O5Si/c1-4-9-12(8,10-5-2)11-6(3)7/h6-7H,3-5H2,1-2H3/mult3)
else if(attemptNumber==11) inpKeyStr+="# pm3 opt freq IOP(2/16=3)"; //use default (not verytight) convergence criteria; use this as last resort
else if(attemptNumber==12) inpKeyStr+="# pm3 opt=(verytight,gdiis) freq=numerical IOP(2/16=3) IOP(4/21=200)";//to address problematic C10H14JJ case
else if(attemptNumber==13) inpKeyStr+="# pm3 opt=(calcfc,verytight,newton,notrustupdate,small,maxcyc=100,maxstep=100) freq=(numerical,step=10) IOP(2/16=3) nosymm";// added 6/10/09 for very troublesome RRMZRNPRCUANER-UHFFFAOYAQ (InChI=1/C5H7/c1-3-5-4-2/h3H,1-2H3) case...there were troubles with negative frequencies, where I don't think they should have been; step size of numerical frequency was adjusted to give positive result; accuracy of result is questionable; it is possible that not all of these keywords are needed; note that for this and other nearly free rotor cases, I think heat capacity will be overestimated by R/2 (R vs. R/2) (but this is a separate issue)
else if(attemptNumber==14) inpKeyStr+="# pm3 opt=(tight,gdiis,small,maxcyc=200,maxstep=100) freq=numerical IOP(2/16=3) nosymm";// added 6/22/09 for troublesome QDERTVAGQZYPHT-UHFFFAOYAHmult3(InChI=1/C6H14O4Si/c1-4-8-11(7,9-5-2)10-6-3/h4H,5-6H2,1-3H3/mult3); key aspects appear to be tight (rather than verytight) convergence criteria, no calculation of frequencies during optimization, use of numerical frequencies, and probably also the use of opt=small
//gmagoon 7/9/09: commented out since although this produces a "reasonable" result for the problematic case, there is a large amount of spin contamination, apparently introducing 70+ kcal/mol of instability else if(attemptNumber==12) inpKeyStr+="# pm3 opt=(verytight,gdiis,small) freq=numerical IOP(2/16=3) IOP(4/21=200)";//7/9/09: similar to current number 9 with keyword small; this addresses case of VCSJVABXVCFDRA-UHFFFAOYAI (InChI=1/C8H19O5Si/c1-5-10-8(4)13-14(9,11-6-2)12-7-3/h8H,5-7H2,1-4H3)
else if(attemptNumber==15) inpKeyStr+="# pm3 opt=(verytight,gdiis,calcall) IOP(2/16=3)";//used for troublesome C5H7J case; note that before fixing, I got errors like the following: "Incomplete coordinate system. Try restarting with Geom=Check Guess=Read Opt=(ReadFC,NewRedundant) Incomplete coordinate system. Error termination via Lnk1e in l103.exe"; we could try to restart, but it is probably preferrable to have each keyword combination standalone; another keyword that may be helpful if additional problematic cases are encountered is opt=small; 6/9/09 note: originally, this had # pm3 opt=(verytight,gdiis,calcall) freq IOP(2/16=3)" (with freq keyword), but I discovered that in this case, there are two thermochemistry sections and cclib parses frequencies twice, giving twice the number of desired frequencies and hence produces incorrect thermo; this turned up on C5H6JJ isomer
//gmagoon 7/3/09: it is probably best to retire this keyword combination in light of the similar combination below //else if(attemptNumber==6) inpKeyStr+="# pm3 opt=(verytight,gdiis,calcall,small) IOP(2/16=3) IOP(4/21=2)";//6/10/09: worked for OJZYSFFHCAPVGA-UHFFFAOYAK (InChI=1/C5H7/c1-3-5-4-2/h1,4H2,2H3) case; IOP(4/21) keyword was key
else if(attemptNumber==16) inpKeyStr+="# pm3 opt=(verytight,gdiis,calcall,small,maxcyc=200) IOP(2/16=3) IOP(4/21=2) nosymm";//6/29/09: worked for troublesome ketene case: CCGKOQOJPYTBIH-UHFFFAOYAO (InChI=1/C2H2O/c1-2-3/h1H2) (could just increase number of iterations for similar keyword combination above (#6 at the time of this writing), allowing symmetry, but nosymm seemed to reduce # of iterations; I think one of nosymm or higher number of iterations would allow the similar keyword combination to converge; both are included here for robustness)
else if(attemptNumber==17) inpKeyStr+="# pm3 opt=(verytight,gdiis,calcall,small) IOP(2/16=3) nosymm";//7/1/09: added for case of ZWMVZWMBTVHPBS-UHFFFAOYAEmult3 (InChI=1/C4H4O2/c1-3-5-6-4-2/h1-2H2/mult3)
else if(attemptNumber==18) inpKeyStr+="# pm3 opt=(calcall,small,maxcyc=100) IOP(2/16=3)"; //6/10/09: used to address troublesome FILUFGAZMJGNEN-UHFFFAOYAImult3 case (InChI=1/C5H6/c1-3-5-4-2/h3H,1H2,2H3/mult3)
else throw new Exception();//this point should not be reached
// if(multiplicity == 3) inpKeyStr+= " guess=mix"; //assumed to be triplet biradical...use guess=mix to perform unrestricted ; nevermind...I think this would only be for singlet biradicals based on http://www.gaussian.com/g_tech/g_ur/k_guess.htm
if (usePolar) inpKeyStr += " polar";
FileWriter fw = new FileWriter(inpKey);
fw.write(inpKeyStr);
fw.close();
}
catch(Exception e){
String err = "Error in writing inputkeywords.txt \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//call the OpenBabel process (note that this requires OpenBabel environment variable)
try{
File runningdir=new File(directory);
String command=null;
if (System.getProperty("os.name").toLowerCase().contains("windows")){//special windows case
command = "babel -imol \""+ p_molfile.getPath()+ "\" -ogjf \"" + name+".gjf\" -xf inputkeywords.txt --title \""+InChIaug+"\"";
}
else{
command = "babel -imol "+ p_molfile.getPath()+ " -ogjf " + name+".gjf -xf inputkeywords.txt --title "+InChIaug;
}
Process babelProc = Runtime.getRuntime().exec(command, null, runningdir);
//read in output
InputStream is = babelProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
//do nothing
}
int exitValue = babelProc.waitFor();
}
catch(Exception e){
String err = "Error in running OpenBabel MOL to GJF process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
return maxAttemptNumber;
}
//creates MM4 input file and MM4 batch file in directory with filenames name.mm4 and name.com, respectively using MoleCoor
//attemptNumber determines which keywords to try
//the function returns the maximum number of keywords that can be attempted; this will be the same throughout the evaluation of the code, so it may be more appropriate to have this as a "constant" attribute of some sort
public int createMM4Input(String name, String directory, molFile p_molfile, int attemptNumber, String InChIaug, int multiplicity){
//Step 1: write the script for MM4 batch operation
// Example script file:
// #! /bin/csh
// cp testEthylene.mm4 CPD.MM4
// cp $MM4_DATDIR/BLANK.DAT PARA.MM4
// cp $MM4_DATDIR/CONST.MM4 .
// $MM4_EXEDIR/mm4 <<%
// 1
// 2
// 0
// %
// mv TAPE4.MM4 testEthyleneBatch.out
// mv TAPE9.MM4 testEthyleneBatch.opt
// exit
int scriptAttempts = 2;//the number of script permutations available; update as additional options are added
int maxAttemptNumber=2*scriptAttempts;//we will try a second time with crude coordinates if the UFF refined coordinates do not work
try{
//create batch file with executable permissions: cf. http://java.sun.com/docs/books/tutorial/essential/io/fileAttr.html#posix
File inpKey = new File(directory+"/"+name+".com");
String inpKeyStr="#! /bin/csh\n";
inpKeyStr+="cp "+name+".mm4 CPD.MM4\n";
inpKeyStr+="cp $MM4_DATDIR/BLANK.DAT PARA.MM4\n";
inpKeyStr+="cp $MM4_DATDIR/CONST.MM4 .\n";
inpKeyStr+="$MM4_EXEDIR/mm4 <<%\n";
inpKeyStr+="1\n";//read from first line of .mm4 file
if (!useCanTherm){
if(attemptNumber%scriptAttempts==1) inpKeyStr+="2\n"; //Block-Diagonal Method then Full-Matrix Method
else if(attemptNumber%scriptAttempts==2) inpKeyStr+="3\n"; //Full-Matrix Method only
else throw new Exception();//this point should not be reached
inpKeyStr+="\n";//<RETURN> for temperature
inpKeyStr+="4\n";//unofficial option 4 for vibrational eigenvector printout to generate Cartesian force constant matrix in FORCE.MAT file
inpKeyStr+="0\n";//no vibrational amplitude printout
}
else{//CanTherm case: write the FORCE.MAT file
if(attemptNumber%scriptAttempts==1) inpKeyStr+="4\n"; //Block-Diagonal Method then Full-Matrix Method
else if(attemptNumber%scriptAttempts==2) inpKeyStr+="5\n"; //Full-Matrix Method only
else throw new Exception();//this point should not be reached
}
inpKeyStr+="0\n";//terminate the job
inpKeyStr+="%\n";
inpKeyStr+="mv TAPE4.MM4 "+name+".mm4out\n";
inpKeyStr+="mv TAPE9.MM4 "+name+".mm4opt\n";
if(useCanTherm){
inpKeyStr+="mv FORCE.MAT "+name+".fmat\n";
}
inpKeyStr+="exit\n";
FileWriter fw = new FileWriter(inpKey);
fw.write(inpKeyStr);
fw.close();
}
catch(Exception e){
String err = "Error in writing MM4 script file\n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//Step 2: call the MoleCoor process to create the MM4 input file from the mole file
try{
File runningdir=new File(directory);
//this will only be run on Linux so we don't have to worry about Linux vs. Windows issues
String command = "python "+System.getenv("RMG")+"/scripts/MM4InputFileMaker.py ";
//first argument: input file path; for the first attempts, we will use UFF refined coordinates; if that doesn't work, (e.g. case of cyclopropene, InChI=1/C3H4/c1-2-3-1/h1-2H,3H2 OOXWYYGXTJLWHA-UHFFFAOYAJ) we will try crude coordinates (.cmol suffix)
if(attemptNumber<=scriptAttempts){
command=command.concat(p_molfile.getPath() + " ");
}
else{
command=command.concat(p_molfile.getCrudePath() + " ");
}
//second argument: output path
String inpfilepath=directory+"/"+name+".mm4";
command=command.concat(inpfilepath+ " ");
//third argument: molecule name (the augmented InChI)
command=command.concat(InChIaug+ " ");
//fourth argument: PYTHONPATH
command=command.concat(System.getenv("RMG")+"/source/MoleCoor");//this will pass $RMG/source/MoleCoor to the script (in order to get the appropriate path for importing
Process molecoorProc = Runtime.getRuntime().exec(command, null, runningdir);
//read in output
InputStream is = molecoorProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
//do nothing
}
int exitValue = molecoorProc.waitFor();
}
catch(Exception e){
String err = "Error in running MoleCoor MOL to .MM4 process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
return maxAttemptNumber;
}
//returns the extra Mopac keywords to use for radical species, given the spin multiplicity (radical number + 1)
public String getMopacRadicalString(int multiplicity){
if(multiplicity==1) return "";
else if (multiplicity==2) return "uhf doublet";
else if (multiplicity==3) return "uhf triplet";
else if (multiplicity==4) return "uhf quartet";
else if (multiplicity==5) return "uhf quintet";
else if (multiplicity==6) return "uhf sextet";
else if (multiplicity==7) return "uhf septet";
else if (multiplicity==8) return "uhf octet";
else if (multiplicity==9) return "uhf nonet";
else{
System.out.println("Invalid multiplicity encountered: "+multiplicity);
System.exit(0);
}
return "this should not be returned: error associated with getMopacRadicalString()";
}
//creates MOPAC PM3 input file in directory with filename name.mop by using OpenBabel to convert p_molfile
//attemptNumber determines which keywords to try
//the function returns the maximum number of keywords that can be attempted; this will be the same throughout the evaluation of the code, so it may be more appropriate to have this as a "constant" attribute of some sort
//unlike createGaussianPM3 input, this requires an additional input specifying the spin multiplicity (radical number + 1) for the species
public int createMopacPM3Input(String name, String directory, molFile p_molfile, int attemptNumber, String InChIaug, int multiplicity){
//write a file with the input keywords
int maxAttemptNumber=5;//update this if additional keyword options are added or removed
String inpKeyStrBoth = "";//this string will be written at both the top (for optimization) and the bottom (for thermo/force calc)
String inpKeyStrTop = "";//this string will be written only at the top
String inpKeyStrBottom = "";//this string will be written at the bottom
String radicalString = getMopacRadicalString(multiplicity);
try{
// File inpKey=new File(directory+"/inputkeywords.txt");
// String inpKeyStr="%chk="+directory+"\\RMGrunCHKfile.chk\n";
// inpKeyStr+="%mem=6MW\n";
// inpKeyStr+="%nproc=1\n";
if(attemptNumber==1){
inpKeyStrBoth="pm3 "+radicalString;
inpKeyStrTop=" precise nosym";
inpKeyStrBottom="oldgeo thermo nosym precise ";//7/10/09: based on a quick review of recent results, keyword combo #1 rarely works, and when it did (CJAINEUZFLXGFA-UHFFFAOYAUmult3 (InChI=1/C8H16O5Si/c1-4-11-14(9,12-5-2)13-8-6-10-7(8)3/h7-8H,3-6H2,1-2H3/mult3)), the grad. norm on the force step was about 1.7 (too large); I manually removed this result and re-ran...the entropy was increased by nearly 20 cal/mol-K...perhaps we should add a check for the "WARNING" that MOPAC prints out when the gradient is high; 7/22/09: for the case of FUGDBSHZYPTWLG-UHFFFAOYADmult3 (InChI=1/C5H8/c1-4-3-5(4)2/h4-5H,1-3H2/mult3), adding nosym seemed to resolve 1. large discrepancies from Gaussian and 2. negative frequencies in mass-weighted coordinates and possibly related issue in discrepancies between regular and mass-weighted coordinate frequencies
}
else if(attemptNumber==2){//7/9/09: used for VCSJVABXVCFDRA-UHFFFAOYAI (InChI=1/C8H19O5Si/c1-5-10-8(4)13-14(9,11-6-2)12-7-3/h8H,5-7H2,1-4H3); all existing Gaussian keywords also failed; the Gaussian result was also rectified, but the resulting molecule was over 70 kcal/mol less stable, probably due to a large amount of spin contamination (~1.75 in fixed Gaussian result vs. 0.754 for MOPAC)
inpKeyStrBoth="pm3 "+radicalString;
inpKeyStrTop=" precise nosym gnorm=0.0 nonr";
inpKeyStrBottom="oldgeo thermo nosym precise ";
}
else if(attemptNumber==3){//7/8/09: used for ADMPQLGIEMRGAT-UHFFFAOYAUmult3 (InChI=1/C6H14O5Si/c1-4-9-12(8,10-5-2)11-6(3)7/h6-7H,3-5H2,1-2H3/mult3); all existing Gaussian keywords also failed; however, the Gaussian result was also rectified, and the resulting conformation was about 1.0 kcal/mol more stable than the one resulting from this, so fixed Gaussian result was manually copied to QMFiles folder
inpKeyStrBoth="pm3 "+radicalString;
inpKeyStrTop=" precise nosym gnorm=0.0";
inpKeyStrBottom="oldgeo thermo nosym precise "; //precise appeared to be necessary for the problematic case (to avoid negative frequencies);
}
else if(attemptNumber==4){//7/8/09: used for GYFVJYRUZAKGFA-UHFFFAOYALmult3 (InChI=1/C6H14O6Si/c1-3-10-13(8,11-4-2)12-6-5-9-7/h6-7H,3-5H2,1-2H3/mult3) case (negative frequency issues in MOPAC) (also, none of the existing Gaussian combinations worked with it); note that the Gaussian result appears to be a different conformation as it is about 0.85 kcal/mol more stable, so the Gaussian result was manually copied to QMFiles directory; note that the MOPAC output included a very low frequency (4-5 cm^-1)
inpKeyStrBoth="pm3 "+radicalString;
inpKeyStrTop=" precise nosym gnorm=0.0 bfgs";
inpKeyStrBottom="oldgeo thermo nosym precise "; //precise appeared to be necessary for the problematic case (to avoid negative frequencies)
}
// else if(attemptNumber==5){
// inpKeyStrBoth="pm3 "+radicalString;
// inpKeyStrTop=" precise nosym gnorm=0.0 ddmin=0.0";
// inpKeyStrBottom="oldgeo thermo nosym precise ";
// }
// else if(attemptNumber==6){
// inpKeyStrBoth="pm3 "+radicalString;
// inpKeyStrTop=" precise nosym gnorm=0.0 nonr ddmin=0.0";
// inpKeyStrBottom="oldgeo thermo nosym precise ";
// }
// else if(attemptNumber==7){
// inpKeyStrBoth="pm3 "+radicalString;
// inpKeyStrTop=" precise nosym bfgs gnorm=0.0 ddmin=0.0";
// inpKeyStrBottom="oldgeo thermo nosym precise ";
// }
else if(attemptNumber==5){//used for troublesome HGRZRPHFLAXXBT-UHFFFAOYAVmult3 (InChI=1/C3H2O4/c4-2(5)1-3(6)7/h1H2/mult3) case (negative frequency and large gradient issues)
inpKeyStrBoth="pm3 "+radicalString;
inpKeyStrTop=" precise nosym recalc=10 dmax=0.10 nonr cycles=2000 t=2000";
inpKeyStrBottom="oldgeo thermo nosym precise ";
}
// else if(attemptNumber==9){//used for troublesome CMARQPBQDRXBTN-UHFFFAOYAAmult3 (InChI=1/C3H2O4/c1-3(5)7-6-2-4/h1H2/mult3) case (negative frequency issues)
// inpKeyStrBoth="pm3 "+radicalString;
// inpKeyStrTop=" precise nosym recalc=1 dmax=0.05 gnorm=0.0 cycles=1000 t=1000";
// inpKeyStrBottom="oldgeo thermo nosym precise ";
// }
// else if(attemptNumber==10){//used for ATCYLHQLTOSVFK-UHFFFAOYAMmult4 (InChI=1/C4H5O5/c1-3(5)8-9-4(2)7-6/h6H,1-2H2/mult4) case (timeout issue; also, negative frequency issues); note that this is very similar to the keyword below, so we may want to consolidate
// inpKeyStrBoth="pm3 "+radicalString;
// inpKeyStrTop=" precise nosym recalc=1 dmax=0.05 gnorm=0.2 cycles=1000 t=1000";
// inpKeyStrBottom="oldgeo thermo nosym precise ";
// }
else throw new Exception();//this point should not be reached
// FileWriter fw = new FileWriter(inpKey);
// fw.write(inpKeyStr);
// fw.close();
}
catch(Exception e){
String err = "Error in writing inputkeywords.txt \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
String polarString = "";
if (usePolar){
if(multiplicity == 1) polarString = System.getProperty("line.separator") + System.getProperty("line.separator") + System.getProperty("line.separator")+ "oldgeo polar nosym precise " + inpKeyStrBoth;
else polarString = System.getProperty("line.separator") + System.getProperty("line.separator") + System.getProperty("line.separator")+ "oldgeo static nosym precise " + inpKeyStrBoth;
}
//call the OpenBabel process (note that this requires OpenBabel environment variable)
try{
File runningdir=new File(directory);
String inpKeyStrTopCombined = inpKeyStrBoth + inpKeyStrTop;
String command = "babel -imol "+ p_molfile.getPath()+ " -omop " + name+".mop -xk \"" + inpKeyStrTopCombined + "\" --title \""+InChIaug+"\"";
Process babelProc = Runtime.getRuntime().exec(command, null, runningdir);
//read in output
InputStream is = babelProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
//do nothing
}
int exitValue = babelProc.waitFor();
//append the final keywords to the end of the file just written
// File mopacInpFile = new File(directory+"/"+name+".mop");
FileWriter fw = new FileWriter(directory+"/"+name+".mop", true);//filewriter with append = true
fw.write(System.getProperty("line.separator") + inpKeyStrBottom + inpKeyStrBoth + polarString);//on Windows Vista, "\n" appears correctly in WordPad view, but not Notepad view (cf. http://forums.sun.com/thread.jspa?threadID=5386822)
fw.close();
}
catch(Exception e){
String err = "Error in running OpenBabel MOL to MOP process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
return maxAttemptNumber;
}
//name and directory are the name and directory for the input (and output) file;
//input is assumed to be preexisting and have the .gjf suffix
//returns an integer indicating success or failure of the Gaussian calculation: 1 for success, 0 for failure;
public int runGaussian(String name, String directory){
int flag = 0;
int successFlag=0;
try{
String command = "g03 ";
command=command.concat(qmfolder+"/"+name+".gjf ");//specify the input file; space is important
command=command.concat(qmfolder+"/"+name+".log");//specify the output file
Process gaussianProc = Runtime.getRuntime().exec(command);
//check for errors and display the error if there is one
InputStream is = gaussianProc.getErrorStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
line = line.trim();
System.err.println(line);
flag=1;
}
//if there was an error, indicate that an error was obtained
if(flag==1){
System.out.println("Gaussian process received error (see above) on " + name);
}
int exitValue = gaussianProc.waitFor();
}
catch(Exception e){
String err = "Error in running Gaussian process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//look in the output file to check for the successful termination of the Gaussian calculation
//failed jobs will contain the a line beginning with " Error termination" near the end of the file
int failureFlag=0;
String errorLine = "";//string to store the error
try{
FileReader in = new FileReader(directory+"/"+name+".log");
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
if (line.startsWith(" Error termination ")){
failureFlag=1;
errorLine = line.trim();
System.out.println("*****Error in Gaussian log file: "+errorLine);//print the error (note that in general, I think two lines will be printed)
}
else if (line.startsWith(" ******")){//also look for imaginary frequencies
if (line.contains("imaginary frequencies")){
System.out.println("*****Imaginary freqencies found:");
failureFlag=1;
}
}
line=reader.readLine();
}
}
catch(Exception e){
String err = "Error in reading Gaussian log file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//if the failure flag is still 0, the process should have been successful
if (failureFlag==0) successFlag=1;
return successFlag;
}
//name and directory are the name and directory for the input (and output) file;
//input script is assumed to be preexisting and have the .com suffix
//returns an integer indicating success or failure of the calculation: 1 for success, 0 for failure
public int runMM4(String name, String directory){
int successFlag=0;
//int flag = 0;
try{
File runningDirectory = new File(qmfolder);
String command=name+".com";
File script = new File(qmfolder+command);
command = "./"+command;
script.setExecutable(true);
Process mm4Proc = Runtime.getRuntime().exec(command, null, runningDirectory);
//check for errors and display the error if there is one
// InputStream is = mm4Proc.getErrorStream();
// InputStreamReader isr = new InputStreamReader(is);
// BufferedReader br = new BufferedReader(isr);
// String line=null;
// while ( (line = br.readLine()) != null) {
// line = line.trim();
// if(!line.equals("STOP statement executed")){//string listed here seems to be typical
// System.err.println(line);
// flag=1;
// }
// }
InputStream is = mm4Proc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
//do nothing
}
//if there was an error, indicate that an error was obtained
// if(flag==1){
// System.out.println("MM4 process received error (see above) on " + name);
// }
int exitValue = mm4Proc.waitFor();
}
catch(Exception e){
String err = "Error in running MM4 process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//look in the output file to check for the successful termination of the MM4 calculation (cf. successfulMM4ResultExistsQ)
File file = new File(directory+"/"+name+".mm4out");
int failureFlag=1;//flag (1 or 0) indicating whether the MM4 job failed
int failureOverrideFlag=0;//flag (1 or 0) to override success as measured by failureFlag
if(file.exists()){//if the file exists, do further checks; otherwise, we will skip to final statement and return false
try{
FileReader in = new FileReader(file);
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
String trimLine= line.trim();
if (trimLine.equals("STATISTICAL THERMODYNAMICS ANALYSIS")){
failureFlag = 0;
}
else if (trimLine.endsWith("imaginary frequencies,")){//read the number of imaginary frequencies and make sure it is zero
String[] split = trimLine.split("\\s+");
if (Integer.parseInt(split[3])>0){
System.out.println("*****Imaginary freqencies found:");
failureOverrideFlag=1;
}
}
else if (trimLine.contains(" 0.0 (fir )")){
if (useCanTherm){//zero frequencies are only acceptable when CanTherm is used
System.out.println("*****Warning: zero freqencies found (values lower than 7.7 cm^-1 are rounded to zero in MM4 output); CanTherm should hopefully correct this:");
}
else{
System.out.println("*****Zero freqencies found:");
failureOverrideFlag=1;
}
}
line=reader.readLine();
}
}
catch(Exception e){
String err = "Error in reading MM4 output file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
}
//if the failure flag is still 0, the process should have been successful
if(failureOverrideFlag==1) failureFlag=1; //job will be considered a failure if there are imaginary frequencies or if job terminates to to excess time/cycles
//if the failure flag is 0 and there are no negative frequencies, the process should have been successful
if (failureFlag==0) successFlag=1;
return successFlag;
}
//name and directory are the name and directory for the input (and output) file;
//input is assumed to be preexisting and have the .mop suffix
//returns an integer indicating success or failure of the MOPAC calculation: 1 for success, 0 for failure;
//this function is based on the Gaussian analogue
public int runMOPAC(String name, String directory){
int flag = 0;
int successFlag=0;
try{
String command = System.getenv("MOPAC_LICENSE")+"MOPAC2009.exe ";
command=command.concat(directory+"/"+name+".mop ");//specify the input file; space is important
command=command.concat(directory+"/"+name+".out");//specify the output file
Process mopacProc = Runtime.getRuntime().exec(command);
//check for errors and display the error if there is one
InputStream is = mopacProc.getErrorStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
line = line.trim();
System.err.println(line);
flag=1;
}
//if there was an error, indicate that an error was obtained
if(flag==1){
System.out.println("MOPAC process received error (see above) on " + name);
}
int exitValue = mopacProc.waitFor();
}
catch(Exception e){
String err = "Error in running MOPAC process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//look in the output file to check for the successful termination of the calculation (this is a trimmed down version of what appears in successfulMOPACResultExistsQ (it doesn't have the InChI check)
File file = new File(directory+"/"+name+".out");
int failureFlag=1;//flag (1 or 0) indicating whether the MOPAC job failed
int failureOverrideFlag=0;//flag (1 or 0) to override success as measured by failureFlag
if(file.exists()){//if the file exists, do further checks; otherwise, we will skip to final statement and return false
try{
FileReader in = new FileReader(file);
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
String trimLine = line.trim();
if (trimLine.equals("DESCRIPTION OF VIBRATIONS")){//check for this line; if it is here, check for negative frequencies
//if(!MopacFileContainsNegativeFreqsQ(name, directory)) failureFlag=0;
failureFlag=0;
}
//negative frequencies notice example:
// NOTE: SYSTEM IS NOT A GROUND STATE, THEREFORE ZERO POINT
// ENERGY IS NOT MEANINGFULL. ZERO POINT ENERGY PRINTED
// DOES NOT INCLUDE THE 2 IMAGINARY FREQUENCIES
else if (trimLine.endsWith("IMAGINARY FREQUENCIES")){
System.out.println("*****Imaginary freqencies found:");
failureOverrideFlag=1;
}
else if (trimLine.equals("EXCESS NUMBER OF OPTIMIZATION CYCLES")){//exceeding max cycles error
failureOverrideFlag=1;
}
else if (trimLine.equals("NOT ENOUGH TIME FOR ANOTHER CYCLE")){//timeout error
failureOverrideFlag=1;
}
line=reader.readLine();
}
}
catch(Exception e){
String err = "Error in reading MOPAC output file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
}
if(failureOverrideFlag==1) failureFlag=1; //job will be considered a failure if there are imaginary frequencies or if job terminates to to excess time/cycles
//if the failure flag is 0 and there are no negative frequencies, the process should have been successful
if (failureFlag==0) successFlag=1;
return successFlag;
}
//parse the results using cclib and return a ThermoData object; name and directory indicate the location of the Gaussian .log file
//may want to split this into several functions
public ThermoData parseGaussianPM3(String name, String directory, ChemGraph p_chemGraph){
// //parse the Gaussian file using cclib
// int natoms = 0; //number of atoms from Gaussian file; in principle, this should agree with number of chemGraph atoms
// ArrayList atomicNumber = new ArrayList(); //vector of atomic numbers (integers) (apparently Vector is thread-safe; cf. http://answers.yahoo.com/question/index?qid=20081214065127AArZDT3; ...should I be using this instead?)
// ArrayList x_coor = new ArrayList(); //vectors of x-, y-, and z-coordinates (doubles) (Angstroms) (in order corresponding to above atomic numbers)
// ArrayList y_coor = new ArrayList();
// ArrayList z_coor = new ArrayList();
// double energy = 0; //PM3 energy (Hf298) in Hartree
// double molmass = 0; //molecular mass in amu
// ArrayList freqs = new ArrayList(); //list of frequencies in units of cm^-1
// double rotCons_1 = 0;//rotational constants in (1/s)
// double rotCons_2 = 0;
// double rotCons_3 = 0;
// int gdStateDegen = p_chemGraph.getRadicalNumber()+1;//calculate ground state degeneracy from the number of radicals; this should give the same result as spin multiplicity in Gaussian input file (and output file), but we do not explicitly check this (we could use "mult" which cclib reads in if we wanted to do so); also, note that this is not always correct, as there can apparently be additional spatial degeneracy for non-symmetric linear molecules like OH radical (cf. http://cccbdb.nist.gov/thermo.asp)
// try{
// File runningdir=new File(directory);
// String command = "c:/Python25/python.exe c:/Python25/GaussianPM3ParsingScript.py ";//this should eventually be modified for added generality
// String logfilepath=directory+"/"+name+".log";
// command=command.concat(logfilepath);
// Process cclibProc = Runtime.getRuntime().exec(command, null, runningdir);
// //read the stdout of the process, which should contain the desired information in a particular format
// InputStream is = cclibProc.getInputStream();
// InputStreamReader isr = new InputStreamReader(is);
// BufferedReader br = new BufferedReader(isr);
// String line=null;
// //example output:
//// C:\Python25>python.exe GaussianPM3ParsingScript.py TEOS.out
//// 33
//// [ 6 6 8 14 8 6 6 8 6 6 8 6 6 1 1 1 1 1 1 1 1 1 1 1 1
//// 1 1 1 1 1 1 1 1]
//// [[ 2.049061 -0.210375 3.133106]
//// [ 1.654646 0.321749 1.762752]
//// [ 0.359284 -0.110429 1.471465]
//// [-0.201871 -0.013365 -0.12819 ]
//// [ 0.086307 1.504918 -0.82893 ]
//// [-0.559186 2.619928 -0.284003]
//// [-0.180246 3.839463 -1.113029]
//// [ 0.523347 -1.188305 -1.112765]
//// [ 1.857584 -1.018167 -1.495088]
//// [ 2.375559 -2.344392 -2.033403]
//// [-1.870397 -0.297297 -0.075427]
//// [-2.313824 -1.571765 0.300245]
//// [-3.83427 -1.535927 0.372171]
//// [ 1.360346 0.128852 3.917699]
//// [ 2.053945 -1.307678 3.160474]
//// [ 3.055397 0.133647 3.403037]
//// [ 1.677262 1.430072 1.750899]
//// [ 2.372265 -0.029237 0.985204]
//// [-0.245956 2.754188 0.771433]
//// [-1.656897 2.472855 -0.287156]
//// [-0.664186 4.739148 -0.712606]
//// [-0.489413 3.734366 -2.161038]
//// [ 0.903055 4.016867 -1.112198]
//// [ 1.919521 -0.229395 -2.269681]
//// [ 2.474031 -0.680069 -0.629949]
//// [ 2.344478 -3.136247 -1.273862]
//// [ 1.786854 -2.695974 -2.890647]
//// [ 3.41648 -2.242409 -2.365094]
//// [-1.884889 -1.858617 1.28054 ]
//// [-1.976206 -2.322432 -0.440995]
//// [-4.284706 -1.26469 -0.591463]
//// [-4.225999 -2.520759 0.656131]
//// [-4.193468 -0.809557 1.112677]]
//// -14.1664924726
//// [ 9.9615 18.102 27.0569 31.8459 39.0096 55.0091
//// 66.4992 80.4552 86.4912 123.3551 141.6058 155.5448
//// 159.4747 167.0013 178.5676 207.3738 237.3201 255.3487
//// 264.5649 292.867 309.4248 344.6503 434.8231 470.2074
//// 488.9717 749.1722 834.257 834.6594 837.7292 839.6352
//// 887.9767 892.9538 899.5374 992.1851 1020.6164 1020.8671
//// 1028.3897 1046.7945 1049.1768 1059.4704 1065.1505 1107.4001
//// 1108.1567 1109.0466 1112.6677 1122.7785 1124.4315 1128.4163
//// 1153.3438 1167.6705 1170.9627 1174.9613 1232.1826 1331.8459
//// 1335.3932 1335.8677 1343.9556 1371.37 1372.8127 1375.5428
//// 1396.0344 1402.4082 1402.7554 1403.2463 1403.396 1411.6946
//// 1412.2456 1412.3519 1414.5982 1415.3613 1415.5698 1415.7993
//// 1418.5409 2870.7446 2905.3132 2907.0361 2914.1662 2949.2646
//// 2965.825 2967.7667 2971.5223 3086.3849 3086.3878 3086.6448
//// 3086.687 3089.2274 3089.4105 3089.4743 3089.5841 3186.0753
//// 3186.1375 3186.3511 3186.365 ]
//// [ 0.52729 0.49992 0.42466]
////note: above example has since been updated to print molecular mass; also frequency and atomic number format has been updated
// String [] stringArray;
// natoms = Integer.parseInt(br.readLine());//read line 1: number of atoms
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read line 2: the atomic numbers (first removing braces)
// // line = br.readLine().replace("[", "").replace("]","");//read line 2: the atomic numbers (first removing braces)
// // StringTokenizer st = new StringTokenizer(line); //apprently the stringTokenizer class is deprecated, but I am having trouble getting the regular expressions to work properly
// for(int i=0; i < natoms; i++){
// // atomicNumber.add(i,Integer.parseInt(stringArray[i]));
// atomicNumber.add(i,Integer.parseInt(stringArray[i]));
// }
// for(int i=0; i < natoms; i++){
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read line 3+: coordinates for atom i; used /s+ for split; using spaces with default limit of 0 was giving empty string
// x_coor.add(i,Double.parseDouble(stringArray[0]));
// y_coor.add(i,Double.parseDouble(stringArray[1]));
// z_coor.add(i,Double.parseDouble(stringArray[2]));
// }
// energy = Double.parseDouble(br.readLine());//read next line: energy
// molmass = Double.parseDouble(br.readLine());//read next line: molecular mass (in amu)
// if (natoms>1){//read additional info for non-monoatomic species
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read next line: frequencies
// for(int i=0; i < stringArray.length; i++){
// freqs.add(i,Double.parseDouble(stringArray[i]));
// }
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read next line rotational constants (converting from GHz to Hz in the process)
// rotCons_1 = Double.parseDouble(stringArray[0])*1000000000;
// rotCons_2 = Double.parseDouble(stringArray[1])*1000000000;
// rotCons_3 = Double.parseDouble(stringArray[2])*1000000000;
// }
// while ( (line = br.readLine()) != null) {
// //do nothing (there shouldn't be any more information, but this is included to get all the output)
// }
// int exitValue = cclibProc.waitFor();
// }
// catch (Exception e) {
// String err = "Error in running ccLib Python process \n";
// err += e.toString();
// e.printStackTrace();
// System.exit(0);
// }
//
// ThermoData result = calculateThermoFromPM3Calc(natoms, atomicNumber, x_coor, y_coor, z_coor, energy, molmass, freqs, rotCons_1, rotCons_2, rotCons_3, gdStateDegen);
// System.out.println("Thermo for " + name + ": "+ result.toString());//print result, at least for debugging purposes
// return result;
String command = null;
if (System.getProperty("os.name").toLowerCase().contains("windows")){//special windows case where paths can have spaces and are allowed to be surrounded by quotes
command = "python \""+ System.getProperty("RMG.workingDirectory")+"/scripts/GaussianPM3ParsingScript.py\" ";
String logfilepath="\""+directory+"/"+name+".log\"";
command=command.concat(logfilepath);
command=command.concat(" \""+ System.getenv("RMG")+"/source\"");//this will pass $RMG/source to the script (in order to get the appropriate path for importing
}
else{//non-Windows case
command = "python "+ System.getProperty("RMG.workingDirectory")+"/scripts/GaussianPM3ParsingScript.py ";
String logfilepath=directory+"/"+name+".log";
command=command.concat(logfilepath);
command=command.concat(" "+ System.getenv("RMG")+"/source");//this will pass $RMG/source to the script (in order to get the appropriate path for importing
}
ThermoData result = getPM3MM4ThermoDataUsingCCLib(name, directory, p_chemGraph, command);
System.out.println("Thermo for " + name + ": "+ result.toString());//print result, at least for debugging purposes
return result;
}
//parse the results using cclib and return a ThermoData object; name and directory indicate the location of the MM4 .mm4out file
public ThermoData parseMM4(String name, String directory, ChemGraph p_chemGraph){
String command = "python "+System.getProperty("RMG.workingDirectory")+"/scripts/MM4ParsingScript.py ";
String logfilepath=directory+"/"+name+".mm4out";
command=command.concat(logfilepath);
command=command.concat(" "+ System.getenv("RMG")+"/source");//this will pass $RMG/source to the script (in order to get the appropriate path for importing
ThermoData result = getPM3MM4ThermoDataUsingCCLib(name, directory, p_chemGraph, command);
System.out.println("Thermo for " + name + ": "+ result.toString());//print result, at least for debugging purposes
return result;
}
//parse the results using cclib and CanTherm and return a ThermoData object; name and directory indicate the location of the MM4 .mm4out file
public ThermoData parseMM4withForceMat(String name, String directory, ChemGraph p_chemGraph){
//1. parse the MM4 file with cclib to get atomic number vector and geometry
String command = "python "+System.getProperty("RMG.workingDirectory")+"/scripts/MM4ParsingScript.py ";
String logfilepath=directory+"/"+name+".mm4out";
command=command.concat(logfilepath);
command=command.concat(" "+ System.getenv("RMG")+"/source");//this will pass $RMG/source to the script (in order to get the appropriate path for importing
///////////beginning of block taken from the bulk of getPM3MM4ThermoDataUsingCCLib////////////
//parse the file using cclib
int natoms = 0; //number of atoms from Mopac file; in principle, this should agree with number of chemGraph atoms
ArrayList atomicNumber = new ArrayList(); //vector of atomic numbers (integers) (apparently Vector is thread-safe; cf. http://answers.yahoo.com/question/index?qid=20081214065127AArZDT3; ...should I be using this instead?)
ArrayList x_coor = new ArrayList(); //vectors of x-, y-, and z-coordinates (doubles) (Angstroms) (in order corresponding to above atomic numbers)
ArrayList y_coor = new ArrayList();
ArrayList z_coor = new ArrayList();
double energy = 0; // energy (Hf298) in Hartree
double molmass = 0; //molecular mass in amu
ArrayList freqs = new ArrayList(); //list of frequencies in units of cm^-1
double rotCons_1 = 0;//rotational constants in (1/s)
double rotCons_2 = 0;
double rotCons_3 = 0;
int gdStateDegen = p_chemGraph.getRadicalNumber()+1;//calculate ground state degeneracy from the number of radicals; this should give the same result as spin multiplicity in Gaussian input file (and output file), but we do not explicitly check this (we could use "mult" which cclib reads in if we wanted to do so); also, note that this is not always correct, as there can apparently be additional spatial degeneracy for non-symmetric linear molecules like OH radical (cf. http://cccbdb.nist.gov/thermo.asp)
try{
Process cclibProc = Runtime.getRuntime().exec(command);
//read the stdout of the process, which should contain the desired information in a particular format
InputStream is = cclibProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
//example output:
// C:\Python25>python.exe GaussianPM3ParsingScript.py TEOS.out
// 33
// [ 6 6 8 14 8 6 6 8 6 6 8 6 6 1 1 1 1 1 1 1 1 1 1 1 1
// 1 1 1 1 1 1 1 1]
// [[ 2.049061 -0.210375 3.133106]
// [ 1.654646 0.321749 1.762752]
// [ 0.359284 -0.110429 1.471465]
// [-0.201871 -0.013365 -0.12819 ]
// [ 0.086307 1.504918 -0.82893 ]
// [-0.559186 2.619928 -0.284003]
// [-0.180246 3.839463 -1.113029]
// [ 0.523347 -1.188305 -1.112765]
// [ 1.857584 -1.018167 -1.495088]
// [ 2.375559 -2.344392 -2.033403]
// [-1.870397 -0.297297 -0.075427]
// [-2.313824 -1.571765 0.300245]
// [-3.83427 -1.535927 0.372171]
// [ 1.360346 0.128852 3.917699]
// [ 2.053945 -1.307678 3.160474]
// [ 3.055397 0.133647 3.403037]
// [ 1.677262 1.430072 1.750899]
// [ 2.372265 -0.029237 0.985204]
// [-0.245956 2.754188 0.771433]
// [-1.656897 2.472855 -0.287156]
// [-0.664186 4.739148 -0.712606]
// [-0.489413 3.734366 -2.161038]
// [ 0.903055 4.016867 -1.112198]
// [ 1.919521 -0.229395 -2.269681]
// [ 2.474031 -0.680069 -0.629949]
// [ 2.344478 -3.136247 -1.273862]
// [ 1.786854 -2.695974 -2.890647]
// [ 3.41648 -2.242409 -2.365094]
// [-1.884889 -1.858617 1.28054 ]
// [-1.976206 -2.322432 -0.440995]
// [-4.284706 -1.26469 -0.591463]
// [-4.225999 -2.520759 0.656131]
// [-4.193468 -0.809557 1.112677]]
// -14.1664924726
// [ 9.9615 18.102 27.0569 31.8459 39.0096 55.0091
// 66.4992 80.4552 86.4912 123.3551 141.6058 155.5448
// 159.4747 167.0013 178.5676 207.3738 237.3201 255.3487
// 264.5649 292.867 309.4248 344.6503 434.8231 470.2074
// 488.9717 749.1722 834.257 834.6594 837.7292 839.6352
// 887.9767 892.9538 899.5374 992.1851 1020.6164 1020.8671
// 1028.3897 1046.7945 1049.1768 1059.4704 1065.1505 1107.4001
// 1108.1567 1109.0466 1112.6677 1122.7785 1124.4315 1128.4163
// 1153.3438 1167.6705 1170.9627 1174.9613 1232.1826 1331.8459
// 1335.3932 1335.8677 1343.9556 1371.37 1372.8127 1375.5428
// 1396.0344 1402.4082 1402.7554 1403.2463 1403.396 1411.6946
// 1412.2456 1412.3519 1414.5982 1415.3613 1415.5698 1415.7993
// 1418.5409 2870.7446 2905.3132 2907.0361 2914.1662 2949.2646
// 2965.825 2967.7667 2971.5223 3086.3849 3086.3878 3086.6448
// 3086.687 3089.2274 3089.4105 3089.4743 3089.5841 3186.0753
// 3186.1375 3186.3511 3186.365 ]
// [ 0.52729 0.49992 0.42466]
//note: above example has since been updated to print molecular mass; also frequency and atomic number format has been updated
String [] stringArray;
natoms = Integer.parseInt(br.readLine());//read line 1: number of atoms
stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read line 2: the atomic numbers (first removing braces)
// line = br.readLine().replace("[", "").replace("]","");//read line 2: the atomic numbers (first removing braces)
// StringTokenizer st = new StringTokenizer(line); //apprently the stringTokenizer class is deprecated, but I am having trouble getting the regular expressions to work properly
for(int i=0; i < natoms; i++){
// atomicNumber.add(i,Integer.parseInt(stringArray[i]));
atomicNumber.add(i,Integer.parseInt(stringArray[i]));
}
for(int i=0; i < natoms; i++){
stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read line 3+: coordinates for atom i; used /s+ for split; using spaces with default limit of 0 was giving empty string
x_coor.add(i,Double.parseDouble(stringArray[0]));
y_coor.add(i,Double.parseDouble(stringArray[1]));
z_coor.add(i,Double.parseDouble(stringArray[2]));
}
energy = Double.parseDouble(br.readLine());//read next line: energy
molmass = Double.parseDouble(br.readLine());//read next line: molecular mass (in amu)
if (natoms>1){//read additional info for non-monoatomic species
stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read next line: frequencies
for(int i=0; i < stringArray.length; i++){
freqs.add(i,Double.parseDouble(stringArray[i]));
}
stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read next line rotational constants (converting from GHz to Hz in the process)
rotCons_1 = Double.parseDouble(stringArray[0])*1000000000;
rotCons_2 = Double.parseDouble(stringArray[1])*1000000000;
rotCons_3 = Double.parseDouble(stringArray[2])*1000000000;
}
while ( (line = br.readLine()) != null) {
//do nothing (there shouldn't be any more information, but this is included to get all the output)
}
int exitValue = cclibProc.waitFor();
}
catch (Exception e) {
String err = "Error in running ccLib Python process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
///////////end of block taken from the bulk of getPM3MM4ThermoDataUsingCCLib////////////
//2. compute H0; note that we will pass H0 to CanTherm by H0=H298(harmonicMM4)-(H298-H0)harmonicMM4, where harmonicMM4 values come from cclib parsing and since it is enthalpy, it should not be NaN due to zero frequencies
//*** to be written
//3. write CanTherm input file
String canInp = "ThermoMM4\n";//***special MM4 treatment in CanTherm;
canInp += "Trange 300 100 13\n";//temperatures from 300 to 1500 in increments of 100
canInp += "Scale: 1.0\n";//scale factor of 1
canInp += "Mol 1\n";
if(p_chemGraph.getAtomNumber()==1) canInp += "ATOM\n";
else if (p_chemGraph.isLinear()) canInp+="LINEAR\n";
else canInp+="NONLINEAR\n";
canInp += "GEOM File " + name+".mm4ou\n";//geometry file; ***special MM4 treatment in CanTherm; another option would be to use mm4opt file, but CanTherm code would need to be modified accordingly
canInp += "FORCEC File "+name+".fmat\n";//force constant file; ***special MM4 treatment in CanTherm
canInp += "ENERGY "+ energy +" MM4\n";//***special MM4 treatment in CanTherm
canInp+="EXTSYM 1\n";//use external symmetry of 1; it will be corrected for below
canInp+="NELEC 1\n";//multiplicity = 1; all cases we consider will be non-radicals
if(!useHindRot) canInp += "ROTORS 0\n";//do not consider hindered rotors
else{
//this section still needs to be written
canInp+="";
}
try{
File canFile=new File(directory+"/"+name+".can");
FileWriter fw = new FileWriter(canFile);
fw.write(canInp);
fw.close();
}
catch(Exception e){
String err = "Error in writing CanTherm input \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//4. call CanTherm
try{
File runningDirectory = new File(qmfolder);
String canCommand="python " + System.getenv("RMG")+"/source/CanTherm/CanTherm.py"+name+".can";
Process canProc = Runtime.getRuntime().exec(canCommand, null, runningDirectory);
InputStream is = canProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
//do nothing
}
int exitValue = canProc.waitFor();
}
catch(Exception e){
String err = "Error in running CanTherm process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//5. read in CanTherm output
try{
File outFile=new File(directory+"/cantherm.out");
FileReader in = new FileReader(outFile);
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
line=reader.readLine();//****needs to be updated once I figure out what output looks like
}
}
catch(Exception e){
String err = "Error in reading CanTherm output \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//6. correct the output for symmetry number (everything has been assumed to be one) : useHindRot=true case: use ChemGraph symmetry number; otherwise, we use SYMMETRY
//*** to be written
if(!useHindRot){
}
else{
}
ThermoData result = getPM3MM4ThermoDataUsingCCLib(name, directory, p_chemGraph, command);
System.out.println("Thermo for " + name + ": "+ result.toString());//print result, at least for debugging purposes
return result;
}
//parse the results using cclib and return a ThermoData object; name and directory indicate the location of the MOPAC .out file
public ThermoData parseMopacPM3(String name, String directory, ChemGraph p_chemGraph){
String command=null;
if (System.getProperty("os.name").toLowerCase().contains("windows")){//special windows case where paths can have spaces and are allowed to be surrounded by quotes
command = "python \""+System.getProperty("RMG.workingDirectory")+"/scripts/MopacPM3ParsingScript.py\" ";
String logfilepath="\""+directory+"/"+name+".out\"";
command=command.concat(logfilepath);
command=command.concat(" \""+ System.getenv("RMG")+"/source\"");//this will pass $RMG/source to the script (in order to get the appropriate path for importing
}
else{//non-Windows case
command = "python "+System.getProperty("RMG.workingDirectory")+"/scripts/MopacPM3ParsingScript.py ";
String logfilepath=directory+"/"+name+".out";
command=command.concat(logfilepath);
command=command.concat(" "+ System.getenv("RMG")+"/source");//this will pass $RMG/source to the script (in order to get the appropriate path for importing
}
ThermoData result = getPM3MM4ThermoDataUsingCCLib(name, directory, p_chemGraph, command);
System.out.println("Thermo for " + name + ": "+ result.toString());//print result, at least for debugging purposes
return result;
}
//separated from parseMopacPM3, since the function was originally based off of parseGaussianPM3 and was very similar (differences being command and logfilepath variables);
public ThermoData getPM3MM4ThermoDataUsingCCLib(String name, String directory, ChemGraph p_chemGraph, String command){
//parse the Mopac file using cclib
int natoms = 0; //number of atoms from Mopac file; in principle, this should agree with number of chemGraph atoms
ArrayList atomicNumber = new ArrayList(); //vector of atomic numbers (integers) (apparently Vector is thread-safe; cf. http://answers.yahoo.com/question/index?qid=20081214065127AArZDT3; ...should I be using this instead?)
ArrayList x_coor = new ArrayList(); //vectors of x-, y-, and z-coordinates (doubles) (Angstroms) (in order corresponding to above atomic numbers)
ArrayList y_coor = new ArrayList();
ArrayList z_coor = new ArrayList();
double energy = 0; //PM3 energy (Hf298) in Hartree (***note: in the case of MOPAC, the MOPAC file will contain in units of kcal/mol, but modified ccLib will return in Hartree)
double molmass = 0; //molecular mass in amu
ArrayList freqs = new ArrayList(); //list of frequencies in units of cm^-1
double rotCons_1 = 0;//rotational constants in (1/s)
double rotCons_2 = 0;
double rotCons_3 = 0;
int gdStateDegen = p_chemGraph.getRadicalNumber()+1;//calculate ground state degeneracy from the number of radicals; this should give the same result as spin multiplicity in Gaussian input file (and output file), but we do not explicitly check this (we could use "mult" which cclib reads in if we wanted to do so); also, note that this is not always correct, as there can apparently be additional spatial degeneracy for non-symmetric linear molecules like OH radical (cf. http://cccbdb.nist.gov/thermo.asp)
try{
Process cclibProc = Runtime.getRuntime().exec(command);
//read the stdout of the process, which should contain the desired information in a particular format
InputStream is = cclibProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
//example output:
// C:\Python25>python.exe GaussianPM3ParsingScript.py TEOS.out
// 33
// [ 6 6 8 14 8 6 6 8 6 6 8 6 6 1 1 1 1 1 1 1 1 1 1 1 1
// 1 1 1 1 1 1 1 1]
// [[ 2.049061 -0.210375 3.133106]
// [ 1.654646 0.321749 1.762752]
// [ 0.359284 -0.110429 1.471465]
// [-0.201871 -0.013365 -0.12819 ]
// [ 0.086307 1.504918 -0.82893 ]
// [-0.559186 2.619928 -0.284003]
// [-0.180246 3.839463 -1.113029]
// [ 0.523347 -1.188305 -1.112765]
// [ 1.857584 -1.018167 -1.495088]
// [ 2.375559 -2.344392 -2.033403]
// [-1.870397 -0.297297 -0.075427]
// [-2.313824 -1.571765 0.300245]
// [-3.83427 -1.535927 0.372171]
// [ 1.360346 0.128852 3.917699]
// [ 2.053945 -1.307678 3.160474]
// [ 3.055397 0.133647 3.403037]
// [ 1.677262 1.430072 1.750899]
// [ 2.372265 -0.029237 0.985204]
// [-0.245956 2.754188 0.771433]
// [-1.656897 2.472855 -0.287156]
// [-0.664186 4.739148 -0.712606]
// [-0.489413 3.734366 -2.161038]
// [ 0.903055 4.016867 -1.112198]
// [ 1.919521 -0.229395 -2.269681]
// [ 2.474031 -0.680069 -0.629949]
// [ 2.344478 -3.136247 -1.273862]
// [ 1.786854 -2.695974 -2.890647]
// [ 3.41648 -2.242409 -2.365094]
// [-1.884889 -1.858617 1.28054 ]
// [-1.976206 -2.322432 -0.440995]
// [-4.284706 -1.26469 -0.591463]
// [-4.225999 -2.520759 0.656131]
// [-4.193468 -0.809557 1.112677]]
// -14.1664924726
// [ 9.9615 18.102 27.0569 31.8459 39.0096 55.0091
// 66.4992 80.4552 86.4912 123.3551 141.6058 155.5448
// 159.4747 167.0013 178.5676 207.3738 237.3201 255.3487
// 264.5649 292.867 309.4248 344.6503 434.8231 470.2074
// 488.9717 749.1722 834.257 834.6594 837.7292 839.6352
// 887.9767 892.9538 899.5374 992.1851 1020.6164 1020.8671
// 1028.3897 1046.7945 1049.1768 1059.4704 1065.1505 1107.4001
// 1108.1567 1109.0466 1112.6677 1122.7785 1124.4315 1128.4163
// 1153.3438 1167.6705 1170.9627 1174.9613 1232.1826 1331.8459
// 1335.3932 1335.8677 1343.9556 1371.37 1372.8127 1375.5428
// 1396.0344 1402.4082 1402.7554 1403.2463 1403.396 1411.6946
// 1412.2456 1412.3519 1414.5982 1415.3613 1415.5698 1415.7993
// 1418.5409 2870.7446 2905.3132 2907.0361 2914.1662 2949.2646
// 2965.825 2967.7667 2971.5223 3086.3849 3086.3878 3086.6448
// 3086.687 3089.2274 3089.4105 3089.4743 3089.5841 3186.0753
// 3186.1375 3186.3511 3186.365 ]
// [ 0.52729 0.49992 0.42466]
//note: above example has since been updated to print molecular mass; also frequency and atomic number format has been updated
String [] stringArray;
natoms = Integer.parseInt(br.readLine());//read line 1: number of atoms
stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read line 2: the atomic numbers (first removing braces)
// line = br.readLine().replace("[", "").replace("]","");//read line 2: the atomic numbers (first removing braces)
// StringTokenizer st = new StringTokenizer(line); //apprently the stringTokenizer class is deprecated, but I am having trouble getting the regular expressions to work properly
for(int i=0; i < natoms; i++){
// atomicNumber.add(i,Integer.parseInt(stringArray[i]));
atomicNumber.add(i,Integer.parseInt(stringArray[i]));
}
for(int i=0; i < natoms; i++){
stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read line 3+: coordinates for atom i; used /s+ for split; using spaces with default limit of 0 was giving empty string
x_coor.add(i,Double.parseDouble(stringArray[0]));
y_coor.add(i,Double.parseDouble(stringArray[1]));
z_coor.add(i,Double.parseDouble(stringArray[2]));
}
energy = Double.parseDouble(br.readLine());//read next line: energy
molmass = Double.parseDouble(br.readLine());//read next line: molecular mass (in amu)
if (natoms>1){//read additional info for non-monoatomic species
stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read next line: frequencies
for(int i=0; i < stringArray.length; i++){
freqs.add(i,Double.parseDouble(stringArray[i]));
}
stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read next line rotational constants (converting from GHz to Hz in the process)
rotCons_1 = Double.parseDouble(stringArray[0])*1000000000;
rotCons_2 = Double.parseDouble(stringArray[1])*1000000000;
rotCons_3 = Double.parseDouble(stringArray[2])*1000000000;
}
while ( (line = br.readLine()) != null) {
//do nothing (there shouldn't be any more information, but this is included to get all the output)
}
int exitValue = cclibProc.waitFor();
}
catch (Exception e) {
String err = "Error in running ccLib Python process \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
ThermoData result = calculateThermoFromPM3MM4Calc(natoms, atomicNumber, x_coor, y_coor, z_coor, energy, molmass, freqs, rotCons_1, rotCons_2, rotCons_3, gdStateDegen);
return result;
}
//returns a thermo result, given results from quantum PM3 calculation or MM4 calculation (originally, this was in parseGaussianPM3 function
public ThermoData calculateThermoFromPM3MM4Calc(int natoms, ArrayList atomicNumber, ArrayList x_coor, ArrayList y_coor, ArrayList z_coor, double energy, double molmass, ArrayList freqs, double rotCons_1, double rotCons_2, double rotCons_3, int gdStateDegen){
//determine point group using the SYMMETRY Program
String geom = natoms + "\n";
for(int i=0; i < natoms; i++){
geom += atomicNumber.get(i) + " "+ x_coor.get(i) + " " + y_coor.get(i) + " " +z_coor.get(i) + "\n";
}
// String pointGroup = determinePointGroupUsingSYMMETRYProgram(geom, 0.01);
String pointGroup = determinePointGroupUsingSYMMETRYProgram(geom);
//calculate thermo quantities using stat. mech. equations
double R = 1.9872; //ideal gas constant in cal/mol-K (does this appear elsewhere in RMG, so I don't need to reuse it?)
double Hartree_to_kcal = 627.5095; //conversion from Hartree to kcal/mol taken from Gaussian thermo white paper
double Na = 6.02214179E23;//Avagadro's number; cf. http://physics.nist.gov/cgi-bin/cuu/Value?na|search_for=physchem_in!
double k = 1.3806504E-23;//Boltzmann's constant in J/K; cf. http://physics.nist.gov/cgi-bin/cuu/Value?na|search_for=physchem_in!
double h = 6.62606896E-34;//Planck's constant in J-s; cf. http://physics.nist.gov/cgi-bin/cuu/Value?h|search_for=universal_in!
double c = 299792458. *100;//speed of light in vacuum in cm/s, cf. http://physics.nist.gov/cgi-bin/cuu/Value?c|search_for=universal_in!
//boolean linearity = p_chemGraph.isLinear();//determine linearity (perhaps it would be more appropriate to determine this from point group?)
boolean linearity = false;
if (pointGroup.equals("Cinfv")||pointGroup.equals("Dinfh")) linearity=true;//determine linearity from 3D-geometry; changed to correctly consider linear ketene radical case
//we will use number of atoms from above (alternatively, we could use the chemGraph); this is needed to test whether the species is monoatomic
double Hf298, S298, Cp300, Cp400, Cp500, Cp600, Cp800, Cp1000, Cp1500;
double sigmaCorr=0;//statistical correction for S in dimensionless units (divided by R)
//determine statistical correction factor for 1. external rotational symmetry (affects rotational partition function) and 2. chirality (will add R*ln2 to entropy) based on point group
//ref: http://cccbdb.nist.gov/thermo.asp
//assumptions below for Sn, T, Th, O, I seem to be in line with expectations based on order reported at: http://en.wikipedia.org/w/index.php?title=List_of_character_tables_for_chemically_important_3D_point_groups&oldid=287261611 (assuming order = symmetry number * 2 (/2 if chiral))...this appears to be true for all point groups I "know" to be correct
//minor concern: does SYMMETRY appropriately calculate all Sn groups considering 2007 discovery of previous errors in character tables (cf. Wikipedia article above)
if (pointGroup.equals("C1")) sigmaCorr=+Math.log(2.);//rot. sym. = 1, chiral
else if (pointGroup.equals("Cs")) sigmaCorr=0; //rot. sym. = 1
else if (pointGroup.equals("Ci")) sigmaCorr=0; //rot. sym. = 1
else if (pointGroup.equals("C2")) sigmaCorr=0;//rot. sym. = 2, chiral (corrections cancel)
else if (pointGroup.equals("C3")) sigmaCorr=+Math.log(2.)-Math.log(3.);//rot. sym. = 3, chiral
else if (pointGroup.equals("C4")) sigmaCorr=+Math.log(2.)-Math.log(4.);//rot. sym. = 4, chiral
else if (pointGroup.equals("C5")) sigmaCorr=+Math.log(2.)-Math.log(5.);//rot. sym. = 5, chiral
else if (pointGroup.equals("C6")) sigmaCorr=+Math.log(2.)-Math.log(6.);//rot. sym. = 6, chiral
else if (pointGroup.equals("C7")) sigmaCorr=+Math.log(2.)-Math.log(7.);//rot. sym. = 7, chiral
else if (pointGroup.equals("C8")) sigmaCorr=+Math.log(2.)-Math.log(8.);//rot. sym. = 8, chiral
else if (pointGroup.equals("D2")) sigmaCorr=+Math.log(2.)-Math.log(4.);//rot. sym. = 4, chiral
else if (pointGroup.equals("D3")) sigmaCorr=+Math.log(2.)-Math.log(6.);//rot. sym. = 6, chiral
else if (pointGroup.equals("D4")) sigmaCorr=+Math.log(2.)-Math.log(8.);//rot. sym. = 8, chiral
else if (pointGroup.equals("D5")) sigmaCorr=+Math.log(2.)-Math.log(10.);//rot. sym. = 10, chiral
else if (pointGroup.equals("D6")) sigmaCorr=+Math.log(2.)-Math.log(12.);//rot. sym. = 12, chiral
else if (pointGroup.equals("D7")) sigmaCorr=+Math.log(2.)-Math.log(14.);//rot. sym. = 14, chiral
else if (pointGroup.equals("D8")) sigmaCorr=+Math.log(2.)-Math.log(16.);//rot. sym. = 16, chiral
else if (pointGroup.equals("C2v")) sigmaCorr=-Math.log(2.);//rot. sym. = 2
else if (pointGroup.equals("C3v")) sigmaCorr=-Math.log(3.);//rot. sym. = 3
else if (pointGroup.equals("C4v")) sigmaCorr=-Math.log(4.);//rot. sym. = 4
else if (pointGroup.equals("C5v")) sigmaCorr=-Math.log(5.);//rot. sym. = 5
else if (pointGroup.equals("C6v")) sigmaCorr=-Math.log(6.);//rot. sym. = 6
else if (pointGroup.equals("C7v")) sigmaCorr=-Math.log(7.);//rot. sym. = 7
else if (pointGroup.equals("C8v")) sigmaCorr=-Math.log(8.);//rot. sym. = 8
else if (pointGroup.equals("C2h")) sigmaCorr=-Math.log(2.);//rot. sym. = 2
else if (pointGroup.equals("C3h")) sigmaCorr=-Math.log(3.);//rot. sym. = 3
else if (pointGroup.equals("C4h")) sigmaCorr=-Math.log(4.);//rot. sym. = 4
else if (pointGroup.equals("C5h")) sigmaCorr=-Math.log(5.);//rot. sym. = 5
else if (pointGroup.equals("C6h")) sigmaCorr=-Math.log(6.);//rot. sym. = 6
else if (pointGroup.equals("C7h")) sigmaCorr=-Math.log(7.);//rot. sym. = 7
else if (pointGroup.equals("C8h")) sigmaCorr=-Math.log(8.);//rot. sym. = 8
else if (pointGroup.equals("D2h")) sigmaCorr=-Math.log(4.);//rot. sym. = 4
else if (pointGroup.equals("D3h")) sigmaCorr=-Math.log(6.);//rot. sym. = 6
else if (pointGroup.equals("D4h")) sigmaCorr=-Math.log(8.);//rot. sym. = 8
else if (pointGroup.equals("D5h")) sigmaCorr=-Math.log(10.);//rot. sym. = 10
else if (pointGroup.equals("D6h")) sigmaCorr=-Math.log(12.);//rot. sym. = 12
else if (pointGroup.equals("D7h")) sigmaCorr=-Math.log(14.);//rot. sym. = 14
else if (pointGroup.equals("D8h")) sigmaCorr=-Math.log(16.);//rot. sym. = 16
else if (pointGroup.equals("D2d")) sigmaCorr=-Math.log(4.);//rot. sym. = 4
else if (pointGroup.equals("D3d")) sigmaCorr=-Math.log(6.);//rot. sym. = 6
else if (pointGroup.equals("D4d")) sigmaCorr=-Math.log(8.);//rot. sym. = 8
else if (pointGroup.equals("D5d")) sigmaCorr=-Math.log(10.);//rot. sym. = 10
else if (pointGroup.equals("D6d")) sigmaCorr=-Math.log(12.);//rot. sym. = 12
else if (pointGroup.equals("D7d")) sigmaCorr=-Math.log(14.);//rot. sym. = 14
else if (pointGroup.equals("D8d")) sigmaCorr=-Math.log(16.);//rot. sym. = 16
else if (pointGroup.equals("S4")) sigmaCorr=-Math.log(2.);//rot. sym. = 2 ;*** assumed achiral
else if (pointGroup.equals("S6")) sigmaCorr=-Math.log(3.);//rot. sym. = 3 ;*** assumed achiral
else if (pointGroup.equals("S8")) sigmaCorr=-Math.log(4.);//rot. sym. = 4 ;*** assumed achiral
else if (pointGroup.equals("T")) sigmaCorr=+Math.log(2.)-Math.log(12.);//rot. sym. = 12, *** assumed chiral
else if (pointGroup.equals("Th")) sigmaCorr=-Math.log(12.);//***assumed rot. sym. = 12
else if (pointGroup.equals("Td")) sigmaCorr=-Math.log(12.);//rot. sym. = 12
else if (pointGroup.equals("O")) sigmaCorr=+Math.log(2.)-Math.log(24.);//***assumed rot. sym. = 24, chiral
else if (pointGroup.equals("Oh")) sigmaCorr=-Math.log(24.);//rot. sym. = 24
else if (pointGroup.equals("Cinfv")) sigmaCorr=0;//rot. sym. = 1
else if (pointGroup.equals("Dinfh")) sigmaCorr=-Math.log(2.);//rot. sym. = 2
else if (pointGroup.equals("I")) sigmaCorr=+Math.log(2.)-Math.log(60.);//***assumed rot. sym. = 60, chiral
else if (pointGroup.equals("Ih")) sigmaCorr=-Math.log(60.);//rot. sym. = 60
else if (pointGroup.equals("Kh")) sigmaCorr=0;//arbitrarily set to zero...one could argue that it is infinite; apparently this is the point group of a single atom (cf. http://www.cobalt.chem.ucalgary.ca/ps/symmetry/tests/G_Kh); this should not have a rotational partition function, and we should not use the symmetry correction in this case
else{//this point should not be reached, based on checks performed in determinePointGroupUsingSYMMETRYProgram
System.out.println("Unrecognized point group: "+ pointGroup);
System.exit(0);
}
Hf298 = energy*Hartree_to_kcal;
S298 = R*Math.log(gdStateDegen)+R*(3./2.*Math.log(2.*Math.PI*molmass/(1000.*Na*Math.pow(h,2.)))+5./2.*Math.log(k*298.15)-Math.log(100000.)+5./2.);//electronic + translation; note use of 10^5 Pa for standard pressure; also note that molecular mass needs to be divided by 1000 for kg units
Cp300 = 5./2.*R;
Cp400 = 5./2.*R;
Cp500 = 5./2.*R;
Cp600 = 5./2.*R;
Cp800 = 5./2.*R;
Cp1000 = 5./2.*R;
Cp1500 = 5./2.*R;
if(natoms>1){//include statistical correction and rotational (without symmetry number, vibrational contributions if species is polyatomic
if(linearity){//linear case
//determine the rotational constant (note that one of the rotcons will be zero)
double rotCons;
if(rotCons_1 > 0.0001) rotCons = rotCons_1;
else rotCons = rotCons_2;
S298 += R*sigmaCorr+R*(Math.log(k*298.15/(h*rotCons))+1)+R*calcVibS(freqs, 298.15, h, k, c);
Cp300 += R + R*calcVibCp(freqs, 300., h, k, c);
Cp400 += R + R*calcVibCp(freqs, 400., h, k, c);
Cp500 += R + R*calcVibCp(freqs, 500., h, k, c);
Cp600 += R + R*calcVibCp(freqs, 600., h, k, c);
Cp800 += R + R*calcVibCp(freqs, 800., h, k, c);
Cp1000 += R + R*calcVibCp(freqs, 1000., h, k, c);
Cp1500 += R + R*calcVibCp(freqs, 1500., h, k, c);
}
else{//nonlinear case
S298 += R*sigmaCorr+R*(3./2.*Math.log(k*298.15/h)-1./2.*Math.log(rotCons_1*rotCons_2*rotCons_3/Math.PI)+3./2.)+R*calcVibS(freqs, 298.15, h, k, c);
Cp300 += 3./2.*R + R*calcVibCp(freqs, 300., h, k, c);
Cp400 += 3./2.*R + R*calcVibCp(freqs, 400., h, k, c);
Cp500 += 3./2.*R + R*calcVibCp(freqs, 500., h, k, c);
Cp600 += 3./2.*R + R*calcVibCp(freqs, 600., h, k, c);
Cp800 += 3./2.*R + R*calcVibCp(freqs, 800., h, k, c);
Cp1000 += 3./2.*R + R*calcVibCp(freqs, 1000., h, k, c);
Cp1500 += 3./2.*R + R*calcVibCp(freqs, 1500., h, k, c);
}
}
ThermoData result = new ThermoData(Hf298,S298,Cp300,Cp400,Cp500,Cp600,Cp800,Cp1000,Cp1500,5,1,1,"PM3 calculation");//this includes rough estimates of uncertainty
return result;
}
//determine the point group using the SYMMETRY program (http://www.cobalt.chem.ucalgary.ca/ps/symmetry/)
//required input is a line with number of atoms followed by lines for each atom including atom number and x,y,z coordinates
//finalTol determines how loose the point group criteria are; values are comparable to those specifed in the GaussView point group interface
//public String determinePointGroupUsingSYMMETRYProgram(String geom, double finalTol){
public String determinePointGroupUsingSYMMETRYProgram(String geom){
int attemptNumber = 1;
int maxAttemptNumber = 2;
boolean pointGroupFound=false;
//write the input file
try {
File inputFile=new File(qmfolder+"symminput.txt");//SYMMETRY program directory
FileWriter fw = new FileWriter(inputFile);
fw.write(geom);
fw.close();
} catch (IOException e) {
String err = "Error writing input file for point group calculation";
err += e.toString();
System.out.println(err);
System.exit(0);
}
String result = "";
String command = "";
while (attemptNumber<=maxAttemptNumber && !pointGroupFound){
//call the program and read the result
result = "";
String [] lineArray;
try{
if (System.getProperty("os.name").toLowerCase().contains("windows")){//the Windows case where the precompiled executable seems to need to be called from a batch script
if(attemptNumber==1) command = "\""+System.getProperty("RMG.workingDirectory")+"/scripts/symmetryDefault2.bat\" "+qmfolder+ "symminput.txt";//12/1/09 gmagoon: switched to use slightly looser criteria of 0.02 rather than 0.01 to handle methylperoxyl radical result from MOPAC
else if (attemptNumber==2) command = "\""+System.getProperty("RMG.workingDirectory")+"/scripts/symmetryLoose.bat\" " +qmfolder+ "symminput.txt";//looser criteria (0.1 instead of 0.01) to properly identify C2v group in VBURLMBUVWIEMQ-UHFFFAOYAVmult5 (InChI=1/C3H4O2/c1-3(2,4)5/h1-2H2/mult5) MOPAC result; C2 and sigma were identified with default, but it should be C2 and sigma*2
else{
System.out.println("Invalid attemptNumber: "+ attemptNumber);
System.exit(0);
}
}
else{//in other (non-Windows) cases, where it is compiled from scratch, we should be able to run this directly
if(attemptNumber==1) command = System.getProperty("RMG.workingDirectory")+"/bin/SYMMETRY.EXE -final 0.02 " +qmfolder+ "symminput.txt";//12/1/09 gmagoon: switched to use slightly looser criteria of 0.02 rather than 0.01 to handle methylperoxyl radical result from MOPAC
else if (attemptNumber==2) command = System.getProperty("RMG.workingDirectory")+"/bin/SYMMETRY.EXE -final 0.1 " +qmfolder+ "symminput.txt";//looser criteria (0.1 instead of 0.01) to properly identify C2v group in VBURLMBUVWIEMQ-UHFFFAOYAVmult5 (InChI=1/C3H4O2/c1-3(2,4)5/h1-2H2/mult5) MOPAC result; C2 and sigma were identified with default, but it should be C2 and sigma*2
else{
System.out.println("Invalid attemptNumber: "+ attemptNumber);
System.exit(0);
}
}
Process symmProc = Runtime.getRuntime().exec(command);
//check for errors and display the error if there is one
InputStream is = symmProc.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null) {
if(line.startsWith("It seems to be the ")){//last line, ("It seems to be the [x] point group") indicates point group
lineArray = line.split(" ");//split the line around spaces
result = lineArray[5];//point group string should be the 6th word
}
}
int exitValue = symmProc.waitFor();
}
catch(Exception e){
String err = "Error in running point group calculation process using SYMMETRY \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//check for a recognized point group
if (result.equals("C1")||result.equals("Cs")||result.equals("Ci")||result.equals("C2")||result.equals("C3")||result.equals("C4")||result.equals("C5")||result.equals("C6")||result.equals("C7")||result.equals("C8")||result.equals("D2")||result.equals("D3")||result.equals("D4")||result.equals("D5")||result.equals("D6")||result.equals("D7")||result.equals("D8")||result.equals("C2v")||result.equals("C3v")||result.equals("C4v")||result.equals("C5v")||result.equals("C6v")||result.equals("C7v")||result.equals("C8v")||result.equals("C2h")||result.equals("C3h")||result.equals("C4h")||result.equals("C5h")||result.equals("C6h")||result.equals("C7h")||result.equals("C8h")||result.equals("D2h")||result.equals("D3h")||result.equals("D4h")||result.equals("D5h")||result.equals("D6h")||result.equals("D7h")||result.equals("D8h")||result.equals("D2d")||result.equals("D3d")||result.equals("D4d")||result.equals("D5d")||result.equals("D6d")||result.equals("D7d")||result.equals("D8d")||result.equals("S4")||result.equals("S6")||result.equals("S8")||result.equals("T")||result.equals("Th")||result.equals("Td")||result.equals("O")||result.equals("Oh")||result.equals("Cinfv")||result.equals("Dinfh")||result.equals("I")||result.equals("Ih")||result.equals("Kh")) pointGroupFound=true;
else{
if(attemptNumber < maxAttemptNumber) System.out.println("Attempt number "+attemptNumber+" did not identify a recognized point group (" +result+"). Will retry with looser point group criteria.");
else{
System.out.println("Final attempt number "+attemptNumber+" did not identify a recognized point group (" +result+"). Exiting.");
System.exit(0);
}
attemptNumber++;
}
}
System.out.println("Point group: "+ result);//print result, at least for debugging purposes
return result;
}
//gmagoon 6/8/09
//calculate the vibrational contribution (divided by R, dimensionless) at temperature, T, in Kelvin to entropy
//p_freqs in cm^-1; c in cm/s; k in J/K; h in J-s
//ref.: http://cccbdb.nist.gov/thermo.asp
public double calcVibS(ArrayList p_freqs, double p_T, double h, double k, double c){
double Scontrib = 0;
double dr;
for(int i=0; i < p_freqs.size(); i++){
double freq = (Double)p_freqs.get(i);
dr = h*c*freq/(k*p_T); //frequently used dimensionless ratio
Scontrib = Scontrib - Math.log(1.-Math.exp(-dr))+dr*Math.exp(-dr)/(1.-Math.exp(-dr));
}
return Scontrib;
}
//gmagoon 6/8/09
//calculate the vibrational contribution (divided by R, dimensionless) at temperature, T, in Kelvin to heat capacity, Cp
//p_freqs in cm^-1; c in cm/s; k in J/K; h in J-s
//ref.: http://cccbdb.nist.gov/thermo.asp
public double calcVibCp(ArrayList p_freqs, double p_T, double h, double k, double c){
double Cpcontrib = 0;
double dr;
for(int i=0; i < p_freqs.size(); i++){
double freq = (Double)p_freqs.get(i);
dr = h*c*freq/(k*p_T); //frequently used dimensionless ratio
Cpcontrib = Cpcontrib + Math.pow(dr, 2.)*Math.exp(-dr)/Math.pow(1.-Math.exp(-dr),2.);
}
return Cpcontrib;
}
//determine the QM filename (element 0) and augmented InChI (element 1) for a ChemGraph
//QM filename is InChIKey appended with mult3, mult4, mult5, or mult6 for multiplicities of 3 or higher
//augmented InChI is InChI appended with /mult3, /mult4, /mult5, or /mult6 for multiplicities of 3 or higher
public String [] getQMFileName(ChemGraph p_chemGraph){
String [] result = new String[2];
result[0] = p_chemGraph.getModifiedInChIKeyAnew();//need to generate InChI and key anew because ChemGraph may have changed (in particular, adding/removing hydrogens in HBI process)
result[1] = p_chemGraph.getModifiedInChIAnew();
return result;
}
//returns true if a Gaussian file for the given name and directory (.log suffix) exists and indicates successful completion (same criteria as used after calculation runs); terminates if the InChI doesn't match the InChI in the file or if there is no InChI in the file; returns false otherwise
public boolean successfulGaussianResultExistsQ(String name, String directory, String InChIaug){
//part of the code is taken from runGaussian code above
//look in the output file to check for the successful termination of the Gaussian calculation
//failed jobs will contain the a line beginning with " Error termination" near the end of the file
File file = new File(directory+"/"+name+".log");
if(file.exists()){//if the file exists, do further checks; otherwise, we will skip to final statement and return false
int failureFlag=0;//flag (1 or 0) indicating whether the Gaussian job failed
int InChIMatch=0;//flag (1 or 0) indicating whether the InChI in the file matches InChIaug; this can only be 1 if InChIFound is also 1;
int InChIFound=0;//flag (1 or 0) indicating whether an InChI was found in the log file
int InChIPartialMatch=0;//flag (1 or 0) indicating whether the InChI in the log file is a substring of the InChI in RMG's memory
String logFileInChI="";
try{
FileReader in = new FileReader(file);
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
if (line.startsWith(" Error termination ")) failureFlag=1;
else if (line.startsWith(" ******")){//also look for imaginary frequencies
if (line.contains("imaginary frequencies")) failureFlag=1;
}
else if(line.startsWith(" InChI=")){
logFileInChI = line.trim();
//continue reading lines until a line of dashes is found (in this way, we can read InChIs that span multiple lines)
line=reader.readLine();
while (!line.startsWith(" --------")){
logFileInChI += line.trim();
line=reader.readLine();
}
InChIFound=1;
if(logFileInChI.equals(InChIaug)) InChIMatch=1;
else if(InChIaug.startsWith(logFileInChI)) InChIPartialMatch=1;
}
line=reader.readLine();
}
}
catch(Exception e){
String err = "Error in reading preexisting Gaussian log file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
//if the failure flag is still 0, the process should have been successful
if (failureFlag==0&&InChIMatch==1){
System.out.println("Pre-existing successful quantum result for " + name + " ("+InChIaug+") has been found. This log file will be used.");
return true;
}
else if (InChIFound==1 && InChIMatch == 0){//InChIs do not match (most likely due to limited name length mirrored in log file (79 characters), but possibly due to a collision)
if(InChIPartialMatch == 1){//case where the InChI in memory begins with the InChI in the log file; we will continue and check the input file, printing a warning if there is no match
File inputFile = new File(directory+"/"+name+".gjf");
if(inputFile.exists()){//read the Gaussian inputFile
String inputFileInChI="";
try{
FileReader inI = new FileReader(inputFile);
BufferedReader readerI = new BufferedReader(inI);
String lineI=readerI.readLine();
while(lineI!=null){
if(lineI.startsWith(" InChI=")){
inputFileInChI = lineI.trim();
}
lineI=readerI.readLine();
}
}
catch(Exception e){
String err = "Error in reading preexisting Gaussian gjf file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
if(inputFileInChI.equals(InChIaug)){
if(failureFlag==0){
System.out.println("Pre-existing successful quantum result for " + name + " ("+InChIaug+") has been found. This log file will be used. *Note that input file was read to confirm lack of InChIKey collision (InChI probably more than 79 characters)");
return true;
}
else{//otherwise, failureFlag==1
System.out.println("Pre-existing quantum result for " + name + " ("+InChIaug+") has been found, but the result was apparently unsuccessful. The file will be overwritten with a new calculation. *Note that input file was read to confirm lack of InChIKey collision (InChI probably more than 79 characters)");
return false;
}
}
else{
if(inputFileInChI.equals("")){//InChI was not found in input file
System.out.println("*****Warning: potential InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI + " . InChI could not be found in the Gaussian input file. You should manually check that the log file contains the intended species.");
return true;
}
else{//InChI was found but doesn't match
System.out.println("Congratulations! You appear to have discovered the first recorded instance of an InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Gaussian input file Augmented InChI = "+inputFileInChI);
System.exit(0);
}
}
}
else{
System.out.println("*****Warning: potential InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI + " . Gaussian input file could not be found to check full InChI. You should manually check that the log file contains the intended species.");
return true;
}
}
else{
System.out.println("Congratulations! You appear to have discovered the first recorded instance of an InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI);
System.exit(0);
}
}
else if (InChIFound==0){
System.out.println("An InChI was not found in file: " +name+".log");
System.exit(0);
}
else if (failureFlag==1){//note these should cover all possible results for this block, and if the file.exists block is entered, it should return from within the block and should not reach the return statement below
System.out.println("Pre-existing quantum result for " + name + " ("+InChIaug+") has been found, but the result was apparently unsuccessful. The file will be overwritten with a new calculation.");
return false;
}
}
//we could print a line here for cases where the file doesn't exist, but this would probably be too verbose
return false;
}
//returns true if a successful result exists (either Gaussian or MOPAC)
// public boolean [] successfulResultExistsQ(String name, String directory, String InChIaug){
// boolean gaussianResult=successfulGaussianResultExistsQ(name, directory, InChIaug);
// boolean mopacResult=successfulMOPACResultExistsQ(name, directory, InChIaug);
// return (gaussianResult || mopacResult);// returns true if either a successful Gaussian or MOPAC result exists
// }
//returns true if a MOPAC output file for the given name and directory (.out suffix) exists and indicates successful completion (same criteria as used after calculation runs); terminates if the InChI doesn't match the InChI in the file or if there is no InChI in the file; returns false otherwise
public boolean successfulMopacResultExistsQ(String name, String directory, String InChIaug){
//part of the code is taken from analogous code for Gaussian
//look in the output file to check for the successful termination of the calculation (assumed to be successful if "description of vibrations appears)
File file = new File(directory+"/"+name+".out");
if(file.exists()){//if the file exists, do further checks; otherwise, we will skip to final statement and return false
int failureFlag=1;//flag (1 or 0) indicating whether the MOPAC job failed
int failureOverrideFlag=0;//flag (1 or 0) to override success as measured by failureFlag
int InChIMatch=0;//flag (1 or 0) indicating whether the InChI in the file matches InChIaug; this can only be 1 if InChIFound is also 1;
int InChIFound=0;//flag (1 or 0) indicating whether an InChI was found in the log file
int InChIPartialMatch=0;//flag (1 or 0) indicating whether the InChI in the log file is a substring of the InChI in RMG's memory
String logFileInChI="";
try{
FileReader in = new FileReader(file);
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
String trimLine= line.trim();
if (trimLine.equals("DESCRIPTION OF VIBRATIONS")){
// if(!MopacFileContainsNegativeFreqsQ(name, directory)) failureFlag=0;//check for this line; if it is here, check for negative frequencies
failureFlag = 0;
}
//negative frequencies notice example:
// NOTE: SYSTEM IS NOT A GROUND STATE, THEREFORE ZERO POINT
// ENERGY IS NOT MEANINGFULL. ZERO POINT ENERGY PRINTED
// DOES NOT INCLUDE THE 2 IMAGINARY FREQUENCIES
else if (trimLine.endsWith("IMAGINARY FREQUENCIES")){
// System.out.println("*****Imaginary freqencies found:");
failureOverrideFlag=1;
}
else if (trimLine.equals("EXCESS NUMBER OF OPTIMIZATION CYCLES")){//exceeding max cycles error
failureOverrideFlag=1;
}
else if (trimLine.equals("NOT ENOUGH TIME FOR ANOTHER CYCLE")){//timeout error
failureOverrideFlag=1;
}
else if(line.startsWith(" InChI=")){
logFileInChI = line.trim();//output files should take up to 240 characters of the name in the input file
InChIFound=1;
if(logFileInChI.equals(InChIaug)) InChIMatch=1;
else if(InChIaug.startsWith(logFileInChI)) InChIPartialMatch=1;
}
line=reader.readLine();
}
}
catch(Exception e){
String err = "Error in reading preexisting MOPAC output file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
if(failureOverrideFlag==1) failureFlag=1; //job will be considered a failure if there are imaginary frequencies or if job terminates to to excess time/cycles
//if the failure flag is still 0, the process should have been successful
if (failureFlag==0&&InChIMatch==1){
System.out.println("Pre-existing successful MOPAC quantum result for " + name + " ("+InChIaug+") has been found. This log file will be used.");
return true;
}
else if (InChIFound==1 && InChIMatch == 0){//InChIs do not match (most likely due to limited name length mirrored in log file (240 characters), but possibly due to a collision)
// if(InChIPartialMatch == 1){//case where the InChI in memory begins with the InChI in the log file; we will continue and check the input file, printing a warning if there is no match
//look in the input file if the InChI doesn't match (apparently, certain characters can be deleted in MOPAC output file for long InChIs)
File inputFile = new File(directory+"/"+name+".mop");
if(inputFile.exists()){//read the MOPAC inputFile
String inputFileInChI="";
try{
FileReader inI = new FileReader(inputFile);
BufferedReader readerI = new BufferedReader(inI);
String lineI=readerI.readLine();
while(lineI!=null){
if(lineI.startsWith("InChI=")){
inputFileInChI = lineI.trim();
}
lineI=readerI.readLine();
}
}
catch(Exception e){
String err = "Error in reading preexisting MOPAC input file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
if(inputFileInChI.equals(InChIaug)){
if(failureFlag==0){
System.out.println("Pre-existing successful MOPAC quantum result for " + name + " ("+InChIaug+") has been found. This log file will be used. *Note that input file was read to confirm lack of InChIKey collision (InChI probably more than 240 characters or characters probably deleted from InChI in .out file)");
return true;
}
else{//otherwise, failureFlag==1
System.out.println("Pre-existing MOPAC quantum result for " + name + " ("+InChIaug+") has been found, but the result was apparently unsuccessful. The file will be overwritten with a new calculation or Gaussian result (if available) will be used. *Note that input file was read to confirm lack of InChIKey collision (InChI probably more than 240 characters or characters probably deleted from InChI in .out file)");
return false;
}
}
else{
if(inputFileInChI.equals("")){//InChI was not found in input file
System.out.println("*****Warning: potential InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI + " . InChI could not be found in the MOPAC input file. You should manually check that the output file contains the intended species.");
return true;
}
else{//InChI was found but doesn't match
System.out.println("Congratulations! You appear to have discovered the first recorded instance of an InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " MOPAC input file Augmented InChI = " + inputFileInChI + " Log file Augmented InChI = "+logFileInChI);
System.exit(0);
}
}
}
else{
System.out.println("*****Warning: potential InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI + " . MOPAC input file could not be found to check full InChI. You should manually check that the log file contains the intended species.");
return true;
}
// }
// else{
// System.out.println("Congratulations! You appear to have discovered the first recorded instance of an InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " MOPAC output file Augmented InChI = "+logFileInChI);
// System.exit(0);
// }
}
else if (InChIFound==0){
System.out.println("An InChI was not found in file: " +name+".out");
System.exit(0);
}
else if (failureFlag==1){//note these should cover all possible results for this block, and if the file.exists block is entered, it should return from within the block and should not reach the return statement below
System.out.println("Pre-existing MOPAC quantum result for " + name + " ("+InChIaug+") has been found, but the result was apparently unsuccessful. The file will be overwritten with a new calculation or Gaussian result (if available) will be used.");
return false;
}
}
//we could print a line here for cases where the file doesn't exist, but this would probably be too verbose
return false;
}
//returns true if an MM4 output file for the given name and directory (.mm4out suffix) exists and indicates successful completion (same criteria as used after calculation runs); terminates if the InChI doesn't match the InChI in the file or if there is no InChI in the file; returns false otherwise
public boolean successfulMM4ResultExistsQ(String name, String directory, String InChIaug){
//part of the code is taken from analogous code for MOPAC (first ~half) and Gaussian (second ~half)
//look in the output file to check for the successful termination of the calculation (assumed to be successful if "description of vibrations appears)
int failureFlag=1;//flag (1 or 0) indicating whether the MM4 job failed
int failureOverrideFlag=0;//flag (1 or 0) to override success as measured by failureFlag
File file = new File(directory+"/"+name+".mm4out");
int InChIMatch=0;//flag (1 or 0) indicating whether the InChI in the file matches InChIaug; this can only be 1 if InChIFound is also 1;
int InChIFound=0;//flag (1 or 0) indicating whether an InChI was found in the log file
int InChIPartialMatch=0;//flag (1 or 0) indicating whether the InChI in the log file is a substring of the InChI in RMG's memory
if(file.exists()){//if the file exists, do further checks; otherwise, we will skip to final statement and return false
String logFileInChI="";
try{
FileReader in = new FileReader(file);
BufferedReader reader = new BufferedReader(in);
String line=reader.readLine();
while(line!=null){
String trimLine= line.trim();
if (trimLine.equals("STATISTICAL THERMODYNAMICS ANALYSIS")){
failureFlag = 0;
}
else if (trimLine.endsWith("imaginary frequencies,")){//read the number of imaginary frequencies and make sure it is zero
String[] split = trimLine.split("\\s+");
if (Integer.parseInt(split[3])>0){
System.out.println("*****Imaginary freqencies found:");
failureOverrideFlag=1;
}
}
else if (trimLine.contains(" 0.0 (fir )")){
if (useCanTherm){//zero frequencies are only acceptable when CanTherm is used
System.out.println("*****Warning: zero freqencies found (values lower than 7.7 cm^-1 are rounded to zero in MM4 output); CanTherm should hopefully correct this:");
}
else{
System.out.println("*****Zero freqencies found:");
failureOverrideFlag=1;
}
}
else if(trimLine.startsWith("InChI=")){
logFileInChI = line.trim();//output files should take up to about 60 (?) characters of the name in the input file
InChIFound=1;
if(logFileInChI.equals(InChIaug)) InChIMatch=1;
else if(InChIaug.startsWith(logFileInChI)) InChIPartialMatch=1;
}
line=reader.readLine();
}
}
catch(Exception e){
String err = "Error in reading preexisting MM4 output file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
if(failureOverrideFlag==1) failureFlag=1; //job will be considered a failure if there are imaginary frequencies or if job terminates to to excess time/cycles
//if the failure flag is still 0, the process should have been successful
if (failureFlag==0&&InChIMatch==1){
System.out.println("Pre-existing successful MM4 result for " + name + " ("+InChIaug+") has been found. This log file will be used.");
return true;
}
else if (InChIFound==1 && InChIMatch == 0){//InChIs do not match (most likely due to limited name length mirrored in log file (79 characters), but possibly due to a collision)
if(InChIPartialMatch == 1){//case where the InChI in memory begins with the InChI in the log file; we will continue and check the input file, printing a warning if there is no match
File inputFile = new File(directory+"/"+name+".mm4");
if(inputFile.exists()){//read the MM4 inputFile
String inputFileInChI="";
try{
FileReader inI = new FileReader(inputFile);
BufferedReader readerI = new BufferedReader(inI);
String lineI=readerI.readLine();
//InChI should be repeated after in the first line of the input file
inputFileInChI = lineI.trim().substring(80);//extract the string starting with character 81
}
catch(Exception e){
String err = "Error in reading preexisting MM4 .mm4 file \n";
err += e.toString();
e.printStackTrace();
System.exit(0);
}
if(inputFileInChI.equals(InChIaug)){
if(failureFlag==0){
System.out.println("Pre-existing successful MM4 result for " + name + " ("+InChIaug+") has been found. This log file will be used. *Note that input file was read to confirm lack of InChIKey collision (InChI probably more than ~60 characters)");
return true;
}
else{//otherwise, failureFlag==1
System.out.println("Pre-existing MM4 result for " + name + " ("+InChIaug+") has been found, but the result was apparently unsuccessful. The file will be overwritten with a new calculation. *Note that input file was read to confirm lack of InChIKey collision (InChI probably more than ~60 characters)");
return false;
}
}
else{
if(inputFileInChI.equals("")){//InChI was not found in input file
System.out.println("*****Warning: potential InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI + " . InChI could not be found in the MM4 input file. You should manually check that the log file contains the intended species.");
return true;
}
else{//InChI was found but doesn't match
System.out.println("Congratulations! You appear to have discovered the first recorded instance of an InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " MM4 input file Augmented InChI = "+inputFileInChI);
System.exit(0);
}
}
}
else{
System.out.println("*****Warning: potential InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI + " . MM4 input file could not be found to check full InChI. You should manually check that the log file contains the intended species.");
return true;
}
}
else{
System.out.println("Congratulations! You appear to have discovered the first recorded instance of an InChIKey collision: InChIKey(augmented) = " + name + " RMG Augmented InChI = "+ InChIaug + " Log file Augmented InChI = "+logFileInChI);
System.exit(0);
}
}
else if (InChIFound==0){
System.out.println("An InChI was not found in file: " +name+".mm4out");
System.exit(0);
}
else if (failureFlag==1){//note these should cover all possible results for this block, and if the file.exists block is entered, it should return from within the block and should not reach the return statement below
System.out.println("Pre-existing MM4 result for " + name + " ("+InChIaug+") has been found, but the result was apparently unsuccessful. The file will be overwritten with a new calculation.");
return false;
}
}
//we could print a line here for cases where the file doesn't exist, but this would probably be too verbose
return false;
}
// //checks the MOPAC file for negative frequencies
// public boolean MopacFileContainsNegativeFreqsQ(String name, String directory){
// boolean negativeFreq=false;
//
// //code below copied from parseMopacPM3()
// String command = "c:/Python25/python.exe c:/Python25/MopacPM3ParsingScript.py ";//this should eventually be modified for added generality
// String logfilepath=directory+"/"+name+".out";
// command=command.concat(logfilepath);
//
// //much of code below is copied from calculateThermoFromPM3Calc()
// //parse the Mopac file using cclib
// int natoms = 0; //number of atoms from Mopac file; in principle, this should agree with number of chemGraph atoms
// ArrayList atomicNumber = new ArrayList(); //vector of atomic numbers (integers) (apparently Vector is thread-safe; cf. http://answers.yahoo.com/question/index?qid=20081214065127AArZDT3; ...should I be using this instead?)
// ArrayList x_coor = new ArrayList(); //vectors of x-, y-, and z-coordinates (doubles) (Angstroms) (in order corresponding to above atomic numbers)
// ArrayList y_coor = new ArrayList();
// ArrayList z_coor = new ArrayList();
// double energy = 0; //PM3 energy (Hf298) in Hartree (***note: in the case of MOPAC, the MOPAC file will contain in units of kcal/mol, but modified ccLib will return in Hartree)
// double molmass = 0; //molecular mass in amu
// ArrayList freqs = new ArrayList(); //list of frequencies in units of cm^-1
// double rotCons_1 = 0;//rotational constants in (1/s)
// double rotCons_2 = 0;
// double rotCons_3 = 0;
// //int gdStateDegen = p_chemGraph.getRadicalNumber()+1;//calculate ground state degeneracy from the number of radicals; this should give the same result as spin multiplicity in Gaussian input file (and output file), but we do not explicitly check this (we could use "mult" which cclib reads in if we wanted to do so); also, note that this is not always correct, as there can apparently be additional spatial degeneracy for non-symmetric linear molecules like OH radical (cf. http://cccbdb.nist.gov/thermo.asp)
// try{
// File runningdir=new File(directory);
// Process cclibProc = Runtime.getRuntime().exec(command, null, runningdir);
// //read the stdout of the process, which should contain the desired information in a particular format
// InputStream is = cclibProc.getInputStream();
// InputStreamReader isr = new InputStreamReader(is);
// BufferedReader br = new BufferedReader(isr);
// String line=null;
// //example output:
//// C:\Python25>python.exe GaussianPM3ParsingScript.py TEOS.out
//// 33
//// [ 6 6 8 14 8 6 6 8 6 6 8 6 6 1 1 1 1 1 1 1 1 1 1 1 1
//// 1 1 1 1 1 1 1 1]
//// [[ 2.049061 -0.210375 3.133106]
//// [ 1.654646 0.321749 1.762752]
//// [ 0.359284 -0.110429 1.471465]
//// [-0.201871 -0.013365 -0.12819 ]
//// [ 0.086307 1.504918 -0.82893 ]
//// [-0.559186 2.619928 -0.284003]
//// [-0.180246 3.839463 -1.113029]
//// [ 0.523347 -1.188305 -1.112765]
//// [ 1.857584 -1.018167 -1.495088]
//// [ 2.375559 -2.344392 -2.033403]
//// [-1.870397 -0.297297 -0.075427]
//// [-2.313824 -1.571765 0.300245]
//// [-3.83427 -1.535927 0.372171]
//// [ 1.360346 0.128852 3.917699]
//// [ 2.053945 -1.307678 3.160474]
//// [ 3.055397 0.133647 3.403037]
//// [ 1.677262 1.430072 1.750899]
//// [ 2.372265 -0.029237 0.985204]
//// [-0.245956 2.754188 0.771433]
//// [-1.656897 2.472855 -0.287156]
//// [-0.664186 4.739148 -0.712606]
//// [-0.489413 3.734366 -2.161038]
//// [ 0.903055 4.016867 -1.112198]
//// [ 1.919521 -0.229395 -2.269681]
//// [ 2.474031 -0.680069 -0.629949]
//// [ 2.344478 -3.136247 -1.273862]
//// [ 1.786854 -2.695974 -2.890647]
//// [ 3.41648 -2.242409 -2.365094]
//// [-1.884889 -1.858617 1.28054 ]
//// [-1.976206 -2.322432 -0.440995]
//// [-4.284706 -1.26469 -0.591463]
//// [-4.225999 -2.520759 0.656131]
//// [-4.193468 -0.809557 1.112677]]
//// -14.1664924726
//// [ 9.9615 18.102 27.0569 31.8459 39.0096 55.0091
//// 66.4992 80.4552 86.4912 123.3551 141.6058 155.5448
//// 159.4747 167.0013 178.5676 207.3738 237.3201 255.3487
//// 264.5649 292.867 309.4248 344.6503 434.8231 470.2074
//// 488.9717 749.1722 834.257 834.6594 837.7292 839.6352
//// 887.9767 892.9538 899.5374 992.1851 1020.6164 1020.8671
//// 1028.3897 1046.7945 1049.1768 1059.4704 1065.1505 1107.4001
//// 1108.1567 1109.0466 1112.6677 1122.7785 1124.4315 1128.4163
//// 1153.3438 1167.6705 1170.9627 1174.9613 1232.1826 1331.8459
//// 1335.3932 1335.8677 1343.9556 1371.37 1372.8127 1375.5428
//// 1396.0344 1402.4082 1402.7554 1403.2463 1403.396 1411.6946
//// 1412.2456 1412.3519 1414.5982 1415.3613 1415.5698 1415.7993
//// 1418.5409 2870.7446 2905.3132 2907.0361 2914.1662 2949.2646
//// 2965.825 2967.7667 2971.5223 3086.3849 3086.3878 3086.6448
//// 3086.687 3089.2274 3089.4105 3089.4743 3089.5841 3186.0753
//// 3186.1375 3186.3511 3186.365 ]
//// [ 0.52729 0.49992 0.42466]
////note: above example has since been updated to print molecular mass; also frequency and atomic number format has been updated
// String [] stringArray;
// natoms = Integer.parseInt(br.readLine());//read line 1: number of atoms
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read line 2: the atomic numbers (first removing braces)
// // line = br.readLine().replace("[", "").replace("]","");//read line 2: the atomic numbers (first removing braces)
// // StringTokenizer st = new StringTokenizer(line); //apprently the stringTokenizer class is deprecated, but I am having trouble getting the regular expressions to work properly
// for(int i=0; i < natoms; i++){
// // atomicNumber.add(i,Integer.parseInt(stringArray[i]));
// atomicNumber.add(i,Integer.parseInt(stringArray[i]));
// }
// for(int i=0; i < natoms; i++){
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read line 3+: coordinates for atom i; used /s+ for split; using spaces with default limit of 0 was giving empty string
// x_coor.add(i,Double.parseDouble(stringArray[0]));
// y_coor.add(i,Double.parseDouble(stringArray[1]));
// z_coor.add(i,Double.parseDouble(stringArray[2]));
// }
// energy = Double.parseDouble(br.readLine());//read next line: energy
// molmass = Double.parseDouble(br.readLine());//read next line: molecular mass (in amu)
// if (natoms>1){//read additional info for non-monoatomic species
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split(",\\s+");//read next line: frequencies
// for(int i=0; i < stringArray.length; i++){
// freqs.add(i,Double.parseDouble(stringArray[i]));
// }
// stringArray = br.readLine().replace("[", "").replace("]","").trim().split("\\s+");//read next line rotational constants (converting from GHz to Hz in the process)
// rotCons_1 = Double.parseDouble(stringArray[0])*1000000000;
// rotCons_2 = Double.parseDouble(stringArray[1])*1000000000;
// rotCons_3 = Double.parseDouble(stringArray[2])*1000000000;
// }
// while ( (line = br.readLine()) != null) {
// //do nothing (there shouldn't be any more information, but this is included to get all the output)
// }
// int exitValue = cclibProc.waitFor();
// }
// catch (Exception e) {
// String err = "Error in running ccLib Python process \n";
// err += e.toString();
// e.printStackTrace();
// System.exit(0);
// }
//
// //start of code "new" to this function (aside from initialization of negativeFreq)
// if(natoms > 0){
// for (int i=0; i<freqs.size(); i++){
// if((Double)freqs.get(i) < 0) negativeFreq = true;
// }
// }
// return negativeFreq;
// }
//## operation initGAGroupLibrary()
protected void initGAGroupLibrary() {
//#[ operation initGAGroupLibrary()
thermoLibrary = ThermoGAGroupLibrary.getINSTANCE();
//#]
}
}
/*********************************************************************
File Path : RMG\RMG\jing\chem\QMTP.java
*********************************************************************/
|
adding to and tidying up Java code for using CanTherm
|
source/RMG/jing/chem/QMTP.java
|
adding to and tidying up Java code for using CanTherm
|
|
Java
|
mit
|
a4b0cfc55944198521336355ce4acd9058ee4002
| 0
|
ponderousmad/pipevo,ponderousmad/pipevo,ponderousmad/pipevo
|
pipevo/src/pipes/gui/RunAIWorker.java
|
/* ---------------------------------------------------------------
* Copyright Adrian Smith.
* Licensed under the MIT license. See license.txt at project root.
* --------------------------------------------------------------- */
package pipes.gui;
import java.awt.Dimension;
import java.awt.FlowLayout;
import javax.swing.JFrame;
import javax.swing.JProgressBar;
import com.sun.java.SwingWorker;
import pipes.ai.PipeFollower;
import pipes.ai.players.PipeAI;
import pipes.root.GamePlay;
import pipes.stats.StatsManager;
class RunAIWorker extends SwingWorker {
private int mRunCount;
private int mCurrent;
private PipeAI mAI;
private long mSeed;
private JFrame mProgressFrame;
private JProgressBar mProgress;
private boolean mUIShown = false;
void setupUI()
{
mProgressFrame = new JFrame();
mProgressFrame.setDefaultCloseOperation( JFrame.DISPOSE_ON_CLOSE );
mProgressFrame.setLayout( new FlowLayout() );
mProgress = new JProgressBar( 0, mRunCount );
mProgress.setStringPainted(true);
mProgress.setPreferredSize( new Dimension( 300, 40 ) );
mProgressFrame.add( mProgress );
mProgressFrame.pack();
mProgressFrame.setTitle( "Performing " + mRunCount + " runs of " + mAI.player().toString() );
mProgressFrame.setVisible( true );
mProgressFrame.toFront();
mUIShown = true;
}
private void runAI( PipeAI ai ) {
GamePlay game = GamePlay.create( mSeed );
ai.setGame( game, mSeed );
while( !game.isGameOver() ) {
if( !ai.performMove() ) {
break;
}
}
PipeFollower follower = new PipeFollower( game );
follower.follow();
StatsManager.getSession().addRun( ai.player(), follower.length(), mSeed );
++mSeed;
}
RunAIWorker( PipeAI ai, int runCount, long seed ) {
mAI = ai;
mRunCount = runCount;
mCurrent = 0;
mSeed = seed;
setName(ai.player().name() + "Worker");
}
public int getCurrent() {
return mCurrent;
}
public Object construct() {
javax.swing.SwingUtilities.invokeLater(
new Runnable() {
public void run() {
setupUI();
}
}
);
for( ; mCurrent < mRunCount; ++mCurrent ) {
if( mUIShown && !mProgressFrame.isVisible() ) {
return null;
}
runAI( mAI );
if( mUIShown ) {
mProgress.setValue( mCurrent );
}
}
return mCurrent;
}
public void finished() {
Integer result = (Integer) get();
if( result != null ) {
System.out.println( "Task finished:" + result.toString() );
} else {
System.out.println( "Task Aborted" );
}
mProgressFrame.setVisible( false );
mProgressFrame.dispose();
}
}
|
Remove unneeded file.
|
pipevo/src/pipes/gui/RunAIWorker.java
|
Remove unneeded file.
|
||
Java
|
mit
|
24e862dfdd41dd3620e3cebb53cb38cc900d126b
| 0
|
InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service
|
package org.innovateuk.ifs.application.transactional;
import org.apache.commons.lang3.tuple.Pair;
import org.innovateuk.ifs.application.domain.Application;
import org.innovateuk.ifs.application.domain.FormInputResponse;
import org.innovateuk.ifs.application.repository.ApplicationRepository;
import org.innovateuk.ifs.application.repository.FormInputResponseRepository;
import org.innovateuk.ifs.application.resource.FormInputResponseFileEntryId;
import org.innovateuk.ifs.application.resource.FormInputResponseFileEntryResource;
import org.innovateuk.ifs.commons.service.ServiceResult;
import org.innovateuk.ifs.competition.domain.Competition;
import org.innovateuk.ifs.competition.resource.CompetitionStatus;
import org.innovateuk.ifs.file.domain.FileEntry;
import org.innovateuk.ifs.file.resource.FileEntryResource;
import org.innovateuk.ifs.file.transactional.FileService;
import org.innovateuk.ifs.form.builder.QuestionBuilder;
import org.innovateuk.ifs.form.domain.FormInput;
import org.innovateuk.ifs.form.domain.Question;
import org.innovateuk.ifs.form.domain.Section;
import org.innovateuk.ifs.form.repository.FormInputRepository;
import org.innovateuk.ifs.form.resource.FormInputType;
import org.innovateuk.ifs.organisation.domain.Organisation;
import org.innovateuk.ifs.organisation.domain.OrganisationType;
import org.innovateuk.ifs.organisation.repository.OrganisationRepository;
import org.innovateuk.ifs.organisation.resource.OrganisationTypeEnum;
import org.innovateuk.ifs.user.domain.ProcessRole;
import org.innovateuk.ifs.user.repository.ProcessRoleRepository;
import org.innovateuk.ifs.user.resource.Role;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import java.io.File;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static com.google.common.collect.Lists.newArrayList;
import static java.util.Collections.emptyList;
import static java.util.Optional.of;
import static org.innovateuk.ifs.application.builder.ApplicationBuilder.newApplication;
import static org.innovateuk.ifs.application.builder.FormInputResponseBuilder.newFormInputResponse;
import static org.innovateuk.ifs.base.amend.BaseBuilderAmendFunctions.id;
import static org.innovateuk.ifs.commons.error.CommonErrors.internalServerErrorError;
import static org.innovateuk.ifs.commons.error.CommonErrors.notFoundError;
import static org.innovateuk.ifs.commons.error.CommonFailureKeys.FILES_ALREADY_UPLOADED;
import static org.innovateuk.ifs.commons.service.ServiceResult.serviceFailure;
import static org.innovateuk.ifs.commons.service.ServiceResult.serviceSuccess;
import static org.innovateuk.ifs.competition.builder.CompetitionBuilder.newCompetition;
import static org.innovateuk.ifs.file.builder.FileEntryBuilder.newFileEntry;
import static org.innovateuk.ifs.file.builder.FileEntryResourceBuilder.newFileEntryResource;
import static org.innovateuk.ifs.form.builder.FormInputBuilder.newFormInput;
import static org.innovateuk.ifs.form.builder.QuestionBuilder.newQuestion;
import static org.innovateuk.ifs.form.builder.SectionBuilder.newSection;
import static org.innovateuk.ifs.organisation.builder.OrganisationBuilder.newOrganisation;
import static org.innovateuk.ifs.organisation.builder.OrganisationTypeBuilder.newOrganisationType;
import static org.innovateuk.ifs.user.builder.ProcessRoleBuilder.newProcessRole;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.mockito.MockitoAnnotations.initMocks;
public class ApplicationFormInputUploadServiceImplTest {
@Mock
private FileService fileServiceMock;
@Mock
private FormInputRepository formInputRepositoryMock;
@Mock
private FormInputResponseRepository formInputResponseRepositoryMock;
@Mock
private ProcessRoleRepository processRoleRepositoryMock;
@Mock
private ApplicationRepository applicationRepositoryMock;
@Mock
private OrganisationRepository organisationRepositoryMock;
@InjectMocks
private ApplicationFormInputUploadService service = new ApplicationFormInputUploadServiceImpl();
private FormInput formInput;
private FormInputType formInputType;
private Question question;
private FileEntryResource fileEntryResource;
private FormInputResponseFileEntryResource formInputResponseFileEntryResource;
private List<FileEntry> existingFileEntry;
private FormInputResponse existingFormInputResponse;
private List<FormInputResponse> existingFormInputResponses;
private FormInputResponse unlinkedFormInputFileEntry;
private Long organisationId = 456L;
private Question multiAnswerQuestion;
private Question leadAnswerQuestion;
private OrganisationType orgType;
private Organisation org1;
private Organisation org2;
private Organisation org3;
private ProcessRole[] roles;
private Section section;
private Competition comp;
private Application app;
private Application openApplication;
@Before
public void setUp() throws Exception {
initMocks(this);
question = QuestionBuilder.newQuestion().build();
formInputType = FormInputType.FILEUPLOAD;
formInput = newFormInput().withType(formInputType).withWordCount(2).build();
formInput.setId(123L);
formInput.setQuestion(question);
question.setFormInputs(newArrayList(formInput));
fileEntryResource = newFileEntryResource().with(id(999L)).build();
formInputResponseFileEntryResource = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
existingFileEntry = newArrayList(newFileEntry().with(id(999L)).build());
existingFormInputResponse = newFormInputResponse().withFileEntries(existingFileEntry).withFormInputs(formInput).build();
existingFormInputResponses = newArrayList(existingFormInputResponse);
unlinkedFormInputFileEntry = newFormInputResponse().with(id(existingFormInputResponse.getId())).withFileEntries(emptyList()).build();
final Competition openCompetition = newCompetition().withCompetitionStatus(CompetitionStatus.OPEN).build();
openApplication = newApplication().withCompetition(openCompetition).build();
when(applicationRepositoryMock.findById(anyLong())).thenReturn(Optional.of(openApplication));
multiAnswerQuestion = newQuestion().withMarksAsCompleteEnabled(Boolean.TRUE).withMultipleStatuses(Boolean.TRUE).withId(123L).build();
leadAnswerQuestion = newQuestion().withMarksAsCompleteEnabled(Boolean.TRUE).withMultipleStatuses(Boolean.FALSE).withId(321L).build();
orgType = newOrganisationType().withOrganisationType(OrganisationTypeEnum.BUSINESS).build();
org1 = newOrganisation().withOrganisationType(orgType).withId(234L).build();
org2 = newOrganisation().withId(345L).build();
org3 = newOrganisation().withId(456L).build();
roles = newProcessRole().withRole(Role.LEADAPPLICANT, Role.APPLICANT, Role.COLLABORATOR).withOrganisationId(234L, 345L, 456L).build(3).toArray(new ProcessRole[0]);
section = newSection().withQuestions(Arrays.asList(multiAnswerQuestion, leadAnswerQuestion)).build();
comp = newCompetition().withSections(Arrays.asList(section)).withMaxResearchRatio(30).build();
app = newApplication().withCompetition(comp).withProcessRoles(roles).build();
when(applicationRepositoryMock.findById(app.getId())).thenReturn(Optional.of(app));
when(organisationRepositoryMock.findById(234L)).thenReturn(Optional.of(org1));
when(organisationRepositoryMock.findById(345L)).thenReturn(Optional.of(org2));
when(organisationRepositoryMock.findById(456L)).thenReturn(Optional.of(org3));
}
@Test
public void createFormInputResponseFileUpload() {
FileEntryResource fileEntryResource = newFileEntryResource().build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
File fileFound = mock(File.class);
FileEntry newFileEntry = newFileEntry().with(id(999L)).build();
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).
thenReturn(serviceSuccess(Pair.of(fileFound, newFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(null);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(newFormInput().withQuestion(newQuestion().withMultipleStatuses(true).build()).build()));
when(applicationRepositoryMock.findById(456L)).thenReturn(Optional.of(openApplication));
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isSuccess());
FormInputResponseFileEntryResource resultParts = result.getSuccess();
assertEquals(Long.valueOf(999), resultParts.getFileEntryResource().getId());
verify(formInputResponseRepositoryMock).findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L);
}
@Test
public void createFormInputResponseFileUploadMultipleFiles() {
FileEntryResource fileEntryResource = newFileEntryResource().with(id(987L)).build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
FileEntry alreadyExistingFileEntry = newFileEntry().with(id(987L)).build();
FormInputResponse existingFormInputResponseWithLinkedFileEntry = newFormInputResponse().withFileEntries(newArrayList(alreadyExistingFileEntry)).withFormInputs(formInput).build();
File fileFound = mock(File.class);
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.FILEUPLOAD).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
when(fileServiceMock.deleteFileIgnoreNotFound(alreadyExistingFileEntry.getId())).thenReturn(serviceSuccess(alreadyExistingFileEntry));
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).thenReturn(serviceSuccess(Pair.of(fileFound, alreadyExistingFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(existingFormInputResponseWithLinkedFileEntry);
when(formInputResponseRepositoryMock.save(existingFormInputResponseWithLinkedFileEntry)).thenReturn(existingFormInputResponseWithLinkedFileEntry);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(applicationRepositoryMock.findById(456L)).thenReturn(Optional.of(openApplication));
when(fileServiceMock.getFileByFileEntryId(987L)).thenReturn(serviceSuccess(inputStreamSupplier));
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isSuccess());
verify(formInputResponseRepositoryMock, times(1)).findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L);
}
@Test
public void createFormInputResponseFileUploadButMaximumFilesAlready() {
FileEntryResource fileEntryResource = newFileEntryResource().with(id(987L)).build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
FileEntry alreadyExistingFileEntry = newFileEntry().with(id(987L)).build();
FormInputResponse existingFormInputResponseWithLinkedFileEntry = newFormInputResponse().withFileEntries(newArrayList(alreadyExistingFileEntry, alreadyExistingFileEntry, alreadyExistingFileEntry)).withFormInputs(formInput).build();
File fileFound = mock(File.class);
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.FILEUPLOAD).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
when(fileServiceMock.deleteFileIgnoreNotFound(alreadyExistingFileEntry.getId())).thenReturn(serviceSuccess(alreadyExistingFileEntry));
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).thenReturn(serviceSuccess(Pair.of(fileFound, alreadyExistingFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(existingFormInputResponseWithLinkedFileEntry);
when(formInputResponseRepositoryMock.save(existingFormInputResponseWithLinkedFileEntry)).thenReturn(existingFormInputResponseWithLinkedFileEntry);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(applicationRepositoryMock.findById(456L)).thenReturn(Optional.of(openApplication));
when(fileServiceMock.getFileByFileEntryId(987L)).thenReturn(serviceSuccess(inputStreamSupplier));
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(FILES_ALREADY_UPLOADED));
verify(formInputResponseRepositoryMock, times(1)).findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L);
}
@Test
public void createFormInputResponseFileUploadTemplate() {
FileEntryResource fileEntryResource = newFileEntryResource().with(id(987L)).build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
File fileFound = mock(File.class);
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.TEMPLATE_DOCUMENT).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
FileEntry alreadyExistingFileEntry = newFileEntry().with(id(987L)).build();
FormInputResponse existingFormInputResponseWithLinkedFileEntry = newFormInputResponse().withFileEntries(newArrayList(alreadyExistingFileEntry, alreadyExistingFileEntry, alreadyExistingFileEntry)).withFormInputs(formInputLocal).build();
when(fileServiceMock.deleteFileIgnoreNotFound(alreadyExistingFileEntry.getId())).thenReturn(serviceSuccess(alreadyExistingFileEntry));
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).thenReturn(serviceSuccess(Pair.of(fileFound, alreadyExistingFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(existingFormInputResponseWithLinkedFileEntry);
when(formInputResponseRepositoryMock.save(existingFormInputResponseWithLinkedFileEntry)).thenReturn(existingFormInputResponseWithLinkedFileEntry);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(applicationRepositoryMock.findById(456L)).thenReturn(Optional.of(openApplication));
when(fileServiceMock.getFileByFileEntryId(987L)).thenReturn(serviceSuccess(inputStreamSupplier));
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(FILES_ALREADY_UPLOADED));
verify(formInputResponseRepositoryMock, times(1)).findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L);
}
@Test
public void createFormInputResponseFileUploadButFileServiceCallFails() {
FileEntryResource fileEntryResource = newFileEntryResource().build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).
thenReturn(serviceFailure(internalServerErrorError()));
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(internalServerErrorError()));
}
@Test
public void createFormInputResponseFileUploadWithAlreadyExistingFormInputResponse() {
FileEntryResource fileEntryResource = newFileEntryResource().build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 987L);
Supplier<InputStream> inputStreamSupplier = () -> null;
File fileFound = mock(File.class);
FileEntry newFileEntry = newFileEntry().with(id(999L)).build();
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).
thenReturn(serviceSuccess(Pair.of(fileFound, newFileEntry)));
FormInputResponse existingFormInputResponse = newFormInputResponse().withFormInputs(formInput).build();
when(formInputResponseRepositoryMock.findOneByApplicationIdAndFormInputId(456L, 123L)).thenReturn(of(existingFormInputResponse));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(newFormInput().withQuestion(newQuestion().withMultipleStatuses(false).build()).build()));
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isSuccess());
FormInputResponseFileEntryResource resultParts = result.getSuccess();
assertEquals(Long.valueOf(999), resultParts.getFileEntryResource().getId());
assertEquals(newFileEntry, existingFormInputResponse.getFileEntries().get(0));
}
@Test
public void createFormInputResponseFileUploadButProcessRoleNotFound() {
FileEntryResource fileEntryResource = newFileEntryResource().build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
File fileFound = mock(File.class);
FileEntry newFileEntry = newFileEntry().with(id(999L)).build();
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).
thenReturn(serviceSuccess(Pair.of(fileFound, newFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(null);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.empty());
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(notFoundError(ProcessRole.class, 789L)));
}
@Test
public void createFormInputResponseFileUploadButFormInputNotFound() {
FileEntryResource fileEntryResource = newFileEntryResource().build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
File fileFound = mock(File.class);
FileEntry newFileEntry = newFileEntry().with(id(999L)).build();
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).
thenReturn(serviceSuccess(Pair.of(fileFound, newFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(null);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.empty());
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(notFoundError(FormInput.class, 123L)));
}
@Test
public void createFormInputResponseFileUploadButApplicationNotFound() {
FileEntryResource fileEntryResource = newFileEntryResource().build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
File fileFound = mock(File.class);
FileEntry newFileEntry = newFileEntry().with(id(999L)).build();
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).
thenReturn(serviceSuccess(Pair.of(fileFound, newFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(null);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(newFormInput().build()));
when(applicationRepositoryMock.findById(456L)).thenReturn(Optional.empty());
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(notFoundError(Application.class, 456L)));
}
@Test
public void deleteFormInputResponseFileUpload() {
Supplier<InputStream> inputStreamSupplier = () -> null;
when(formInputResponseRepositoryMock.findOneByApplicationIdAndFormInputId(456L, 123L)).thenReturn(of(existingFormInputResponse));
when(fileServiceMock.getFileByFileEntryId(existingFileEntry.get(0).getId())).thenReturn(serviceSuccess(inputStreamSupplier));
when(formInputResponseRepositoryMock.save(existingFormInputResponse)).thenReturn(unlinkedFormInputFileEntry);
when(fileServiceMock.deleteFileIgnoreNotFound(999L)).thenReturn(serviceSuccess(existingFileEntry.get(0)));
when(formInputRepositoryMock.findById(formInputResponseFileEntryResource.getCompoundId().getFormInputId())).thenReturn
(Optional.of(newFormInput().withQuestion(question).build()));
ServiceResult<FormInputResponse> result =
service.deleteFormInputResponseFileUpload(formInputResponseFileEntryResource.getCompoundId());
assertTrue(result.isSuccess());
assertEquals(unlinkedFormInputFileEntry, result.getSuccess());
assertTrue(existingFormInputResponse.getFileEntries().isEmpty());
verify(formInputResponseRepositoryMock, times(2)).findOneByApplicationIdAndFormInputId(456L, 123L);
verify(formInputResponseRepositoryMock).save(existingFormInputResponse);
}
@Test
public void deleteFormInputResponseFileUploadButFileServiceCallFails() {
Supplier<InputStream> inputStreamSupplier = () -> null;
when(formInputResponseRepositoryMock.findOneByApplicationIdAndFormInputId(456L, 123L)).thenReturn(of(existingFormInputResponse));
when(fileServiceMock.getFileByFileEntryId(existingFileEntry.get(0).getId())).thenReturn(serviceSuccess(inputStreamSupplier));
when(fileServiceMock.deleteFileIgnoreNotFound(999L)).thenReturn(serviceFailure(internalServerErrorError()));
when(formInputRepositoryMock.findById(formInputResponseFileEntryResource.getCompoundId().getFormInputId())).thenReturn(
Optional.of(newFormInput().withQuestion(question).withType(FormInputType.FILEUPLOAD).build()));
when(applicationRepositoryMock.findById(formInputResponseFileEntryResource.getCompoundId().getApplicationId())).thenReturn(
Optional.of(newApplication().withCompetition(newCompetition().withCompetitionStatus(CompetitionStatus.OPEN).build()).build()));
ServiceResult<FormInputResponse> result =
service.deleteFormInputResponseFileUpload(formInputResponseFileEntryResource.getCompoundId());
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(internalServerErrorError()));
}
@Test
public void deleteFormInputResponseFileUploadButUnableToFindFormInputResponse() {
when(formInputResponseRepositoryMock.findByApplicationIdAndFormInputId(456L, 123L)).thenReturn(newArrayList
(existingFormInputResponse));
when(formInputRepositoryMock.findById(formInputResponseFileEntryResource.getCompoundId().getFormInputId())).thenReturn(Optional.empty());
ServiceResult<FormInputResponse> result =
service.deleteFormInputResponseFileUpload(formInputResponseFileEntryResource.getCompoundId());
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(notFoundError(FormInput.class, 123L)));
}
@Test
public void deleteFormInputResponseFileUploadButFileEntryNotFound() {
FileEntryResource fileEntryResource = newFileEntryResource().with(id(999L)).build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
FileEntry existingFileEntry = newFileEntry().with(id(999L)).build();
FormInputResponse existingFormInputResponse = newFormInputResponse().withFileEntries(newArrayList(existingFileEntry)).build();
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.FILEUPLOAD).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
when(fileServiceMock.deleteFileIgnoreNotFound(999L)).thenReturn(serviceFailure(notFoundError(FileEntry.class, 999L)));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(existingFormInputResponse);
when(fileServiceMock.getFileByFileEntryId(existingFileEntry.getId())).thenReturn(serviceFailure(notFoundError(File.class, 999L)));
ServiceResult<FormInputResponse> result = service.deleteFormInputResponseFileUpload(fileEntry.getCompoundId());
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(notFoundError(FileEntry.class, 999L)));
}
@Test
public void getFormInputResponseFileUpload() {
FileEntry fileEntry = newFileEntry().with(id(999L)).build();
FormInputResponse formInputResponse = newFormInputResponse().withFileEntries(newArrayList(fileEntry)).build();
Supplier<InputStream> inputStreamSupplier = () -> null;
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.FILEUPLOAD).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(formInputResponse);
when(fileServiceMock.getFileByFileEntryId(fileEntry.getId())).thenReturn(serviceSuccess(inputStreamSupplier));
ServiceResult<FormInputResponseFileAndContents> result =
service.getFormInputResponseFileUpload(new FormInputResponseFileEntryId(123L, 456L, 789L, 999L));
assertTrue(result.isSuccess());
assertEquals(inputStreamSupplier, result.getSuccess().getContentsSupplier());
FileEntryResource fileEntryResource = newFileEntryResource().with(id(999L)).build();
FormInputResponseFileEntryResource formInputResponseFile = result.getSuccess().getFormInputResponseFileEntry();
assertEquals(fileEntryResource.getId(), formInputResponseFile.getFileEntryResource().getId());
assertEquals(123L, formInputResponseFile.getCompoundId().getFormInputId());
assertEquals(456L, formInputResponseFile.getCompoundId().getApplicationId());
assertEquals(789L, formInputResponseFile.getCompoundId().getProcessRoleId());
}
@Test
public void getFormInputResponseFileUploadButFileServiceCallFails() {
FileEntry fileEntry = newFileEntry().withId(999L).build();
FormInputResponse formInputResponse = newFormInputResponse().withFileEntries(newArrayList(fileEntry)).withFormInputs(formInput).build();
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.FILEUPLOAD).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(formInputResponse);
when(fileServiceMock.getFileByFileEntryId(fileEntry.getId())).thenReturn(serviceFailure(internalServerErrorError()));
ServiceResult<FormInputResponseFileAndContents> result =
service.getFormInputResponseFileUpload(new FormInputResponseFileEntryId(123L, 456L, 789L, 999L));
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(internalServerErrorError()));
}
@Test
public void getFormInputResponseFileUploadButUnableToFindFormInputResponse() {
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.FILEUPLOAD).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(null);
ServiceResult<FormInputResponseFileAndContents> result =
service.getFormInputResponseFileUpload(new FormInputResponseFileEntryId(123L, 456L, 789L, 999L));
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(notFoundError(FormInputResponse.class, 456L, 789L, 123L)));
}
}
|
ifs-data-layer/ifs-data-service/src/test/java/org/innovateuk/ifs/application/transactional/ApplicationFormInputUploadServiceImplTest.java
|
package org.innovateuk.ifs.application.transactional;
import org.apache.commons.lang3.tuple.Pair;
import org.innovateuk.ifs.application.domain.Application;
import org.innovateuk.ifs.application.domain.FormInputResponse;
import org.innovateuk.ifs.application.repository.ApplicationRepository;
import org.innovateuk.ifs.application.repository.FormInputResponseRepository;
import org.innovateuk.ifs.application.resource.FormInputResponseFileEntryId;
import org.innovateuk.ifs.application.resource.FormInputResponseFileEntryResource;
import org.innovateuk.ifs.commons.service.ServiceResult;
import org.innovateuk.ifs.competition.domain.Competition;
import org.innovateuk.ifs.competition.resource.CompetitionStatus;
import org.innovateuk.ifs.file.domain.FileEntry;
import org.innovateuk.ifs.file.resource.FileEntryResource;
import org.innovateuk.ifs.file.transactional.FileService;
import org.innovateuk.ifs.form.builder.QuestionBuilder;
import org.innovateuk.ifs.form.domain.FormInput;
import org.innovateuk.ifs.form.domain.Question;
import org.innovateuk.ifs.form.domain.Section;
import org.innovateuk.ifs.form.repository.FormInputRepository;
import org.innovateuk.ifs.form.resource.FormInputType;
import org.innovateuk.ifs.organisation.domain.Organisation;
import org.innovateuk.ifs.organisation.domain.OrganisationType;
import org.innovateuk.ifs.organisation.repository.OrganisationRepository;
import org.innovateuk.ifs.organisation.resource.OrganisationTypeEnum;
import org.innovateuk.ifs.user.domain.ProcessRole;
import org.innovateuk.ifs.user.repository.ProcessRoleRepository;
import org.innovateuk.ifs.user.resource.Role;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import java.io.File;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static com.google.common.collect.Lists.newArrayList;
import static java.util.Collections.emptyList;
import static java.util.Optional.of;
import static org.innovateuk.ifs.application.builder.ApplicationBuilder.newApplication;
import static org.innovateuk.ifs.application.builder.FormInputResponseBuilder.newFormInputResponse;
import static org.innovateuk.ifs.base.amend.BaseBuilderAmendFunctions.id;
import static org.innovateuk.ifs.commons.error.CommonErrors.internalServerErrorError;
import static org.innovateuk.ifs.commons.error.CommonErrors.notFoundError;
import static org.innovateuk.ifs.commons.error.CommonFailureKeys.FILES_ALREADY_UPLOADED;
import static org.innovateuk.ifs.commons.service.ServiceResult.serviceFailure;
import static org.innovateuk.ifs.commons.service.ServiceResult.serviceSuccess;
import static org.innovateuk.ifs.competition.builder.CompetitionBuilder.newCompetition;
import static org.innovateuk.ifs.file.builder.FileEntryBuilder.newFileEntry;
import static org.innovateuk.ifs.file.builder.FileEntryResourceBuilder.newFileEntryResource;
import static org.innovateuk.ifs.form.builder.FormInputBuilder.newFormInput;
import static org.innovateuk.ifs.form.builder.QuestionBuilder.newQuestion;
import static org.innovateuk.ifs.form.builder.SectionBuilder.newSection;
import static org.innovateuk.ifs.organisation.builder.OrganisationBuilder.newOrganisation;
import static org.innovateuk.ifs.organisation.builder.OrganisationTypeBuilder.newOrganisationType;
import static org.innovateuk.ifs.user.builder.ProcessRoleBuilder.newProcessRole;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.mockito.MockitoAnnotations.initMocks;
public class ApplicationFormInputUploadServiceImplTest {
@Mock
private FileService fileServiceMock;
@Mock
private FormInputRepository formInputRepositoryMock;
@Mock
private FormInputResponseRepository formInputResponseRepositoryMock;
@Mock
private ProcessRoleRepository processRoleRepositoryMock;
@Mock
private ApplicationRepository applicationRepositoryMock;
@Mock
private OrganisationRepository organisationRepositoryMock;
@InjectMocks
private ApplicationFormInputUploadService service = new ApplicationFormInputUploadServiceImpl();
private FormInput formInput;
private FormInputType formInputType;
private Question question;
private FileEntryResource fileEntryResource;
private FormInputResponseFileEntryResource formInputResponseFileEntryResource;
private List<FileEntry> existingFileEntry;
private FormInputResponse existingFormInputResponse;
private List<FormInputResponse> existingFormInputResponses;
private FormInputResponse unlinkedFormInputFileEntry;
private Long organisationId = 456L;
private Question multiAnswerQuestion;
private Question leadAnswerQuestion;
private OrganisationType orgType;
private Organisation org1;
private Organisation org2;
private Organisation org3;
private ProcessRole[] roles;
private Section section;
private Competition comp;
private Application app;
private Application openApplication;
@Before
public void setUp() throws Exception {
initMocks(this);
question = QuestionBuilder.newQuestion().build();
formInputType = FormInputType.FILEUPLOAD;
formInput = newFormInput().withType(formInputType).withWordCount(2).build();
formInput.setId(123L);
formInput.setQuestion(question);
question.setFormInputs(newArrayList(formInput));
fileEntryResource = newFileEntryResource().with(id(999L)).build();
formInputResponseFileEntryResource = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
existingFileEntry = newArrayList(newFileEntry().with(id(999L)).build());
existingFormInputResponse = newFormInputResponse().withFileEntries(existingFileEntry).withFormInputs(formInput).build();
existingFormInputResponses = newArrayList(existingFormInputResponse);
unlinkedFormInputFileEntry = newFormInputResponse().with(id(existingFormInputResponse.getId())).withFileEntries(emptyList()).build();
final Competition openCompetition = newCompetition().withCompetitionStatus(CompetitionStatus.OPEN).build();
openApplication = newApplication().withCompetition(openCompetition).build();
when(applicationRepositoryMock.findById(anyLong())).thenReturn(Optional.of(openApplication));
multiAnswerQuestion = newQuestion().withMarksAsCompleteEnabled(Boolean.TRUE).withMultipleStatuses(Boolean.TRUE).withId(123L).build();
leadAnswerQuestion = newQuestion().withMarksAsCompleteEnabled(Boolean.TRUE).withMultipleStatuses(Boolean.FALSE).withId(321L).build();
orgType = newOrganisationType().withOrganisationType(OrganisationTypeEnum.BUSINESS).build();
org1 = newOrganisation().withOrganisationType(orgType).withId(234L).build();
org2 = newOrganisation().withId(345L).build();
org3 = newOrganisation().withId(456L).build();
roles = newProcessRole().withRole(Role.LEADAPPLICANT, Role.APPLICANT, Role.COLLABORATOR).withOrganisationId(234L, 345L, 456L).build(3).toArray(new ProcessRole[0]);
section = newSection().withQuestions(Arrays.asList(multiAnswerQuestion, leadAnswerQuestion)).build();
comp = newCompetition().withSections(Arrays.asList(section)).withMaxResearchRatio(30).build();
app = newApplication().withCompetition(comp).withProcessRoles(roles).build();
when(applicationRepositoryMock.findById(app.getId())).thenReturn(Optional.of(app));
when(organisationRepositoryMock.findById(234L)).thenReturn(Optional.of(org1));
when(organisationRepositoryMock.findById(345L)).thenReturn(Optional.of(org2));
when(organisationRepositoryMock.findById(456L)).thenReturn(Optional.of(org3));
}
@Test
public void createFormInputResponseFileUpload() {
FileEntryResource fileEntryResource = newFileEntryResource().build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
File fileFound = mock(File.class);
FileEntry newFileEntry = newFileEntry().with(id(999L)).build();
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).
thenReturn(serviceSuccess(Pair.of(fileFound, newFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(null);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(newFormInput().withQuestion(newQuestion().withMultipleStatuses(true).build()).build()));
when(applicationRepositoryMock.findById(456L)).thenReturn(Optional.of(openApplication));
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isSuccess());
FormInputResponseFileEntryResource resultParts = result.getSuccess();
assertEquals(Long.valueOf(999), resultParts.getFileEntryResource().getId());
verify(formInputResponseRepositoryMock).findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L);
}
@Test
public void createFormInputResponseFileUploadMultipleFiles() {
FileEntryResource fileEntryResource = newFileEntryResource().with(id(987L)).build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
FileEntry alreadyExistingFileEntry = newFileEntry().with(id(987L)).build();
FormInputResponse existingFormInputResponseWithLinkedFileEntry = newFormInputResponse().withFileEntries(newArrayList(alreadyExistingFileEntry)).withFormInputs(formInput).build();
File fileFound = mock(File.class);
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.FILEUPLOAD).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
when(fileServiceMock.deleteFileIgnoreNotFound(alreadyExistingFileEntry.getId())).thenReturn(serviceSuccess(alreadyExistingFileEntry));
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).thenReturn(serviceSuccess(Pair.of(fileFound, alreadyExistingFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(existingFormInputResponseWithLinkedFileEntry);
when(formInputResponseRepositoryMock.save(existingFormInputResponseWithLinkedFileEntry)).thenReturn(existingFormInputResponseWithLinkedFileEntry);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(applicationRepositoryMock.findById(456L)).thenReturn(Optional.of(openApplication));
when(fileServiceMock.getFileByFileEntryId(987L)).thenReturn(serviceSuccess(inputStreamSupplier));
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isSuccess());
verify(formInputResponseRepositoryMock, times(1)).findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L);
}
@Test
public void createFormInputResponseFileUploadButMaximumFilesAlready() {
FileEntryResource fileEntryResource = newFileEntryResource().with(id(987L)).build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
FileEntry alreadyExistingFileEntry = newFileEntry().with(id(987L)).build();
FormInputResponse existingFormInputResponseWithLinkedFileEntry = newFormInputResponse().withFileEntries(newArrayList(alreadyExistingFileEntry, alreadyExistingFileEntry, alreadyExistingFileEntry)).withFormInputs(formInput).build();
File fileFound = mock(File.class);
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.FILEUPLOAD).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
when(fileServiceMock.deleteFileIgnoreNotFound(alreadyExistingFileEntry.getId())).thenReturn(serviceSuccess(alreadyExistingFileEntry));
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).thenReturn(serviceSuccess(Pair.of(fileFound, alreadyExistingFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(existingFormInputResponseWithLinkedFileEntry);
when(formInputResponseRepositoryMock.save(existingFormInputResponseWithLinkedFileEntry)).thenReturn(existingFormInputResponseWithLinkedFileEntry);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(applicationRepositoryMock.findById(456L)).thenReturn(Optional.of(openApplication));
when(fileServiceMock.getFileByFileEntryId(987L)).thenReturn(serviceSuccess(inputStreamSupplier));
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(FILES_ALREADY_UPLOADED));
verify(formInputResponseRepositoryMock, times(1)).findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L);
}
@Test
public void createFormInputResponseFileUploadButFileServiceCallFails() {
FileEntryResource fileEntryResource = newFileEntryResource().build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).
thenReturn(serviceFailure(internalServerErrorError()));
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(internalServerErrorError()));
}
@Test
public void createFormInputResponseFileUploadWithAlreadyExistingFormInputResponse() {
FileEntryResource fileEntryResource = newFileEntryResource().build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 987L);
Supplier<InputStream> inputStreamSupplier = () -> null;
File fileFound = mock(File.class);
FileEntry newFileEntry = newFileEntry().with(id(999L)).build();
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).
thenReturn(serviceSuccess(Pair.of(fileFound, newFileEntry)));
FormInputResponse existingFormInputResponse = newFormInputResponse().withFormInputs(formInput).build();
when(formInputResponseRepositoryMock.findOneByApplicationIdAndFormInputId(456L, 123L)).thenReturn(of(existingFormInputResponse));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(newFormInput().withQuestion(newQuestion().withMultipleStatuses(false).build()).build()));
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isSuccess());
FormInputResponseFileEntryResource resultParts = result.getSuccess();
assertEquals(Long.valueOf(999), resultParts.getFileEntryResource().getId());
assertEquals(newFileEntry, existingFormInputResponse.getFileEntries().get(0));
}
@Test
public void createFormInputResponseFileUploadButProcessRoleNotFound() {
FileEntryResource fileEntryResource = newFileEntryResource().build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
File fileFound = mock(File.class);
FileEntry newFileEntry = newFileEntry().with(id(999L)).build();
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).
thenReturn(serviceSuccess(Pair.of(fileFound, newFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(null);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.empty());
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(notFoundError(ProcessRole.class, 789L)));
}
@Test
public void createFormInputResponseFileUploadButFormInputNotFound() {
FileEntryResource fileEntryResource = newFileEntryResource().build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
File fileFound = mock(File.class);
FileEntry newFileEntry = newFileEntry().with(id(999L)).build();
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).
thenReturn(serviceSuccess(Pair.of(fileFound, newFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(null);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.empty());
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(notFoundError(FormInput.class, 123L)));
}
@Test
public void createFormInputResponseFileUploadButApplicationNotFound() {
FileEntryResource fileEntryResource = newFileEntryResource().build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
Supplier<InputStream> inputStreamSupplier = () -> null;
File fileFound = mock(File.class);
FileEntry newFileEntry = newFileEntry().with(id(999L)).build();
when(fileServiceMock.createFile(fileEntryResource, inputStreamSupplier)).
thenReturn(serviceSuccess(Pair.of(fileFound, newFileEntry)));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(null);
when(processRoleRepositoryMock.findById(789L)).thenReturn(Optional.of(newProcessRole().build()));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(newFormInput().build()));
when(applicationRepositoryMock.findById(456L)).thenReturn(Optional.empty());
ServiceResult<FormInputResponseFileEntryResource> result =
service.uploadResponse(fileEntry, inputStreamSupplier);
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(notFoundError(Application.class, 456L)));
}
@Test
public void deleteFormInputResponseFileUpload() {
Supplier<InputStream> inputStreamSupplier = () -> null;
when(formInputResponseRepositoryMock.findOneByApplicationIdAndFormInputId(456L, 123L)).thenReturn(of(existingFormInputResponse));
when(fileServiceMock.getFileByFileEntryId(existingFileEntry.get(0).getId())).thenReturn(serviceSuccess(inputStreamSupplier));
when(formInputResponseRepositoryMock.save(existingFormInputResponse)).thenReturn(unlinkedFormInputFileEntry);
when(fileServiceMock.deleteFileIgnoreNotFound(999L)).thenReturn(serviceSuccess(existingFileEntry.get(0)));
when(formInputRepositoryMock.findById(formInputResponseFileEntryResource.getCompoundId().getFormInputId())).thenReturn
(Optional.of(newFormInput().withQuestion(question).build()));
ServiceResult<FormInputResponse> result =
service.deleteFormInputResponseFileUpload(formInputResponseFileEntryResource.getCompoundId());
assertTrue(result.isSuccess());
assertEquals(unlinkedFormInputFileEntry, result.getSuccess());
assertTrue(existingFormInputResponse.getFileEntries().isEmpty());
verify(formInputResponseRepositoryMock, times(2)).findOneByApplicationIdAndFormInputId(456L, 123L);
verify(formInputResponseRepositoryMock).save(existingFormInputResponse);
}
@Test
public void deleteFormInputResponseFileUploadButFileServiceCallFails() {
Supplier<InputStream> inputStreamSupplier = () -> null;
when(formInputResponseRepositoryMock.findOneByApplicationIdAndFormInputId(456L, 123L)).thenReturn(of(existingFormInputResponse));
when(fileServiceMock.getFileByFileEntryId(existingFileEntry.get(0).getId())).thenReturn(serviceSuccess(inputStreamSupplier));
when(fileServiceMock.deleteFileIgnoreNotFound(999L)).thenReturn(serviceFailure(internalServerErrorError()));
when(formInputRepositoryMock.findById(formInputResponseFileEntryResource.getCompoundId().getFormInputId())).thenReturn(
Optional.of(newFormInput().withQuestion(question).withType(FormInputType.FILEUPLOAD).build()));
when(applicationRepositoryMock.findById(formInputResponseFileEntryResource.getCompoundId().getApplicationId())).thenReturn(
Optional.of(newApplication().withCompetition(newCompetition().withCompetitionStatus(CompetitionStatus.OPEN).build()).build()));
ServiceResult<FormInputResponse> result =
service.deleteFormInputResponseFileUpload(formInputResponseFileEntryResource.getCompoundId());
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(internalServerErrorError()));
}
@Test
public void deleteFormInputResponseFileUploadButUnableToFindFormInputResponse() {
when(formInputResponseRepositoryMock.findByApplicationIdAndFormInputId(456L, 123L)).thenReturn(newArrayList
(existingFormInputResponse));
when(formInputRepositoryMock.findById(formInputResponseFileEntryResource.getCompoundId().getFormInputId())).thenReturn(Optional.empty());
ServiceResult<FormInputResponse> result =
service.deleteFormInputResponseFileUpload(formInputResponseFileEntryResource.getCompoundId());
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(notFoundError(FormInput.class, 123L)));
}
@Test
public void deleteFormInputResponseFileUploadButFileEntryNotFound() {
FileEntryResource fileEntryResource = newFileEntryResource().with(id(999L)).build();
FormInputResponseFileEntryResource fileEntry = new FormInputResponseFileEntryResource(fileEntryResource, 123L, 456L, 789L, 999L);
FileEntry existingFileEntry = newFileEntry().with(id(999L)).build();
FormInputResponse existingFormInputResponse = newFormInputResponse().withFileEntries(newArrayList(existingFileEntry)).build();
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.FILEUPLOAD).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
when(fileServiceMock.deleteFileIgnoreNotFound(999L)).thenReturn(serviceFailure(notFoundError(FileEntry.class, 999L)));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(existingFormInputResponse);
when(fileServiceMock.getFileByFileEntryId(existingFileEntry.getId())).thenReturn(serviceFailure(notFoundError(File.class, 999L)));
ServiceResult<FormInputResponse> result = service.deleteFormInputResponseFileUpload(fileEntry.getCompoundId());
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(notFoundError(FileEntry.class, 999L)));
}
@Test
public void getFormInputResponseFileUpload() {
FileEntry fileEntry = newFileEntry().with(id(999L)).build();
FormInputResponse formInputResponse = newFormInputResponse().withFileEntries(newArrayList(fileEntry)).build();
Supplier<InputStream> inputStreamSupplier = () -> null;
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.FILEUPLOAD).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(formInputResponse);
when(fileServiceMock.getFileByFileEntryId(fileEntry.getId())).thenReturn(serviceSuccess(inputStreamSupplier));
ServiceResult<FormInputResponseFileAndContents> result =
service.getFormInputResponseFileUpload(new FormInputResponseFileEntryId(123L, 456L, 789L, 999L));
assertTrue(result.isSuccess());
assertEquals(inputStreamSupplier, result.getSuccess().getContentsSupplier());
FileEntryResource fileEntryResource = newFileEntryResource().with(id(999L)).build();
FormInputResponseFileEntryResource formInputResponseFile = result.getSuccess().getFormInputResponseFileEntry();
assertEquals(fileEntryResource.getId(), formInputResponseFile.getFileEntryResource().getId());
assertEquals(123L, formInputResponseFile.getCompoundId().getFormInputId());
assertEquals(456L, formInputResponseFile.getCompoundId().getApplicationId());
assertEquals(789L, formInputResponseFile.getCompoundId().getProcessRoleId());
}
@Test
public void getFormInputResponseFileUploadButFileServiceCallFails() {
FileEntry fileEntry = newFileEntry().withId(999L).build();
FormInputResponse formInputResponse = newFormInputResponse().withFileEntries(newArrayList(fileEntry)).withFormInputs(formInput).build();
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.FILEUPLOAD).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(formInputResponse);
when(fileServiceMock.getFileByFileEntryId(fileEntry.getId())).thenReturn(serviceFailure(internalServerErrorError()));
ServiceResult<FormInputResponseFileAndContents> result =
service.getFormInputResponseFileUpload(new FormInputResponseFileEntryId(123L, 456L, 789L, 999L));
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(internalServerErrorError()));
}
@Test
public void getFormInputResponseFileUploadButUnableToFindFormInputResponse() {
Question question = QuestionBuilder.newQuestion().build();
question.setMultipleStatuses(true);
FormInput formInputLocal = newFormInput().withType(FormInputType.FILEUPLOAD).build();
formInputLocal.setId(123L);
formInputLocal.setQuestion(question);
question.setFormInputs(newArrayList(formInputLocal));
when(formInputRepositoryMock.findById(123L)).thenReturn(Optional.of(formInputLocal));
when(formInputResponseRepositoryMock.findByApplicationIdAndUpdatedByIdAndFormInputId(456L, 789L, 123L)).thenReturn(null);
ServiceResult<FormInputResponseFileAndContents> result =
service.getFormInputResponseFileUpload(new FormInputResponseFileEntryId(123L, 456L, 789L, 999L));
assertTrue(result.isFailure());
assertTrue(result.getFailure().is(notFoundError(FormInputResponse.class, 456L, 789L, 123L)));
}
}
|
IFS-7943 test.
|
ifs-data-layer/ifs-data-service/src/test/java/org/innovateuk/ifs/application/transactional/ApplicationFormInputUploadServiceImplTest.java
|
IFS-7943 test.
|
|
Java
|
mit
|
6a553e1edd174a80059df5e3edd3a1197e43c284
| 0
|
CyclopsMC/CyclopsCore
|
package org.cyclops.cyclopscore.inventory.container;
import com.google.common.collect.Lists;
import net.minecraft.entity.player.InventoryPlayer;
import org.apache.commons.lang3.tuple.Pair;
import org.cyclops.cyclopscore.inventory.IGuiContainerProvider;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
/**
* An inventory container that has a scrollbar and searchfield.
* Terminology:
* row: The row index from visible elements.
* elementIndex: The element index in all available elements
* visible: Currently on-screen by the user, maximum amount of elements is determined by the pageSize
* filtered: All items that are browsable by the user, might be more than the pageSize allows what leads to a scrollbar.
* unfiltered: All items, pattern searching will happen in this list.
* @author rubensworks
*/
public abstract class ScrollingInventoryContainer<E> extends ExtendedInventoryContainer {
private final List<E> unfilteredItems;
private List<Pair<Integer, E>> filteredItems; // Pair: original index - item
private final List<E> visibleItems;
private final IItemPredicate<E> itemSearchPredicate;
/**
* Make a new instance.
*
* @param inventory The player inventory.
* @param guiProvider The gui provider.
* @param items All items to potentially show in this list.
* @param filterer The predicate that is used to filter on the given items.
*/
@SuppressWarnings("unchecked")
public ScrollingInventoryContainer(InventoryPlayer inventory, IGuiContainerProvider guiProvider, List<E> items,
IItemPredicate<E> filterer) {
super(inventory, guiProvider);
this.unfilteredItems = Lists.newArrayList(items);
this.filteredItems = Lists.newLinkedList();
this.visibleItems = (List<E>) Arrays.asList(new Object[getPageSize()]);
for(int i = 0; i < getPageSize(); i++) {
this.visibleItems.set(i, null);
}
this.itemSearchPredicate = filterer;
}
protected List<E> getUnfilteredItems() {
return this.unfilteredItems;
}
protected List<Pair<Integer, E>> getFilteredItems() {
return this.filteredItems;
}
public int getUnfilteredItemCount() {
return getUnfilteredItems().size();
}
public int getFilteredItemCount() {
return getFilteredItems().size();
}
/**
* @return The maximum amount of columns to show.
*/
public int getColumns() {
return 1;
}
/**
* Scroll to the given relative position.
* @param scroll A value between 0 and 1.
*/
public void scrollTo(float scroll) {
onScroll();
int rows = (getFilteredItemCount() + getColumns() - 1) / getColumns() - getPageSize();
int firstRow = (int)((double)(scroll * (float)rows) + 0.5D);
if(firstRow < 0) firstRow = 0;
for(int i = 0; i < getPageSize(); i++) {
for(int j = 0; j < getColumns(); j++) {
int index = i * getColumns() + j;
int elementIndex = index + firstRow;
this.visibleItems.set(index, null);
if(elementIndex < getFilteredItemCount()) {
Pair<Integer, E> filteredItem = getFilteredItems().get(elementIndex);
enableElementAt(index, filteredItem.getLeft(), filteredItem.getRight());
}
}
}
}
protected void onScroll() {
}
/**
* @return The allowed page size.
*/
public abstract int getPageSize();
/**
* After scrolling, this will be called to make items visible.
* @param row The row to show the given element at.
* @param elementIndex The absolute element index.
* @param element The element to show.
*/
protected void enableElementAt(int row, int elementIndex, E element) {
this.visibleItems.set(row, element);
}
/**
* Check if the given element is visible.
* @param row The row the the given element is at.
* @return If it is visible.
*/
public boolean isElementVisible(int row) {
return row < getPageSize() && getVisibleElement(row) != null;
}
/**
* Get the currently visible element at the given row.
* @param row The row the the given element is at.
* @return The elements
*/
public E getVisibleElement(int row) {
if(row >= visibleItems.size()) return null;
return this.visibleItems.get(row);
}
/**
* Update the filtered items.
* @param searchString The input string to search by.
*/
public void updateFilter(String searchString) {
Pattern pattern;
try {
pattern = Pattern.compile(".*" + searchString.toLowerCase() + ".*");
} catch (PatternSyntaxException e) {
pattern = Pattern.compile(".*");
}
this.filteredItems = filter(unfilteredItems, itemSearchPredicate, pattern);
scrollTo(0); // Reset scroll, will also refresh items on-screen.
}
protected static <E> List<Pair<Integer, E>> filter(List<E> input, IItemPredicate<E> predicate, Pattern pattern) {
List<Pair<Integer, E>> filtered = Lists.newLinkedList();
int i = 0;
for(E item : input) {
if(predicate.apply(item, pattern)) {
filtered.add(Pair.of(i, item));
}
i++;
}
return filtered;
}
/**
* Predicate for matching items used to search.
* @param <E> The type of item.
*/
public static interface IItemPredicate<E> {
/**
* Check if the given item matches a string pattern.
* @param item The item to check.
* @param pattern The pattern to check.
* @return If the item matches
*/
public boolean apply(E item, Pattern pattern);
}
}
|
src/main/java/org/cyclops/cyclopscore/inventory/container/ScrollingInventoryContainer.java
|
package org.cyclops.cyclopscore.inventory.container;
import com.google.common.collect.Lists;
import net.minecraft.entity.player.InventoryPlayer;
import org.apache.commons.lang3.tuple.Pair;
import org.cyclops.cyclopscore.inventory.IGuiContainerProvider;
import scala.actors.threadpool.Arrays;
import java.util.List;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
/**
* An inventory container that has a scrollbar and searchfield.
* Terminology:
* row: The row index from visible elements.
* elementIndex: The element index in all available elements
* visible: Currently on-screen by the user, maximum amount of elements is determined by the pageSize
* filtered: All items that are browsable by the user, might be more than the pageSize allows what leads to a scrollbar.
* unfiltered: All items, pattern searching will happen in this list.
* @author rubensworks
*/
public abstract class ScrollingInventoryContainer<E> extends ExtendedInventoryContainer {
private final List<E> unfilteredItems;
private List<Pair<Integer, E>> filteredItems; // Pair: original index - item
private final List<E> visibleItems;
private final IItemPredicate<E> itemSearchPredicate;
/**
* Make a new instance.
*
* @param inventory The player inventory.
* @param guiProvider The gui provider.
* @param items All items to potentially show in this list.
* @param filterer The predicate that is used to filter on the given items.
*/
@SuppressWarnings("unchecked")
public ScrollingInventoryContainer(InventoryPlayer inventory, IGuiContainerProvider guiProvider, List<E> items,
IItemPredicate<E> filterer) {
super(inventory, guiProvider);
this.unfilteredItems = Lists.newArrayList(items);
this.filteredItems = Lists.newLinkedList();
this.visibleItems = Arrays.asList(new Object[getPageSize()]);
for(int i = 0; i < getPageSize(); i++) {
this.visibleItems.set(i, null);
}
this.itemSearchPredicate = filterer;
}
protected List<E> getUnfilteredItems() {
return this.unfilteredItems;
}
protected List<Pair<Integer, E>> getFilteredItems() {
return this.filteredItems;
}
public int getUnfilteredItemCount() {
return getUnfilteredItems().size();
}
public int getFilteredItemCount() {
return getFilteredItems().size();
}
/**
* @return The maximum amount of columns to show.
*/
public int getColumns() {
return 1;
}
/**
* Scroll to the given relative position.
* @param scroll A value between 0 and 1.
*/
public void scrollTo(float scroll) {
onScroll();
int rows = (getFilteredItemCount() + getColumns() - 1) / getColumns() - getPageSize();
int firstRow = (int)((double)(scroll * (float)rows) + 0.5D);
if(firstRow < 0) firstRow = 0;
for(int i = 0; i < getPageSize(); i++) {
for(int j = 0; j < getColumns(); j++) {
int index = i * getColumns() + j;
int elementIndex = index + firstRow;
this.visibleItems.set(index, null);
if(elementIndex < getFilteredItemCount()) {
Pair<Integer, E> filteredItem = getFilteredItems().get(elementIndex);
enableElementAt(index, filteredItem.getLeft(), filteredItem.getRight());
}
}
}
}
protected void onScroll() {
}
/**
* @return The allowed page size.
*/
public abstract int getPageSize();
/**
* After scrolling, this will be called to make items visible.
* @param row The row to show the given element at.
* @param elementIndex The absolute element index.
* @param element The element to show.
*/
protected void enableElementAt(int row, int elementIndex, E element) {
this.visibleItems.set(row, element);
}
/**
* Check if the given element is visible.
* @param row The row the the given element is at.
* @return If it is visible.
*/
public boolean isElementVisible(int row) {
return row < getPageSize() && getVisibleElement(row) != null;
}
/**
* Get the currently visible element at the given row.
* @param row The row the the given element is at.
* @return The elements
*/
public E getVisibleElement(int row) {
if(row >= visibleItems.size()) return null;
return this.visibleItems.get(row);
}
/**
* Update the filtered items.
* @param searchString The input string to search by.
*/
public void updateFilter(String searchString) {
Pattern pattern;
try {
pattern = Pattern.compile(".*" + searchString.toLowerCase() + ".*");
} catch (PatternSyntaxException e) {
pattern = Pattern.compile(".*");
}
this.filteredItems = filter(unfilteredItems, itemSearchPredicate, pattern);
scrollTo(0); // Reset scroll, will also refresh items on-screen.
}
protected static <E> List<Pair<Integer, E>> filter(List<E> input, IItemPredicate<E> predicate, Pattern pattern) {
List<Pair<Integer, E>> filtered = Lists.newLinkedList();
int i = 0;
for(E item : input) {
if(predicate.apply(item, pattern)) {
filtered.add(Pair.of(i, item));
}
i++;
}
return filtered;
}
/**
* Predicate for matching items used to search.
* @param <E> The type of item.
*/
public static interface IItemPredicate<E> {
/**
* Check if the given item matches a string pattern.
* @param item The item to check.
* @param pattern The pattern to check.
* @return If the item matches
*/
public boolean apply(E item, Pattern pattern);
}
}
|
Don't use Scala classes
|
src/main/java/org/cyclops/cyclopscore/inventory/container/ScrollingInventoryContainer.java
|
Don't use Scala classes
|
|
Java
|
mit
|
eb107f25c6c6047934a13c7b2907f4539f85f66c
| 0
|
carrot/android-animation-arsenal
|
package com.android_animation_arsenal;
import android.animation.Animator;
import android.content.Context;
import android.graphics.Point;
import android.os.Build;
import android.transition.Explode;
import android.transition.Fade;
import android.transition.Slide;
import android.transition.Transition;
import android.transition.TransitionSet;
import android.util.DisplayMetrics;
import android.view.View;
import android.view.ViewAnimationUtils;
import android.view.ViewTreeObserver;
import android.view.Window;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.RotateAnimation;
import android.view.animation.ScaleAnimation;
/**
* Created by angelsolis on 11/20/15.
*/
public class AnimationArsenal
{
private static int HALF_SECOND_DURATION = 500;
/**
* returns Explode Transition
*
* @param listener transition listener
* @param duration duration for transition
* @return returns explode transition - could be null
*/
public static Transition getExplodeTransition(Transition.TransitionListener listener, int
duration)
{
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
{
Explode explode = new Explode();
explode.setDuration(duration);
explode.addListener(listener);
return explode;
}
return null;
}
/**
* returns Slide Transition
*
* @param listener transition listener
* @param duration duration for transition
* @param gravity direction of slide transition
* @return returns slide transition - could be null
*/
public static Transition getSlideTransition(Transition.TransitionListener listener, int
duration, int gravity)
{
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
{
Slide slide = new Slide(gravity);
slide.setDuration(duration);
slide.addListener(listener);
return slide;
}
return null;
}
/**
* Enum for Reveal Transition
*/
public enum RevealGravity
{
BOTTOM_LEFT,
BOTTOM_RIGHT,
TOP_LEFT,
TOP_RIGHT,
CENTER
}
/**
* starts Circular Reveal transition
*
* @param view view for circular reveal transition
* @param context application context
* @param gravity position where the reveal starts
*/
public static void circularReveal(final View view, Context context, RevealGravity gravity)
{
if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
{
return;
}
int x = 0;
int y = 0;
switch(gravity)
{
case CENTER:
Point point = getViewEpicenter(view);
x = point.x;
y = point.y;
break;
case BOTTOM_LEFT:
x = view.getLeft();
y = view.getBottom();
break;
case BOTTOM_RIGHT:
x = view.getRight();
y = view.getBottom();
break;
case TOP_LEFT:
x = view.getLeft();
y = view.getTop();
break;
case TOP_RIGHT:
x = view.getRight();
y = view.getTop();
break;
}
DisplayMetrics displayMetrics = context.getResources().getDisplayMetrics();
// big radius to cover view
int bigRadius = Math.max(displayMetrics.widthPixels, displayMetrics
.heightPixels);
// initialize animator with circular reveal
final Animator animatorReveal = ViewAnimationUtils.createCircularReveal
(view, x, y, 0, bigRadius);
animatorReveal.setDuration(HALF_SECOND_DURATION);
// add listener to change view visibility
animatorReveal.addListener(new Animator.AnimatorListener()
{
@Override
public void onAnimationStart(Animator animation)
{
view.setVisibility(View.VISIBLE);
}
@Override
public void onAnimationEnd(Animator animation)
{
}
@Override
public void onAnimationCancel(Animator animation)
{
}
@Override
public void onAnimationRepeat(Animator animation)
{
}
}
);
animatorReveal.start();
}
public static Point getViewEpicenter(View view)
{
if(view == null)
{
return new Point(0, 0);
}
// get location of view
int[] location = new int[2];
view.getLocationInWindow(location);
return new Point(location[0] + view.getMeasuredWidth() / 2,
location[1] + view.getMeasuredHeight() / 2);
}
/**
* returns Fade transition
*
* @param listener transition listener
* @param fadeMode The behavior of this transition: Fade.IN or Fade.OUT
* @return returns explode transition - could be null
*/
public static Transition getFadeTransition(Transition.TransitionListener listener, int fadeMode)
{
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
{
Fade fade = new Fade(fadeMode);
fade.addListener(listener);
return fade;
}
return null;
}
/**
* explode animation moving view to specific sides using 'Slide Transition'
*
* @return returns transition - could be null
*/
public static Transition getSlideExplosionTransition(int duration,
View topViewContainer,
View bottomViewContainer,
View leftViewContainer,
View rightViewContainer)
{
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
{
TransitionSet set = new TransitionSet();
Slide slideTop = new Slide(android.view.Gravity.TOP);
slideTop.addTarget(topViewContainer);
set.addTransition(slideTop);
Slide slideBottom = new Slide(android.view.Gravity.BOTTOM);
slideBottom.addTarget(bottomViewContainer);
set.addTransition(slideBottom);
Slide slideLeft = new Slide(android.view.Gravity.LEFT);
slideLeft.addTarget(leftViewContainer);
set.addTransition(slideLeft);
Slide slideRight = new Slide(android.view.Gravity.RIGHT);
slideRight.addTarget(rightViewContainer);
set.addTransition(slideRight);
set.setDuration(duration);
return set;
}
return null;
}
/**
* Sets window enter transition
*
* @param window application window
* @param transition specific transition for enter transition
* @return boolean (true if transition is available, false if not)
*/
public static boolean setEnterTransition(Window window, Transition transition)
{
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
{
if(window == null || transition == null)
{
return false;
}
window.setEnterTransition(transition);
return true;
}
return false;
}
/**
* Sets window exit transition
*
* @param window application window
* @param transition specific transition for exit transition
* @return boolean (true if transition is available, false if not)
*/
public static boolean setExitTransition(Window window, Transition transition)
{
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
{
if(window == null || transition == null)
{
return false;
}
window.setExitTransition(transition);
return true;
}
return false;
}
/**
* starts animation from resource for specific view
*
* @param context application context
* @param view specific view for animation
* @param animationId specific animation resource
* @param duration duration of animation
* @param animationListener animation listener
*/
public static Animation playAnimationFromResource(Context context, View view,
int animationId, int duration,
Animation.AnimationListener animationListener)
{
return animate(context, view, animationId, duration, animationListener);
}
/**
* starts fade in animation for specific view
*
* @param context application context
* @param view specific view for animation
* @param duration duration of animation
* @param animationListener animation listener
*/
public static Animation playAnimationFadeIn(Context context, View view, int duration,
Animation.AnimationListener animationListener)
{
return animate(context, view, android.R.anim.fade_in, duration, animationListener);
}
/**
* starts fade out animation for specific view
*
* @param context application context
* @param view specific view for animation
* @param duration duration of animation
* @param animationListener animation listener
*/
public static Animation playAnimationFadeOut(Context context, View view, int duration,
Animation.AnimationListener animationListener)
{
return animate(context, view, android.R.anim.fade_out, duration, animationListener);
}
/**
* starts slide left animation for specific view
*
* @param context application context
* @param view specific view for animation
* @param duration duration of animation
* @param animationListener animation listener
*/
public static Animation playAnimationSlideLeft(Context context, View view, int duration,
Animation.AnimationListener animationListener)
{
return animate(context, view, duration, android.R.anim.slide_in_left, animationListener);
}
/**
* starts slide right animation for specific view
*
* @param context application context
* @param view specific view for animation
* @param duration duration of animation
* @param animationListener animation listener
*/
public static Animation playAnimationSlideRight(Context context, View view, int duration,
Animation.AnimationListener animationListener)
{
return animate(context, view, duration, android.R.anim.slide_out_right, animationListener);
}
/**
* creates animation from specified resource
*
* @param context application context
* @param view specific view for animation
* @param duration duration of animation
* @param listener animation listener
*/
private static Animation animate(Context context, View view, int animationId,
int duration, Animation.AnimationListener listener)
{
final Animation anim = AnimationUtils.loadAnimation(context, animationId);
anim.setAnimationListener(listener);
anim.setDuration(duration);
view.startAnimation(anim);
return anim;
}
/**
* starts scale animation for specific view
*
* @param view specific view for animation
* @param duration duration of animation
* @param startScale starting scale size
* @param endScale ending scale size
*/
public static Animation playAnimationScale(View view, int duration, float startScale,
float endScale, Animation.AnimationListener listener)
{
final Animation anim = new ScaleAnimation(
startScale, endScale, // Start and end values for the X axis scaling
startScale, endScale, // Start and end values for the Y axis scaling
Animation.RELATIVE_TO_SELF, 0.5f, // Pivot point of X scaling
Animation.RELATIVE_TO_SELF, 0.5f); // Pivot point of Y scaling
anim.setFillAfter(true); // Needed to keep the result of the animation
anim.setDuration(duration);
anim.setAnimationListener(listener);
view.startAnimation(anim);
return anim;
}
/**
* starts rotate animation for specific view
*
* @param view specific view for animation
* @param duration duration of animation
* @param fromDegrees starting rotation
* @param toDegrees ending rotation
*/
public static Animation playAnimationRotate(View view, int duration, float fromDegrees,
float toDegrees, int repeat, Animation
.AnimationListener listener)
{
final Animation anim = new RotateAnimation(
fromDegrees, toDegrees,
Animation.RELATIVE_TO_SELF, 0.5f, // Pivot point of X scaling
Animation.RELATIVE_TO_SELF, 0.5f); // Pivot point of Y scaling
anim.setFillAfter(true); // Needed to keep the result of the animation
anim.setDuration(duration); // set animation duration
anim.setRepeatCount(repeat); // -1 = infinite repeated
anim.setAnimationListener(listener);
view.startAnimation(anim);
return anim;
}
/**
* Animate Enter of Shared Element in < 21 (Min SDK 12)
* Use this in onCreate() with a safety check.
* Set 'compatExitSharedElement' in onBackPressed() as well with a safety check to control
* the shared element animation at exiting activity
*
* @param startView starting view from calling Activity
* @param targetView Targeted view from actual activity
*/
public static void compatEnterSharedElement(final View startView, final View targetView)
{
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1)
{
targetView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver
.OnPreDrawListener()
{
@Override
public boolean onPreDraw()
{
targetView.getViewTreeObserver().removeOnPreDrawListener(this);
//get image view locations
int[] targetImageLocation = new int[2];
targetView.getLocationOnScreen(targetImageLocation);
int[] startImageLocation = new int[2];
startView.getLocationOnScreen(startImageLocation);
int xLeft = targetImageLocation[0];
int yTop = targetImageLocation[1];
int leftDelta = startImageLocation[0] - xLeft;
int topDelta = startImageLocation[1] - yTop;
float widthScale = (float) startView.getWidth() / targetView.getWidth();
float heightScale = (float) startView.getHeight() / targetView.getHeight();
//set values for animation
targetView.setPivotX(0);
targetView.setPivotY(0);
targetView.setScaleX(widthScale);
targetView.setScaleY(heightScale);
targetView.setTranslationX(leftDelta);
targetView.setTranslationY(topDelta);
targetView.animate().setDuration(HALF_SECOND_DURATION)
.scaleX(1).scaleY(1)
.translationX(0).translationY(0)
.setInterpolator(new DecelerateInterpolator());
return true;
}
});
}
}
/**
* Animate Exit of Shared Element in < 21 (Min SDK 12)
* Use this in onBackPressed() with a safety check
*
* @param startView starting view from calling Activity
* @param targetView Targeted view from actual activity
* @param runnable Runnable for ending action
*/
public static void compatExitSharedElement(View startView, View targetView, Runnable runnable)
{
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1)
{
float mWidthScale = (float) startView.getWidth() / targetView.getWidth();
float mHeightScale = (float) startView.getHeight() / targetView.getHeight();
int[] startImageLocation = new int[2];
startView.getLocationOnScreen(startImageLocation);
int[] targetImageLocation = new int[2];
targetView.getLocationOnScreen(targetImageLocation);
int leftDelta = startImageLocation[0] - targetImageLocation[0];
int topDelta = startImageLocation[1] - targetImageLocation[1];
targetView.animate().setDuration(HALF_SECOND_DURATION)
.scaleX(mWidthScale).scaleY(mHeightScale)
.translationX(leftDelta).translationY(topDelta)
.withEndAction(runnable);
}
}
}
|
android-animation-arsenal/src/main/java/com/android_animation_arsenal/AnimationArsenal.java
|
package com.android_animation_arsenal;
import android.animation.Animator;
import android.app.Activity;
import android.content.Context;
import android.graphics.Point;
import android.os.Build;
import android.transition.Explode;
import android.transition.Fade;
import android.transition.Slide;
import android.transition.Transition;
import android.transition.TransitionSet;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.ViewAnimationUtils;
import android.view.ViewTreeObserver;
import android.view.Window;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.RotateAnimation;
import android.view.animation.ScaleAnimation;
/**
* Created by angelsolis on 11/20/15.
*/
public class AnimationArsenal
{
private static int HALF_SECOND_DURATION = 500;
/**
* returns Explode Transition
*
* @param listener transition listener
* @param duration duration for transition
* @return returns explode transition - could be null
*/
public static Transition getExplodeTransition(Transition.TransitionListener listener, int
duration)
{
if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
{
return null;
}
Explode explode = new Explode();
explode.setDuration(duration);
explode.addListener(listener);
return explode;
}
/**
* returns Slide Transition
*
* @param listener transition listener
* @param duration duration for transition
* @param gravity direction of slide transition
* @return returns slide transition - could be null
*/
public static Transition getSlideTransition(Transition.TransitionListener listener, int
duration, int gravity)
{
if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
{
return null;
}
Slide slide = new Slide(gravity);
slide.setDuration(duration);
slide.addListener(listener);
return slide;
}
/**
* Enum for Reveal Transition
*/
public enum RevealGravity
{
BOTTOM_LEFT,
BOTTOM_RIGHT,
TOP_LEFT,
TOP_RIGHT,
CENTER
}
/**
* starts Circular Reveal transition
*
* @param view view for circular reveal transition
* @param context application context
* @param gravity position where the reveal starts
*/
public static void circularReveal(final View view, Context context, RevealGravity gravity)
{
if(view == null)
{
return;
}
if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
{
return;
}
int x = 0;
int y = 0;
switch(gravity)
{
case CENTER:
Point point = getViewEpicenter(view);
x = point.x;
y = point.y;
break;
case BOTTOM_LEFT:
x = view.getLeft();
y = view.getBottom();
break;
case BOTTOM_RIGHT:
x = view.getRight();
y = view.getBottom();
break;
case TOP_LEFT:
x = view.getLeft();
y = view.getTop();
break;
case TOP_RIGHT:
x = view.getRight();
y = view.getTop();
break;
}
DisplayMetrics displayMetrics = context.getResources().getDisplayMetrics();
// big radius to cover view
int bigRadius = Math.max(displayMetrics.widthPixels, displayMetrics
.heightPixels);
// initialize animator with circular reveal
final Animator animatorReveal = ViewAnimationUtils.createCircularReveal
(view, x, y, 0, bigRadius);
animatorReveal.setDuration(HALF_SECOND_DURATION);
// add listener to change view visibility
animatorReveal.addListener(new Animator.AnimatorListener()
{
@Override
public void onAnimationStart(Animator animation)
{
view.setVisibility(View.VISIBLE);
}
@Override
public void onAnimationEnd(Animator animation)
{
}
@Override
public void onAnimationCancel(Animator animation)
{
}
@Override
public void onAnimationRepeat(Animator animation)
{
}
}
);
animatorReveal.start();
}
public static Point getViewEpicenter(View view)
{
if(view == null)
{
return new Point(0, 0);
}
// get location of view
int[] location = new int[2];
view.getLocationInWindow(location);
return new Point(location[0] + view.getMeasuredWidth() / 2,
location[1] + view.getMeasuredHeight() / 2);
}
/**
* returns Fade transition
*
* @param listener transition listener
* @param fadeMode The behavior of this transition: Fade.IN or Fade.OUT
* @return returns explode transition - could be null
*/
public static Transition getFadeTransition(Transition.TransitionListener listener, int fadeMode)
{
if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
{
return null;
}
Fade fade = new Fade(fadeMode);
fade.addListener(listener);
return fade;
}
/**
* explode animation moving view to specific sides using 'Slide Transition'
*
* @return returns transition - could be null
*/
public static Transition getSlideExplosionTransition(int duration,
View topViewContainer,
View bottomViewContainer,
View leftViewContainer,
View rightViewContainer)
{
if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
{
return null;
}
TransitionSet set = new TransitionSet();
Slide slideTop = new Slide(android.view.Gravity.TOP);
slideTop.addTarget(topViewContainer);
set.addTransition(slideTop);
Slide slideBottom = new Slide(android.view.Gravity.BOTTOM);
slideBottom.addTarget(bottomViewContainer);
set.addTransition(slideBottom);
Slide slideLeft = new Slide(android.view.Gravity.LEFT);
slideLeft.addTarget(leftViewContainer);
set.addTransition(slideLeft);
Slide slideRight = new Slide(android.view.Gravity.RIGHT);
slideRight.addTarget(rightViewContainer);
set.addTransition(slideRight);
set.setDuration(duration);
return set;
}
/**
* Sets window enter transition
*
* @param window application window
* @param transition specific transition for enter transition
* @return boolean (true if transition is available, false if not)
*/
public static boolean setEnterTransition(Window window, Transition transition)
{
if(window == null || transition == null)
{
return false;
}
if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
{
return false;
}
window.setEnterTransition(transition);
return true;
}
/**
* Sets window exit transition
*
* @param window application window
* @param transition specific transition for exit transition
* @return boolean (true if transition is available, false if not)
*/
public static boolean setExitTransition(Window window, Transition transition)
{
if(window == null || transition == null)
{
return false;
}
if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
{
return false;
}
window.setExitTransition(transition);
return true;
}
/**
* starts animation from resource for specific view
*
* @param context application context
* @param view specific view for animation
* @param animationId specific animation resource
* @param duration duration of animation
* @param animationListener animation listener
*/
public static void playAnimationFromResource(Context context, View view,
int animationId, int duration,
Animation.AnimationListener animationListener)
{
createAnimation(context, view, animationId, duration, animationListener);
}
/**
* starts fade in animation for specific view
*
* @param context application context
* @param view specific view for animation
* @param duration duration of animation
* @param animationListener animation listener
*/
public static void playAnimationFadeIn(Context context, View view, int duration,
Animation.AnimationListener animationListener)
{
createAnimation(context, view, android.R.anim.fade_in, duration, animationListener);
}
/**
* starts fade out animation for specific view
*
* @param context application context
* @param view specific view for animation
* @param duration duration of animation
* @param animationListener animation listener
*/
public static void playAnimationFadeOut(Context context, View view, int duration,
Animation.AnimationListener animationListener)
{
createAnimation(context, view, android.R.anim.fade_out, duration, animationListener);
}
/**
* starts slide left animation for specific view
*
* @param context application context
* @param view specific view for animation
* @param duration duration of animation
* @param animationListener animation listener
*/
public static void playAnimationSlideLeft(Context context, View view, int duration,
Animation.AnimationListener animationListener)
{
createAnimation(context, view, duration, android.R.anim.slide_in_left, animationListener);
}
/**
* starts slide right animation for specific view
*
* @param context application context
* @param view specific view for animation
* @param duration duration of animation
* @param animationListener animation listener
*/
public static void playAnimationSlideRight(Context context, View view, int duration,
Animation.AnimationListener animationListener)
{
createAnimation(context, view, duration, android.R.anim.slide_out_right, animationListener);
}
/**
* creates animation from specified resource
*
* @param context application context
* @param view specific view for animation
* @param duration duration of animation
* @param listener animation listener
*/
private static void createAnimation(Context context, View view, int animationId,
int duration, Animation.AnimationListener listener)
{
if(view == null)
{
return;
}
final Animation anim = AnimationUtils.loadAnimation(context, animationId);
anim.setAnimationListener(listener);
anim.setDuration(duration);
view.startAnimation(anim);
}
/**
* starts scale animation for specific view
*
* @param view specific view for animation
* @param duration duration of animation
* @param startScale starting scale size
* @param endScale ending scale size
*/
public static void playAnimationScale(View view, int duration, float startScale,
float endScale, Animation.AnimationListener listener)
{
final Animation anim = new ScaleAnimation(
startScale, endScale, // Start and end values for the X axis scaling
startScale, endScale, // Start and end values for the Y axis scaling
Animation.RELATIVE_TO_SELF, 0.5f, // Pivot point of X scaling
Animation.RELATIVE_TO_SELF, 0.5f); // Pivot point of Y scaling
anim.setFillAfter(true); // Needed to keep the result of the animation
anim.setDuration(duration);
anim.setAnimationListener(listener);
view.startAnimation(anim);
}
/**
* starts rotate animation for specific view
*
* @param view specific view for animation
* @param duration duration of animation
* @param fromDegrees starting rotation
* @param toDegrees ending rotation
*/
public static void playAnimationRotate(View view, int duration, float fromDegrees,
float toDegrees, int repeat, Animation
.AnimationListener listener)
{
final Animation anim = new RotateAnimation(
fromDegrees, toDegrees,
Animation.RELATIVE_TO_SELF, 0.5f, // Pivot point of X scaling
Animation.RELATIVE_TO_SELF, 0.5f); // Pivot point of Y scaling
anim.setFillAfter(true); // Needed to keep the result of the animation
anim.setDuration(duration); // set animation duration
anim.setRepeatCount(repeat); // -1 = infinite repeated
anim.setAnimationListener(listener);
view.startAnimation(anim);
}
/**
* Animate Enter of Shared Element in < 21
* Use this in onCreate() with a safety check.
* Set 'compatExitSharedElement' in onBackPressed() as well with a safety check to control
* the shared element animation at exiting activity
*
* @param startView starting view from calling Activity
* @param targetView Targeted view from actual activity
*/
public static void compatEnterSharedElement(final View startView, final View targetView)
{
targetView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver
.OnPreDrawListener()
{
@Override
public boolean onPreDraw()
{
targetView.getViewTreeObserver().removeOnPreDrawListener(this);
//get image view locations
int[] targetImageLocation = new int[2];
targetView.getLocationOnScreen(targetImageLocation);
int[] startImageLocation = new int[2];
startView.getLocationOnScreen(startImageLocation);
int xLeft = targetImageLocation[0];
int yTop = targetImageLocation[1];
int leftDelta = startImageLocation[0] - xLeft;
int topDelta = startImageLocation[1] - yTop;
float widthScale = (float) startView.getWidth() / targetView.getWidth();
float heightScale = (float) startView.getHeight() / targetView.getHeight();
//set values for animation
targetView.setPivotX(0);
targetView.setPivotY(0);
targetView.setScaleX(widthScale);
targetView.setScaleY(heightScale);
targetView.setTranslationX(leftDelta);
targetView.setTranslationY(topDelta);
targetView.animate().setDuration(HALF_SECOND_DURATION)
.scaleX(1).scaleY(1)
.translationX(0).translationY(0)
.setInterpolator(new DecelerateInterpolator());
return true;
}
});
}
/**
* Animate Exit of Shared Element in < 21
* Use this in onBackPressed() with a safety check
*
* @param startView starting view from calling Activity
* @param targetView Targeted view from actual activity
* @param runnable Runnable for ending action
*/
public static void compatExitSharedElement(View startView, View targetView, Runnable runnable)
{
float mWidthScale = (float) startView.getWidth() / targetView.getWidth();
float mHeightScale = (float) startView.getHeight() / targetView.getHeight();
int[] startImageLocation = new int[2];
startView.getLocationOnScreen(startImageLocation);
int[] targetImageLocation = new int[2];
targetView.getLocationOnScreen(targetImageLocation);
int leftDelta = startImageLocation[0] - targetImageLocation[0];
int topDelta = startImageLocation[1] - targetImageLocation[1];
targetView.animate().setDuration(HALF_SECOND_DURATION)
.scaleX(mWidthScale).scaleY(mHeightScale)
.translationX(leftDelta).translationY(topDelta)
.withEndAction(runnable);
}
}
|
refactor resource animations, add library to build gradle for example, update readme
|
android-animation-arsenal/src/main/java/com/android_animation_arsenal/AnimationArsenal.java
|
refactor resource animations, add library to build gradle for example, update readme
|
|
Java
|
mit
|
3752d809cc0e4198775d770cb3473663e9a5e28a
| 0
|
jenkinsci/kiuwan-plugin,jenkinsci/kiuwan-plugin,kiuwan/jenkins-plugin,kiuwan/jenkins-plugin
|
package com.kiuwan.plugins.kiuwanJenkinsPlugin;
import hudson.EnvVars;
import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Launcher.ProcStarter;
import hudson.Proc;
import hudson.model.BuildListener;
import hudson.model.Result;
import hudson.model.TaskListener;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Computer;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.BuildStepMonitor;
import hudson.tasks.Publisher;
import hudson.tasks.Recorder;
import hudson.util.FormValidation;
import hudson.util.FormValidation.Kind;
import hudson.util.Secret;
import hudson.util.ListBoxModel;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import jenkins.model.Jenkins;
import net.sf.json.JSONObject;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import com.kiuwan.client.KiuwanClientException;
import com.kiuwan.client.KiuwanRestApiClient;
import com.kiuwan.client.model.ApplicationResults;
public class KiuwanRecorder extends Recorder {
public static final String INSTALL_DIR = "tools/kiuwan";
public static final String AGENT_HOME = "KiuwanLocalAnalyzer";
public static final String QUALITY_INDICATOR = "QUALITY_INDICATOR";
public static final String EFFORT_TO_TARGET = "EFFORT_TO_TARGET";
public static final String RISK_INDEX = "RISK_INDEX";
private String applicationName;
private String label;
private String encoding;
private String includes;
private String excludes;
private int timeout;
private double unstableThreshold;
private double failureThreshold;
private String measure;
private final static Long TIMEOUT_MARGIN = 5000L;
@DataBoundConstructor
public KiuwanRecorder(String applicationName, String label, String encoding, String includes, String excludes, int timeout, String measure, double unstableThreshold, double failureThreshold) {
this.applicationName = applicationName;
this.label = label;
this.encoding = encoding;
this.timeout = timeout;
this.includes = includes;
this.excludes = excludes;
this.measure = measure;
this.unstableThreshold = unstableThreshold;
this.failureThreshold = failureThreshold;
}
/**
* @return the applicationName
*/
public String getApplicationName() {
return applicationName;
}
/**
* @return the label
*/
public String getLabel() {
return label;
}
/**
* @return the encoding
*/
public String getEncoding() {
return encoding;
}
/**
* @return the includes
*/
public String getIncludes() {
return includes;
}
/**
* @return the excludes
*/
public String getExcludes() {
return excludes;
}
/**
* @return the measure
*/
public String getMeasure() {
return measure;
}
/**
* @return the unstableThreshold
*/
public double getUnstableThreshold() {
return unstableThreshold;
}
/**
* @return the failureThreshold
*/
public double getFailureThreshold() {
return failureThreshold;
}
/**
* @return the timeout
*/
public int getTimeout() {
return timeout;
}
@Override
public boolean needsToRunAfterFinalized() {
return false;
}
public BuildStepMonitor getRequiredMonitorService() {
return BuildStepMonitor.BUILD;
}
/**
* @see hudson.tasks.BuildStepCompatibilityLayer#perform(hudson.model.AbstractBuild,
* hudson.Launcher, hudson.model.BuildListener)
*/
@Override
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException {
long startTime = System.currentTimeMillis();
long endTime = startTime+TimeUnit.MILLISECONDS.convert(timeout, TimeUnit.MINUTES);
AtomicReference<Throwable> exceptionReference = new AtomicReference<Throwable>();
AtomicReference<Result> resultReference = new AtomicReference<Result>();
Thread thread = createExecutionThread(build, launcher, listener, resultReference, exceptionReference);
thread.start();
long currentTime = System.currentTimeMillis();
try{
while(thread.isAlive() && currentTime < endTime){
TimeUnit.MILLISECONDS.sleep(TIMEOUT_MARGIN);
currentTime = System.currentTimeMillis();
}
}
catch(InterruptedException interruptedException){
if(thread.isAlive()){
thread.interrupt();
}
build.setResult(Result.ABORTED);
throw interruptedException;
}
if(thread.isAlive()){
listener.getLogger().println("Aborted by timeout.");
build.setResult(Result.ABORTED);
}
Throwable throwable = exceptionReference.get();
if(throwable != null){
if(throwable instanceof InterruptedException){
throw (InterruptedException) throwable;
}
else if(throwable instanceof IOException){
throw (IOException) throwable;
}
else{
build.setResult(Result.FAILURE);
}
}
Result result = resultReference.get();
if(result != null){
build.setResult(result);
}
return true;
}
private Thread createExecutionThread(final AbstractBuild<?, ?> build, final Launcher launcher, final BuildListener listener, final AtomicReference<Result> resultReference, final AtomicReference<Throwable> exceptionReference) {
Runnable runnable = new Runnable() {
public void run() {
try {
DescriptorImpl descriptor = getDescriptor();
FormValidation connectionTestResult = descriptor.doTestConnection(descriptor.getUsername(), descriptor.getPassword());
if(Kind.OK.equals(connectionTestResult.kind)){
performScan(build, launcher, listener, resultReference);
}
else{
listener.getLogger().print("Could not get authorization from Kiuwan. Verify your ");
listener.hyperlink(Jenkins.getInstance().getRootUrl()+"/configure", "Kiuwan account settings");
listener.getLogger().println(".");
resultReference.set(Result.NOT_BUILT);
}
} catch (KiuwanException e) {
listener.getLogger().println(e.getMessage());
listener.fatalError(e.getMessage());
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
listener.getLogger().println(sw.toString());
resultReference.set(Result.NOT_BUILT);
exceptionReference.set(e);
} catch (IOException e) {
listener.getLogger().println(e.toString());
exceptionReference.set(e);
resultReference.set(Result.NOT_BUILT);
} catch (InterruptedException e) {
listener.getLogger().println("Analysis interrupted.");
exceptionReference.set(e);
resultReference.set(Result.ABORTED);
}
catch(Throwable throwable){
listener.getLogger().println(ExceptionUtils.getFullStackTrace(throwable));
resultReference.set(Result.NOT_BUILT);
}
}
};
Thread thread = new Thread(runnable);
return thread;
}
private void performScan(AbstractBuild<?, ?> build, Launcher launcher, final BuildListener listener, AtomicReference<Result> resultReference) throws KiuwanException, IOException, InterruptedException {
String name = this.applicationName;
if (StringUtils.isEmpty(name)) {
name = build.getProject().getName();
}
String analysisLabel = this.label;
if (StringUtils.isEmpty(analysisLabel)) {
analysisLabel = "#" + build.getNumber();
}
String analysisEncoding = this.encoding;
if (StringUtils.isEmpty(analysisEncoding)) {
analysisEncoding = "UTF-8";
}
FilePath srcFolder = build.getModuleRoot();
FilePath rootPath = srcFolder.getParent().getParent();
FilePath remoteDir = rootPath.child(KiuwanComputerListener.INSTALL_DIR);
FilePath agentHome = remoteDir.child(KiuwanComputerListener.AGENT_HOME);
if (!agentHome.exists()) {
installLocalAnalyzer(rootPath, listener);
}
DescriptorImpl descriptor = getDescriptor();
String command = launcher.isUnix() ? "agent.sh" : "agent.cmd";
FilePath agentBinDir = agentHome.child("bin");
FilePath script = agentBinDir.child(command);
EnvVars env = agentBinDir.act(new KiuwanRemoteEnvironment());
env.overrideAll(build.getBuildVariables());
final PrintStream loggerStream = listener.getLogger();
if (launcher.isUnix()) {
loggerStream.println("Changing "+command+" permission");
agentBinDir.child("agent.sh").chmod(0755);
}
saveCredentials(build, launcher, descriptor, agentBinDir, script, env, listener);
printExecutionConfiguration(listener, name, analysisLabel, analysisEncoding, srcFolder, script);
String analysisCode = null;
int result = -1;
List<String> args = buildAgentCommand(launcher, name, analysisLabel, analysisEncoding, srcFolder, command, agentBinDir, listener);
ProcStarter procStarter = null;
if(launcher.isUnix()){
procStarter = launcher.launch().cmds(args);
}
else{
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(" ");
for (String arg : args) {
stringBuilder.append(arg+" ");
}
procStarter = launcher.launch().cmds(new String[]{"cmd","/s","/c", stringBuilder.toString()});
}
procStarter = procStarter.envs(env).readStdout().pwd(script.getParent());
Proc process = procStarter.start();
BufferedReader bufferedReader = null;
Pattern pattern = Pattern.compile(".*Analysis created in Kiuwan with code: (.*)$");
bufferedReader = new BufferedReader(new InputStreamReader(process.getStdout()));
String line = null;
while((line = bufferedReader.readLine()) != null){
Matcher matcher = pattern.matcher(line);
boolean found = matcher.find();
if(found){
analysisCode = matcher.group(1);
}
listener.getLogger().println(line);
}
result = process.join();
if (result != 0 || analysisCode == null) {
resultReference.set(Result.NOT_BUILT);
} else {
double qualityIndicator = -1d;
double effortToTarget = -1d;
double riskIndex = -1d;
boolean buildFailedInKiuwan = false;
boolean end = false;
KiuwanRestApiClient client = new KiuwanRestApiClient(descriptor.getUsername(), descriptor.getPassword());
int retries = 3;
do {
try {
loggerStream.println("Query for result: "+analysisCode);
ApplicationResults results = client.getApplicationResultsByAnalysisCode(analysisCode);
loggerStream.println("Analysis status in Kiuwan: "+results.getAnalysisStatus());
if ("FINISHED".equalsIgnoreCase(results.getAnalysisStatus())) {
qualityIndicator = results.getQualityIndicator().getValue();
effortToTarget = results.getEffortToTarget().getValue();
BigDecimal rawRiskIndex = new BigDecimal(results.getRiskIndex().getValue());
rawRiskIndex = rawRiskIndex.setScale(2, RoundingMode.HALF_UP);
riskIndex = rawRiskIndex.doubleValue();
end = true;
} else if ("FINISHED_WITH_ERROR".equalsIgnoreCase(results.getAnalysisStatus())) {
buildFailedInKiuwan = true;
end = true;
}
} catch (KiuwanClientException e) {
if(retries > 0){
// Re-initializes the client.
client = new KiuwanRestApiClient(descriptor.getUsername(), descriptor.getPassword());
retries--;
}
else{
loggerStream.println(e.getMessage());
buildFailedInKiuwan = true;
end = true;
}
}
if(!end){
Thread.sleep(60000);
}
}while (!end);
if (buildFailedInKiuwan) {
loggerStream.println("Build failed in Kiuwan");
resultReference.set(Result.NOT_BUILT);
} else {
printAnalysisSummary(listener, qualityIndicator, effortToTarget, riskIndex);
checkThresholds(build, listener, qualityIndicator, effortToTarget, riskIndex, resultReference);
KiuwanBuildSummaryAction link = new KiuwanBuildSummaryAction(name, analysisLabel);
build.addAction(link);
}
}
}
private String getRemoteFileAbsolutePath(FilePath filePath, TaskListener listener) throws IOException, InterruptedException {
String path = filePath.act(new KiuwanRemoteFilePath());
if(path == null){
listener.fatalError("File: \""+getRemoteFileAbsolutePath(filePath, listener)+"\", not found.");
}
return path;
}
private void saveCredentials(AbstractBuild<?, ?> build, Launcher launcher, DescriptorImpl descriptor, FilePath agentBinDir, FilePath script, EnvVars env, TaskListener listener) throws IOException, InterruptedException {
encryptSecret(build, launcher, descriptor, agentBinDir, script, env, listener);
saveCredentialsInProperties(descriptor, agentBinDir, listener);
}
private void saveCredentialsInProperties(DescriptorImpl descriptor, FilePath agentBinDir, TaskListener listener) throws FileNotFoundException, IOException, InterruptedException {
agentBinDir.getParent().child("conf").child("agent.properties").act(new KiuwanAgentProperties(descriptor.getUsername()));
}
private void encryptSecret(AbstractBuild<?, ?> build, Launcher launcher, DescriptorImpl descriptor, FilePath agentBinDir, FilePath script, EnvVars env, TaskListener listener) throws IOException, InterruptedException {
PipedInputStream userInput = new PipedInputStream();
final PipedOutputStream userKeyboard = new PipedOutputStream(userInput);
final PipedInputStream consoleReaderStream = new PipedInputStream();
PipedOutputStream console = new PipedOutputStream(consoleReaderStream);
try{
final PrintStream loggerStream = listener.getLogger();
final String password = descriptor.getPassword();
Runnable consoleRunnable = new Runnable() {
public void run() {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(consoleReaderStream));
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(userKeyboard);
try {
String line = bufferedReader.readLine();
loggerStream.println(line);
outputStreamWriter.write(password);
outputStreamWriter.flush();
} catch (IOException e) {
loggerStream.println(ExceptionUtils.getFullStackTrace(e));
}
finally{
IOUtils.closeQuietly(outputStreamWriter);
}
}
};
Thread thread = new Thread(consoleRunnable);
thread.start();
launcher.launch().cmds(getRemoteFileAbsolutePath(script, listener), "-e").envs(env).stdin(userInput).stdout(console).pwd(script.getParent()).join();
thread.join(5000L);
}
finally{
IOUtils.closeQuietly(console);
IOUtils.closeQuietly(userInput);
}
}
private void printExecutionConfiguration(BuildListener listener, String name, String analysisLabel, String analysisEncoding, FilePath srcFolder, FilePath script) throws IOException, InterruptedException {
listener.getLogger().println("Analyze folder: " + getRemoteFileAbsolutePath(srcFolder, listener));
listener.getLogger().println("Script: " + getRemoteFileAbsolutePath(script, listener));
listener.getLogger().println("kiuwan app name: " + name);
listener.getLogger().println("Analysis label: " + analysisLabel);
listener.getLogger().println("Threshold measure: " + this.measure);
listener.getLogger().println("Unstable threshold: " + this.unstableThreshold);
listener.getLogger().println("Failure threshold: " + this.failureThreshold);
listener.getLogger().println("encoding: " + analysisEncoding);
listener.getLogger().println("includes pattern: " + includes);
listener.getLogger().println("excludes pattern: " + excludes);
listener.getLogger().println("timeout: " + timeout + " minutes");
}
private void checkThresholds(AbstractBuild<?, ?> build, BuildListener listener, double qualityIndicator, double effortToTarget, double riskIndex, AtomicReference<Result> resultReference) {
if (QUALITY_INDICATOR.equalsIgnoreCase(this.measure)) {
if (qualityIndicator < this.failureThreshold) {
resultReference.set(Result.FAILURE);
listener.getLogger().println("Quality indicator is lower than " + this.failureThreshold);
} else if (qualityIndicator < this.unstableThreshold) {
resultReference.set(Result.UNSTABLE);
listener.getLogger().println("Quality indicator is lower than " + this.unstableThreshold);
}
} else if (EFFORT_TO_TARGET.equalsIgnoreCase(this.measure)) {
if (effortToTarget > this.failureThreshold) {
resultReference.set(Result.FAILURE);
listener.getLogger().println("Effort to target is greater than " + this.failureThreshold);
} else if (effortToTarget > this.unstableThreshold) {
resultReference.set(Result.UNSTABLE);
listener.getLogger().println("Effort to target is greater than " + this.unstableThreshold);
}
} else if (RISK_INDEX.equalsIgnoreCase(this.measure)) {
if (riskIndex > this.failureThreshold) {
resultReference.set(Result.FAILURE);
listener.getLogger().println("Risk index is greater than " + this.failureThreshold);
}
else if (riskIndex > this.unstableThreshold) {
resultReference.set(Result.UNSTABLE);
listener.getLogger().println("Risk index is greater than " + this.unstableThreshold);
}
}
}
private void printAnalysisSummary(BuildListener listener, double qualityIndicator, double effortToTarget, double riskIndex) {
listener.getLogger().println("==========================================================================");
listener.getLogger().println(" Kiuwan Static Analysis Summary ");
listener.getLogger().println("==========================================================================");
listener.getLogger().println(" - Quality indicator: " + qualityIndicator);
listener.getLogger().println(" - Effort to target: " + effortToTarget);
listener.getLogger().println(" - Risk index: " + riskIndex);
listener.getLogger().println();
}
private List<String> buildAgentCommand(Launcher launcher, String name, String analysisLabel, String analysisEncoding, FilePath srcFolder, String command, FilePath agentBinDir, TaskListener listener) throws IOException, InterruptedException {
String timeoutAsString = Long.toString(TimeUnit.MILLISECONDS.convert(this.timeout, TimeUnit.MINUTES)-TIMEOUT_MARGIN);
List<String> args = new ArrayList<String>();
String commandAbsolutePath = getRemoteFileAbsolutePath(agentBinDir.child(command), listener);
// if(!launcher.isUnix()){
// args.add("cmd");
// args.add("/c");
// }
args.add(buildArgument(launcher, commandAbsolutePath));
args.add("-s");
args.add(buildArgument(launcher, getRemoteFileAbsolutePath(srcFolder, listener)));
args.add("-n");
args.add(buildArgument(launcher, name));
args.add("-l");
args.add(buildArgument(launcher, analysisLabel));
args.add("-c");
args.add(buildAdditionalParameterExpression(launcher, "timeout", timeoutAsString));
args.add(buildAdditionalParameterExpression(launcher, "encoding", analysisEncoding));
if (StringUtils.isNotBlank(includes)) {
args.add(buildAdditionalParameterExpression(launcher, "include.patterns", includes));
}
if (StringUtils.isNotBlank(excludes)) {
args.add(buildAdditionalParameterExpression(launcher, "exclude.patterns", excludes));
}
return args;
}
private String buildArgument(Launcher launcher, String argument) {
if(argument.indexOf('"') != -1){
throw new IllegalArgumentException("Double quote is not allowed in parameters: "+argument);
}
if(launcher.isUnix()){
return argument;
}
else{
return "\""+argument+"\"";
}
}
private String buildAdditionalParameterExpression(Launcher launcher, String parameterName, String parameterValue) {
String parameterExpression = "";
if(parameterValue.indexOf('"') != -1){
throw new IllegalArgumentException("Double quote is not allowed in parameters: "+parameterValue);
}
if(launcher.isUnix()){
parameterExpression = parameterName+"="+parameterValue;
}
else{
parameterExpression = parameterName+"=\""+parameterValue+"\"";
}
return parameterExpression;
}
private void installLocalAnalyzer(FilePath root, BuildListener listener) throws IOException, InterruptedException {
KiuwanDownloadable kiuwanDownloadable = new KiuwanDownloadable();
FilePath remoteDir = root.child(INSTALL_DIR);
listener.getLogger().println("Installing KiuwanLocalAnalyzer in " + remoteDir);
Map<Object, Object> props = Computer.currentComputer().getSystemProperties();
File zip = kiuwanDownloadable.resolve((String) props.get("os.name"), (String) props.get("sun.arch.data.model"), listener);
remoteDir.mkdirs();
new FilePath(zip).unzip(remoteDir);
}
@Override
public DescriptorImpl getDescriptor() {
return (DescriptorImpl) super.getDescriptor();
}
@Extension
public static class DescriptorImpl extends BuildStepDescriptor<Publisher> {
private final static String[] comboValues = { QUALITY_INDICATOR, RISK_INDEX, EFFORT_TO_TARGET };
private final static String[] comboNames = { "Quality indicator", "Risk index", "Effort to target" };
private String username;
private String password;
public DescriptorImpl() {
load();
}
@Override
public boolean configure(StaplerRequest req, JSONObject json) throws FormException {
// to persist global configuration information,
// set that to properties and call save().
String username = (String) json.get("username");
String password = (String) json.get("password");
this.username = username;
Secret secret = Secret.fromString(password);
this.password = secret.getEncryptedValue();
save();
return true;
}
@Override
public String getDisplayName() {
return "Analyze your source code with Kiuwan!";
}
@Override
public boolean isApplicable(Class<? extends AbstractProject> item) {
return true;
}
/**
* @return the username
*/
public String getUsername() {
return this.username;
}
/**
* @return the password
*/
public String getPassword() {
return Secret.toString(Secret.decrypt(this.password));
}
public FormValidation doTestConnection(@QueryParameter String username, @QueryParameter String password) {
KiuwanRestApiClient client = new KiuwanRestApiClient(username, password);
try {
client.getApplications();
return FormValidation.ok("Authentication completed successfully!");
} catch (KiuwanClientException kiuwanClientException) {
return FormValidation.error("Authentication failed.");
} catch (Throwable throwable) {
return FormValidation.warning("Could not initiate the authentication process. Reason: " + throwable.getMessage());
}
}
public ListBoxModel doFillMeasureItems(@QueryParameter("measure") String measure) {
ListBoxModel items = new ListBoxModel();
for (int i = 0; i < comboNames.length; i++) {
if (comboValues[i].equalsIgnoreCase(measure)) {
items.add(new ListBoxModel.Option(comboNames[i], comboValues[i], true));
} else {
items.add(comboNames[i], comboValues[i]);
}
}
return items;
}
public FormValidation doCheckTimeout(@QueryParameter("timeout") int timeout) {
if(timeout < 1){
return FormValidation.error("Timeout must be greater than 0.");
}
else{
return FormValidation.ok();
}
}
public FormValidation doCheckThresholds(@QueryParameter("unstableThreshold") String unstableThreshold, @QueryParameter("failureThreshold") String failureThreshold, @QueryParameter("measure") String measure) {
FormValidation unstableThresholdValidationResult = doCheckUnstableThreshold(unstableThreshold, failureThreshold, measure);
if(Kind.OK.equals(unstableThresholdValidationResult.kind)){
return doCheckFailureThreshold(failureThreshold, unstableThreshold, measure);
}
else{
return unstableThresholdValidationResult;
}
}
public FormValidation doCheckUnstableThreshold(@QueryParameter("unstableThreshold") String unstableThreshold, @QueryParameter("failureThreshold") String failureThreshold, @QueryParameter("measure") String measure) {
double unstable = 0;
try {
unstable = Double.parseDouble(unstableThreshold);
if (unstable < 0) {
return FormValidation.error("Unstable threshold must be a positive number.");
}
} catch (Throwable throwable) {
return FormValidation.error("Unstable threshold must be a non-negative numeric value.");
}
if (QUALITY_INDICATOR.equalsIgnoreCase(measure)) {
if (unstable >= 100) {
return FormValidation.error("Unstable threshold must be lower than 100.");
} else {
try {
double failure = Double.parseDouble(failureThreshold);
if (failure >= unstable) {
return FormValidation.error("Unstable threshold can not be lower or equal than failure threshold.");
}
} catch (Throwable throwable) {
// Ignore
}
}
} else if (RISK_INDEX.equalsIgnoreCase(measure)) {
if (unstable <= 0) {
return FormValidation.error("Unstable threshold must be greater than 0.");
} else {
try {
double failure = Double.parseDouble(failureThreshold);
if (failure <= unstable) {
return FormValidation.error("Unstable threshold can not be greater or equal than failure threshold.");
}
} catch (Throwable throwable) {
// Ignore
}
}
} else if (EFFORT_TO_TARGET.equalsIgnoreCase(measure)) {
try {
double failed = Double.parseDouble(failureThreshold);
if (failed <= unstable) {
return FormValidation.error("Unstable threshold can not be greater or equal than failure threshold.");
}
} catch (Throwable throwable) {
// Ignore
}
}
return FormValidation.ok();
}
public FormValidation doCheckFailureThreshold(@QueryParameter("failureThreshold") String failureThreshold, @QueryParameter("unstableThreshold") String unstableThreshold, @QueryParameter("measure") String measure) {
double failure = 0;
try {
failure = Double.parseDouble(failureThreshold);
if (failure < 0) {
return FormValidation.error("Failure threshold must be a positive number.");
}
} catch (Throwable throwable) {
return FormValidation.error("Failure threshold must be a non-negative numeric value.");
}
if (QUALITY_INDICATOR.equalsIgnoreCase(measure)) {
try {
double unstable = Double.parseDouble(unstableThreshold);
if (failure >= unstable) {
return FormValidation.error("Failure threshold can not be greater or equal than unstable threshold.");
}
} catch (Throwable throwable) {
// Ignore
}
} else if (RISK_INDEX.equalsIgnoreCase(measure)) {
if (failure > 100) {
return FormValidation.error("Failure threshold must be lower or equal than 100.");
} else {
try {
double unstable = Double.parseDouble(unstableThreshold);
if (failure <= unstable) {
return FormValidation.error("Failure threshold can not be lower or equal than unstable threshold.");
}
} catch (Throwable throwable) {
// Ignore
}
}
} else if (EFFORT_TO_TARGET.equalsIgnoreCase(measure)) {
try {
double unstable = Double.parseDouble(unstableThreshold);
if (failure <= unstable) {
return FormValidation.error("Failure threshold can not be lower or equal than unstable threshold.");
}
} catch (Throwable throwable) {
// Ignore
}
}
return FormValidation.ok();
}
}
}
|
src/main/java/com/kiuwan/plugins/kiuwanJenkinsPlugin/KiuwanRecorder.java
|
package com.kiuwan.plugins.kiuwanJenkinsPlugin;
import hudson.EnvVars;
import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Launcher.ProcStarter;
import hudson.Proc;
import hudson.model.BuildListener;
import hudson.model.Result;
import hudson.model.TaskListener;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Computer;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.BuildStepMonitor;
import hudson.tasks.Publisher;
import hudson.tasks.Recorder;
import hudson.util.FormValidation;
import hudson.util.FormValidation.Kind;
import hudson.util.Secret;
import hudson.util.ListBoxModel;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import jenkins.model.Jenkins;
import net.sf.json.JSONObject;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import com.kiuwan.client.KiuwanClientException;
import com.kiuwan.client.KiuwanRestApiClient;
import com.kiuwan.client.model.ApplicationResults;
public class KiuwanRecorder extends Recorder {
public static final String INSTALL_DIR = "tools/kiuwan";
public static final String AGENT_HOME = "KiuwanLocalAnalyzer";
public static final String QUALITY_INDICATOR = "QUALITY_INDICATOR";
public static final String EFFORT_TO_TARGET = "EFFORT_TO_TARGET";
public static final String RISK_INDEX = "RISK_INDEX";
private String applicationName;
private String label;
private String encoding;
private String includes;
private String excludes;
private int timeout;
private double unstableThreshold;
private double failureThreshold;
private String measure;
private final static Long TIMEOUT_MARGIN = 5000L;
@DataBoundConstructor
public KiuwanRecorder(String applicationName, String label, String encoding, String includes, String excludes, int timeout, String measure, double unstableThreshold, double failureThreshold) {
this.applicationName = applicationName;
this.label = label;
this.encoding = encoding;
this.timeout = timeout;
this.includes = includes;
this.excludes = excludes;
this.measure = measure;
this.unstableThreshold = unstableThreshold;
this.failureThreshold = failureThreshold;
}
/**
* @return the applicationName
*/
public String getApplicationName() {
return applicationName;
}
/**
* @return the label
*/
public String getLabel() {
return label;
}
/**
* @return the encoding
*/
public String getEncoding() {
return encoding;
}
/**
* @return the includes
*/
public String getIncludes() {
return includes;
}
/**
* @return the excludes
*/
public String getExcludes() {
return excludes;
}
/**
* @return the measure
*/
public String getMeasure() {
return measure;
}
/**
* @return the unstableThreshold
*/
public double getUnstableThreshold() {
return unstableThreshold;
}
/**
* @return the failureThreshold
*/
public double getFailureThreshold() {
return failureThreshold;
}
/**
* @return the timeout
*/
public int getTimeout() {
return timeout;
}
@Override
public boolean needsToRunAfterFinalized() {
return false;
}
public BuildStepMonitor getRequiredMonitorService() {
return BuildStepMonitor.BUILD;
}
/**
* @see hudson.tasks.BuildStepCompatibilityLayer#perform(hudson.model.AbstractBuild,
* hudson.Launcher, hudson.model.BuildListener)
*/
@Override
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException {
long startTime = System.currentTimeMillis();
long endTime = startTime+TimeUnit.MILLISECONDS.convert(timeout, TimeUnit.MINUTES);
AtomicReference<Throwable> exceptionReference = new AtomicReference<Throwable>();
AtomicReference<Result> resultReference = new AtomicReference<Result>();
Thread thread = createExecutionThread(build, launcher, listener, resultReference, exceptionReference);
thread.start();
long currentTime = System.currentTimeMillis();
try{
while(thread.isAlive() && currentTime < endTime){
TimeUnit.MILLISECONDS.sleep(TIMEOUT_MARGIN);
currentTime = System.currentTimeMillis();
}
}
catch(InterruptedException interruptedException){
if(thread.isAlive()){
thread.interrupt();
}
build.setResult(Result.ABORTED);
throw interruptedException;
}
if(thread.isAlive()){
listener.getLogger().println("Aborted by timeout.");
build.setResult(Result.ABORTED);
}
Throwable throwable = exceptionReference.get();
if(throwable != null){
if(throwable instanceof InterruptedException){
throw (InterruptedException) throwable;
}
else if(throwable instanceof IOException){
throw (IOException) throwable;
}
else{
build.setResult(Result.FAILURE);
}
}
Result result = resultReference.get();
if(result != null){
build.setResult(result);
}
return true;
}
private Thread createExecutionThread(final AbstractBuild<?, ?> build, final Launcher launcher, final BuildListener listener, final AtomicReference<Result> resultReference, final AtomicReference<Throwable> exceptionReference) {
Runnable runnable = new Runnable() {
public void run() {
try {
DescriptorImpl descriptor = getDescriptor();
FormValidation connectionTestResult = descriptor.doTestConnection(descriptor.getUsername(), descriptor.getPassword());
if(Kind.OK.equals(connectionTestResult.kind)){
performScan(build, launcher, listener, resultReference);
}
else{
listener.getLogger().print("Could not get authorization from Kiuwan. Verify your ");
listener.hyperlink(Jenkins.getInstance().getRootUrl()+"/configure", "Kiuwan account settings");
listener.getLogger().println(".");
resultReference.set(Result.NOT_BUILT);
}
} catch (KiuwanException e) {
listener.getLogger().println(e.getMessage());
listener.fatalError(e.getMessage());
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
listener.getLogger().println(sw.toString());
resultReference.set(Result.NOT_BUILT);
exceptionReference.set(e);
} catch (IOException e) {
listener.getLogger().println(e.toString());
exceptionReference.set(e);
resultReference.set(Result.NOT_BUILT);
} catch (InterruptedException e) {
listener.getLogger().println("Analysis interrupted.");
exceptionReference.set(e);
resultReference.set(Result.ABORTED);
}
catch(Throwable throwable){
listener.getLogger().println(ExceptionUtils.getFullStackTrace(throwable));
resultReference.set(Result.NOT_BUILT);
}
}
};
Thread thread = new Thread(runnable);
return thread;
}
private void performScan(AbstractBuild<?, ?> build, Launcher launcher, final BuildListener listener, AtomicReference<Result> resultReference) throws KiuwanException, IOException, InterruptedException {
String name = this.applicationName;
if (StringUtils.isEmpty(name)) {
name = build.getProject().getName();
}
String analysisLabel = this.label;
if (StringUtils.isEmpty(analysisLabel)) {
analysisLabel = "#" + build.getNumber();
}
String analysisEncoding = this.encoding;
if (StringUtils.isEmpty(analysisEncoding)) {
analysisEncoding = "UTF-8";
}
FilePath srcFolder = build.getModuleRoot();
FilePath rootPath = srcFolder.getParent().getParent();
FilePath remoteDir = rootPath.child(KiuwanComputerListener.INSTALL_DIR);
FilePath agentHome = remoteDir.child(KiuwanComputerListener.AGENT_HOME);
if (!agentHome.exists()) {
installLocalAnalyzer(rootPath, listener);
}
DescriptorImpl descriptor = getDescriptor();
String command = launcher.isUnix() ? "agent.sh" : "agent.cmd";
FilePath agentBinDir = agentHome.child("bin");
FilePath script = agentBinDir.child(command);
EnvVars env = agentBinDir.act(new KiuwanRemoteEnvironment());
env.overrideAll(build.getBuildVariables());
final PrintStream loggerStream = listener.getLogger();
if (launcher.isUnix()) {
loggerStream.println("Changing "+command+" permission");
agentBinDir.child("agent.sh").chmod(0755);
}
saveCredentials(build, launcher, descriptor, agentBinDir, script, env, listener);
printExecutionConfiguration(listener, name, analysisLabel, analysisEncoding, srcFolder, script);
String analysisCode = null;
int result = -1;
List<String> args = buildAgentCommand(launcher, name, analysisLabel, analysisEncoding, srcFolder, command, agentBinDir, listener);
ProcStarter procStarter = null;
if(launcher.isUnix()){
procStarter = launcher.launch().cmds(args);
}
else{
StringBuilder stringBuilder = new StringBuilder();
for (String arg : args) {
stringBuilder.append(arg+" ");
}
procStarter = launcher.launch().cmds(new String[]{"cmd","/s","/c", stringBuilder.toString()});
}
procStarter = procStarter.envs(env).readStdout().pwd(script.getParent());
Proc process = procStarter.start();
BufferedReader bufferedReader = null;
Pattern pattern = Pattern.compile(".*Analysis created in Kiuwan with code: (.*)$");
bufferedReader = new BufferedReader(new InputStreamReader(process.getStdout()));
String line = null;
while((line = bufferedReader.readLine()) != null){
Matcher matcher = pattern.matcher(line);
boolean found = matcher.find();
if(found){
analysisCode = matcher.group(1);
}
listener.getLogger().println(line);
}
result = process.join();
if (result != 0 || analysisCode == null) {
resultReference.set(Result.NOT_BUILT);
} else {
double qualityIndicator = -1d;
double effortToTarget = -1d;
double riskIndex = -1d;
boolean buildFailedInKiuwan = false;
boolean end = false;
KiuwanRestApiClient client = new KiuwanRestApiClient(descriptor.getUsername(), descriptor.getPassword());
int retries = 3;
do {
try {
loggerStream.println("Query for result: "+analysisCode);
ApplicationResults results = client.getApplicationResultsByAnalysisCode(analysisCode);
loggerStream.println("Analysis status in Kiuwan: "+results.getAnalysisStatus());
if ("FINISHED".equalsIgnoreCase(results.getAnalysisStatus())) {
qualityIndicator = results.getQualityIndicator().getValue();
effortToTarget = results.getEffortToTarget().getValue();
BigDecimal rawRiskIndex = new BigDecimal(results.getRiskIndex().getValue());
rawRiskIndex = rawRiskIndex.setScale(2, RoundingMode.HALF_UP);
riskIndex = rawRiskIndex.doubleValue();
end = true;
} else if ("FINISHED_WITH_ERROR".equalsIgnoreCase(results.getAnalysisStatus())) {
buildFailedInKiuwan = true;
end = true;
}
} catch (KiuwanClientException e) {
if(retries > 0){
// Re-initializes the client.
client = new KiuwanRestApiClient(descriptor.getUsername(), descriptor.getPassword());
retries--;
}
else{
loggerStream.println(e.getMessage());
buildFailedInKiuwan = true;
end = true;
}
}
if(!end){
Thread.sleep(60000);
}
}while (!end);
if (buildFailedInKiuwan) {
loggerStream.println("Build failed in Kiuwan");
resultReference.set(Result.NOT_BUILT);
} else {
printAnalysisSummary(listener, qualityIndicator, effortToTarget, riskIndex);
checkThresholds(build, listener, qualityIndicator, effortToTarget, riskIndex, resultReference);
KiuwanBuildSummaryAction link = new KiuwanBuildSummaryAction(name, analysisLabel);
build.addAction(link);
}
}
}
private String getRemoteFileAbsolutePath(FilePath filePath, TaskListener listener) throws IOException, InterruptedException {
String path = filePath.act(new KiuwanRemoteFilePath());
if(path == null){
listener.fatalError("File: \""+getRemoteFileAbsolutePath(filePath, listener)+"\", not found.");
}
return path;
}
private void saveCredentials(AbstractBuild<?, ?> build, Launcher launcher, DescriptorImpl descriptor, FilePath agentBinDir, FilePath script, EnvVars env, TaskListener listener) throws IOException, InterruptedException {
encryptSecret(build, launcher, descriptor, agentBinDir, script, env, listener);
saveCredentialsInProperties(descriptor, agentBinDir, listener);
}
private void saveCredentialsInProperties(DescriptorImpl descriptor, FilePath agentBinDir, TaskListener listener) throws FileNotFoundException, IOException, InterruptedException {
agentBinDir.getParent().child("conf").child("agent.properties").act(new KiuwanAgentProperties(descriptor.getUsername()));
}
private void encryptSecret(AbstractBuild<?, ?> build, Launcher launcher, DescriptorImpl descriptor, FilePath agentBinDir, FilePath script, EnvVars env, TaskListener listener) throws IOException, InterruptedException {
PipedInputStream userInput = new PipedInputStream();
final PipedOutputStream userKeyboard = new PipedOutputStream(userInput);
final PipedInputStream consoleReaderStream = new PipedInputStream();
PipedOutputStream console = new PipedOutputStream(consoleReaderStream);
try{
final PrintStream loggerStream = listener.getLogger();
final String password = descriptor.getPassword();
Runnable consoleRunnable = new Runnable() {
public void run() {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(consoleReaderStream));
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(userKeyboard);
try {
String line = bufferedReader.readLine();
loggerStream.println(line);
outputStreamWriter.write(password);
outputStreamWriter.flush();
} catch (IOException e) {
loggerStream.println(ExceptionUtils.getFullStackTrace(e));
}
finally{
IOUtils.closeQuietly(outputStreamWriter);
}
}
};
Thread thread = new Thread(consoleRunnable);
thread.start();
launcher.launch().cmds(getRemoteFileAbsolutePath(script, listener), "-e").envs(env).stdin(userInput).stdout(console).pwd(script.getParent()).join();
thread.join(5000L);
}
finally{
IOUtils.closeQuietly(console);
IOUtils.closeQuietly(userInput);
}
}
private void printExecutionConfiguration(BuildListener listener, String name, String analysisLabel, String analysisEncoding, FilePath srcFolder, FilePath script) throws IOException, InterruptedException {
listener.getLogger().println("Analyze folder: " + getRemoteFileAbsolutePath(srcFolder, listener));
listener.getLogger().println("Script: " + getRemoteFileAbsolutePath(script, listener));
listener.getLogger().println("kiuwan app name: " + name);
listener.getLogger().println("Analysis label: " + analysisLabel);
listener.getLogger().println("Threshold measure: " + this.measure);
listener.getLogger().println("Unstable threshold: " + this.unstableThreshold);
listener.getLogger().println("Failure threshold: " + this.failureThreshold);
listener.getLogger().println("encoding: " + analysisEncoding);
listener.getLogger().println("includes pattern: " + includes);
listener.getLogger().println("excludes pattern: " + excludes);
listener.getLogger().println("timeout: " + timeout + " minutes");
}
private void checkThresholds(AbstractBuild<?, ?> build, BuildListener listener, double qualityIndicator, double effortToTarget, double riskIndex, AtomicReference<Result> resultReference) {
if (QUALITY_INDICATOR.equalsIgnoreCase(this.measure)) {
if (qualityIndicator < this.failureThreshold) {
resultReference.set(Result.FAILURE);
listener.getLogger().println("Quality indicator is lower than " + this.failureThreshold);
} else if (qualityIndicator < this.unstableThreshold) {
resultReference.set(Result.UNSTABLE);
listener.getLogger().println("Quality indicator is lower than " + this.unstableThreshold);
}
} else if (EFFORT_TO_TARGET.equalsIgnoreCase(this.measure)) {
if (effortToTarget > this.failureThreshold) {
resultReference.set(Result.FAILURE);
listener.getLogger().println("Effort to target is greater than " + this.failureThreshold);
} else if (effortToTarget > this.unstableThreshold) {
resultReference.set(Result.UNSTABLE);
listener.getLogger().println("Effort to target is greater than " + this.unstableThreshold);
}
} else if (RISK_INDEX.equalsIgnoreCase(this.measure)) {
if (riskIndex > this.failureThreshold) {
resultReference.set(Result.FAILURE);
listener.getLogger().println("Risk index is greater than " + this.failureThreshold);
}
else if (riskIndex > this.unstableThreshold) {
resultReference.set(Result.UNSTABLE);
listener.getLogger().println("Risk index is greater than " + this.unstableThreshold);
}
}
}
private void printAnalysisSummary(BuildListener listener, double qualityIndicator, double effortToTarget, double riskIndex) {
listener.getLogger().println("==========================================================================");
listener.getLogger().println(" Kiuwan Static Analysis Summary ");
listener.getLogger().println("==========================================================================");
listener.getLogger().println(" - Quality indicator: " + qualityIndicator);
listener.getLogger().println(" - Effort to target: " + effortToTarget);
listener.getLogger().println(" - Risk index: " + riskIndex);
listener.getLogger().println();
}
private List<String> buildAgentCommand(Launcher launcher, String name, String analysisLabel, String analysisEncoding, FilePath srcFolder, String command, FilePath agentBinDir, TaskListener listener) throws IOException, InterruptedException {
String timeoutAsString = Long.toString(TimeUnit.MILLISECONDS.convert(this.timeout, TimeUnit.MINUTES)-TIMEOUT_MARGIN);
List<String> args = new ArrayList<String>();
String commandAbsolutePath = getRemoteFileAbsolutePath(agentBinDir.child(command), listener);
// if(!launcher.isUnix()){
// args.add("cmd");
// args.add("/c");
// }
args.add(buildArgument(launcher, commandAbsolutePath));
args.add("-s");
args.add(buildArgument(launcher, getRemoteFileAbsolutePath(srcFolder, listener)));
args.add("-n");
args.add(buildArgument(launcher, name));
args.add("-l");
args.add(buildArgument(launcher, analysisLabel));
args.add("-c");
args.add(buildAdditionalParameterExpression(launcher, "timeout", timeoutAsString));
args.add(buildAdditionalParameterExpression(launcher, "encoding", analysisEncoding));
if (StringUtils.isNotBlank(includes)) {
args.add(buildAdditionalParameterExpression(launcher, "include.patterns", includes));
}
if (StringUtils.isNotBlank(excludes)) {
args.add(buildAdditionalParameterExpression(launcher, "exclude.patterns", excludes));
}
return args;
}
private String buildArgument(Launcher launcher, String argument) {
if(launcher.isUnix()){
return argument;
}
else{
return "\""+argument+"\"";
}
}
private String buildAdditionalParameterExpression(Launcher launcher, String parameterName, String parameterValue) {
String parameterExpression = "";
if(launcher.isUnix()){
parameterExpression = parameterName+"="+parameterValue;
}
else{
parameterExpression = parameterName+"=\""+parameterValue+"\"";
}
return parameterExpression;
}
private void installLocalAnalyzer(FilePath root, BuildListener listener) throws IOException, InterruptedException {
KiuwanDownloadable kiuwanDownloadable = new KiuwanDownloadable();
FilePath remoteDir = root.child(INSTALL_DIR);
listener.getLogger().println("Installing KiuwanLocalAnalyzer in " + remoteDir);
Map<Object, Object> props = Computer.currentComputer().getSystemProperties();
File zip = kiuwanDownloadable.resolve((String) props.get("os.name"), (String) props.get("sun.arch.data.model"), listener);
remoteDir.mkdirs();
new FilePath(zip).unzip(remoteDir);
}
@Override
public DescriptorImpl getDescriptor() {
return (DescriptorImpl) super.getDescriptor();
}
@Extension
public static class DescriptorImpl extends BuildStepDescriptor<Publisher> {
private final static String[] comboValues = { QUALITY_INDICATOR, RISK_INDEX, EFFORT_TO_TARGET };
private final static String[] comboNames = { "Quality indicator", "Risk index", "Effort to target" };
private String username;
private String password;
public DescriptorImpl() {
load();
}
@Override
public boolean configure(StaplerRequest req, JSONObject json) throws FormException {
// to persist global configuration information,
// set that to properties and call save().
String username = (String) json.get("username");
String password = (String) json.get("password");
this.username = username;
Secret secret = Secret.fromString(password);
this.password = secret.getEncryptedValue();
save();
return true;
}
@Override
public String getDisplayName() {
return "Analyze your source code with Kiuwan!";
}
@Override
public boolean isApplicable(Class<? extends AbstractProject> item) {
return true;
}
/**
* @return the username
*/
public String getUsername() {
return this.username;
}
/**
* @return the password
*/
public String getPassword() {
return Secret.toString(Secret.decrypt(this.password));
}
public FormValidation doTestConnection(@QueryParameter String username, @QueryParameter String password) {
KiuwanRestApiClient client = new KiuwanRestApiClient(username, password);
try {
client.getApplications();
return FormValidation.ok("Authentication completed successfully!");
} catch (KiuwanClientException kiuwanClientException) {
return FormValidation.error("Authentication failed.");
} catch (Throwable throwable) {
return FormValidation.warning("Could not initiate the authentication process. Reason: " + throwable.getMessage());
}
}
public ListBoxModel doFillMeasureItems(@QueryParameter("measure") String measure) {
ListBoxModel items = new ListBoxModel();
for (int i = 0; i < comboNames.length; i++) {
if (comboValues[i].equalsIgnoreCase(measure)) {
items.add(new ListBoxModel.Option(comboNames[i], comboValues[i], true));
} else {
items.add(comboNames[i], comboValues[i]);
}
}
return items;
}
public FormValidation doCheckTimeout(@QueryParameter("timeout") int timeout) {
if(timeout < 1){
return FormValidation.error("Timeout must be greater than 0.");
}
else{
return FormValidation.ok();
}
}
public FormValidation doCheckThresholds(@QueryParameter("unstableThreshold") String unstableThreshold, @QueryParameter("failureThreshold") String failureThreshold, @QueryParameter("measure") String measure) {
FormValidation unstableThresholdValidationResult = doCheckUnstableThreshold(unstableThreshold, failureThreshold, measure);
if(Kind.OK.equals(unstableThresholdValidationResult.kind)){
return doCheckFailureThreshold(failureThreshold, unstableThreshold, measure);
}
else{
return unstableThresholdValidationResult;
}
}
public FormValidation doCheckUnstableThreshold(@QueryParameter("unstableThreshold") String unstableThreshold, @QueryParameter("failureThreshold") String failureThreshold, @QueryParameter("measure") String measure) {
double unstable = 0;
try {
unstable = Double.parseDouble(unstableThreshold);
if (unstable < 0) {
return FormValidation.error("Unstable threshold must be a positive number.");
}
} catch (Throwable throwable) {
return FormValidation.error("Unstable threshold must be a non-negative numeric value.");
}
if (QUALITY_INDICATOR.equalsIgnoreCase(measure)) {
if (unstable >= 100) {
return FormValidation.error("Unstable threshold must be lower than 100.");
} else {
try {
double failure = Double.parseDouble(failureThreshold);
if (failure >= unstable) {
return FormValidation.error("Unstable threshold can not be lower or equal than failure threshold.");
}
} catch (Throwable throwable) {
// Ignore
}
}
} else if (RISK_INDEX.equalsIgnoreCase(measure)) {
if (unstable <= 0) {
return FormValidation.error("Unstable threshold must be greater than 0.");
} else {
try {
double failure = Double.parseDouble(failureThreshold);
if (failure <= unstable) {
return FormValidation.error("Unstable threshold can not be greater or equal than failure threshold.");
}
} catch (Throwable throwable) {
// Ignore
}
}
} else if (EFFORT_TO_TARGET.equalsIgnoreCase(measure)) {
try {
double failed = Double.parseDouble(failureThreshold);
if (failed <= unstable) {
return FormValidation.error("Unstable threshold can not be greater or equal than failure threshold.");
}
} catch (Throwable throwable) {
// Ignore
}
}
return FormValidation.ok();
}
public FormValidation doCheckFailureThreshold(@QueryParameter("failureThreshold") String failureThreshold, @QueryParameter("unstableThreshold") String unstableThreshold, @QueryParameter("measure") String measure) {
double failure = 0;
try {
failure = Double.parseDouble(failureThreshold);
if (failure < 0) {
return FormValidation.error("Failure threshold must be a positive number.");
}
} catch (Throwable throwable) {
return FormValidation.error("Failure threshold must be a non-negative numeric value.");
}
if (QUALITY_INDICATOR.equalsIgnoreCase(measure)) {
try {
double unstable = Double.parseDouble(unstableThreshold);
if (failure >= unstable) {
return FormValidation.error("Failure threshold can not be greater or equal than unstable threshold.");
}
} catch (Throwable throwable) {
// Ignore
}
} else if (RISK_INDEX.equalsIgnoreCase(measure)) {
if (failure > 100) {
return FormValidation.error("Failure threshold must be lower or equal than 100.");
} else {
try {
double unstable = Double.parseDouble(unstableThreshold);
if (failure <= unstable) {
return FormValidation.error("Failure threshold can not be lower or equal than unstable threshold.");
}
} catch (Throwable throwable) {
// Ignore
}
}
} else if (EFFORT_TO_TARGET.equalsIgnoreCase(measure)) {
try {
double unstable = Double.parseDouble(unstableThreshold);
if (failure <= unstable) {
return FormValidation.error("Failure threshold can not be lower or equal than unstable threshold.");
}
} catch (Throwable throwable) {
// Ignore
}
}
return FormValidation.ok();
}
}
}
|
Fixed windows cmd problem.
|
src/main/java/com/kiuwan/plugins/kiuwanJenkinsPlugin/KiuwanRecorder.java
|
Fixed windows cmd problem.
|
|
Java
|
epl-1.0
|
189d6c84a220c75eb148021d18f8ddfcd7eacb8b
| 0
|
cbrun/jstuart
|
package fr.obeo.tools.stuart;
import java.io.File;
import java.net.URL;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.hash.Hashing;
import fr.obeo.tools.stuart.bugzilla.BugzillaLogger;
import fr.obeo.tools.stuart.eclipseforum.EclipseForumsLogger;
import fr.obeo.tools.stuart.gerrit.GerritLogger;
import fr.obeo.tools.stuart.git.GitLogger;
import fr.obeo.tools.stuart.jenkins.JenkinsLogger;
import fr.obeo.tools.stuart.mattermost.MattermostEmitter;
import fr.obeo.tools.stuart.rss.RssLogger;
public class EclipseMattermostInstanceTest {
private static final String SO_ICON = "https://veithen.github.io/images/icon-stackoverflow.svg";
private String host = "mattermost-test.eclipse.org";
@Test
public void eclipseAnnounces() throws Exception {
String storage = System.getenv("WORKSPACE");
if (storage == null) {
storage = ".";
}
String channel = System.getenv("NEWS_CHANNEL");
if (channel != null) {
MattermostEmitter emitter = new MattermostEmitter("https", host, channel);
Date daysAgo = getDateXDaysAgo(15);
EmitterTrace traceFile = new EmitterTrace(
new File(storage + "/" + host + "_" + Hashing.sha256().hashString(channel) + "_trace.json"));
Map<String, Date> trace = traceFile.load();
List<Post> posts = Lists.newArrayList();
posts.addAll(
new RssLogger(new URL("https://dev.eclipse.org/mhonarc/lists/eclipse.org-committers/maillist.rss"),
daysAgo).get());
posts.addAll(new RssLogger(new URL("http://planet.eclipse.org/planet/rss20.xml"), daysAgo).get());
Collections.sort(posts, new Comparator<Post>() {
public int compare(Post m1, Post m2) {
return m1.getCreatedAt().compareTo(m2.getCreatedAt());
}
});
for (Post post : posts) {
send(emitter, trace, post);
}
traceFile.evictOldEvents(trace, 60);
traceFile.save(trace);
} else {
Assert.fail("Expecting the NEWS_CHANNEL environment variable to be set");
}
}
@Test
public void sendEventsToPlatformChans() throws Exception {
String storage = System.getenv("WORKSPACE");
if (storage == null) {
storage = ".";
}
String bug_Channel = System.getenv("PLATFORM_BUG_CHANNEL");
String qa_Channel = System.getenv("PLATFORM_QA_CHANNEL");
String patch_Channel = System.getenv("PLATFORM_PATCHES_CHANNEL");
if (qa_Channel != null && bug_Channel != null && patch_Channel != null) {
MattermostEmitter qaEmitter = new MattermostEmitter("https", host, qa_Channel);
Date daysAgo = getDateXDaysAgo(3);
EmitterTrace traceFile = new EmitterTrace(new File(storage + "/" + host + "_platform" + "_trace.json"));
Map<String, Date> trace = traceFile.load();
List<Post> posts = Lists.newArrayList();
posts.addAll(new EclipseForumsLogger(11, daysAgo).forumLog());
posts.addAll(new EclipseForumsLogger(116, daysAgo).forumLog());
posts.addAll(new EclipseForumsLogger(106, daysAgo).forumLog());
posts.addAll(new EclipseForumsLogger(12, daysAgo).forumLog());
posts.addAll(new EclipseForumsLogger(100, daysAgo).forumLog());
posts.addAll(new EclipseForumsLogger(15, daysAgo).forumLog());
Collections.sort(posts, new Comparator<Post>() {
public int compare(Post m1, Post m2) {
return m1.getCreatedAt().compareTo(m2.getCreatedAt());
}
});
posts.addAll(new RssLogger(new URL("http://stackoverflow.com/feeds/tag/eclipse-plugin"), daysAgo)
.setIcon(SO_ICON).get());
posts.addAll(new RssLogger(new URL("http://stackoverflow.com/feeds/tag/eclipse-rcp"), daysAgo)
.setIcon(SO_ICON).get());
posts.addAll(
new RssLogger(new URL("http://stackoverflow.com/feeds/tag/swt"), daysAgo).setIcon(SO_ICON).get());
posts.addAll(
new RssLogger(new URL("http://stackoverflow.com/feeds/tag/jface"), daysAgo).setIcon(SO_ICON).get());
posts.addAll(
new RssLogger(new URL("http://stackoverflow.com/feeds/tag/e4"), daysAgo).setIcon(SO_ICON).get());
for (Post post : posts) {
send(qaEmitter, trace, post);
}
MattermostEmitter bugEmitter = new MattermostEmitter("https", host, bug_Channel);
List<Post> bugzillas = Lists.newArrayList();
bugzillas.addAll(
new BugzillaLogger("https://bugs.eclipse.org/bugs", Sets.newHashSet("genie", "genie@eclipse.org"))
.bugzillaLog(3, Sets.newHashSet("Platform")));
for (Post post : bugzillas) {
send(bugEmitter, trace, post);
}
List<Post> patches = Lists.newArrayList();
patches.addAll(new GerritLogger("https://git.eclipse.org/r", 1).groupReviews(false)
.getPatchsets(Sets.newHashSet("platform/eclipse.platform", "platform/eclipse.platform.common",
"platform/eclipse.platform.debug", "platform/eclipse.platform.images",
"platform/eclipse.platform.news", "platform/eclipse.platform.resources",
"platform/eclipse.platform.runtime", "platform/eclipse.platform.swt",
"platform/eclipse.platform.team", "platform/eclipse.platform.text",
"platform/eclipse.platform.ua", "platform/eclipse.platform.ui",
"platform/eclipse.platform.tools")));
MattermostEmitter patchesEmitter = new MattermostEmitter("https", host, patch_Channel);
for (Post post : patches) {
send(patchesEmitter, trace, post);
}
traceFile.evictOldEvents(trace, 60);
traceFile.save(trace);
} else {
Assert.fail(
"Expecting the PLATFORM_QA_CHANNEL, PLATFORM_BUG_CHANNEL,PLATFORM_PATCHES_CHANNEL environment variable to be set");
}
}
@Test
public void sendEventsToCDTChans() throws Exception {
String storage = System.getenv("WORKSPACE");
if (storage == null) {
storage = ".";
}
String qa_Channel = System.getenv("CDT_CHANNEL");
if (qa_Channel != null) {
MattermostEmitter qaEmitter = new MattermostEmitter("https", host, qa_Channel);
Date daysAgo = getDateXDaysAgo(3);
EmitterTrace traceFile = new EmitterTrace(new File(storage + "/" + host + "_cdtgeneral" + "_trace.json"));
Map<String, Date> trace = traceFile.load();
List<Post> posts = Lists.newArrayList();
posts.addAll(new EclipseForumsLogger(80, daysAgo).forumLog());
posts.addAll(
new RssLogger(new URL("http://stackoverflow.com/feeds/tag/cdt"), daysAgo).setIcon(SO_ICON).get());
posts.addAll(new RssLogger(new URL("http://stackoverflow.com/feeds/tag/eclipse-cdt"), daysAgo)
.setIcon(SO_ICON).get());
Collections.sort(posts, new Comparator<Post>() {
public int compare(Post m1, Post m2) {
return m1.getCreatedAt().compareTo(m2.getCreatedAt());
}
});
for (Post post : posts) {
send(qaEmitter, trace, post);
}
traceFile.evictOldEvents(trace, 60);
traceFile.save(trace);
} else {
Assert.fail("Expecting the CDT_CHANNEL environment variable to be set");
}
}
@Test
public void sendEventsToPackageDrone() throws Exception {
String storage = System.getenv("WORKSPACE");
if (storage == null) {
storage = ".";
}
String mainChannelID = System.getenv("PACKAGEDRONE_MAIN_CHANNEL");
String log_Channel = System.getenv("PACKAGEDRONE_CHANNEL");
if (log_Channel != null && mainChannelID != null) {
MattermostEmitter emitter = new MattermostEmitter("https", host, log_Channel);
Date daysAgo = getDateXDaysAgo(3);
EmitterTrace traceFile = new EmitterTrace(new File(storage + "/" + host + "_mattermost" + "_trace.json"));
Map<String, Date> trace = traceFile.load();
List<Post> posts = Lists.newArrayList();
posts.addAll(
new BugzillaLogger("https://bugs.eclipse.org/bugs", Sets.newHashSet("genie", "genie@eclipse.org"))
.bugzillaLog(3, Sets.newHashSet("Package-Drone")));
posts.addAll(new GitLogger(new File(storage + "/clones/")).getMergedCommits(daysAgo,
"https://github.com/eclipse/packagedrone.git", "https://github.com/eclipse/packagedrone/commit/"));
posts.addAll(new EclipseForumsLogger(318, daysAgo).forumLog());
posts.addAll(new JenkinsLogger("https://hudson.eclipse.org/package-drone/", daysAgo).getBuildResults());
Collections.sort(posts, new Comparator<Post>() {
public int compare(Post m1, Post m2) {
return m1.getCreatedAt().compareTo(m2.getCreatedAt());
}
});
for (Post post : posts) {
send(emitter, trace, post);
}
MattermostEmitter mainChannelEmitter = new MattermostEmitter("https", host, mainChannelID);
for (Post post : new RssLogger(new URL("http://packagedrone.org/feed/"), daysAgo).get()) {
send(mainChannelEmitter, trace, post);
}
traceFile.evictOldEvents(trace, 60);
traceFile.save(trace);
} else {
Assert.fail("Expecting the PACKAGEDRONE_CHANNEL environment variable to be set");
}
}
public static Date getDateXDaysAgo(int nbDays) {
Calendar cal = Calendar.getInstance();
cal.add(Calendar.DATE, -nbDays);
Date daysAgo = cal.getTime();
return daysAgo;
}
@Test
public void sendEventsToSiriusPrivateChan() throws Exception {
String storage = System.getenv("WORKSPACE");
if (storage == null) {
storage = ".";
}
String channel = System.getenv("MATTERMOST_CHANNEL");
if (channel != null) {
MattermostEmitter emitter = new MattermostEmitter("https", host, channel);
int nbDays = 3;
Date daysAgo = getDateXDaysAgo(nbDays);
EmitterTrace traceFile = new EmitterTrace(
new File(storage + "/" + host + "_" + Hashing.sha256().hashString(channel) + "_trace.json"));
Map<String, Date> trace = traceFile.load();
List<Post> posts = Lists.newArrayList();
posts.addAll(new GitLogger(new File(storage + "/clones/")).getMergedCommits(daysAgo,
"https://git.eclipse.org/r/sirius/org.eclipse.sirius",
"https://git.eclipse.org/c/sirius/org.eclipse.sirius.git/commit/?id="));
posts.addAll(new EclipseForumsLogger(262, daysAgo).forumLog());
posts.addAll(
new JenkinsLogger("https://hudson.eclipse.org/sirius/", daysAgo).getBuildResults(trace.keySet()));
posts.addAll(new GerritLogger("https://git.eclipse.org/r", nbDays)
.getPatchsets(Sets.newHashSet("sirius/org.eclipse.sirius")));
posts.addAll(
new BugzillaLogger("https://bugs.eclipse.org/bugs", Sets.newHashSet("genie", "genie@eclipse.org"))
.bugzillaLog(3, Sets.newHashSet("Sirius")));
Collections.sort(posts, new Comparator<Post>() {
public int compare(Post m1, Post m2) {
return m1.getCreatedAt().compareTo(m2.getCreatedAt());
}
});
for (Post post : posts) {
send(emitter, trace, post);
}
traceFile.evictOldEvents(trace, 60);
traceFile.save(trace);
} else {
Assert.fail("Expecting the MATTERMOST_CHANNEL environment variable to be set");
}
}
private void send(MattermostEmitter emitter, Map<String, Date> trace, Post post) {
if (!trace.containsKey(post.getKey())) {
try {
System.err.println("Sending :" + post.getKey());
emitter.accept(MattermostPost.fromGenericPost(post));
trace.put(post.getKey(), new Date());
Thread.sleep(500);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
|
stuart/src/test/java/fr/obeo/tools/stuart/EclipseMattermostInstanceTest.java
|
package fr.obeo.tools.stuart;
import java.io.File;
import java.net.URL;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.hash.Hashing;
import fr.obeo.tools.stuart.bugzilla.BugzillaLogger;
import fr.obeo.tools.stuart.eclipseforum.EclipseForumsLogger;
import fr.obeo.tools.stuart.gerrit.GerritLogger;
import fr.obeo.tools.stuart.git.GitLogger;
import fr.obeo.tools.stuart.jenkins.JenkinsLogger;
import fr.obeo.tools.stuart.mattermost.MattermostEmitter;
import fr.obeo.tools.stuart.rss.RssLogger;
public class EclipseMattermostInstanceTest {
private static final String SO_ICON = "https://veithen.github.io/images/icon-stackoverflow.svg";
private String host = "mattermost-test.eclipse.org";
@Test
public void eclipseAnnounces() throws Exception {
String storage = System.getenv("WORKSPACE");
if (storage == null) {
storage = ".";
}
String channel = System.getenv("NEWS_CHANNEL");
if (channel != null) {
MattermostEmitter emitter = new MattermostEmitter("https", host, channel);
Date daysAgo = getDateXDaysAgo(15);
EmitterTrace traceFile = new EmitterTrace(
new File(storage + "/" + host + "_" + Hashing.sha256().hashString(channel) + "_trace.json"));
Map<String, Date> trace = traceFile.load();
List<Post> posts = Lists.newArrayList();
posts.addAll(
new RssLogger(new URL("https://dev.eclipse.org/mhonarc/lists/eclipse.org-committers/maillist.rss"),
daysAgo).get());
posts.addAll(new RssLogger(new URL("http://planet.eclipse.org/planet/rss20.xml"), daysAgo).get());
Collections.sort(posts, new Comparator<Post>() {
public int compare(Post m1, Post m2) {
return m1.getCreatedAt().compareTo(m2.getCreatedAt());
}
});
for (Post post : posts) {
send(emitter, trace, post);
}
traceFile.evictOldEvents(trace, 60);
traceFile.save(trace);
} else {
Assert.fail("Expecting the NEWS_CHANNEL environment variable to be set");
}
}
@Test
public void sendEventsToPlatformChans() throws Exception {
String storage = System.getenv("WORKSPACE");
if (storage == null) {
storage = ".";
}
String bug_Channel = System.getenv("PLATFORM_BUG_CHANNEL");
String qa_Channel = System.getenv("PLATFORM_QA_CHANNEL");
String patch_Channel = System.getenv("PLATFORM_PATCHES_CHANNEL");
if (qa_Channel != null && bug_Channel != null && patch_Channel != null) {
MattermostEmitter qaEmitter = new MattermostEmitter("https", host, qa_Channel);
Date daysAgo = getDateXDaysAgo(3);
EmitterTrace traceFile = new EmitterTrace(new File(storage + "/" + host + "_platform" + "_trace.json"));
Map<String, Date> trace = traceFile.load();
List<Post> posts = Lists.newArrayList();
posts.addAll(new EclipseForumsLogger(11, daysAgo).forumLog());
posts.addAll(new EclipseForumsLogger(116, daysAgo).forumLog());
posts.addAll(new EclipseForumsLogger(106, daysAgo).forumLog());
posts.addAll(new EclipseForumsLogger(12, daysAgo).forumLog());
posts.addAll(new EclipseForumsLogger(100, daysAgo).forumLog());
posts.addAll(new EclipseForumsLogger(15, daysAgo).forumLog());
Collections.sort(posts, new Comparator<Post>() {
public int compare(Post m1, Post m2) {
return m1.getCreatedAt().compareTo(m2.getCreatedAt());
}
});
for (Post post : posts) {
send(qaEmitter, trace, post);
}
posts.addAll(new RssLogger(new URL("http://stackoverflow.com/feeds/tag/eclipse-plugin"), daysAgo)
.setIcon(SO_ICON).get());
posts.addAll(new RssLogger(new URL("http://stackoverflow.com/feeds/tag/eclipse-rcp"), daysAgo)
.setIcon(SO_ICON).get());
posts.addAll(
new RssLogger(new URL("http://stackoverflow.com/feeds/tag/swt"), daysAgo).setIcon(SO_ICON).get());
posts.addAll(
new RssLogger(new URL("http://stackoverflow.com/feeds/tag/jface"), daysAgo).setIcon(SO_ICON).get());
posts.addAll(
new RssLogger(new URL("http://stackoverflow.com/feeds/tag/e4"), daysAgo).setIcon(SO_ICON).get());
MattermostEmitter bugEmitter = new MattermostEmitter("https", host, bug_Channel);
List<Post> bugzillas = Lists.newArrayList();
bugzillas.addAll(
new BugzillaLogger("https://bugs.eclipse.org/bugs", Sets.newHashSet("genie", "genie@eclipse.org"))
.bugzillaLog(3, Sets.newHashSet("Platform")));
for (Post post : bugzillas) {
send(bugEmitter, trace, post);
}
List<Post> patches = Lists.newArrayList();
patches.addAll(new GerritLogger("https://git.eclipse.org/r", 1).groupReviews(false)
.getPatchsets(Sets.newHashSet("platform/eclipse.platform", "platform/eclipse.platform.common",
"platform/eclipse.platform.debug", "platform/eclipse.platform.images",
"platform/eclipse.platform.news", "platform/eclipse.platform.resources",
"platform/eclipse.platform.runtime", "platform/eclipse.platform.swt",
"platform/eclipse.platform.team", "platform/eclipse.platform.text",
"platform/eclipse.platform.ua", "platform/eclipse.platform.ui",
"platform/eclipse.platform.tools")));
MattermostEmitter patchesEmitter = new MattermostEmitter("https", host, patch_Channel);
for (Post post : patches) {
send(patchesEmitter, trace, post);
}
traceFile.evictOldEvents(trace, 60);
traceFile.save(trace);
} else {
Assert.fail(
"Expecting the PLATFORM_QA_CHANNEL, PLATFORM_BUG_CHANNEL,PLATFORM_PATCHES_CHANNEL environment variable to be set");
}
}
@Test
public void sendEventsToCDTChans() throws Exception {
String storage = System.getenv("WORKSPACE");
if (storage == null) {
storage = ".";
}
String qa_Channel = System.getenv("CDT_CHANNEL");
if (qa_Channel != null) {
MattermostEmitter qaEmitter = new MattermostEmitter("https", host, qa_Channel);
Date daysAgo = getDateXDaysAgo(3);
EmitterTrace traceFile = new EmitterTrace(new File(storage + "/" + host + "_cdtgeneral" + "_trace.json"));
Map<String, Date> trace = traceFile.load();
List<Post> posts = Lists.newArrayList();
posts.addAll(new EclipseForumsLogger(80, daysAgo).forumLog());
posts.addAll(
new RssLogger(new URL("http://stackoverflow.com/feeds/tag/cdt"), daysAgo).setIcon(SO_ICON).get());
posts.addAll(new RssLogger(new URL("http://stackoverflow.com/feeds/tag/eclipse-cdt"), daysAgo)
.setIcon(SO_ICON).get());
Collections.sort(posts, new Comparator<Post>() {
public int compare(Post m1, Post m2) {
return m1.getCreatedAt().compareTo(m2.getCreatedAt());
}
});
for (Post post : posts) {
send(qaEmitter, trace, post);
}
traceFile.evictOldEvents(trace, 60);
traceFile.save(trace);
} else {
Assert.fail("Expecting the CDT_CHANNEL environment variable to be set");
}
}
@Test
public void sendEventsToPackageDrone() throws Exception {
String storage = System.getenv("WORKSPACE");
if (storage == null) {
storage = ".";
}
String mainChannelID = System.getenv("PACKAGEDRONE_MAIN_CHANNEL");
String log_Channel = System.getenv("PACKAGEDRONE_CHANNEL");
if (log_Channel != null && mainChannelID != null) {
MattermostEmitter emitter = new MattermostEmitter("https", host, log_Channel);
Date daysAgo = getDateXDaysAgo(3);
EmitterTrace traceFile = new EmitterTrace(new File(storage + "/" + host + "_mattermost" + "_trace.json"));
Map<String, Date> trace = traceFile.load();
List<Post> posts = Lists.newArrayList();
posts.addAll(
new BugzillaLogger("https://bugs.eclipse.org/bugs", Sets.newHashSet("genie", "genie@eclipse.org"))
.bugzillaLog(3, Sets.newHashSet("Package-Drone")));
posts.addAll(new GitLogger(new File(storage + "/clones/")).getMergedCommits(daysAgo,
"https://github.com/eclipse/packagedrone.git", "https://github.com/eclipse/packagedrone/commit/"));
posts.addAll(new EclipseForumsLogger(318, daysAgo).forumLog());
posts.addAll(new JenkinsLogger("https://hudson.eclipse.org/package-drone/", daysAgo).getBuildResults());
Collections.sort(posts, new Comparator<Post>() {
public int compare(Post m1, Post m2) {
return m1.getCreatedAt().compareTo(m2.getCreatedAt());
}
});
for (Post post : posts) {
send(emitter, trace, post);
}
MattermostEmitter mainChannelEmitter = new MattermostEmitter("https", host, mainChannelID);
for (Post post : new RssLogger(new URL("http://packagedrone.org/feed/"), daysAgo).get()) {
send(mainChannelEmitter, trace, post);
}
traceFile.evictOldEvents(trace, 60);
traceFile.save(trace);
} else {
Assert.fail("Expecting the PACKAGEDRONE_CHANNEL environment variable to be set");
}
}
public static Date getDateXDaysAgo(int nbDays) {
Calendar cal = Calendar.getInstance();
cal.add(Calendar.DATE, -nbDays);
Date daysAgo = cal.getTime();
return daysAgo;
}
@Test
public void sendEventsToSiriusPrivateChan() throws Exception {
String storage = System.getenv("WORKSPACE");
if (storage == null) {
storage = ".";
}
String channel = System.getenv("MATTERMOST_CHANNEL");
if (channel != null) {
MattermostEmitter emitter = new MattermostEmitter("https", host, channel);
int nbDays = 3;
Date daysAgo = getDateXDaysAgo(nbDays);
EmitterTrace traceFile = new EmitterTrace(
new File(storage + "/" + host + "_" + Hashing.sha256().hashString(channel) + "_trace.json"));
Map<String, Date> trace = traceFile.load();
List<Post> posts = Lists.newArrayList();
posts.addAll(new GitLogger(new File(storage + "/clones/")).getMergedCommits(daysAgo,
"https://git.eclipse.org/r/sirius/org.eclipse.sirius",
"https://git.eclipse.org/c/sirius/org.eclipse.sirius.git/commit/?id="));
posts.addAll(new EclipseForumsLogger(262, daysAgo).forumLog());
posts.addAll(
new JenkinsLogger("https://hudson.eclipse.org/sirius/", daysAgo).getBuildResults(trace.keySet()));
posts.addAll(new GerritLogger("https://git.eclipse.org/r", nbDays)
.getPatchsets(Sets.newHashSet("sirius/org.eclipse.sirius")));
posts.addAll(
new BugzillaLogger("https://bugs.eclipse.org/bugs", Sets.newHashSet("genie", "genie@eclipse.org"))
.bugzillaLog(3, Sets.newHashSet("Sirius")));
Collections.sort(posts, new Comparator<Post>() {
public int compare(Post m1, Post m2) {
return m1.getCreatedAt().compareTo(m2.getCreatedAt());
}
});
for (Post post : posts) {
send(emitter, trace, post);
}
traceFile.evictOldEvents(trace, 60);
traceFile.save(trace);
} else {
Assert.fail("Expecting the MATTERMOST_CHANNEL environment variable to be set");
}
}
private void send(MattermostEmitter emitter, Map<String, Date> trace, Post post) {
if (!trace.containsKey(post.getKey())) {
try {
System.err.println("Sending :" + post.getKey());
emitter.accept(MattermostPost.fromGenericPost(post));
trace.put(post.getKey(), new Date());
Thread.sleep(500);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
|
make sure we **send** the RSS posts at some time...
|
stuart/src/test/java/fr/obeo/tools/stuart/EclipseMattermostInstanceTest.java
|
make sure we **send** the RSS posts at some time...
|
|
Java
|
agpl-3.0
|
3a565b054bb3a5396027556b138c9fd1c4e5dd93
| 0
|
paraita/scheduling,ow2-proactive/scheduling,mbenguig/scheduling,yinan-liu/scheduling,fviale/scheduling,paraita/scheduling,yinan-liu/scheduling,fviale/scheduling,yinan-liu/scheduling,marcocast/scheduling,ow2-proactive/scheduling,tobwiens/scheduling,sandrineBeauche/scheduling,laurianed/scheduling,youribonnaffe/scheduling,ow2-proactive/scheduling,ow2-proactive/scheduling,lpellegr/scheduling,fviale/scheduling,fviale/scheduling,ShatalovYaroslav/scheduling,fviale/scheduling,zeineb/scheduling,paraita/scheduling,tobwiens/scheduling,marcocast/scheduling,laurianed/scheduling,ow2-proactive/scheduling,laurianed/scheduling,youribonnaffe/scheduling,zeineb/scheduling,youribonnaffe/scheduling,youribonnaffe/scheduling,laurianed/scheduling,marcocast/scheduling,yinan-liu/scheduling,lpellegr/scheduling,ow2-proactive/scheduling,lpellegr/scheduling,sgRomaric/scheduling,jrochas/scheduling,mbenguig/scheduling,youribonnaffe/scheduling,sandrineBeauche/scheduling,sgRomaric/scheduling,lpellegr/scheduling,tobwiens/scheduling,fviale/scheduling,lpellegr/scheduling,jrochas/scheduling,sgRomaric/scheduling,ShatalovYaroslav/scheduling,ow2-proactive/scheduling,laurianed/scheduling,lpellegr/scheduling,sandrineBeauche/scheduling,yinan-liu/scheduling,paraita/scheduling,marcocast/scheduling,sandrineBeauche/scheduling,youribonnaffe/scheduling,sgRomaric/scheduling,yinan-liu/scheduling,youribonnaffe/scheduling,tobwiens/scheduling,mbenguig/scheduling,yinan-liu/scheduling,mbenguig/scheduling,mbenguig/scheduling,jrochas/scheduling,zeineb/scheduling,sgRomaric/scheduling,marcocast/scheduling,jrochas/scheduling,laurianed/scheduling,fviale/scheduling,sandrineBeauche/scheduling,tobwiens/scheduling,marcocast/scheduling,sgRomaric/scheduling,ShatalovYaroslav/scheduling,sandrineBeauche/scheduling,paraita/scheduling,mbenguig/scheduling,lpellegr/scheduling,paraita/scheduling,zeineb/scheduling,ShatalovYaroslav/scheduling,ShatalovYaroslav/scheduling,sandrineBeauche/scheduling,mbenguig/scheduling,jrochas/scheduling,tobwiens/scheduling,tobwiens/scheduling,zeineb/scheduling,marcocast/scheduling,zeineb/scheduling,sgRomaric/scheduling,ShatalovYaroslav/scheduling,jrochas/scheduling,jrochas/scheduling,laurianed/scheduling,ShatalovYaroslav/scheduling,zeineb/scheduling,paraita/scheduling
|
/*
* ################################################################
*
* ProActive: The Java(TM) library for Parallel, Distributed,
* Concurrent computing with Security and Mobility
*
* Copyright (C) 1997-2002 INRIA/University of Nice-Sophia Antipolis
* Contact: proactive-support@inria.fr
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* Initial developer(s): The ProActive Team
* http://www.inria.fr/oasis/ProActive/contacts.html
* Contributor(s):
*
* ################################################################
*/
package org.objectweb.proactive.core.body.proxy;
import org.objectweb.proactive.Body;
import org.objectweb.proactive.ProActive;
import org.objectweb.proactive.core.Constants;
import org.objectweb.proactive.core.ProActiveRuntimeException;
import org.objectweb.proactive.core.UniqueID;
import org.objectweb.proactive.core.body.future.Future;
import org.objectweb.proactive.core.body.future.FutureProxy;
import org.objectweb.proactive.core.exceptions.NonFunctionalException;
import org.objectweb.proactive.core.exceptions.communication.SendRequestCommunicationException;
import org.objectweb.proactive.core.exceptions.creation.FutureCreationException;
import org.objectweb.proactive.core.exceptions.handler.Handler;
import org.objectweb.proactive.core.mop.MOP;
import org.objectweb.proactive.core.mop.MOPException;
import org.objectweb.proactive.core.mop.MethodCall;
import org.objectweb.proactive.core.mop.MethodCallExecutionFailedException;
import org.objectweb.proactive.core.mop.StubObject;
import org.objectweb.proactive.ext.security.RenegotiateSessionException;
public abstract class AbstractBodyProxy extends AbstractProxy
implements BodyProxy, java.io.Serializable {
//
// -- STATIC MEMBERS -----------------------------------------------
//
//
// -- PROTECTED MEMBERS -----------------------------------------------
//
protected UniqueID bodyID;
//
// -- CONSTRUCTORS -----------------------------------------------
//
public AbstractBodyProxy() {
}
AbstractBodyProxy(UniqueID bodyID) {
this.bodyID = bodyID;
}
//
// -- PUBLIC METHODS -----------------------------------------------
//
//
// -- implements BodyProxy -----------------------------------------------
//
public UniqueID getBodyID() {
return bodyID;
}
//
// -- implements Proxy -----------------------------------------------
//
/**
* Performs operations on the Call object created by the stub, thus changing
* the semantics of message-passing to asynchronous message-passing with
* future objects
*
*
* The semantics of message-passing implemented by this proxy class
* may be definied as follows :<UL>
* <LI>Asynchronous message-passing
* <LI>Creation of future objects where possible (which leads to
* wait-by-necessity).
* <LI>Synchronous, blocking calls where futures are not available.
* <LI>The Call <code>methodCall</code> is passed to the skeleton for execution.
* </UL>
*/
public Object reify(MethodCall methodCall) throws Throwable {
if (methodCall.getName().equals("equals")) {
//there is only one argument to this method
Object arg = methodCall.getParameter(0);
if (MOP.isReifiedObject(arg)) {
AbstractBodyProxy bodyProxy = (AbstractBodyProxy) ((StubObject) arg).getProxy();
return new Boolean(bodyID.equals(bodyProxy.bodyID));
} else {
return new Boolean(false);
}
}
// Now gives the MethodCall object to the body
try {
if (isOneWayCall(methodCall)) {
reifyAsOneWay(methodCall);
return null;
}
if (isAsynchronousCall(methodCall)) {
return reifyAsAsynchronous(methodCall);
}
return reifyAsSynchronous(methodCall);
} catch (MethodCallExecutionFailedException e) {
throw new ProActiveRuntimeException(e.getMessage(),
e.getTargetException());
} catch (Throwable t) {
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
} else if (t instanceof Error) {
throw (Error) t;
} else {
// check now which exception can be safely thrown
Class[] declaredExceptions = methodCall.getReifiedMethod()
.getExceptionTypes();
for (int i = 0; i < declaredExceptions.length; i++) {
Class exceptionClass = declaredExceptions[i];
if (exceptionClass.isAssignableFrom(t.getClass())) {
throw t;
}
}
// Here we should extend the behavior to accept exception Handler
throw new ProActiveRuntimeException(t);
}
}
}
/**
*
*/
protected void reifyAsOneWay(MethodCall methodCall)
throws MethodCallExecutionFailedException, RenegotiateSessionException {
try {
sendRequest(methodCall, null);
} catch (java.io.IOException e) {
// old stuff
// throw new MethodCallExecutionFailedException("Exception occured in reifyAsOneWay while sending request for methodcall ="+methodCall.getName(), e);
// Create a non functional exception encapsulating the network exception
NonFunctionalException nfe = new SendRequestCommunicationException(
"Exception occured in reifyAsOneWay while sending request for methodcall =" +
methodCall.getName(), e);
// Retrieve the right handler for the given exception
Handler handler = ProActive.searchExceptionHandler(nfe, this);
handler.handle(nfe);
}
}
protected Object reifyAsAsynchronous(MethodCall methodCall)
throws MethodCallExecutionFailedException, RenegotiateSessionException {
StubObject futureobject = null;
// Creates a stub + FutureProxy for representing the result
try {
//futureobject = (StubObject)MOP.newInstance(methodCall.getReifiedMethod().getReturnType().getName(), null, Constants.DEFAULT_FUTURE_PROXY_CLASS_NAME, null);
futureobject = (StubObject) MOP.newInstance(methodCall.getReifiedMethod()
.getReturnType(),
null, Constants.DEFAULT_FUTURE_PROXY_CLASS_NAME, null);
} catch (MOPException e) {
// Create a non functional exception encapsulating the network exception
NonFunctionalException nfe = new FutureCreationException(
"Exception occured in reifyAsAsynchronous while creating future for methodcall =" +
methodCall.getName(), e);
// Retrieve the right handler for the given exception
Handler handler = ProActive.searchExceptionHandler(nfe, this);
handler.handle(nfe);
// Check if problem is resolved
if (futureobject == null) {
throw new MethodCallExecutionFailedException(
"Exception occured in reifyAsAsynchronous while creating future for methodcall =" +
methodCall.getName(), e);
}
} catch (ClassNotFoundException e) {
// Create a non functional exception encapsulating the network exception
NonFunctionalException nfe = new FutureCreationException(
"Exception occured in reifyAsAsynchronous while creating future for methodcall =" +
methodCall.getName(), e);
// Retrieve the right handler for the given exception
Handler handler = ProActive.searchExceptionHandler(nfe, this);
handler.handle(nfe);
// Check if problem is resolved
if (futureobject == null) {
throw new MethodCallExecutionFailedException(
"Exception occured in reifyAsAsynchronous while creating future for methodcall =" +
methodCall.getName(), e);
}
}
// Set the id of the body creator in the created future
FutureProxy fp = (FutureProxy) (futureobject.getProxy());
fp.setCreatorID(bodyID);
// Send the request
try {
sendRequest(methodCall, (Future) futureobject.getProxy());
} catch (java.io.IOException e) {
// old stuff
// throw new MethodCallExecutionFailedException("Exception occured in reifyAsAsynchronous while sending request for methodcall ="+methodCall.getName(), e);
// Create a non functional exception encapsulating the network exception
NonFunctionalException nfe = new SendRequestCommunicationException(
"Exception occured in reifyAsAsynchronous while sending request for methodcall =" +
methodCall.getName(), e);
// Retrieve the right handler for the given exception
Handler handler = ProActive.searchExceptionHandler(nfe, this);
handler.handle(nfe);
}
// And return the future object
return futureobject;
}
protected Object reifyAsSynchronous(MethodCall methodCall)
throws Throwable, MethodCallExecutionFailedException,
RenegotiateSessionException {
// Setting methodCall.res to null means that we do not use the future mechanism
Future f = FutureProxy.getFutureProxy();
f.setCreatorID(bodyID);
// Set it as the 'thing' to send results to methodCall.res = f;
// Send the request
try {
sendRequest(methodCall, f);
} catch (java.io.IOException e) {
// old stuff
// throw new MethodCallExecutionFailedException("Exception occured in reifyAsSynchronous while sending request for methodcall ="+methodCall.getName(), e);
// Create a non functional exception encapsulating the network exception
NonFunctionalException nfe = new SendRequestCommunicationException(
"Exception occured in reifyAsSynchronous while sending request for methodcall =" +
methodCall.getName(), e);
// Retrieve the right handler for the given exception
Handler handler = ProActive.searchExceptionHandler(nfe, this);
handler.handle(nfe);
}
// Returns the result
// note : it seems that a functional exception is returned -> NFE are not necessary
if (f.getRaisedException() != null) {
throw f.getRaisedException();
} else {
return f.getResult();
}
}
protected abstract void sendRequest(MethodCall methodCall, Future future)
throws java.io.IOException, RenegotiateSessionException;
protected abstract void sendRequest(MethodCall methodCall, Future future,
Body sourceBody)
throws java.io.IOException, RenegotiateSessionException;
}
|
src/org/objectweb/proactive/core/body/proxy/AbstractBodyProxy.java
|
/*
* ################################################################
*
* ProActive: The Java(TM) library for Parallel, Distributed,
* Concurrent computing with Security and Mobility
*
* Copyright (C) 1997-2002 INRIA/University of Nice-Sophia Antipolis
* Contact: proactive-support@inria.fr
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* Initial developer(s): The ProActive Team
* http://www.inria.fr/oasis/ProActive/contacts.html
* Contributor(s):
*
* ################################################################
*/
package org.objectweb.proactive.core.body.proxy;
import org.objectweb.proactive.Body;
import org.objectweb.proactive.core.Constants;
import org.objectweb.proactive.core.ProActiveRuntimeException;
import org.objectweb.proactive.core.UniqueID;
import org.objectweb.proactive.core.body.future.Future;
import org.objectweb.proactive.core.body.future.FutureProxy;
import org.objectweb.proactive.core.mop.MOP;
import org.objectweb.proactive.core.mop.MOPException;
import org.objectweb.proactive.core.mop.MethodCall;
import org.objectweb.proactive.core.mop.MethodCallExecutionFailedException;
import org.objectweb.proactive.core.mop.StubObject;
import org.objectweb.proactive.ext.security.RenegotiateSessionException;
public abstract class AbstractBodyProxy extends AbstractProxy
implements BodyProxy,
java.io.Serializable {
//
// -- STATIC MEMBERS -----------------------------------------------
//
//
// -- PROTECTED MEMBERS -----------------------------------------------
//
protected UniqueID bodyID;
//
// -- CONSTRUCTORS -----------------------------------------------
//
public AbstractBodyProxy() {
}
AbstractBodyProxy(UniqueID bodyID) {
this.bodyID = bodyID;
}
//
// -- PUBLIC METHODS -----------------------------------------------
//
//
// -- implements BodyProxy -----------------------------------------------
//
public UniqueID getBodyID() {
return bodyID;
}
//
// -- implements Proxy -----------------------------------------------
//
/**
* Performs operations on the Call object created by the stub, thus changing
* the semantics of message-passing to asynchronous message-passing with
* future objects
*
*
* The semantics of message-passing implemented by this proxy class
* may be definied as follows :<UL>
* <LI>Asynchronous message-passing
* <LI>Creation of future objects where possible (which leads to
* wait-by-necessity).
* <LI>Synchronous, blocking calls where futures are not available.
* <LI>The Call <code>methodCall</code> is passed to the skeleton for execution.
* </UL>
*/
public Object reify(MethodCall methodCall)
throws Throwable {
if (methodCall.getName().equals("equals")) {
//there is only one argument to this method
Object arg = methodCall.getParameter(0);
if (MOP.isReifiedObject(arg)) {
AbstractBodyProxy bodyProxy = (AbstractBodyProxy)((StubObject)arg).getProxy();
return new Boolean(bodyID.equals(bodyProxy.bodyID));
} else {
return new Boolean(false);
}
}
// Now gives the MethodCall object to the body
try {
if (isOneWayCall(methodCall)) {
reifyAsOneWay(methodCall);
return null;
}
if (isAsynchronousCall(methodCall)) {
return reifyAsAsynchronous(methodCall);
}
return reifyAsSynchronous(methodCall);
} catch (MethodCallExecutionFailedException e) {
throw new ProActiveRuntimeException(e.getMessage(),
e.getTargetException());
}
catch (Throwable t) {
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
} else if (t instanceof Error) {
throw (Error)t;
} else {
// check now which exception can be safely thrown
Class[] declaredExceptions = methodCall.getReifiedMethod().getExceptionTypes();
for (int i = 0; i < declaredExceptions.length; i++) {
Class exceptionClass = declaredExceptions[i];
if (exceptionClass.isAssignableFrom(t.getClass())) {
throw t;
}
}
// Here we should extend the behavior to accept exception Handler
throw new ProActiveRuntimeException(t);
}
}
}
/**
*
*/
protected void reifyAsOneWay(MethodCall methodCall) throws MethodCallExecutionFailedException, RenegotiateSessionException {
try {
sendRequest(methodCall, null);
} catch (java.io.IOException e) {
throw new MethodCallExecutionFailedException("Exception occured in reifyAsOneWay while sending request for methodcall ="+methodCall.getName(), e);
}
}
protected Object reifyAsAsynchronous(MethodCall methodCall) throws MethodCallExecutionFailedException, RenegotiateSessionException {
StubObject futureobject;
// Creates a stub + FutureProxy for representing the result
try {
//futureobject = (StubObject)MOP.newInstance(methodCall.getReifiedMethod().getReturnType().getName(), null, Constants.DEFAULT_FUTURE_PROXY_CLASS_NAME, null);
futureobject = (StubObject)MOP.newInstance(methodCall.getReifiedMethod().getReturnType(), null, Constants.DEFAULT_FUTURE_PROXY_CLASS_NAME, null);
} catch (MOPException e) {
throw new MethodCallExecutionFailedException("Exception occured in reifyAsAsynchronous while creating future for methodcall ="+methodCall.getName(), e);
} catch (ClassNotFoundException e) {
throw new MethodCallExecutionFailedException("Exception occured in reifyAsAsynchronous while creating future for methodcall ="+methodCall.getName(), e);
}
// Set the id of the body creator in the created future
FutureProxy fp = (FutureProxy)(futureobject.getProxy());
fp.setCreatorID(bodyID);
// Send the request
try {
sendRequest(methodCall, (Future)futureobject.getProxy());
} catch (java.io.IOException e) {
throw new MethodCallExecutionFailedException("Exception occured in reifyAsAsynchronous while sending request for methodcall ="+methodCall.getName(), e);
}
// And return the future object
return futureobject;
}
protected Object reifyAsSynchronous(MethodCall methodCall) throws Throwable, MethodCallExecutionFailedException , RenegotiateSessionException{
// Setting methodCall.res to null means that we do not use the future mechanism
Future f = FutureProxy.getFutureProxy();
f.setCreatorID(bodyID);
// Set it as the 'thing' to send results to methodCall.res = f;
// Send the request
try {
sendRequest(methodCall, f);
} catch (java.io.IOException e) {
throw new MethodCallExecutionFailedException("Exception occured in reifyAsSynchronous while sending request for methodcall ="+methodCall.getName(), e);
}
// Returns the result
if (f.getRaisedException() != null) {
throw f.getRaisedException();
} else {
return f.getResult();
}
}
protected abstract void sendRequest(MethodCall methodCall, Future future) throws java.io.IOException, RenegotiateSessionException;
protected abstract void sendRequest(MethodCall methodCall, Future future, Body sourceBody) throws java.io.IOException, RenegotiateSessionException;
}
|
NFE handle communiation error
git-svn-id: 9146c88ff6d39b48099bf954d15d68f687b3fa69@1198 28e8926c-6b08-0410-baaa-805c5e19b8d6
|
src/org/objectweb/proactive/core/body/proxy/AbstractBodyProxy.java
|
NFE handle communiation error
|
|
Java
|
agpl-3.0
|
575ee3311d6148fbc7acf6aed6f8dae248ad369d
| 0
|
deepstupid/sphinx5
|
/*
* Copyright 1999-2002 Carnegie Mellon University.
* Portions Copyright 2002 Sun Microsystems, Inc.
* Portions Copyright 2002 Mitsubishi Electronic Research Laboratories.
* All Rights Reserved. Use is subject to license terms.
*
* See the file "license.terms" for information on usage and
* redistribution of this file, and for a DISCLAIMER OF ALL
* WARRANTIES.
*
*/
package edu.cmu.sphinx.knowledge.acoustic;
import edu.cmu.sphinx.util.SphinxProperties;
import edu.cmu.sphinx.util.Timer;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.logging.Logger;
import java.util.logging.Level;
import java.util.StringTokenizer;
/**
* Represents the generic interface to the Acoustic
* Model for sphinx4
*/
public class TrainerAcousticModel extends AcousticModel {
/**
* Prefix for acoustic model SphinxProperties.
*/
public final static String PROP_PREFIX
= "edu.cmu.sphinx.knowledge.acoustic.";
/**
* The directory where the acoustic model data can be found.
*/
public final static String PROP_LOCATION_SAVE =
PROP_PREFIX + "location.save";
/**
* The default value of PROP_LOCATION_SAVE.
*/
public final static String PROP_LOCATION_SAVE_DEFAULT = ".";
/**
* The save format for the acoustic model data. Current supported
* formats are:
*
* sphinx3_ascii
* sphinx3_binary
*/
public final static String PROP_FORMAT_SAVE = PROP_PREFIX + "format.save";
/**
* The default value of PROP_FORMAT_SAVE.
*/
public final static String PROP_FORMAT_SAVE_DEFAULT = "sphinx3.binary";
/**
* The file containing the phone list.
*/
public final static String PROP_PHONE_LIST = "phone_list";
/**
* The default value of PROP_PHONE_LIST.
*/
public final static String PROP_PHONE_LIST_DEFAULT = "phonelist";
/**
* Flag indicating all models should be operated on.
*/
public final static int ALL_MODELS = -1;
/**
* The logger for this class
*/
private static Logger logger =
Logger.getLogger(PROP_PREFIX + "TrainerAcousticModel");
/**
* The pool manager
*/
private HMMPoolManager hmmPoolManager;
/**
* Initializes an acoustic model of a given context. This method
* should be called once per context. It is used to associate a
* particular context with an acoustic model resource. This
* method should be called only when one acoustic model is
* specified in the properties file. Otherwise, use the method
* <code>getAcousticModel(name, context)</code>.
*
* @param context the context of interest
*
* @return the acoustic model associated with the context or null
* if the given context has no associated acoustic model
*
* @throws IOException if the model could not be loaded
* @throws FileNotFoundException if the model does not exist
*/
public static TrainerAcousticModel getTrainerAcousticModel(String context)
throws IOException, FileNotFoundException {
// This bit of code simply checks if there are any acoustic
// model names specified in the props file. If there is one
// name, use that name. If there are more than one, then flag
// an error.
SphinxProperties props =
SphinxProperties.getSphinxProperties(context);
String amNames = props.getString(PROP_NAMES, PROP_NAMES_DEFAULT);
if (amNames != null) {
StringTokenizer tokenizer = new StringTokenizer(amNames);
if (tokenizer.countTokens() == 0) {
amNames = null;
} else if (tokenizer.countTokens() == 1) {
amNames = amNames.trim();
} else if (tokenizer.countTokens() > 1) {
throw new Error
("TrainerAcousticModel: more than one acoustic model" +
" specified. "+
"Instead of method getAcousticModel(context), " +
"use method getAcousticModel(name, context).");
}
}
return getTrainerAcousticModel(amNames, context);
}
/**
* Returns the acoustic model of the given name and context.
* If the acoustic model of the given name and context has not
* been loaded, it will be loaded, and returned.
* If there is only one acoustic model for this context,
* "name" can be null.
*
* @param name the name of the acoustic model, or null if
* the acoustic model has no name.
* @param context the context of interest
*
* @return the name acoustic model in the given context, or
* null if no such acoustic model is found
*
* @throws IOException if the model count not be loaded
* @throws FileNotFoundException if the model does not exist
*/
public static TrainerAcousticModel getTrainerAcousticModel(String name,
String context) throws IOException, FileNotFoundException {
String key = getModelKey(name, context);
if (contextMap.get(key) == null) {
TrainerAcousticModel model =
new TrainerAcousticModel(name, context);
contextMap.put(key, model);
}
return (TrainerAcousticModel) contextMap.get(key);
}
/**
* Creates an acoustic model with the given name and context.
* Since acoustic models are only created by the factory method,
* getAcousticModel(), this constructor is private.
*
* @param name the name of the acoustic model
* @param context the context for this acoustic model
*
* @throws IOException if the model could not be loaded
*
* @see #getAcousticModel
*/
private TrainerAcousticModel(String name, String context)
throws IOException {
this.name = name;
this.context = context;
this.props = SphinxProperties.getSphinxProperties(context);
this.loadTimer = Timer.getTimer(context, TIMER_LOAD);
logInfo();
}
/**
* Creates an acoustic model. Since acoustic models are only
* created by the factory method <code> getAcousticModel </code>,
* this contructor is <code> private </code>. This constructor
* is used when there is only one acoustic model for the given
* context, which is why the acoustic model has no name.
* Note that an acoustic model can have a name even if it is
* the only acoustic model in this context.
*
* @param context the context for this acoustic model
*
* @throws IOException if the model could not be loaded
*
* @see #getAcousticModel
*/
private TrainerAcousticModel(String context)
throws IOException {
this(null, context);
}
/**
* Initializes the acoustic model
*
* @throws IOException if the model could not be created
*/
public void initialize() throws IOException {
loader = new ModelInitializerLoader(name, props);
hmmPoolManager = new HMMPoolManager(loader, props);
}
/**
* Saves the acoustic model with a given name and format
*
* @param name the name of the acoustic model
*
* @throws IOException if the model could not be loaded
* @throws FileNotFoundException if the model does not exist
*/
public void save(String name) throws IOException, FileNotFoundException {
Saver saver;
String formatProp = PROP_FORMAT_SAVE;
if (name != null) {
formatProp = PROP_PREFIX + name + ".format.save";
}
String format = props.getString(formatProp, PROP_FORMAT_SAVE_DEFAULT);
if (format.equals("sphinx3.ascii")) {
saver = new Sphinx3Saver(name, props, false, loader);
} else if (format.equals("sphinx3.binary")) {
saver = new Sphinx3Saver(name, props, true, loader);
} else { // add new saving code here.
saver = null;
logger.severe("Unsupported acoustic model format " + format);
}
}
/**
* Loads the acoustic models. This has to be explicitly requested
* in this class.
*
* @throws IOException if the model could not be loaded
* @throws FileNotFoundException if the model does not exist
*/
public void load() throws IOException, FileNotFoundException {
loadTimer.start();
super.load();
loadTimer.stop();
logInfo();
hmmPoolManager = new HMMPoolManager(loader, props);
}
/**
* Accumulate the current TrainerScore into the buffers.
*
* @param index the current index into the TrainerScore vector
* @param trainerScore the TrainerScore in the current frame
* @param nextTrainerScore the TrainerScore in the next frame
*/
public void accumulate(int index, TrainerScore[] trainerScore,
TrainerScore[] nextTrainerScore) {
hmmPoolManager.accumulate(index, trainerScore, nextTrainerScore);
}
/**
* Accumulate the current TrainerScore into the buffers.
*
* @param index the current index into the TrainerScore vector
* @param trainerScore the TrainerScore
*/
public void accumulate(int index, TrainerScore[] trainerScore) {
hmmPoolManager.accumulate(index, trainerScore);
}
/**
* Update the log likelihood. This should be called at the end of
* each utterance.
*/
public void updateLogLikelihood() {
hmmPoolManager.updateLogLikelihood();
}
/**
* Normalize the buffers and update the models.
*
* @return the log likelihood for the whole training set
*/
public float normalize() {
float logLikelihood = hmmPoolManager.normalize();
hmmPoolManager.update();
return logLikelihood;
}
}
|
edu/cmu/sphinx/knowledge/acoustic/TrainerAcousticModel.java
|
/*
* Copyright 1999-2002 Carnegie Mellon University.
* Portions Copyright 2002 Sun Microsystems, Inc.
* Portions Copyright 2002 Mitsubishi Electronic Research Laboratories.
* All Rights Reserved. Use is subject to license terms.
*
* See the file "license.terms" for information on usage and
* redistribution of this file, and for a DISCLAIMER OF ALL
* WARRANTIES.
*
*/
package edu.cmu.sphinx.knowledge.acoustic;
import edu.cmu.sphinx.util.SphinxProperties;
import edu.cmu.sphinx.util.Timer;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.logging.Logger;
import java.util.logging.Level;
import java.util.StringTokenizer;
/**
* Represents the generic interface to the Acoustic
* Model for sphinx4
*/
public class TrainerAcousticModel extends AcousticModel {
/**
* Prefix for acoustic model SphinxProperties.
*/
public final static String PROP_PREFIX
= "edu.cmu.sphinx.knowledge.acoustic.";
/**
* The directory where the acoustic model data can be found.
*/
public final static String PROP_LOCATION_SAVE =
PROP_PREFIX + "location.save";
/**
* The default value of PROP_LOCATION_SAVE.
*/
public final static String PROP_LOCATION_SAVE_DEFAULT = ".";
/**
* The save format for the acoustic model data. Current supported
* formats are:
*
* sphinx3_ascii
* sphinx3_binary
*/
public final static String PROP_FORMAT_SAVE = PROP_PREFIX + "format.save";
/**
* The default value of PROP_FORMAT_SAVE.
*/
public final static String PROP_FORMAT_SAVE_DEFAULT = "sphinx3.binary";
/**
* The file containing the phone list.
*/
public final static String PROP_PHONE_LIST = "phone_list";
/**
* The default value of PROP_PHONE_LIST.
*/
public final static String PROP_PHONE_LIST_DEFAULT = "phonelist";
/**
* Flag indicating all models should be operated on.
*/
public final static int ALL_MODELS = -1;
/**
* The logger for this class
*/
private static Logger logger =
Logger.getLogger(PROP_PREFIX + "TrainerAcousticModel");
/**
* The pool manager
*/
private HMMPoolManager hmmPoolManager;
/**
* Initializes an acoustic model of a given context. This method
* should be called once per context. It is used to associate a
* particular context with an acoustic model resource. This
* method should be called only when one acoustic model is
* specified in the properties file. Otherwise, use the method
* <code>getAcousticModel(name, context)</code>.
*
* @param context the context of interest
*
* @return the acoustic model associated with the context or null
* if the given context has no associated acoustic model
*
* @throws IOException if the model could not be loaded
* @throws FileNotFoundException if the model does not exist
*/
public static TrainerAcousticModel getTrainerAcousticModel(String context)
throws IOException, FileNotFoundException {
// This bit of code simply checks if there are any acoustic
// model names specified in the props file. If there is one
// name, use that name. If there are more than one, then flag
// an error.
SphinxProperties props =
SphinxProperties.getSphinxProperties(context);
String amNames = props.getString(PROP_NAMES, PROP_NAMES_DEFAULT);
if (amNames != null) {
StringTokenizer tokenizer = new StringTokenizer(amNames);
if (tokenizer.countTokens() == 0) {
amNames = null;
} else if (tokenizer.countTokens() == 1) {
amNames = amNames.trim();
} else if (tokenizer.countTokens() > 1) {
throw new Error
("TrainerAcousticModel: more than one acoustic model" +
" specified. "+
"Instead of method getAcousticModel(context), " +
"use method getAcousticModel(name, context).");
}
}
return getTrainerAcousticModel(amNames, context);
}
/**
* Returns the acoustic model of the given name and context.
* If the acoustic model of the given name and context has not
* been loaded, it will be loaded, and returned.
* If there is only one acoustic model for this context,
* "name" can be null.
*
* @param name the name of the acoustic model, or null if
* the acoustic model has no name.
* @param context the context of interest
*
* @return the name acoustic model in the given context, or
* null if no such acoustic model is found
*
* @throws IOException if the model count not be loaded
* @throws FileNotFoundException if the model does not exist
*/
public static TrainerAcousticModel getTrainerAcousticModel(String name,
String context) throws IOException, FileNotFoundException {
String key = getModelKey(name, context);
if (contextMap.get(key) == null) {
TrainerAcousticModel model =
new TrainerAcousticModel(name, context);
contextMap.put(key, model);
}
return (TrainerAcousticModel) contextMap.get(key);
}
/**
* Creates an acoustic model with the given name and context.
* Since acoustic models are only created by the factory method,
* getAcousticModel(), this constructor is private.
*
* @param name the name of the acoustic model
* @param context the context for this acoustic model
*
* @throws IOException if the model could not be loaded
*
* @see #getAcousticModel
*/
private TrainerAcousticModel(String name, String context)
throws IOException {
this.name = name;
this.context = context;
this.props = SphinxProperties.getSphinxProperties(context);
this.loadTimer = Timer.getTimer(context, TIMER_LOAD);
logInfo();
}
/**
* Creates an acoustic model. Since acoustic models are only
* created by the factory method <code> getAcousticModel </code>,
* this contructor is <code> private </code>. This constructor
* is used when there is only one acoustic model for the given
* context, which is why the acoustic model has no name.
* Note that an acoustic model can have a name even if it is
* the only acoustic model in this context.
*
* @param context the context for this acoustic model
*
* @throws IOException if the model could not be loaded
*
* @see #getAcousticModel
*/
private TrainerAcousticModel(String context)
throws IOException {
this(null, context);
}
/**
* Initializes the acoustic model
*
* @throws IOException if the model could not be created
*/
public void initialize() throws IOException {
loader = new ModelInitializerLoader(name, props);
hmmPoolManager = new HMMPoolManager(loader, props);
}
/**
* Saves the acoustic model with a given name and format
*
* @param name the name of the acoustic model
*
* @throws IOException if the model could not be loaded
* @throws FileNotFoundException if the model does not exist
*/
public void save(String name) throws IOException, FileNotFoundException {
Saver saver;
String formatProp = PROP_FORMAT_SAVE;
if (name != null) {
formatProp = PROP_PREFIX + name + ".format.save";
}
String format = props.getString(formatProp, PROP_FORMAT_SAVE_DEFAULT);
if (format.equals("sphinx3.ascii")) {
saver = new Sphinx3Saver(name, props, false, loader);
} else if (format.equals("sphinx3.binary")) {
saver = new Sphinx3Saver(name, props, true, loader);
} else { // add new saving code here.
saver = null;
logger.severe("Unsupported acoustic model format " + format);
}
}
/**
* Loads the acoustic models. This has to be explicitly requested
* in this class.
*
* @throws IOException if the model could not be loaded
* @throws FileNotFoundException if the model does not exist
*/
public void load() throws IOException, FileNotFoundException {
loadTimer.start();
super.load();
loadTimer.stop();
logInfo();
hmmPoolManager = new HMMPoolManager(loader, props);
}
/**
* Accumulate the current TrainerScore into the buffers.
*
* @param trainerScore the TrainerScore
* @param nextTrainerScore the TrainerScore in the next frame
*/
public void accumulate(TrainerScore trainerScore,
TrainerScore[] nextTrainerScore) {
hmmPoolManager.accumulate(trainerScore, nextTrainerScore);
}
/**
* Accumulate the current TrainerScore into the buffers.
*
* @param trainerScore the TrainerScore
*/
public void accumulate(TrainerScore trainerScore) {
hmmPoolManager.accumulate(trainerScore);
}
/**
* Normalize the buffers and update the models.
*
* @return the log likelihood for the whole training set
*/
public float normalize() {
float logLikelihood = hmmPoolManager.normalize();
hmmPoolManager.update();
return logLikelihood;
}
}
|
Instead of passing just the information about the current TrainerScore,
we now pass the whole vector of TrainerScore, and an index. We need all
TrainerScore in the current frame for the non-emitting states.
git-svn-id: a8b04003a33e1d3e001b9d20391fa392a9f62d91@1937 94700074-3cef-4d97-a70e-9c8c206c02f5
|
edu/cmu/sphinx/knowledge/acoustic/TrainerAcousticModel.java
|
Instead of passing just the information about the current TrainerScore, we now pass the whole vector of TrainerScore, and an index. We need all TrainerScore in the current frame for the non-emitting states.
|
|
Java
|
agpl-3.0
|
c4e1aadc4967013e0089a5f6fc8c8bfa51216ebc
| 0
|
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
|
22feba58-2e61-11e5-9284-b827eb9e62be
|
hello.java
|
22f955c2-2e61-11e5-9284-b827eb9e62be
|
22feba58-2e61-11e5-9284-b827eb9e62be
|
hello.java
|
22feba58-2e61-11e5-9284-b827eb9e62be
|
|
Java
|
agpl-3.0
|
aed0380ac5efa12417347b7ad61586894053135b
| 0
|
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
|
fbf4536e-2e60-11e5-9284-b827eb9e62be
|
hello.java
|
fbeeead2-2e60-11e5-9284-b827eb9e62be
|
fbf4536e-2e60-11e5-9284-b827eb9e62be
|
hello.java
|
fbf4536e-2e60-11e5-9284-b827eb9e62be
|
|
Java
|
lgpl-2.1
|
4c6804b883156e905f6c1543df31f5fed1fff1e9
| 0
|
darranl/wildfly-core,aloubyansky/wildfly-core,jfdenise/wildfly-core,darranl/wildfly-core,JiriOndrusek/wildfly-core,jamezp/wildfly-core,ivassile/wildfly-core,darranl/wildfly-core,bstansberry/wildfly-core,JiriOndrusek/wildfly-core,JiriOndrusek/wildfly-core,jfdenise/wildfly-core,aloubyansky/wildfly-core,luck3y/wildfly-core,soul2zimate/wildfly-core,luck3y/wildfly-core,soul2zimate/wildfly-core,aloubyansky/wildfly-core,yersan/wildfly-core,yersan/wildfly-core,jamezp/wildfly-core,luck3y/wildfly-core,jfdenise/wildfly-core,ivassile/wildfly-core,ivassile/wildfly-core,soul2zimate/wildfly-core,bstansberry/wildfly-core,bstansberry/wildfly-core,yersan/wildfly-core,jamezp/wildfly-core
|
/*
* JBoss, Home of Professional Open Source.
* Copyright 2013, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.domain.management.access;
import org.jboss.as.controller.OperationContext;
import org.jboss.as.controller.OperationFailedException;
import org.jboss.as.controller.OperationStepHandler;
import org.jboss.as.controller.PathAddress;
import org.jboss.dmr.ModelNode;
/**
* A {@link OperationStepHandler} for removing principals from the include / exclude list.
*
* @author <a href="mailto:darran.lofthouse@jboss.com">Darran Lofthouse</a>
*/
public class PrincipalRemove implements OperationStepHandler {
public static final PrincipalRemove INSTANCE = new PrincipalRemove();
private PrincipalRemove() {
}
@Override
public void execute(OperationContext context, ModelNode operation) throws OperationFailedException {
context.removeResource(PathAddress.EMPTY_ADDRESS);
context.stepCompleted();
}
}
|
domain-management/src/main/java/org/jboss/as/domain/management/access/PrincipalRemove.java
|
/*
* JBoss, Home of Professional Open Source.
* Copyright 2013, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.domain.management.access;
import org.jboss.as.controller.OperationContext;
import org.jboss.as.controller.OperationFailedException;
import org.jboss.as.controller.OperationStepHandler;
import org.jboss.as.controller.PathAddress;
import org.jboss.dmr.ModelNode;
/**
* A {@link OperationStepHandler} for removing principals from the include / exclude list.
*
* @author <a href="mailto:darran.lofthouse@jboss.com">Darran Lofthouse</a>
*/
public class PrincipalRemove implements OperationStepHandler {
public static final PrincipalRemove INSTANCE = new PrincipalRemove();
private PrincipalRemove() {
}
@Override
public void execute(OperationContext context, ModelNode operation) throws OperationFailedException {
context.removeResource(PathAddress.EMPTY_ADDRESS);
}
}
|
[WFLY-1618] / [WFLY-490] Mark principal removal as being complete.
was: 0062a890915f66f7c43934ee848735ff45052eb9
|
domain-management/src/main/java/org/jboss/as/domain/management/access/PrincipalRemove.java
|
[WFLY-1618] / [WFLY-490] Mark principal removal as being complete.
|
|
Java
|
apache-2.0
|
ae54111685de9acb79cb3a1c16586afb98f68da0
| 0
|
JetBrains/intellij-scala,JetBrains/intellij-scala
|
package org.jetbrains.plugins.scala.nailgun;
import com.facebook.nailgun.Alias;
import com.facebook.nailgun.NGConstants;
import com.facebook.nailgun.NGServer;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.util.Arrays;
import java.util.HashSet;
import java.util.UUID;
import java.util.function.Function;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
/**
* used from {@link org.jetbrains.plugins.scala.compiler.CompileServerLauncher}
*/
@SuppressWarnings("JavadocReference")
public class NailgunRunner {
/** NOTE: set of commands should be equal to the commands from {@link org.jetbrains.jps.incremental.scala.remote.CommandIds} */
private static final String[] COMMANDS = {
"compile",
"compile-jps",
"get-metrics",
"start-metering",
"end-metering"
};
private static final String SERVER_DESCRIPTION = "Scala compile server";
private static final String STOP_ALIAS_START = "stop_";
private static final String STOP_CLASS_NAME = "com.facebook.nailgun.builtins.NGStop";
private static final String JPS_COMPILATION_MAX_THREADS_KEY = "compile.parallel.max.threads";
/**
* An alternative to default nailgun main {@link com.facebook.nailgun.NGServer#main(java.lang.String[])}
*/
public static void main(String[] args) throws Exception {
if (args.length != 4)
throw new IllegalArgumentException("Usage: NailgunRunner [port] [id] [classpath] [system-dir-path]");
int port = Integer.parseInt(args[0]);
String id = args[1];
String classpath = args[2];
Path scalaCompileServerSystemDir = Paths.get(args[3]);
URLClassLoader classLoader = constructClassLoader(classpath);
TokensGenerator.generateAndWriteTokenFor(scalaCompileServerSystemDir, port);
InetAddress address = InetAddress.getByName(null);
NGServer server = createServer(address, port, id, scalaCompileServerSystemDir, classLoader);
Thread thread = new Thread(server);
thread.setName("NGServer(" + address.toString() + ", " + port + "," + id + ")");
thread.setContextClassLoader(classLoader);
thread.start();
Runtime.getRuntime().addShutdownHook(new ShutdownHook(server, scalaCompileServerSystemDir));
}
/**
* Extra class loader is required due to several reasons:
* <p>
* 1. Dotty compiler interfaces (used inside Main during compilation)
* casts classloader to a URLClassloader, and in JRE 11 AppClassLoader is not an instance of URLClassloader.<br>
* <p>
* 2. In order to run REPL instances (aka iLoopWrapper) (for Worksheet in REPL mode) in compiler server process.
* REPL instances can use arbitrary scala versions in runtime. However, Compile Server uses fixed scala version.
* In order to interact with REPL in compile server `repl-interface` is used. It's written in java to avoid any
* scala binary incompatibility errors at runtime.
* <p>
* Final classloader hierarchy looks like this
* <pre>
* [PlatformClassLoader]
* |
* [AppClassLoader] (pure java)
* |
* [repl-interface] (pure java)
* / | \
* / | [JPS & Compiler Server jars] (scala version A)
* / |
* / [REPL instance 1] (scala version B)
* /
* [REPL instance 2] (scala version C)
* </pre>
* Where:<br>
* [PlatformClassLoader] - contains all jvm classes<br>
* [AppClassLoader] - contains jars required to run Nailgun itself (scala-nailgun-runner, nailgun.jar)<br>
* [repl-interface] - contains jars through which repl instances interact with main CompileServer<br>
* [JPS & Compiler Server jars] - contains main compiler server jars with fixed scala version (2.13.2 at this moment)<br>
* [REPL instance N] - classloaders created for each REPL instance with arbitrary scala version
*
* @see org.jetbrains.jps.incremental.scala.local.worksheet.ILoopWrapperFactoryHandler#createClassLoader(org.jetbrains.plugins.scala.compiler.data.CompilerJars)
*/
public static URLClassLoader constructClassLoader(String classpath) {
Function<String, URL> pathToUrl = path -> {
try {
return new File(path).toURI().toURL();
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
};
URL[] urls = Stream.of(classpath.split(File.pathSeparator))
.map(pathToUrl)
.toArray(URL[]::new);
//noinspection Convert2MethodRef
URL[] replInterfaceUrls = Arrays.stream(urls).filter(it -> isReplInterfaceJar(it)).toArray(URL[]::new);
if (replInterfaceUrls.length == 0) {
throw new IllegalStateException("repl interface jar not found");
}
URL[] otherUrls = Arrays.stream(urls).filter(it -> !isReplInterfaceJar(it)).toArray(URL[]::new);
URLClassLoader replLoader = new URLClassLoader(replInterfaceUrls, NailgunRunner.class.getClassLoader());
return new URLClassLoader(otherUrls, replLoader);
}
private static boolean isReplInterfaceJar(URL url) {
String urlString = url.toString();
return urlString.contains("repl-interface.jar");
}
private static NGServer createServer(InetAddress address, int port, String id, Path scalaCompileServerSystemDir, URLClassLoader classLoader)
throws Exception {
final String nailgunDefault = Integer.toString(NGServer.DEFAULT_SESSIONPOOLSIZE);
final int sessionPoolSize = Integer.parseInt(System.getProperty(JPS_COMPILATION_MAX_THREADS_KEY, nailgunDefault));
NGServer server = new NGServer(
address,
port,
sessionPoolSize,
NGConstants.HEARTBEAT_TIMEOUT_MILLIS
);
server.setAllowNailsByClassName(false);
Class<?> mainNailClass = Utils.loadAndSetupServerMainNailClass(classLoader, scalaCompileServerSystemDir);
Utils.setupServerShutdownTimer(mainNailClass, server);
for (String command : COMMANDS) {
server.getAliasManager().addAlias(new Alias(command, SERVER_DESCRIPTION, mainNailClass));
}
// TODO: token should be checked
Class<?> stopClass = classLoader.loadClass(STOP_CLASS_NAME);
String stopAlias = STOP_ALIAS_START + id;
server.getAliasManager().addAlias(new Alias(stopAlias, "", stopClass));
return server;
}
private static class ShutdownHook extends Thread {
private static final int WAIT_FOR_SERVER_TERMINATION_TIMEOUT_MS = 3000;
private final NGServer myServer;
private final Path scalaCompileServerSystemDir;
ShutdownHook(NGServer server, Path scalaCompileServerSystemDir) {
myServer = server;
this.scalaCompileServerSystemDir = scalaCompileServerSystemDir;
}
@Override
public void run() {
TokensGenerator.deleteTokenFor(scalaCompileServerSystemDir, myServer.getPort());
myServer.shutdown();
long waitStart = System.currentTimeMillis();
while (System.currentTimeMillis() - waitStart < WAIT_FOR_SERVER_TERMINATION_TIMEOUT_MS) {
if (!myServer.isRunning())
break;
try {
//noinspection BusyWait
Thread.sleep(100);
} catch (InterruptedException e) {
// do nothing
}
}
// copied from com.facebook.nailgun.NGServer.NGServerShutdowner
if (myServer.isRunning()) {
System.err.println("Unable to cleanly shutdown server. Exiting JVM Anyway.");
} else {
System.out.println("NGServer shut down.");
}
System.err.flush();
System.out.flush();
}
}
private static class TokensGenerator {
static void generateAndWriteTokenFor(Path scalaCompileServerSystemDir, int port) throws IOException {
Path path = tokenPathFor(scalaCompileServerSystemDir, port);
writeTokenTo(path, UUID.randomUUID());
}
/** duplicated in {@link org.jetbrains.plugins.scala.server.CompileServerToken} */
static Path tokenPathFor(Path scalaCompileServerSystemDir, int port) {
return scalaCompileServerSystemDir.resolve("tokens").resolve(Integer.toString(port));
}
@SuppressWarnings("ResultOfMethodCallIgnored")
static void writeTokenTo(Path path, UUID uuid) throws IOException {
File directory = path.getParent().toFile();
if (!directory.exists()) {
if (!directory.mkdirs()) {
throw new IOException("Cannot create directory: " + directory);
}
}
final boolean isPosix = path.getFileSystem().supportedFileAttributeViews().contains("posix");
if (isPosix) {
Files.createFile(path, PosixFilePermissions.asFileAttribute(
new HashSet<>(asList(PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_WRITE))
));
} else {
// Windows
final File file = path.toFile();
file.createNewFile();
file.setExecutable(false);
file.setReadable(/* readable */ true, /* ownerOnly */ true);
file.setWritable(/* writable */ true, /* ownerOnly */ true);
}
Files.write(path, uuid.toString().getBytes(StandardCharsets.UTF_8));
}
public static void deleteTokenFor(Path scalaCompileServerSystemDir, int port) {
File tokenFile = tokenPathFor(scalaCompileServerSystemDir, port).toFile();
if (!tokenFile.delete()) {
tokenFile.deleteOnExit();
}
}
}
}
|
scala/nailgun/src/org/jetbrains/plugins/scala/nailgun/NailgunRunner.java
|
package org.jetbrains.plugins.scala.nailgun;
import com.facebook.nailgun.Alias;
import com.facebook.nailgun.NGConstants;
import com.facebook.nailgun.NGServer;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.util.Arrays;
import java.util.HashSet;
import java.util.UUID;
import java.util.function.Function;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
/**
* used from {@link org.jetbrains.plugins.scala.compiler.CompileServerLauncher}
*/
@SuppressWarnings("JavadocReference")
public class NailgunRunner {
/** NOTE: set of commands should be equal to the commands from {@link org.jetbrains.jps.incremental.scala.remote.CommandIds} */
private static final String[] COMMANDS = {
"compile",
"compile-jps",
"get-metrics",
"start-metering",
"end-metering"
};
private static final String SERVER_DESCRIPTION = "Scala compile server";
private static final String STOP_ALIAS_START = "stop_";
private static final String STOP_CLASS_NAME = "com.facebook.nailgun.builtins.NGStop";
/**
* An alternative to default nailgun main {@link com.facebook.nailgun.NGServer#main(java.lang.String[])}
*/
public static void main(String[] args) throws Exception {
if (args.length != 4)
throw new IllegalArgumentException("Usage: NailgunRunner [port] [id] [classpath] [system-dir-path]");
int port = Integer.parseInt(args[0]);
String id = args[1];
String classpath = args[2];
Path scalaCompileServerSystemDir = Paths.get(args[3]);
URLClassLoader classLoader = constructClassLoader(classpath);
TokensGenerator.generateAndWriteTokenFor(scalaCompileServerSystemDir, port);
InetAddress address = InetAddress.getByName(null);
NGServer server = createServer(address, port, id, scalaCompileServerSystemDir, classLoader);
Thread thread = new Thread(server);
thread.setName("NGServer(" + address.toString() + ", " + port + "," + id + ")");
thread.setContextClassLoader(classLoader);
thread.start();
Runtime.getRuntime().addShutdownHook(new ShutdownHook(server, scalaCompileServerSystemDir));
}
/**
* Extra class loader is required due to several reasons:
* <p>
* 1. Dotty compiler interfaces (used inside Main during compilation)
* casts classloader to a URLClassloader, and in JRE 11 AppClassLoader is not an instance of URLClassloader.<br>
* <p>
* 2. In order to run REPL instances (aka iLoopWrapper) (for Worksheet in REPL mode) in compiler server process.
* REPL instances can use arbitrary scala versions in runtime. However, Compile Server uses fixed scala version.
* In order to interact with REPL in compile server `repl-interface` is used. It's written in java to avoid any
* scala binary incompatibility errors at runtime.
* <p>
* Final classloader hierarchy looks like this
* <pre>
* [PlatformClassLoader]
* |
* [AppClassLoader] (pure java)
* |
* [repl-interface] (pure java)
* / | \
* / | [JPS & Compiler Server jars] (scala version A)
* / |
* / [REPL instance 1] (scala version B)
* /
* [REPL instance 2] (scala version C)
* </pre>
* Where:<br>
* [PlatformClassLoader] - contains all jvm classes<br>
* [AppClassLoader] - contains jars required to run Nailgun itself (scala-nailgun-runner, nailgun.jar)<br>
* [repl-interface] - contains jars through which repl instances interact with main CompileServer<br>
* [JPS & Compiler Server jars] - contains main compiler server jars with fixed scala version (2.13.2 at this moment)<br>
* [REPL instance N] - classloaders created for each REPL instance with arbitrary scala version
*
* @see org.jetbrains.jps.incremental.scala.local.worksheet.ILoopWrapperFactoryHandler#createClassLoader(org.jetbrains.plugins.scala.compiler.data.CompilerJars)
*/
public static URLClassLoader constructClassLoader(String classpath) {
Function<String, URL> pathToUrl = path -> {
try {
return new File(path).toURI().toURL();
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
};
URL[] urls = Stream.of(classpath.split(File.pathSeparator))
.map(pathToUrl)
.toArray(URL[]::new);
//noinspection Convert2MethodRef
URL[] replInterfaceUrls = Arrays.stream(urls).filter(it -> isReplInterfaceJar(it)).toArray(URL[]::new);
if (replInterfaceUrls.length == 0) {
throw new IllegalStateException("repl interface jar not found");
}
URL[] otherUrls = Arrays.stream(urls).filter(it -> !isReplInterfaceJar(it)).toArray(URL[]::new);
URLClassLoader replLoader = new URLClassLoader(replInterfaceUrls, NailgunRunner.class.getClassLoader());
return new URLClassLoader(otherUrls, replLoader);
}
private static boolean isReplInterfaceJar(URL url) {
String urlString = url.toString();
return urlString.contains("repl-interface.jar");
}
private static NGServer createServer(InetAddress address, int port, String id, Path scalaCompileServerSystemDir, URLClassLoader classLoader)
throws Exception {
NGServer server = new NGServer(
address,
port,
// explicitly pass default argument values to remind their existence
NGServer.DEFAULT_SESSIONPOOLSIZE,
NGConstants.HEARTBEAT_TIMEOUT_MILLIS
);
server.setAllowNailsByClassName(false);
Class<?> mainNailClass = Utils.loadAndSetupServerMainNailClass(classLoader, scalaCompileServerSystemDir);
Utils.setupServerShutdownTimer(mainNailClass, server);
for (String command : COMMANDS) {
server.getAliasManager().addAlias(new Alias(command, SERVER_DESCRIPTION, mainNailClass));
}
// TODO: token should be checked
Class<?> stopClass = classLoader.loadClass(STOP_CLASS_NAME);
String stopAlias = STOP_ALIAS_START + id;
server.getAliasManager().addAlias(new Alias(stopAlias, "", stopClass));
return server;
}
private static class ShutdownHook extends Thread {
private static final int WAIT_FOR_SERVER_TERMINATION_TIMEOUT_MS = 3000;
private final NGServer myServer;
private final Path scalaCompileServerSystemDir;
ShutdownHook(NGServer server, Path scalaCompileServerSystemDir) {
myServer = server;
this.scalaCompileServerSystemDir = scalaCompileServerSystemDir;
}
@Override
public void run() {
TokensGenerator.deleteTokenFor(scalaCompileServerSystemDir, myServer.getPort());
myServer.shutdown();
long waitStart = System.currentTimeMillis();
while (System.currentTimeMillis() - waitStart < WAIT_FOR_SERVER_TERMINATION_TIMEOUT_MS) {
if (!myServer.isRunning())
break;
try {
//noinspection BusyWait
Thread.sleep(100);
} catch (InterruptedException e) {
// do nothing
}
}
// copied from com.facebook.nailgun.NGServer.NGServerShutdowner
if (myServer.isRunning()) {
System.err.println("Unable to cleanly shutdown server. Exiting JVM Anyway.");
} else {
System.out.println("NGServer shut down.");
}
System.err.flush();
System.out.flush();
}
}
private static class TokensGenerator {
static void generateAndWriteTokenFor(Path scalaCompileServerSystemDir, int port) throws IOException {
Path path = tokenPathFor(scalaCompileServerSystemDir, port);
writeTokenTo(path, UUID.randomUUID());
}
/** duplicated in {@link org.jetbrains.plugins.scala.server.CompileServerToken} */
static Path tokenPathFor(Path scalaCompileServerSystemDir, int port) {
return scalaCompileServerSystemDir.resolve("tokens").resolve(Integer.toString(port));
}
@SuppressWarnings("ResultOfMethodCallIgnored")
static void writeTokenTo(Path path, UUID uuid) throws IOException {
File directory = path.getParent().toFile();
if (!directory.exists()) {
if (!directory.mkdirs()) {
throw new IOException("Cannot create directory: " + directory);
}
}
final boolean isPosix = path.getFileSystem().supportedFileAttributeViews().contains("posix");
if (isPosix) {
Files.createFile(path, PosixFilePermissions.asFileAttribute(
new HashSet<>(asList(PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_WRITE))
));
} else {
// Windows
final File file = path.toFile();
file.createNewFile();
file.setExecutable(false);
file.setReadable(/* readable */ true, /* ownerOnly */ true);
file.setWritable(/* writable */ true, /* ownerOnly */ true);
}
Files.write(path, uuid.toString().getBytes(StandardCharsets.UTF_8));
}
public static void deleteTokenFor(Path scalaCompileServerSystemDir, int port) {
File tokenFile = tokenPathFor(scalaCompileServerSystemDir, port).toFile();
if (!tokenFile.delete()) {
tokenFile.deleteOnExit();
}
}
}
}
|
Read the JPS server max threads configuration and use it to configure the nailgun max session pool size #SCL-20516
|
scala/nailgun/src/org/jetbrains/plugins/scala/nailgun/NailgunRunner.java
|
Read the JPS server max threads configuration and use it to configure the nailgun max session pool size #SCL-20516
|
|
Java
|
apache-2.0
|
b02b88ea3296a893c2c557dfaade7788019ac0f4
| 0
|
LiuShengchieh/hengweather
|
package com.hengweather.android.service;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.IBinder;
import android.os.SystemClock;
import android.preference.PreferenceManager;
import com.hengweather.android.gson.Weather;
import com.hengweather.android.util.HttpUtil;
import com.hengweather.android.util.StaticClass;
import com.hengweather.android.util.Utility;
import java.io.IOException;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.Response;
public class AutoUpdateService extends Service {
@Override
public IBinder onBind(Intent intent) {
// TODO: Return the communication channel to the service.
//throw new UnsupportedOperationException("Not yet implemented");
return null;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
updateWeather();
AlarmManager manager = (AlarmManager) getSystemService(ALARM_SERVICE);
//1小时的毫秒数
int anHour = 60 * 60 * 1000;
long triggerAtTime = SystemClock.elapsedRealtime() + anHour;
Intent i = new Intent(this, AutoUpdateService.class);
PendingIntent pi = PendingIntent.getService(this, 0, i, 0);
//每次执行完取消定时器
manager.cancel(pi);
manager.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, triggerAtTime, pi);
return super.onStartCommand(intent, flags, startId);
}
/*
* 更新天气
* */
private void updateWeather() {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
String weatherString = prefs.getString("weather", null);
if (weatherString != null) {
// 有缓存时直接解析天气数据
Weather weather = Utility.handleWeatherResponse(weatherString);
final String weatherId = weather.basic.weatherId;
String weatherUrl = "https://free-api.heweather.com/v5/weather?city=" +
weatherId + "&key=" + StaticClass.HE_WEATHER_KEY;
HttpUtil.sendOkHttpRequest(weatherUrl, new Callback() {
@Override
public void onFailure(Call call, IOException e) {
e.printStackTrace();
}
@Override
public void onResponse(Call call, Response response) throws IOException {
String responseText = response.body().string();
Weather weather = Utility.handleWeatherResponse(responseText);
if (weather != null && "ok".equals(weather.status)) {
SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(
AutoUpdateService.this).edit();
editor.putString("weather", responseText);
editor.apply();
}
}
});
}
}
}
|
app/src/main/java/com/hengweather/android/service/AutoUpdateService.java
|
package com.hengweather.android.service;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.IBinder;
import android.os.SystemClock;
import android.preference.PreferenceManager;
import com.hengweather.android.gson.Weather;
import com.hengweather.android.util.HttpUtil;
import com.hengweather.android.util.StaticClass;
import com.hengweather.android.util.Utility;
import java.io.IOException;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.Response;
public class AutoUpdateService extends Service {
@Override
public IBinder onBind(Intent intent) {
// TODO: Return the communication channel to the service.
//throw new UnsupportedOperationException("Not yet implemented");
return null;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
updateWeather();
AlarmManager manager = (AlarmManager) getSystemService(ALARM_SERVICE);
//1小时的毫秒数
int anHour = 60 * 60 * 1000;
long triggerAtTime = SystemClock.elapsedRealtime() + anHour;
Intent i = new Intent(this, AutoUpdateService.class);
PendingIntent pi = PendingIntent.getService(this, 0, i, 0);
//manager.cancel(pi);
manager.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, triggerAtTime, pi);
return super.onStartCommand(intent, flags, startId);
}
/*
* 更新天气
* */
private void updateWeather() {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
String weatherString = prefs.getString("weather", null);
if (weatherString != null) {
// 有缓存时直接解析天气数据
Weather weather = Utility.handleWeatherResponse(weatherString);
final String weatherId = weather.basic.weatherId;
String weatherUrl = "https://free-api.heweather.com/v5/weather?city=" +
weatherId + "&key=" + StaticClass.HE_WEATHER_KEY;
HttpUtil.sendOkHttpRequest(weatherUrl, new Callback() {
@Override
public void onFailure(Call call, IOException e) {
e.printStackTrace();
}
@Override
public void onResponse(Call call, Response response) throws IOException {
String responseText = response.body().string();
Weather weather = Utility.handleWeatherResponse(responseText);
if (weather != null && "ok".equals(weather.status)) {
SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(
AutoUpdateService.this).edit();
editor.putString("weather", responseText);
editor.apply();
}
}
});
}
}
}
|
取消定时器
|
app/src/main/java/com/hengweather/android/service/AutoUpdateService.java
|
取消定时器
|
|
Java
|
apache-2.0
|
d9d029a25fa8cecf618b50ab059875989aeac601
| 0
|
Vitaliy-Yakovchuk/foreign-reader-libs
|
package com.reader.mapdb;
import java.io.File;
import java.util.Arrays;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentNavigableMap;
import org.mapdb.DB;
import org.mapdb.DBMaker;
import com.reader.common.AbstractDatabase;
import com.reader.common.ColorConstants;
import com.reader.common.Word;
import com.reader.common.persist.WordAttributes;
public class MapDBDatabase extends AbstractDatabase {
private final File file;
private DB db;
private ConcurrentNavigableMap<String, byte[]> wordsMap;
private ConcurrentNavigableMap<String, Date> wordsDated;
private ConcurrentNavigableMap<String, Date> wordsKnowDated;
public MapDBDatabase(File dbFile) {
this.file = dbFile;
db = DBMaker.newFileDB(dbFile).make();
wordsMap = db.getTreeMap("words");
wordsDated = db.getTreeMap("dates");
wordsKnowDated = db.getTreeMap("know_dates");
}
@Override
public void putA(String word, WordAttributes wordAttributes) {
wordsMap.put(word, bytes(wordAttributes));
wordsDated.put(word, new Date());
if (wordAttributes.getColor().equals(ColorConstants.WHITE)) {
WordAttributes wa = getA(word);
if (wa != null && wa.getColor().equals(ColorConstants.BLUE))
wordsKnowDated.put(word, new Date());
}
}
@Override
public WordAttributes getA(String word) {
return fromBytes(wordsMap.get(word));
}
@Override
public void removeA(String word) {
db.delete(word);
}
public void closeAndRemove() {
db.close();
file.delete();
}
@Override
public void commit() {
db.commit();
}
@Override
public void rollback() {
db.rollback();
}
@Override
public List<Word> loadWords(String color) {
List<Word> l = new LinkedList<Word>();
WordAttributes attr = new WordAttributes();
attr.setColor(color);
byte[] bs = bytes(attr);
for (Entry<String, byte[]> entry : wordsMap.entrySet()) {
byte[] bs2 = entry.getValue();
bs[bs.length - 1] = bs2[bs.length - 1];
bs[bs.length - 2] = bs2[bs.length - 2];
if (Arrays.equals(bs, bs2)) {
Word word = new Word();
String text = entry.getKey();
word.setText(text);
word.setColor(color);
word.setDate(wordsDated.get(text));
l.add(word);
}
}
return l;
}
@Override
public Word toWord(String word) {
Word word2 = new Word();
WordAttributes attributes = get(word);
word2.setColor(attributes.getColor());
word2.setText(word);
word2.setDate(wordsDated.get(word));
return word2;
}
public DB getDB() {
return db;
}
}
|
MapDB/src/com/reader/mapdb/MapDBDatabase.java
|
package com.reader.mapdb;
import java.io.File;
import java.util.Arrays;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentNavigableMap;
import org.mapdb.DB;
import org.mapdb.DBMaker;
import com.reader.common.AbstractDatabase;
import com.reader.common.ColorConstants;
import com.reader.common.Word;
import com.reader.common.persist.WordAttributes;
public class MapDBDatabase extends AbstractDatabase {
private final File file;
private DB db;
private ConcurrentNavigableMap<String, byte[]> wordsMap;
private ConcurrentNavigableMap<String, Date> wordsDated;
public MapDBDatabase(File dbFile) {
this.file = dbFile;
db = DBMaker.newFileDB(dbFile).make();
wordsMap = db.getTreeMap("words");
wordsDated = db.getTreeMap("dates");
}
@Override
public void putA(String word, WordAttributes wordAttributes) {
wordsMap.put(word, bytes(wordAttributes));
if (wordAttributes.getColor().equals(ColorConstants.WHITE)) {
WordAttributes wa = getA(word);
if (wa != null && wa.getColor().equals(ColorConstants.BLUE))
wordsDated.put(word, new Date());
}
}
@Override
public WordAttributes getA(String word) {
return fromBytes(wordsMap.get(word));
}
@Override
public void removeA(String word) {
db.delete(word);
}
public void closeAndRemove() {
db.close();
file.delete();
}
@Override
public void commit() {
db.commit();
}
@Override
public void rollback() {
db.rollback();
}
@Override
public List<Word> loadWords(String color) {
List<Word> l = new LinkedList<Word>();
WordAttributes attr = new WordAttributes();
attr.setColor(color);
byte[] bs = bytes(attr);
for (Entry<String, byte[]> entry : wordsMap.entrySet()) {
byte[] bs2 = entry.getValue();
bs[bs.length - 1] = bs2[bs.length - 1];
bs[bs.length - 2] = bs2[bs.length - 2];
if (Arrays.equals(bs, bs2)) {
Word word = new Word();
String text = entry.getKey();
word.setText(text);
word.setColor(color);
word.setDate(wordsDated.get(text));
l.add(word);
}
}
return l;
}
@Override
public Word toWord(String word) {
Word word2 = new Word();
WordAttributes attributes = get(word);
word2.setColor(attributes.getColor());
word2.setText(word);
word2.setDate(wordsDated.get(word));
return word2;
}
public DB getDB() {
return db;
}
}
|
Small fix
|
MapDB/src/com/reader/mapdb/MapDBDatabase.java
|
Small fix
|
|
Java
|
apache-2.0
|
5770b82053bdac661981870a10ff4c682d315d11
| 0
|
jwcarman/Wicketopia,jwcarman/Wicketopia
|
/*
* Copyright (c) 2011 Carman Consulting, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wicketopia.joda.util.format;
import org.apache.wicket.Session;
import org.apache.wicket.core.request.ClientInfo;
import org.apache.wicket.protocol.http.request.WebClientInfo;
import org.apache.wicket.util.convert.ConversionException;
import org.apache.wicket.util.io.IClusterable;
import org.apache.wicket.util.string.Strings;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.MutableDateTime;
import org.joda.time.format.DateTimeFormatter;
import org.wicketopia.joda.util.translator.DateTimeTranslator;
import java.text.ParseException;
import java.util.Locale;
import java.util.TimeZone;
public class JodaFormatSupport<T> implements IClusterable
{
//----------------------------------------------------------------------------------------------------------------------
// Fields
//----------------------------------------------------------------------------------------------------------------------
public static final int DEFAULT_PIVOT_YEAR = 2000;
private final DateTimeTranslator<T> translator;
private final FormatProvider formatProvider;
private boolean applyTimeZoneDifference = true;
private int pivotYear = DEFAULT_PIVOT_YEAR;
//----------------------------------------------------------------------------------------------------------------------
// Constructors
//----------------------------------------------------------------------------------------------------------------------
public JodaFormatSupport(DateTimeTranslator<T> translator, String defaultStyle)
{
this(translator, new StyleFormatProvider(defaultStyle));
}
public JodaFormatSupport(DateTimeTranslator<T> translator, FormatProvider formatProvider)
{
this.translator = translator;
this.formatProvider = formatProvider;
}
public JodaFormatSupport(DateTimeTranslator<T> translator, FormatProvider formatProvider, boolean applyTimeZoneDifference, int pivotYear)
{
this.translator = translator;
this.formatProvider = formatProvider;
this.applyTimeZoneDifference = applyTimeZoneDifference;
this.pivotYear = pivotYear;
}
//----------------------------------------------------------------------------------------------------------------------
// Getter/Setter Methods
//----------------------------------------------------------------------------------------------------------------------
public int getPivotYear()
{
return pivotYear;
}
public void setPivotYear(int pivotYear)
{
this.pivotYear = pivotYear;
}
public boolean isApplyTimeZoneDifference()
{
return applyTimeZoneDifference;
}
public void setApplyTimeZoneDifference(boolean applyTimeZoneDifference)
{
this.applyTimeZoneDifference = applyTimeZoneDifference;
}
//----------------------------------------------------------------------------------------------------------------------
// Other Methods
//----------------------------------------------------------------------------------------------------------------------
public T convertToObject(String value, Locale locale)
{
if (Strings.isEmpty(value))
{
return null;
}
DateTimeFormatter format = formatProvider.getFormatter();
if (format == null)
{
throw new IllegalStateException("format must be not null");
}
format = format.withLocale(locale).withPivotYear(pivotYear);
if (applyTimeZoneDifference)
{
TimeZone zone = getClientTimeZone();
// instantiate now/ current time
MutableDateTime dt = new MutableDateTime();
if (zone != null)
{
// set time zone for client
format = format.withZone(DateTimeZone.forTimeZone(zone));
dt.setZone(DateTimeZone.forTimeZone(zone));
}
try
{
// parse date retaining the time of the submission
int result = format.parseInto(dt, value, 0);
if (result < 0)
{
throw new ConversionException(new ParseException("unable to parse date " +
value, ~result));
}
}
catch (RuntimeException e)
{
throw new ConversionException(e);
}
// apply the server time zone to the parsed value
dt.setZone(getServerTimeZone());
return translator.fromDateTime(dt.toDateTime());
}
else
{
try
{
DateTime date = format.parseDateTime(value);
return date == null ? null : translator.fromDateTime(date);
}
catch (RuntimeException e)
{
throw new ConversionException(e);
}
}
}
private static TimeZone getClientTimeZone()
{
ClientInfo info = Session.get().getClientInfo();
if (info instanceof WebClientInfo)
{
return ((WebClientInfo) info).getProperties().getTimeZone();
}
return null;
}
/**
* Gets the server time zone. Override this method if you want to fix to a certain time zone,
* regardless of what actual time zone the server is in.
*
* @return The server time zone
*/
private static DateTimeZone getServerTimeZone()
{
return DateTimeZone.getDefault();
}
@SuppressWarnings("unchecked")
public String convertToString(T object, Locale locale)
{
if(object == null)
{
return "";
}
DateTime dt = translator.toDateTime((T) object);
DateTimeFormatter format = formatProvider.getFormatter();
format = format.withPivotYear(pivotYear).withLocale(locale);
if (applyTimeZoneDifference)
{
TimeZone zone = getClientTimeZone();
if (zone != null)
{
format = format.withZone(DateTimeZone.forTimeZone(zone));
}
}
return format.print(dt);
}
public JodaFormatSupport<T> withProvider(FormatProvider formatProvider)
{
return new JodaFormatSupport<T>(translator, formatProvider);
}
}
|
joda/src/main/java/org/wicketopia/joda/util/format/JodaFormatSupport.java
|
/*
* Copyright (c) 2011 Carman Consulting, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wicketopia.joda.util.format;
import org.apache.wicket.Session;
import org.apache.wicket.core.request.ClientInfo;
import org.apache.wicket.protocol.http.request.WebClientInfo;
import org.apache.wicket.util.convert.ConversionException;
import org.apache.wicket.util.io.IClusterable;
import org.apache.wicket.util.string.Strings;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.MutableDateTime;
import org.joda.time.format.DateTimeFormatter;
import org.wicketopia.joda.util.translator.DateTimeTranslator;
import java.text.ParseException;
import java.util.Locale;
import java.util.TimeZone;
public class JodaFormatSupport<T> implements IClusterable
{
//----------------------------------------------------------------------------------------------------------------------
// Fields
//----------------------------------------------------------------------------------------------------------------------
public static final int DEFAULT_PIVOT_YEAR = 2000;
private final DateTimeTranslator<T> translator;
private final FormatProvider formatProvider;
private boolean applyTimeZoneDifference = true;
private int pivotYear = DEFAULT_PIVOT_YEAR;
//----------------------------------------------------------------------------------------------------------------------
// Constructors
//----------------------------------------------------------------------------------------------------------------------
public JodaFormatSupport(DateTimeTranslator<T> translator, String defaultStyle)
{
this(translator, new StyleFormatProvider(defaultStyle));
}
public JodaFormatSupport(DateTimeTranslator<T> translator, FormatProvider formatProvider)
{
this.translator = translator;
this.formatProvider = formatProvider;
}
public JodaFormatSupport(DateTimeTranslator<T> translator, FormatProvider formatProvider, boolean applyTimeZoneDifference, int pivotYear)
{
this.translator = translator;
this.formatProvider = formatProvider;
this.applyTimeZoneDifference = applyTimeZoneDifference;
this.pivotYear = pivotYear;
}
//----------------------------------------------------------------------------------------------------------------------
// Getter/Setter Methods
//----------------------------------------------------------------------------------------------------------------------
public int getPivotYear()
{
return pivotYear;
}
public void setPivotYear(int pivotYear)
{
this.pivotYear = pivotYear;
}
public boolean isApplyTimeZoneDifference()
{
return applyTimeZoneDifference;
}
public void setApplyTimeZoneDifference(boolean applyTimeZoneDifference)
{
this.applyTimeZoneDifference = applyTimeZoneDifference;
}
//----------------------------------------------------------------------------------------------------------------------
// Other Methods
//----------------------------------------------------------------------------------------------------------------------
public T convertToObject(String value, Locale locale)
{
if (Strings.isEmpty(value))
{
return null;
}
DateTimeFormatter format = formatProvider.getFormatter();
if (format == null)
{
throw new IllegalStateException("format must be not null");
}
format = format.withLocale(locale).withPivotYear(pivotYear);
if (applyTimeZoneDifference)
{
TimeZone zone = getClientTimeZone();
// instantiate now/ current time
MutableDateTime dt = new MutableDateTime();
if (zone != null)
{
// set time zone for client
format = format.withZone(DateTimeZone.forTimeZone(zone));
dt.setZone(DateTimeZone.forTimeZone(zone));
}
try
{
// parse date retaining the time of the submission
int result = format.parseInto(dt, value, 0);
if (result < 0)
{
throw new ConversionException(new ParseException("unable to parse date " +
value, ~result));
}
}
catch (RuntimeException e)
{
throw new ConversionException(e);
}
// apply the server time zone to the parsed value
dt.setZone(getServerTimeZone());
return translator.fromDateTime(dt.toDateTime());
}
else
{
try
{
DateTime date = format.parseDateTime(value);
return date == null ? null : translator.fromDateTime(date);
}
catch (RuntimeException e)
{
throw new ConversionException(e);
}
}
}
private static TimeZone getClientTimeZone()
{
ClientInfo info = Session.get().getClientInfo();
if (info instanceof WebClientInfo)
{
return ((WebClientInfo) info).getProperties().getTimeZone();
}
return null;
}
/**
* Gets the server time zone. Override this method if you want to fix to a certain time zone,
* regardless of what actual time zone the server is in.
*
* @return The server time zone
*/
private static DateTimeZone getServerTimeZone()
{
return DateTimeZone.getDefault();
}
@SuppressWarnings("unchecked")
public String convertToString(T object, Locale locale)
{
if(object == null)
{
return "";
}
DateTime dt = translator.toDateTime((T) object);
DateTimeFormatter format = formatProvider.getFormatter();
format.withPivotYear(pivotYear).withLocale(locale);
if (applyTimeZoneDifference)
{
TimeZone zone = getClientTimeZone();
if (zone != null)
{
format = format.withZone(DateTimeZone.forTimeZone(zone));
}
}
return format.print(dt);
}
public JodaFormatSupport<T> withProvider(FormatProvider formatProvider)
{
return new JodaFormatSupport<T>(translator, formatProvider);
}
}
|
Fixing unused return value.
|
joda/src/main/java/org/wicketopia/joda/util/format/JodaFormatSupport.java
|
Fixing unused return value.
|
|
Java
|
apache-2.0
|
79c941284095cb8db7dcba90f71dd78b72cac7f3
| 0
|
apache/geronimo,vibe13/geronimo,vibe13/geronimo,apache/geronimo,vibe13/geronimo,apache/geronimo,vibe13/geronimo,meetdestiny/geronimo-trader,meetdestiny/geronimo-trader,meetdestiny/geronimo-trader,apache/geronimo
|
/* ====================================================================
* The Apache Software License, Version 1.1
*
* Copyright (c) 2003 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Apache" and "Apache Software Foundation" and
* "Apache Geronimo" must not be used to endorse or promote products
* derived from this software without prior written permission. For
* written permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache",
* "Apache Geronimo", nor may "Apache" appear in their name, without
* prior written permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
* ====================================================================
*/
package org.apache.geronimo.ejb;
import java.security.Identity;
import java.security.Principal;
import java.util.Properties;
import javax.ejb.EJBException;
import javax.ejb.EJBHome;
import javax.ejb.EJBLocalHome;
import javax.ejb.EJBLocalObject;
import javax.ejb.EJBObject;
import javax.ejb.SessionContext;
import javax.ejb.TimerService;
import javax.transaction.Status;
import javax.transaction.SystemException;
import javax.transaction.TransactionManager;
import javax.transaction.UserTransaction;
import javax.xml.rpc.handler.MessageContext;
import org.apache.geronimo.core.service.RPCContainer;
import org.apache.geronimo.ejb.container.EJBPlugins;
import org.apache.geronimo.ejb.context.GeronimoUserTransaction;
import org.apache.geronimo.ejb.metadata.EJBMetadata;
import org.apache.geronimo.security.util.ContextManager;
/**
*
*
*
* @version $Revision: 1.8 $ $Date: 2003/11/12 04:35:39 $
*/
public class GeronimoSessionContext implements SessionContext {
private final RPCContainer container;
private final UserTransaction userTransaction;
private final TransactionManager transactionManager;
// private String state;
public GeronimoSessionContext(RPCContainer container) {
this.container = container;
EJBMetadata ejbMetadata = EJBPlugins.getEJBMetadata(container);
transactionManager = EJBPlugins.getTransactionManager(container);
if (ejbMetadata.getTransactionDemarcation().isBean()) {
this.userTransaction = new GeronimoUserTransaction(transactionManager);
} else {
this.userTransaction = null;
}
}
public EJBHome getEJBHome() throws IllegalStateException {
EJBMetadata ejbMetadata = EJBPlugins.getEJBMetadata(container);
if (ejbMetadata.getHomeInterface() == null) {
throw new IllegalStateException("getEJBHome is not allowed for a bean without a (remote) home interface");
}
return (EJBHome) EJBPlugins.getEJBProxyFactoryManager(container).getThreadEJBProxyFactory().getEJBHome();
}
public EJBLocalHome getEJBLocalHome() throws IllegalStateException {
EJBMetadata ejbMetadata = EJBPlugins.getEJBMetadata(container);
if (ejbMetadata.getLocalHomeInterface() == null) {
throw new IllegalStateException("getEJBLocalHome is not allowed for a bean without a local home interface");
}
return (EJBLocalHome) EJBPlugins.getEJBProxyFactoryManager(container).getEJBProxyFactory("local").getEJBHome();
}
public EJBObject getEJBObject() throws IllegalStateException {
EJBMetadata ejbMetadata = EJBPlugins.getEJBMetadata(container);
if (ejbMetadata.getRemoteInterface() == null) {
throw new IllegalStateException("getEJBObject is not allowed for a bean without a remote interface");
}
// if (state.equals("not-exits")) {
// throw new IllegalStateException("getEJBObject is not allowed until the bean has identity");
// }
return (EJBObject) EJBPlugins.getEJBProxyFactoryManager(container).getThreadEJBProxyFactory().getEJBObject();
}
public EJBLocalObject getEJBLocalObject() throws IllegalStateException {
EJBMetadata ejbMetadata = EJBPlugins.getEJBMetadata(container);
if (ejbMetadata.getLocalHomeInterface() == null) {
throw new IllegalStateException("getEJBLocalObject is not allowed for a bean without a local interface");
}
// if (state.equals("not-exits")) {
// throw new IllegalStateException("getEJBLocalObject is not allowed until the bean has identity");
// }
return (EJBLocalObject) EJBPlugins.getEJBProxyFactoryManager(container).getEJBProxyFactory("local").getEJBObject();
}
public Principal getCallerPrincipal() {
return ContextManager.getCallerPrincipal();
}
public boolean isCallerInRole(String roleName) {
EJBMetadata ejbMetadata = EJBPlugins.getEJBMetadata(container);
return ContextManager.isCallerInRole(ejbMetadata.getName(), roleName);
}
public UserTransaction getUserTransaction() throws IllegalStateException {
if (userTransaction == null) {
throw new IllegalStateException("getUserTransaction is not allowed for bean with container-managed transaction demarcation.");
}
return userTransaction;
}
public boolean getRollbackOnly() throws IllegalStateException {
if (userTransaction != null) {
throw new IllegalStateException("getRollbackOnly is not allowed for beans with bean-managed transaction demarcation.");
}
// if (!state.equals("method-ready")) {
// throw new IllegalStateException("getRollbackOnly is only allowed in the method ready state");
// }
try {
int status = transactionManager.getStatus();
if (status == Status.STATUS_NO_TRANSACTION) {
throw new IllegalStateException("getRollbackOnly is only allowed during a transaction");
}
return status == Status.STATUS_MARKED_ROLLBACK;
} catch (SystemException e) {
throw new EJBException("Could not get transaction status", e);
}
}
public void setRollbackOnly() throws IllegalStateException {
if (userTransaction != null) {
throw new IllegalStateException("setRollbackOnly is not allowed for beans with bean-managed transaction demarcation.");
}
// if (!state.equals("method-ready")) {
// throw new IllegalStateException("setRollbackOnly is only allowed in the method ready state");
// }
try {
if (transactionManager.getStatus() == Status.STATUS_NO_TRANSACTION) {
throw new IllegalStateException("setRollbackOnly is only allowed during a transaction");
}
transactionManager.setRollbackOnly();
} catch (SystemException e) {
throw new EJBException("Could not get transaction status", e);
}
}
/**
* @deprecated Use JNDI instead
* @throws EJBException always
*/
public Properties getEnvironment() {
throw new EJBException("getEnvironment is no longer supported; use JNDI instead");
}
/**
* @deprecated Use getCallerPrincipal()
* @throws EJBException always
*/
public Identity getCallerIdentity() {
throw new EJBException("getCallerIdentity is no longer supported; use getCallerPrincipal instead");
}
/**
* @deprecated Use isCallerInRole(String roleName)
* @throws EJBException always
*/
public boolean isCallerInRole(Identity role) {
throw new EJBException("isCallerInRole(Identity role) is no longer supported; use isCallerInRole(String roleName) instead");
}
public TimerService getTimerService() throws IllegalStateException {
throw new UnsupportedOperationException("Not implemented yet");
}
public MessageContext getMessageContext() throws IllegalStateException {
throw new UnsupportedOperationException("Not implemented yet");
}
}
|
modules/core/src/java/org/apache/geronimo/ejb/GeronimoSessionContext.java
|
/* ====================================================================
* The Apache Software License, Version 1.1
*
* Copyright (c) 2003 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Apache" and "Apache Software Foundation" and
* "Apache Geronimo" must not be used to endorse or promote products
* derived from this software without prior written permission. For
* written permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache",
* "Apache Geronimo", nor may "Apache" appear in their name, without
* prior written permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
* ====================================================================
*/
package org.apache.geronimo.ejb;
import java.security.Identity;
import java.security.Principal;
import java.util.Properties;
import javax.ejb.EJBException;
import javax.ejb.EJBHome;
import javax.ejb.EJBLocalHome;
import javax.ejb.EJBLocalObject;
import javax.ejb.EJBObject;
import javax.ejb.SessionContext;
import javax.ejb.TimerService;
import javax.transaction.Status;
import javax.transaction.SystemException;
import javax.transaction.TransactionManager;
import javax.transaction.UserTransaction;
import javax.xml.rpc.handler.MessageContext;
import org.apache.geronimo.core.service.RPCContainer;
import org.apache.geronimo.ejb.container.EJBPlugins;
import org.apache.geronimo.ejb.context.GeronimoUserTransaction;
import org.apache.geronimo.ejb.metadata.EJBMetadata;
/**
*
*
*
* @version $Revision: 1.7 $ $Date: 2003/09/08 04:28:26 $
*/
public class GeronimoSessionContext implements SessionContext {
private final RPCContainer container;
private final UserTransaction userTransaction;
private final TransactionManager transactionManager;
// private String state;
public GeronimoSessionContext(RPCContainer container) {
this.container = container;
EJBMetadata ejbMetadata = EJBPlugins.getEJBMetadata(container);
transactionManager = EJBPlugins.getTransactionManager(container);
if (ejbMetadata.getTransactionDemarcation().isBean()) {
this.userTransaction = new GeronimoUserTransaction(transactionManager);
} else {
this.userTransaction = null;
}
}
public EJBHome getEJBHome() throws IllegalStateException {
EJBMetadata ejbMetadata = EJBPlugins.getEJBMetadata(container);
if (ejbMetadata.getHomeInterface() == null) {
throw new IllegalStateException("getEJBHome is not allowed for a bean without a (remote) home interface");
}
return (EJBHome) EJBPlugins.getEJBProxyFactoryManager(container).getThreadEJBProxyFactory().getEJBHome();
}
public EJBLocalHome getEJBLocalHome() throws IllegalStateException {
EJBMetadata ejbMetadata = EJBPlugins.getEJBMetadata(container);
if (ejbMetadata.getLocalHomeInterface() == null) {
throw new IllegalStateException("getEJBLocalHome is not allowed for a bean without a local home interface");
}
return (EJBLocalHome) EJBPlugins.getEJBProxyFactoryManager(container).getEJBProxyFactory("local").getEJBHome();
}
public EJBObject getEJBObject() throws IllegalStateException {
EJBMetadata ejbMetadata = EJBPlugins.getEJBMetadata(container);
if (ejbMetadata.getRemoteInterface() == null) {
throw new IllegalStateException("getEJBObject is not allowed for a bean without a remote interface");
}
// if (state.equals("not-exits")) {
// throw new IllegalStateException("getEJBObject is not allowed until the bean has identity");
// }
return (EJBObject) EJBPlugins.getEJBProxyFactoryManager(container).getThreadEJBProxyFactory().getEJBObject();
}
public EJBLocalObject getEJBLocalObject() throws IllegalStateException {
EJBMetadata ejbMetadata = EJBPlugins.getEJBMetadata(container);
if (ejbMetadata.getLocalHomeInterface() == null) {
throw new IllegalStateException("getEJBLocalObject is not allowed for a bean without a local interface");
}
// if (state.equals("not-exits")) {
// throw new IllegalStateException("getEJBLocalObject is not allowed until the bean has identity");
// }
return (EJBLocalObject) EJBPlugins.getEJBProxyFactoryManager(container).getEJBProxyFactory("local").getEJBObject();
}
public Principal getCallerPrincipal() {
return null;
}
public boolean isCallerInRole(String roleName) {
return false;
}
public UserTransaction getUserTransaction() throws IllegalStateException {
if (userTransaction == null) {
throw new IllegalStateException("getUserTransaction is not allowed for bean with container-managed transaction demarcation.");
}
return userTransaction;
}
public boolean getRollbackOnly() throws IllegalStateException {
if (userTransaction != null) {
throw new IllegalStateException("getRollbackOnly is not allowed for beans with bean-managed transaction demarcation.");
}
// if (!state.equals("method-ready")) {
// throw new IllegalStateException("getRollbackOnly is only allowed in the method ready state");
// }
try {
int status = transactionManager.getStatus();
if (status == Status.STATUS_NO_TRANSACTION) {
throw new IllegalStateException("getRollbackOnly is only allowed during a transaction");
}
return status == Status.STATUS_MARKED_ROLLBACK;
} catch (SystemException e) {
throw new EJBException("Could not get transaction status", e);
}
}
public void setRollbackOnly() throws IllegalStateException {
if (userTransaction != null) {
throw new IllegalStateException("setRollbackOnly is not allowed for beans with bean-managed transaction demarcation.");
}
// if (!state.equals("method-ready")) {
// throw new IllegalStateException("setRollbackOnly is only allowed in the method ready state");
// }
try {
if (transactionManager.getStatus() == Status.STATUS_NO_TRANSACTION) {
throw new IllegalStateException("setRollbackOnly is only allowed during a transaction");
}
transactionManager.setRollbackOnly();
} catch (SystemException e) {
throw new EJBException("Could not get transaction status", e);
}
}
/**
* @deprecated Use JNDI instead
* @throws EJBException always
*/
public Properties getEnvironment() {
throw new EJBException("getEnvironment is no longer supported; use JNDI instead");
}
/**
* @deprecated Use getCallerPrincipal()
* @throws EJBException always
*/
public Identity getCallerIdentity() {
throw new EJBException("getCallerIdentity is no longer supported; use getCallerPrincipal instead");
}
/**
* @deprecated Use isCallerInRole(String roleName)
* @throws EJBException always
*/
public boolean isCallerInRole(Identity role) {
throw new EJBException("isCallerInRole(Identity role) is no longer supported; use isCallerInRole(String roleName) instead");
}
public TimerService getTimerService() throws IllegalStateException {
throw new UnsupportedOperationException("Not implemented yet");
}
public MessageContext getMessageContext() throws IllegalStateException {
throw new UnsupportedOperationException("Not implemented yet");
}
}
|
Fill in implementation of getCallerPrincipal() and isCallerInRole()
git-svn-id: d69ffe4ccc4861bf06065bd0072b85c931fba7ed@44493 13f79535-47bb-0310-9956-ffa450edef68
|
modules/core/src/java/org/apache/geronimo/ejb/GeronimoSessionContext.java
|
Fill in implementation of getCallerPrincipal() and isCallerInRole()
|
|
Java
|
apache-2.0
|
fbfdaa6aacd4257a78ddd1183f6eac81c1a34e8f
| 0
|
maduhu/head,jpodeszwik/mifos,vorburger/mifos-head,maduhu/mifos-head,AArhin/head,AArhin/head,maduhu/head,jpodeszwik/mifos,vorburger/mifos-head,vorburger/mifos-head,AArhin/head,maduhu/mifos-head,maduhu/head,jpodeszwik/mifos,maduhu/mifos-head,maduhu/head,vorburger/mifos-head,AArhin/head,maduhu/mifos-head,AArhin/head,maduhu/head,jpodeszwik/mifos,maduhu/mifos-head
|
package org.mifos.application.personnel.struts.action;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.mifos.application.customer.business.CustomFieldView;
import org.mifos.application.customer.center.struts.actionforms.CenterCustActionForm;
import org.mifos.application.customer.util.helpers.CustomerConstants;
import org.mifos.application.office.business.OfficeBO;
import org.mifos.application.personnel.business.PersonnelBO;
import org.mifos.application.personnel.business.service.PersonnelBusinessService;
import org.mifos.application.personnel.persistence.PersonnelPersistence;
import org.mifos.application.personnel.struts.actionforms.PersonActionForm;
import org.mifos.application.personnel.util.helpers.PersonnelConstants;
import org.mifos.application.personnel.util.helpers.PersonnelLevel;
import org.mifos.application.util.helpers.ActionForwards;
import org.mifos.application.util.helpers.Methods;
import org.mifos.framework.MifosMockStrutsTestCase;
import org.mifos.framework.business.util.Address;
import org.mifos.framework.business.util.Name;
import org.mifos.framework.components.fieldConfiguration.util.helpers.FieldConfigImplementer;
import org.mifos.framework.components.fieldConfiguration.util.helpers.FieldConfigItf;
import org.mifos.framework.exceptions.PageExpiredException;
import org.mifos.framework.exceptions.ServiceException;
import org.mifos.framework.hibernate.helper.HibernateUtil;
import org.mifos.framework.security.util.ActivityContext;
import org.mifos.framework.security.util.UserContext;
import org.mifos.framework.struts.plugin.helper.EntityMasterData;
import org.mifos.framework.struts.tags.DateHelper;
import org.mifos.framework.util.helpers.Constants;
import org.mifos.framework.util.helpers.Flow;
import org.mifos.framework.util.helpers.FlowManager;
import org.mifos.framework.util.helpers.ResourceLoader;
import org.mifos.framework.util.helpers.SessionUtils;
import org.mifos.framework.util.helpers.TestObjectFactory;
public class TestPersonAction extends MifosMockStrutsTestCase {
private String flowKey;
private UserContext userContext;
private OfficeBO createdBranchOffice;
PersonnelBO personnel;
@Override
protected void setUp() throws Exception {
super.setUp();
try {
setServletConfigFile(ResourceLoader.getURI("WEB-INF/web.xml")
.getPath());
setConfigFile(ResourceLoader.getURI(
"org/mifos/application/personnel/struts-config.xml")
.getPath());
} catch (URISyntaxException e) {
e.printStackTrace();
}
userContext = TestObjectFactory.getUserContext();
request.getSession().setAttribute(Constants.USERCONTEXT, userContext);
addRequestParameter("recordLoanOfficerId", "1");
addRequestParameter("recordOfficeId", "1");
ActivityContext ac = new ActivityContext((short) 0, userContext
.getBranchId().shortValue(), userContext.getId().shortValue());
request.getSession(false).setAttribute("ActivityContext", ac);
Flow flow = new Flow();
flowKey = String.valueOf(System.currentTimeMillis());
FlowManager flowManager = new FlowManager();
flowManager.addFLow(flowKey, flow);
request.getSession(false).setAttribute(Constants.FLOWMANAGER,
flowManager);
EntityMasterData.getInstance().init();
FieldConfigItf fieldConfigItf = FieldConfigImplementer.getInstance();
fieldConfigItf.init();
FieldConfigImplementer.getInstance();
getActionServlet().getServletContext().setAttribute(
Constants.FIELD_CONFIGURATION,
fieldConfigItf.getEntityMandatoryFieldMap());
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("input", "CreateUser");
PersonnelBusinessService personnelBusinessService = new PersonnelBusinessService();
SessionUtils.setAttribute(PersonnelConstants.OFFICE,personnelBusinessService.getOffice(Short.valueOf("1")), request);
SessionUtils.setAttribute(PersonnelConstants.ROLES_LIST, personnelBusinessService.getRoles(), request);
SessionUtils.setAttribute(PersonnelConstants.ROLEMASTERLIST,
personnelBusinessService.getRoles(), request);
personnelBusinessService=null;
}
@Override
protected void tearDown() throws Exception {
userContext = null;
TestObjectFactory.cleanUp(personnel);
TestObjectFactory.cleanUp(createdBranchOffice);
HibernateUtil.closeSession();
super.tearDown();
}
public void testChooseOffice() {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.chooseOffice.toString());
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.chooseOffice_success.toString());
}
public void testLoad() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.load.toString());
addRequestParameter("officeId", "1");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
OfficeBO office=(OfficeBO)SessionUtils.getAttribute(PersonnelConstants.OFFICE,
request);
assertNotNull(office);
assertEquals(1,office.getOfficeId().intValue());
verifyMasterData();
PersonActionForm personActionForm = (PersonActionForm) request
.getSession().getAttribute("personActionForm");
assertNotNull(personActionForm);
assertEquals(1, personActionForm.getCustomFields().size());
verifyForward(ActionForwards.load_success.toString());
PersonActionForm actionForm = (PersonActionForm)request.getSession().getAttribute("personActionForm");
String currentDate = DateHelper.getCurrentDate(TestObjectFactory.getUserContext().getPereferedLocale());
assertEquals(currentDate,actionForm.getDateOfJoiningMFI());
}
public void testLoadWithBranchOffice() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.load.toString());
addRequestParameter("officeId", "3");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
OfficeBO office=(OfficeBO)SessionUtils.getAttribute(PersonnelConstants.OFFICE,
request);
assertNotNull(office);
assertEquals(3,office.getOfficeId().intValue());
verifyMasterData();
PersonActionForm personActionForm = (PersonActionForm) request
.getSession().getAttribute("personActionForm");
assertNotNull(personActionForm);
assertEquals(1, personActionForm.getCustomFields().size());
assertNotNull(SessionUtils.getAttribute(
PersonnelConstants.PERSONNEL_LEVEL_LIST, request));
assertEquals(2,((List)SessionUtils.getAttribute(
PersonnelConstants.PERSONNEL_LEVEL_LIST, request)).size());
verifyForward(ActionForwards.load_success.toString());
}
public void testPreviewFailure() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.preview.toString());
actionPerform();
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_FIRSTNAME));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_LASTNAME));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_GENDER));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_LEVEL));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_USER_NAME));
assertEquals(1, getErrrorSize(PersonnelConstants.PASSWORD));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_DOB));
verifyInputForward();
}
public void testPreviewFailureWrongPasswordLength() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.preview.toString());
setRequestData();
addRequestParameter("userPassword", "XXX");
actionPerform();
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_PASSWORD_LENGTH));
verifyInputForward();
}
public void testPreviewFailureWrongPasswordAndReaptPassword()
throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.preview.toString());
setRequestData();
addRequestParameter("userPassword", "XXXXXX");
addRequestParameter("passwordRepeat", "XXXXXZ");
actionPerform();
assertEquals(1, getErrrorSize(PersonnelConstants.PASSWORD));
verifyInputForward();
}
public void testPreviewSucess() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.preview.toString());
addRequestParameter("userPassword", "XXXXXXXX");
addRequestParameter("passwordRepeat", "XXXXXXXX");
setRequestData();
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.preview_success.toString());
}
public void testPreviousSucess() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.previous.toString());
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.previous_success.toString());
}
public void testCreateSucess() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.create.toString());
setRequestData();
addRequestParameter("userPassword", "XXXXXXXX");
addRequestParameter("passwordRepeat", "XXXXXXXX");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.create_success.toString());
assertNotNull(request.getAttribute("globalPersonnelNum"));
assertNotNull(request.getAttribute("displayName"));
PersonnelBO personnelBO =
new PersonnelPersistence().getPersonnelByGlobalPersonnelNum(
(String)request.getAttribute("globalPersonnelNum")
);
assertNotNull(personnelBO);
//assert few values
assertEquals("Jim",personnelBO.getPersonnelDetails().getName().getFirstName());
assertEquals("khan",personnelBO.getPersonnelDetails().getName().getLastName());
assertEquals(1,personnelBO.getPersonnelDetails().getGender().intValue());
TestObjectFactory.cleanUp(personnelBO);
}
public void testCreateSucessWithNoRoles() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.create.toString());
addRequestParameter("firstName", "Jim");
addRequestParameter("lastName", "khan");
addRequestParameter("gender", "1");
addRequestParameter("level", "1");
addRequestParameter("title", "1");
addRequestParameter("emailId", "1@1.com");
addRequestParameter("dob", "20/03/76");
addRequestParameter("loginName", "tarzen");
addRequestParameter("preferredLocale","189");
addRequestParameter("userPassword", "XXXXXXXX");
addRequestParameter("passwordRepeat", "XXXXXXXX");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.create_success.toString());
assertNotNull(request.getAttribute("globalPersonnelNum"));
assertNotNull(request.getAttribute("displayName"));
PersonnelBO personnelBO = new PersonnelPersistence().getPersonnelByGlobalPersonnelNum((String)request.getAttribute("globalPersonnelNum"));
assertNotNull(personnelBO);
//assert few values
assertEquals("Jim",personnelBO.getPersonnelDetails().getName().getFirstName());
assertEquals("khan",personnelBO.getPersonnelDetails().getName().getLastName());
assertEquals(1,personnelBO.getPersonnelDetails().getGender().intValue());
TestObjectFactory.cleanUp(personnelBO);
personnelBO=null;
}
public void testGetSucess()throws Exception{
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.get.toString());
addRequestParameter("globalPersonnelNum", "1");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyMasterData();
verifyForward(ActionForwards.get_success.toString());
}
private void verifyMasterData()throws Exception{
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.TITLE_LIST,
request));
assertNotNull(SessionUtils.getAttribute(
PersonnelConstants.PERSONNEL_LEVEL_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.GENDER_LIST,
request));
assertNotNull(SessionUtils.getAttribute(
PersonnelConstants.MARITAL_STATUS_LIST, request));
assertNotNull(SessionUtils.getAttribute(
PersonnelConstants.LANGUAGE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.ROLES_LIST,
request));
assertNotNull(SessionUtils.getAttribute(
CustomerConstants.CUSTOM_FIELDS_LIST, request));
}
public void testManage() throws Exception{
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createPersonnelAndSetInSession(getBranchOffice(), PersonnelLevel.LOAN_OFFICER);
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.manage.toString());
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.manage_success.toString());
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.TITLE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.PERSONNEL_LEVEL_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.GENDER_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.MARITAL_STATUS_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.LANGUAGE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.ROLES_LIST, request));
assertNotNull(SessionUtils.getAttribute(CustomerConstants.CUSTOM_FIELDS_LIST, request));
}
public void testPreviewManage() throws Exception{
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createPersonnelAndSetInSession(getBranchOffice(), PersonnelLevel.LOAN_OFFICER);
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.manage.toString());
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.manage_success.toString());
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.TITLE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.PERSONNEL_LEVEL_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.GENDER_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.MARITAL_STATUS_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.LANGUAGE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.ROLES_LIST, request));
assertNotNull(SessionUtils.getAttribute(CustomerConstants.CUSTOM_FIELDS_LIST, request));
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.previewManage.toString());
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
addRequestParameter("userPassword", "abcdef");
addRequestParameter("passwordRepeat", "abcdef");
addRequestParameter("personnelRoles", "1");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.previewManage_success.toString());
}
public void testManagePreviewFailure() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.previewManage.toString());
actionPerform();
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_FIRSTNAME));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_LASTNAME));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_GENDER));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_LEVEL));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_USER_NAME));
assertEquals(1, getErrrorSize(PersonnelConstants.PASSWORD));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_DOB));
assertEquals(1, getErrrorSize(PersonnelConstants.OFFICE));
verifyInputForward();
}
public void testManagePreviewFailureWrongPasswordLength() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.preview.toString());
setRequestData();
addRequestParameter("userPassword", "XXX");
actionPerform();
assertEquals(1, getErrrorSize("password"));
verifyInputForward();
}
public void testManagePreviewFailureWrongPasswordAndReaptPassword()
throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.preview.toString());
setRequestData();
addRequestParameter("userPassword", "XXXXXX");
addRequestParameter("passwordRepeat", "XXXXXZ");
actionPerform();
assertEquals(1, getErrrorSize("password"));
verifyInputForward();
}
public void testUpdateSuccess() throws Exception{
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createPersonnelAndSetInSession(getBranchOffice(), PersonnelLevel.LOAN_OFFICER);
assertEquals(1, personnel.getPersonnelDetails().getGender().intValue());
assertEquals(1, personnel.getPersonnelDetails().getGender().intValue());
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.manage.toString());
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.manage_success.toString());
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.TITLE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.PERSONNEL_LEVEL_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.GENDER_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.MARITAL_STATUS_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.LANGUAGE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.ROLES_LIST, request));
assertNotNull(SessionUtils.getAttribute(CustomerConstants.CUSTOM_FIELDS_LIST, request));
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.previewManage.toString());
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
addRequestParameter("personnelRoles", "1");
addRequestParameter("gender", "2");
addRequestParameter("maritalStatus", "2");
addRequestParameter("userPassword", "abcdef");
addRequestParameter("passwordRepeat", "abcdef");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.previewManage_success.toString());
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.update.toString());
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.update_success.toString());
assertEquals(2, personnel.getPersonnelDetails().getGender().intValue());
assertEquals(2, personnel.getPersonnelDetails().getGender().intValue());
personnel = (PersonnelBO)TestObjectFactory.getObject(PersonnelBO.class,personnel.getPersonnelId());
}
private void createPersonnelAndSetInSession(OfficeBO office, PersonnelLevel personnelLevel) throws Exception{
List<CustomFieldView> customFieldView = new ArrayList<CustomFieldView>();
customFieldView.add(new CustomFieldView(Short.valueOf("1"), "123456",
Short.valueOf("1")));
Address address = new Address("abcd","abcd","abcd","abcd","abcd","abcd","abcd","abcd");
Name name = new Name("XYZ", null, null, "Last Name");
Date date =new Date();
personnel = new PersonnelBO(personnelLevel,
office, Integer.valueOf("1"), Short.valueOf("1"),
"ABCD", "XYZ", "xyz@yahoo.com", null,
customFieldView, name, "111111", date, Integer
.valueOf("1"), Integer.valueOf("1"), date, date, address, userContext.getId());
personnel.save();
HibernateUtil.commitTransaction();
HibernateUtil.closeSession();
personnel=(PersonnelBO)HibernateUtil.getSessionTL().get(PersonnelBO.class,personnel.getPersonnelId());
SessionUtils.setAttribute(Constants.BUSINESS_KEY, personnel, request);
}
public OfficeBO getBranchOffice(){
return TestObjectFactory.getOffice(Short.valueOf("3"));
}
private void setRequestData() throws PageExpiredException, ServiceException {
addRequestParameter("firstName", "Jim");
addRequestParameter("lastName", "khan");
addRequestParameter("gender", "1");
addRequestParameter("level", "1");
addRequestParameter("title", "1");
addRequestParameter("emailId", "1@1.com");
addRequestParameter("dob", "20/03/76");
addRequestParameter("loginName", "tarzen");
addRequestParameter("personnelRoles", "1");
addRequestParameter("preferredLocale","189");
}
}
|
mifos/test/org/mifos/application/personnel/struts/action/TestPersonAction.java
|
package org.mifos.application.personnel.struts.action;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.mifos.application.customer.business.CustomFieldView;
import org.mifos.application.customer.util.helpers.CustomerConstants;
import org.mifos.application.office.business.OfficeBO;
import org.mifos.application.personnel.business.PersonnelBO;
import org.mifos.application.personnel.business.service.PersonnelBusinessService;
import org.mifos.application.personnel.persistence.PersonnelPersistence;
import org.mifos.application.personnel.struts.actionforms.PersonActionForm;
import org.mifos.application.personnel.util.helpers.PersonnelConstants;
import org.mifos.application.personnel.util.helpers.PersonnelLevel;
import org.mifos.application.util.helpers.ActionForwards;
import org.mifos.application.util.helpers.Methods;
import org.mifos.framework.MifosMockStrutsTestCase;
import org.mifos.framework.business.util.Address;
import org.mifos.framework.business.util.Name;
import org.mifos.framework.components.fieldConfiguration.util.helpers.FieldConfigImplementer;
import org.mifos.framework.components.fieldConfiguration.util.helpers.FieldConfigItf;
import org.mifos.framework.exceptions.PageExpiredException;
import org.mifos.framework.exceptions.ServiceException;
import org.mifos.framework.hibernate.helper.HibernateUtil;
import org.mifos.framework.security.util.ActivityContext;
import org.mifos.framework.security.util.UserContext;
import org.mifos.framework.struts.plugin.helper.EntityMasterData;
import org.mifos.framework.util.helpers.Constants;
import org.mifos.framework.util.helpers.Flow;
import org.mifos.framework.util.helpers.FlowManager;
import org.mifos.framework.util.helpers.ResourceLoader;
import org.mifos.framework.util.helpers.SessionUtils;
import org.mifos.framework.util.helpers.TestObjectFactory;
public class TestPersonAction extends MifosMockStrutsTestCase {
private String flowKey;
private UserContext userContext;
private OfficeBO createdBranchOffice;
PersonnelBO personnel;
@Override
protected void setUp() throws Exception {
super.setUp();
try {
setServletConfigFile(ResourceLoader.getURI("WEB-INF/web.xml")
.getPath());
setConfigFile(ResourceLoader.getURI(
"org/mifos/application/personnel/struts-config.xml")
.getPath());
} catch (URISyntaxException e) {
e.printStackTrace();
}
userContext = TestObjectFactory.getUserContext();
request.getSession().setAttribute(Constants.USERCONTEXT, userContext);
addRequestParameter("recordLoanOfficerId", "1");
addRequestParameter("recordOfficeId", "1");
ActivityContext ac = new ActivityContext((short) 0, userContext
.getBranchId().shortValue(), userContext.getId().shortValue());
request.getSession(false).setAttribute("ActivityContext", ac);
Flow flow = new Flow();
flowKey = String.valueOf(System.currentTimeMillis());
FlowManager flowManager = new FlowManager();
flowManager.addFLow(flowKey, flow);
request.getSession(false).setAttribute(Constants.FLOWMANAGER,
flowManager);
EntityMasterData.getInstance().init();
FieldConfigItf fieldConfigItf = FieldConfigImplementer.getInstance();
fieldConfigItf.init();
FieldConfigImplementer.getInstance();
getActionServlet().getServletContext().setAttribute(
Constants.FIELD_CONFIGURATION,
fieldConfigItf.getEntityMandatoryFieldMap());
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter(Constants.CURRENTFLOWKEY, flowKey);
addRequestParameter("input", "CreateUser");
PersonnelBusinessService personnelBusinessService = new PersonnelBusinessService();
SessionUtils.setAttribute(PersonnelConstants.OFFICE,personnelBusinessService.getOffice(Short.valueOf("1")), request);
SessionUtils.setAttribute(PersonnelConstants.ROLES_LIST, personnelBusinessService.getRoles(), request);
SessionUtils.setAttribute(PersonnelConstants.ROLEMASTERLIST,
personnelBusinessService.getRoles(), request);
personnelBusinessService=null;
}
@Override
protected void tearDown() throws Exception {
userContext = null;
TestObjectFactory.cleanUp(personnel);
TestObjectFactory.cleanUp(createdBranchOffice);
HibernateUtil.closeSession();
super.tearDown();
}
public void testChooseOffice() {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.chooseOffice.toString());
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.chooseOffice_success.toString());
}
public void testLoad() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.load.toString());
addRequestParameter("officeId", "1");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
OfficeBO office=(OfficeBO)SessionUtils.getAttribute(PersonnelConstants.OFFICE,
request);
assertNotNull(office);
assertEquals(1,office.getOfficeId().intValue());
verifyMasterData();
PersonActionForm personActionForm = (PersonActionForm) request
.getSession().getAttribute("personActionForm");
assertNotNull(personActionForm);
assertEquals(1, personActionForm.getCustomFields().size());
verifyForward(ActionForwards.load_success.toString());
}
public void testLoadWithBranchOffice() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.load.toString());
addRequestParameter("officeId", "3");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
OfficeBO office=(OfficeBO)SessionUtils.getAttribute(PersonnelConstants.OFFICE,
request);
assertNotNull(office);
assertEquals(3,office.getOfficeId().intValue());
verifyMasterData();
PersonActionForm personActionForm = (PersonActionForm) request
.getSession().getAttribute("personActionForm");
assertNotNull(personActionForm);
assertEquals(1, personActionForm.getCustomFields().size());
assertNotNull(SessionUtils.getAttribute(
PersonnelConstants.PERSONNEL_LEVEL_LIST, request));
assertEquals(2,((List)SessionUtils.getAttribute(
PersonnelConstants.PERSONNEL_LEVEL_LIST, request)).size());
verifyForward(ActionForwards.load_success.toString());
}
public void testPreviewFailure() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.preview.toString());
actionPerform();
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_FIRSTNAME));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_LASTNAME));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_GENDER));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_LEVEL));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_USER_NAME));
assertEquals(1, getErrrorSize(PersonnelConstants.PASSWORD));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_DOB));
verifyInputForward();
}
public void testPreviewFailureWrongPasswordLength() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.preview.toString());
setRequestData();
addRequestParameter("userPassword", "XXX");
actionPerform();
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_PASSWORD_LENGTH));
verifyInputForward();
}
public void testPreviewFailureWrongPasswordAndReaptPassword()
throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.preview.toString());
setRequestData();
addRequestParameter("userPassword", "XXXXXX");
addRequestParameter("passwordRepeat", "XXXXXZ");
actionPerform();
assertEquals(1, getErrrorSize(PersonnelConstants.PASSWORD));
verifyInputForward();
}
public void testPreviewSucess() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.preview.toString());
addRequestParameter("userPassword", "XXXXXXXX");
addRequestParameter("passwordRepeat", "XXXXXXXX");
setRequestData();
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.preview_success.toString());
}
public void testPreviousSucess() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.previous.toString());
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.previous_success.toString());
}
public void testCreateSucess() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.create.toString());
setRequestData();
addRequestParameter("userPassword", "XXXXXXXX");
addRequestParameter("passwordRepeat", "XXXXXXXX");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.create_success.toString());
assertNotNull(request.getAttribute("globalPersonnelNum"));
assertNotNull(request.getAttribute("displayName"));
PersonnelBO personnelBO =
new PersonnelPersistence().getPersonnelByGlobalPersonnelNum(
(String)request.getAttribute("globalPersonnelNum")
);
assertNotNull(personnelBO);
//assert few values
assertEquals("Jim",personnelBO.getPersonnelDetails().getName().getFirstName());
assertEquals("khan",personnelBO.getPersonnelDetails().getName().getLastName());
assertEquals(1,personnelBO.getPersonnelDetails().getGender().intValue());
TestObjectFactory.cleanUp(personnelBO);
}
public void testCreateSucessWithNoRoles() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.create.toString());
addRequestParameter("firstName", "Jim");
addRequestParameter("lastName", "khan");
addRequestParameter("gender", "1");
addRequestParameter("level", "1");
addRequestParameter("title", "1");
addRequestParameter("emailId", "1@1.com");
addRequestParameter("dob", "20/03/76");
addRequestParameter("loginName", "tarzen");
addRequestParameter("preferredLocale","189");
addRequestParameter("userPassword", "XXXXXXXX");
addRequestParameter("passwordRepeat", "XXXXXXXX");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.create_success.toString());
assertNotNull(request.getAttribute("globalPersonnelNum"));
assertNotNull(request.getAttribute("displayName"));
PersonnelBO personnelBO = new PersonnelPersistence().getPersonnelByGlobalPersonnelNum((String)request.getAttribute("globalPersonnelNum"));
assertNotNull(personnelBO);
//assert few values
assertEquals("Jim",personnelBO.getPersonnelDetails().getName().getFirstName());
assertEquals("khan",personnelBO.getPersonnelDetails().getName().getLastName());
assertEquals(1,personnelBO.getPersonnelDetails().getGender().intValue());
TestObjectFactory.cleanUp(personnelBO);
personnelBO=null;
}
public void testGetSucess()throws Exception{
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.get.toString());
addRequestParameter("globalPersonnelNum", "1");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyMasterData();
verifyForward(ActionForwards.get_success.toString());
}
private void verifyMasterData()throws Exception{
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.TITLE_LIST,
request));
assertNotNull(SessionUtils.getAttribute(
PersonnelConstants.PERSONNEL_LEVEL_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.GENDER_LIST,
request));
assertNotNull(SessionUtils.getAttribute(
PersonnelConstants.MARITAL_STATUS_LIST, request));
assertNotNull(SessionUtils.getAttribute(
PersonnelConstants.LANGUAGE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.ROLES_LIST,
request));
assertNotNull(SessionUtils.getAttribute(
CustomerConstants.CUSTOM_FIELDS_LIST, request));
}
public void testManage() throws Exception{
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createPersonnelAndSetInSession(getBranchOffice(), PersonnelLevel.LOAN_OFFICER);
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.manage.toString());
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.manage_success.toString());
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.TITLE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.PERSONNEL_LEVEL_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.GENDER_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.MARITAL_STATUS_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.LANGUAGE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.ROLES_LIST, request));
assertNotNull(SessionUtils.getAttribute(CustomerConstants.CUSTOM_FIELDS_LIST, request));
}
public void testPreviewManage() throws Exception{
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createPersonnelAndSetInSession(getBranchOffice(), PersonnelLevel.LOAN_OFFICER);
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.manage.toString());
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.manage_success.toString());
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.TITLE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.PERSONNEL_LEVEL_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.GENDER_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.MARITAL_STATUS_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.LANGUAGE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.ROLES_LIST, request));
assertNotNull(SessionUtils.getAttribute(CustomerConstants.CUSTOM_FIELDS_LIST, request));
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.previewManage.toString());
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
addRequestParameter("userPassword", "abcdef");
addRequestParameter("passwordRepeat", "abcdef");
addRequestParameter("personnelRoles", "1");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.previewManage_success.toString());
}
public void testManagePreviewFailure() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.previewManage.toString());
actionPerform();
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_FIRSTNAME));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_LASTNAME));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_GENDER));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_LEVEL));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_USER_NAME));
assertEquals(1, getErrrorSize(PersonnelConstants.PASSWORD));
assertEquals(1, getErrrorSize(PersonnelConstants.ERROR_DOB));
assertEquals(1, getErrrorSize(PersonnelConstants.OFFICE));
verifyInputForward();
}
public void testManagePreviewFailureWrongPasswordLength() throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.preview.toString());
setRequestData();
addRequestParameter("userPassword", "XXX");
actionPerform();
assertEquals(1, getErrrorSize("password"));
verifyInputForward();
}
public void testManagePreviewFailureWrongPasswordAndReaptPassword()
throws Exception {
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.preview.toString());
setRequestData();
addRequestParameter("userPassword", "XXXXXX");
addRequestParameter("passwordRepeat", "XXXXXZ");
actionPerform();
assertEquals(1, getErrrorSize("password"));
verifyInputForward();
}
public void testUpdateSuccess() throws Exception{
request.setAttribute(Constants.CURRENTFLOWKEY, flowKey);
createPersonnelAndSetInSession(getBranchOffice(), PersonnelLevel.LOAN_OFFICER);
assertEquals(1, personnel.getPersonnelDetails().getGender().intValue());
assertEquals(1, personnel.getPersonnelDetails().getGender().intValue());
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.manage.toString());
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.manage_success.toString());
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.TITLE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.PERSONNEL_LEVEL_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.GENDER_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.MARITAL_STATUS_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.LANGUAGE_LIST, request));
assertNotNull(SessionUtils.getAttribute(PersonnelConstants.ROLES_LIST, request));
assertNotNull(SessionUtils.getAttribute(CustomerConstants.CUSTOM_FIELDS_LIST, request));
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.previewManage.toString());
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
addRequestParameter("personnelRoles", "1");
addRequestParameter("gender", "2");
addRequestParameter("maritalStatus", "2");
addRequestParameter("userPassword", "abcdef");
addRequestParameter("passwordRepeat", "abcdef");
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.previewManage_success.toString());
setRequestPathInfo("/PersonAction.do");
addRequestParameter("method", Methods.update.toString());
addRequestParameter(Constants.CURRENTFLOWKEY, (String) request.getAttribute(Constants.CURRENTFLOWKEY));
actionPerform();
verifyNoActionErrors();
verifyNoActionMessages();
verifyForward(ActionForwards.update_success.toString());
assertEquals(2, personnel.getPersonnelDetails().getGender().intValue());
assertEquals(2, personnel.getPersonnelDetails().getGender().intValue());
personnel = (PersonnelBO)TestObjectFactory.getObject(PersonnelBO.class,personnel.getPersonnelId());
}
private void createPersonnelAndSetInSession(OfficeBO office, PersonnelLevel personnelLevel) throws Exception{
List<CustomFieldView> customFieldView = new ArrayList<CustomFieldView>();
customFieldView.add(new CustomFieldView(Short.valueOf("1"), "123456",
Short.valueOf("1")));
Address address = new Address("abcd","abcd","abcd","abcd","abcd","abcd","abcd","abcd");
Name name = new Name("XYZ", null, null, "Last Name");
Date date =new Date();
personnel = new PersonnelBO(personnelLevel,
office, Integer.valueOf("1"), Short.valueOf("1"),
"ABCD", "XYZ", "xyz@yahoo.com", null,
customFieldView, name, "111111", date, Integer
.valueOf("1"), Integer.valueOf("1"), date, date, address, userContext.getId());
personnel.save();
HibernateUtil.commitTransaction();
HibernateUtil.closeSession();
personnel=(PersonnelBO)HibernateUtil.getSessionTL().get(PersonnelBO.class,personnel.getPersonnelId());
SessionUtils.setAttribute(Constants.BUSINESS_KEY, personnel, request);
}
public OfficeBO getBranchOffice(){
return TestObjectFactory.getOffice(Short.valueOf("3"));
}
private void setRequestData() throws PageExpiredException, ServiceException {
addRequestParameter("firstName", "Jim");
addRequestParameter("lastName", "khan");
addRequestParameter("gender", "1");
addRequestParameter("level", "1");
addRequestParameter("title", "1");
addRequestParameter("emailId", "1@1.com");
addRequestParameter("dob", "20/03/76");
addRequestParameter("loginName", "tarzen");
addRequestParameter("personnelRoles", "1");
addRequestParameter("preferredLocale","189");
}
}
|
Issue 793 fixed. test case testLoad() in TestPersonAction modified with additional asserts
Issue 794 fixed . UI Issue. no test case updated
Issue 687 fixed. UI issue. No test case updated
git-svn-id: 6bd94cac40bd5c1df74b384d972046d926de6ffa@10759 a8845c50-7012-0410-95d3-8e1449b9b1e4
|
mifos/test/org/mifos/application/personnel/struts/action/TestPersonAction.java
|
Issue 793 fixed. test case testLoad() in TestPersonAction modified with additional asserts Issue 794 fixed . UI Issue. no test case updated Issue 687 fixed. UI issue. No test case updated
|
|
Java
|
apache-2.0
|
07c0ebcfa9852c8c7a926deb1dbe905e146a440c
| 0
|
JanewzhWang/dasein-cloud-test,dasein-cloud/dasein-cloud-test,maksimov/dasein-cloud-test,daniellemayne/dasein-cloud-test,jeffrey-yan/dasein-cloud-test,greese/dasein-cloud-test,unwin/dasein-cloud-test,vladmunthiu/dasein-cloud-test,maksimov/dasein-cloud-test-old
|
/**
* Copyright (C) 2009-2014 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.test.compute;
import java.util.*;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.apache.log4j.Logger;
import org.dasein.cloud.CloudException;
import org.dasein.cloud.CloudProvider;
import org.dasein.cloud.InternalException;
import org.dasein.cloud.Requirement;
import org.dasein.cloud.compute.*;
import org.dasein.cloud.dc.DataCenter;
import org.dasein.cloud.network.NetworkServices;
import org.dasein.cloud.network.Subnet;
import org.dasein.cloud.network.SubnetCreateOptions;
import org.dasein.cloud.network.VLAN;
import org.dasein.cloud.network.VLANSupport;
import org.dasein.cloud.test.DaseinTestManager;
import org.dasein.cloud.test.identity.IdentityResources;
import org.dasein.cloud.test.network.NetworkResources;
import org.dasein.util.CalendarWrapper;
import org.dasein.util.uom.storage.Gigabyte;
import org.dasein.util.uom.storage.Storage;
/**
* Handles the shared compute resources for executing various tests.
* <p>Created by George Reese: 2/17/13 8:35 PM</p>
*
* @author George Reese
* @version 2013.04
* @version 2014.08 limited architectures to those supported in the cloud
* @since 2013.02
*/
public class ComputeResources {
static private final Logger logger = Logger.getLogger(ComputeResources.class);
static private final Random random = new Random();
private CloudProvider provider;
private final Map<String, String> testMachineImages = new HashMap<String, String>();
private final Map<String, String> testSnapshots = new HashMap<String, String>();
private final Map<String, String> testVMs = new HashMap<String, String>();
private final Map<String, String> testVolumes = new HashMap<String, String>();
//defaults
private String testDataCenterId;
private Platform testImagePlatform;
private String testVMProductId;
private String testVolumeProductId;
public ComputeResources( @Nonnull CloudProvider provider ) {
this.provider = provider;
}
public int report() {
boolean header = false;
int count = 0;
testMachineImages.remove(DaseinTestManager.STATELESS);
if( !testMachineImages.isEmpty() ) {
logger.info("Provisioned Compute Resources:");
header = true;
count += testMachineImages.size();
DaseinTestManager.out(logger, null, "---> Machine Images", testMachineImages.size() + " " + testMachineImages);
}
testSnapshots.remove(DaseinTestManager.STATELESS);
if( !testSnapshots.isEmpty() ) {
if( !header ) {
logger.info("Provisioned Compute Resources:");
header = true;
}
count += testSnapshots.size();
DaseinTestManager.out(logger, null, "---> Snapshots", testSnapshots.size() + " " + testSnapshots);
}
testVMs.remove(DaseinTestManager.STATELESS);
if( !testVMs.isEmpty() ) {
if( !header ) {
logger.info("Provisioned Compute Resources:");
header = true;
}
count += testVMs.size();
DaseinTestManager.out(logger, null, "---> Virtual Machines", testVMs.size() + " " + testVMs);
}
testVolumes.remove(DaseinTestManager.STATELESS);
if( !testVolumes.isEmpty() ) {
if( !header ) {
logger.info("Provisioned Compute Resources:");
}
count += testVolumes.size();
DaseinTestManager.out(logger, null, "---> Volumes", testVolumes.size() + " " + testVolumes);
}
return count;
}
public int close() {
ComputeServices computeServices = provider.getComputeServices();
int count = 0;
if( computeServices != null ) {
VirtualMachineSupport vmSupport = computeServices.getVirtualMachineSupport();
if( vmSupport != null ) {
for( Map.Entry<String, String> entry : testVMs.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.STATELESS) ) {
try {
// Sometimes VMs don't have enough time to start before they are terminated
// by tests, this causes stuck unterminated VMs. Let's try to take care of
// that:
long timeout = System.currentTimeMillis() + 5 * 60 * 1000;
VirtualMachine vm = null;
while(System.currentTimeMillis() < timeout) {
vm = vmSupport.getVirtualMachine(entry.getValue());
if( vm == null || !VmState.PENDING.equals(vm.getCurrentState()) ) {
break;
}
Thread.sleep(10000);
};
if( vm != null ) {
vmSupport.terminate(entry.getValue());
count++;
}
else {
count++;
}
} catch( Throwable t ) {
logger.warn("Failed to de-provision test VM " + entry.getValue() + ": " + t.getMessage());
}
}
}
}
MachineImageSupport imageSupport = computeServices.getImageSupport();
if( imageSupport != null ) {
for( Map.Entry<String, String> entry : testMachineImages.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.STATELESS) ) {
try {
MachineImage img = imageSupport.getImage(entry.getValue());
if( img != null ) {
imageSupport.remove(entry.getValue());
count++;
}
else {
count++;
}
} catch( Throwable t ) {
logger.warn("Failed to de-provision test image " + entry.getValue() + ": " + t.getMessage());
}
}
}
}
SnapshotSupport snapshotSupport = computeServices.getSnapshotSupport();
if( snapshotSupport != null ) {
for( Map.Entry<String, String> entry : testSnapshots.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.STATELESS) ) {
try {
Snapshot snapshot = snapshotSupport.getSnapshot(entry.getValue());
if( snapshot != null ) {
snapshotSupport.remove(entry.getValue());
count++;
}
else {
count++;
}
} catch( Throwable t ) {
logger.warn("Failed to de-provision test snapshot " + entry.getValue() + " post-test: " + t.getMessage());
}
}
}
}
VolumeSupport volumeSupport = computeServices.getVolumeSupport();
if( volumeSupport != null ) {
for( Map.Entry<String, String> entry : testVolumes.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.STATELESS) ) {
try {
Volume volume = volumeSupport.getVolume(entry.getValue());
if( volume != null ) {
volumeSupport.detach(entry.getValue(), true);
}
} catch( Throwable ignore ) {
// IGNORE
}
}
}
try {
Thread.sleep(60000L);
} catch( InterruptedException ignore ) {
}
for( Map.Entry<String, String> entry : testVolumes.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.STATELESS) ) {
try {
Volume volume = volumeSupport.getVolume(entry.getValue());
if( volume != null ) {
volumeSupport.remove(entry.getValue());
count++;
}
else {
count++;
}
} catch( Throwable t ) {
logger.warn("Failed to de-provision test volume " + entry.getValue() + ": " + t.getMessage());
}
}
}
}
}
return count;
}
private @Nullable String findStatelessSnapshot() {
ComputeServices computeServices = provider.getComputeServices();
if( computeServices != null ) {
SnapshotSupport support = computeServices.getSnapshotSupport();
try {
if( support != null && support.isSubscribed() ) {
Snapshot defaultSnapshot = null;
for( Snapshot snapshot : support.listSnapshots() ) {
if( snapshot.getCurrentState().equals(SnapshotState.AVAILABLE) ) {
defaultSnapshot = snapshot;
break;
}
if( defaultSnapshot == null ) {
defaultSnapshot = snapshot;
}
}
if( defaultSnapshot != null ) {
String id = defaultSnapshot.getProviderSnapshotId();
if( id != null ) {
testSnapshots.put(DaseinTestManager.STATELESS, id);
}
return id;
}
}
} catch( Throwable ignore ) {
// ignore
}
}
return null;
}
public @Nullable String getTestDataCenterId( boolean stateless ) {
if( testDataCenterId != null ) {
return testDataCenterId;
}
if( stateless ) {
try {
DataCenter defaultDC = null;
//noinspection ConstantConditions
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(provider.getContext().getRegionId()) ) {
if( defaultDC == null ) {
defaultDC = dc;
}
if( dc.isActive() && dc.isAvailable() ) {
return dc.getProviderDataCenterId();
}
}
if( defaultDC != null ) {
return defaultDC.getProviderDataCenterId();
}
} catch( Throwable ignore ) {
// ignore
}
}
else {
ComputeServices services = provider.getComputeServices();
if( services != null ) {
VirtualMachineSupport support = services.getVirtualMachineSupport();
if( support != null ) {
try {
String id = provisionVM(support, DaseinTestManager.STATEFUL, "Dasein Stateless VM", "dsnstfvm", null);
VirtualMachine vm = support.getVirtualMachine(id);
if( vm != null ) {
testDataCenterId = vm.getProviderDataCenterId();
return testDataCenterId;
}
} catch( Throwable ignore ) {
// ignore me
}
}
}
}
return null;
}
public @Nullable String getTestImageId( @Nonnull String label, boolean provisionIfNull ) {
String id = testMachineImages.get(label);
if( id == null ) {
if( label.equals(DaseinTestManager.STATELESS) ) {
for( Map.Entry<String, String> entry : testMachineImages.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.REMOVED) ) {
id = entry.getValue();
if( id != null ) {
return id;
}
}
}
return null;
}
if( provisionIfNull ) {
ComputeServices services = provider.getComputeServices();
if( services != null ) {
MachineImageSupport support = services.getImageSupport();
if( support != null ) {
try {
return provisionImage(support, label, "dsnimg", null);
} catch( Throwable ignore ) {
return null;
}
}
}
}
}
return id;
}
public @Nullable String getTestSnapshotId( @Nonnull String label, boolean provisionIfNull ) {
if( label.equals(DaseinTestManager.STATELESS) ) {
for( Map.Entry<String, String> entry : testSnapshots.entrySet() ) {
if( !entry.getKey().startsWith(DaseinTestManager.REMOVED) ) {
String id = entry.getValue();
if( id != null ) {
return id;
}
}
}
return findStatelessSnapshot();
}
String id = testSnapshots.get(label);
if( id != null ) {
return id;
}
if( !provisionIfNull ) {
return null;
}
ComputeServices services = provider.getComputeServices();
if( services != null ) {
SnapshotSupport support = services.getSnapshotSupport();
if( support != null ) {
try {
return provisionSnapshot(support, label, "dsnsnap" + ( System.currentTimeMillis() % 10000 ), null);
} catch( Throwable ignore ) {
return null;
}
}
}
return null;
}
public @Nullable String getTestVmId(@Nonnull String label, @Nullable VmState desiredState, boolean provisionIfNull, @Nullable String preferredDataCenterId) {
return getTestVmId(label, "dsnvm", desiredState, provisionIfNull, preferredDataCenterId);
}
public @Nullable String getTestVmId( @Nonnull String label, @Nonnull String vmName, @Nullable VmState desiredState, boolean provisionIfNull, @Nullable String preferredDataCenterId ) {
if( label.equals(DaseinTestManager.STATELESS) ) {
for( Map.Entry<String, String> entry : testVMs.entrySet() ) {
if( !entry.getKey().startsWith(DaseinTestManager.REMOVED) ) {
String id = entry.getValue();
if( id != null ) {
try {
@SuppressWarnings("ConstantConditions") VirtualMachine vm = provider.getComputeServices().getVirtualMachineSupport().getVirtualMachine(id);
if( vm != null && !VmState.TERMINATED.equals(vm.getCurrentState()) ) {
return id;
}
} catch( Throwable ignore ) {
// ignore
}
}
}
}
return null;
}
String id = testVMs.get(label);
if( id == null && !provisionIfNull ) {
return null;
}
ComputeServices services = provider.getComputeServices();
if( services != null ) {
VirtualMachineSupport support = services.getVirtualMachineSupport();
if( support != null ) {
try {
VirtualMachine vm = ( id == null ? null : support.getVirtualMachine(id) );
if( ( vm == null || VmState.TERMINATED.equals(vm.getCurrentState()) ) && provisionIfNull ) {
id = provisionVM(support, label, "testvm-" + label, vmName, preferredDataCenterId);
vm = support.getVirtualMachine(id);
}
if( vm != null && desiredState != null ) {
setState(support, vm, desiredState);
}
return id;
} catch( Throwable t ) {
try {
if( support.isSubscribed() ) {
logger.warn("Unable to provision test virtual machine under label " + label + ": " + t.getMessage());
}
} catch( Throwable ignore ) {
// ignore
}
}
}
}
return null;
}
public @Nullable String getTestVLANVmId(@Nonnull String label, @Nullable VmState desiredState, @Nullable String vlanId, boolean provisionIfNull, @Nullable String preferredDataCenterId) {
if( label.equals(DaseinTestManager.STATELESS) ) {
for( Map.Entry<String,String> entry : testVMs.entrySet() ) {
if( !entry.getKey().startsWith(DaseinTestManager.REMOVED) ) {
String id = entry.getValue();
if( id != null ) {
try {
@SuppressWarnings("ConstantConditions") VirtualMachine vm = provider.getComputeServices().getVirtualMachineSupport().getVirtualMachine(id);
if( vm != null && !VmState.TERMINATED.equals(vm.getCurrentState()) && vm.getProviderVlanId() != null ) {
if( vlanId == null ) {
return id;
}
else if( vm.getProviderVlanId().equalsIgnoreCase(vlanId) ) {
return id;
}
}
}
catch( Throwable ignore ) {
// ignore
}
}
}
}
return null;
}
String id = testVMs.get(label);
if( id == null && !provisionIfNull ) {
return null;
}
ComputeServices services = provider.getComputeServices();
if( services != null ) {
VirtualMachineSupport support = services.getVirtualMachineSupport();
if( support != null ) {
try {
VirtualMachine vm = (id == null ? null : support.getVirtualMachine(id));
if( (vm == null || VmState.TERMINATED.equals(vm.getCurrentState()) || vm.getProviderVlanId() == null || !vm.getProviderVlanId().equalsIgnoreCase(vlanId)) && provisionIfNull ) {
String testImageId = getTestImageId(DaseinTestManager.STATELESS, false);
if( testImageId == null ) {
throw new CloudException("No test image exists for provisioning a virtual machine");
}
long now = System.currentTimeMillis();
String name = "dasein-test-" + label + " " + now;
String host = "dsnvm" + (now%10000);
VMLaunchOptions vmOpts = VMLaunchOptions.getInstance(testVMProductId, testImageId, name, host, "Test VM for stateful integration tests for Dasein Cloud").withExtendedAnalytics();
NetworkResources network = DaseinTestManager.getNetworkResources();
if( vlanId != null ) {
NetworkServices ns = provider.getNetworkServices();
VLANSupport vs = ns.getVlanSupport();
VLAN v = vs.getVlan(vlanId);
Iterable<Subnet> subnets = vs.listSubnets(vlanId);
if( subnets.iterator().hasNext() ) {
Subnet sub = subnets.iterator().next();
vmOpts.inSubnet( null, v.getProviderDataCenterId(), sub.getProviderVlanId(), sub.getProviderSubnetId());
} else {
Subnet sub = vs.createSubnet(SubnetCreateOptions.getInstance(vlanId, "192.168.50.0/24", "dsnsub", "dasein test create vm for vlan"));
vmOpts.inSubnet( null, v.getProviderDataCenterId(), sub.getProviderVlanId(), sub.getProviderSubnetId());
}
} else {
if( network != null ) {
String networkId = network.getTestVLANId(DaseinTestManager.STATEFUL, true, preferredDataCenterId);
if( networkId == null ) {
networkId = network.getTestVLANId(DaseinTestManager.STATELESS, false, preferredDataCenterId);
}
// wait for network to be ready
try {
Thread.sleep(10000L);
}
catch( InterruptedException ignore ) {
}
if( networkId != null ) {
String subnetId = network.getTestSubnetId(DaseinTestManager.STATEFUL, true, networkId, preferredDataCenterId);
if( subnetId == null ) {
subnetId = network.getTestSubnetId(DaseinTestManager.STATELESS, true, networkId, preferredDataCenterId);
}
if( subnetId != null ) {
// wait for subnet to be ready
try { Thread.sleep(10000L); }
catch( InterruptedException ignore ) { }
@SuppressWarnings("ConstantConditions") Subnet subnet = provider.getNetworkServices().getVlanSupport().getSubnet(subnetId);
if( subnet != null ) {
String dcId = subnet.getProviderDataCenterId();
if( dcId == null ) {
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(provider.getContext().getRegionId()) ) {
if( (dc.isActive() && dc.isAvailable()) || dcId == null ) {
dcId = dc.getProviderDataCenterId();
}
}
}
vmOpts.inSubnet(null, dcId, vlanId, subnetId);
}
}
}
}
}
id = provisionVM(support, label, vmOpts, preferredDataCenterId);
vm = support.getVirtualMachine(id);
}
if( vm != null && desiredState != null ) {
setState(support, vm, desiredState);
}
if( vlanId != null && vm.getProviderVlanId().equalsIgnoreCase(vlanId) && id != null ) {
return id;
}
else if( vlanId == null && id != null ) {
return id;
}
else {
return null;
}
}
catch( Throwable t ) {
try {
if( support.isSubscribed() ) {
logger.warn("Unable to provision test virtual machine under label " + label + ": " + t.getMessage());
}
}
catch( Throwable ignore ) {
// ignore
}
}
}
}
return null;
}
public @Nullable String getTestVMProductId() {
return testVMProductId;
}
public @Nullable String getTestVolumeId( @Nonnull String label, boolean provisionIfNull, @Nullable VolumeFormat desiredFormat, @Nullable String preferredDataCenterId ) {
if( label.equals(DaseinTestManager.STATELESS) ) {
for( Map.Entry<String, String> entry : testVolumes.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.REMOVED) ) {
String id = entry.getValue();
if( id != null ) {
return id;
}
}
}
return null;
}
String id = testVolumes.get(label);
if( id != null ) {
return id;
}
if( provisionIfNull ) {
ComputeServices services = provider.getComputeServices();
if( services != null ) {
VolumeSupport support = services.getVolumeSupport();
if( support != null ) {
try {
return provisionVolume(support, label, "dsnvol" + ( System.currentTimeMillis() % 10000 ), desiredFormat, preferredDataCenterId);
} catch( Throwable ignore ) {
return null;
}
}
}
}
return null;
}
public @Nullable String getTestVolumeProductId() {
return testVolumeProductId;
}
public void init() {
ComputeServices computeServices = provider.getComputeServices();
// initialise available architectures
Iterable<Architecture> architectures = Collections.emptyList();
if( computeServices != null && computeServices.getVirtualMachineSupport() != null ) {
try {
architectures = computeServices.getVirtualMachineSupport().getCapabilities().listSupportedArchitectures();
} catch( InternalException e ) {
} catch( CloudException e ) {
}
}
String dataCenterId = System.getProperty("test.dataCenter");
if( computeServices != null ) {
Map<Architecture, VirtualMachineProduct> productMap = new HashMap<Architecture, VirtualMachineProduct>();
VirtualMachineSupport vmSupport = computeServices.getVirtualMachineSupport();
if( vmSupport != null ) {
try {
for( Architecture architecture : architectures ) {
VirtualMachineProduct defaultProduct = null;
try {
VirtualMachineProductFilterOptions options = VirtualMachineProductFilterOptions.getInstance().withDataCenterId(dataCenterId);
for( VirtualMachineProduct product : vmSupport.listProducts(options, architecture) ) {
if( !product.getStatus().equals(VirtualMachineProduct.Status.CURRENT) ) {
continue;
}
if( defaultProduct == null ) {
defaultProduct = product;
}
else if( defaultProduct.getRamSize().intValue() > product.getRamSize().intValue() ) {
if( product.getRamSize().intValue() > 1000 ) {
defaultProduct = product;
}
}
else {
if( defaultProduct.getRamSize().intValue() < 1024 && product.getRamSize().intValue() < 2200 ) {
defaultProduct = product;
}
else if( defaultProduct.getCpuCount() > product.getCpuCount() ) {
if( ( defaultProduct.getRamSize().intValue() * 2 ) > product.getRamSize().intValue() ) {
defaultProduct = product;
}
}
}
}
} catch( Throwable ignore ) {
// ignore
}
productMap.put(architecture, defaultProduct);
}
} catch( Throwable ignore ) {
// ignore
}
}
MachineImageSupport imageSupport = computeServices.getImageSupport();
if( imageSupport != null ) {
boolean volumeBased = false;
try {
for( MachineImageType type : imageSupport.getCapabilities().listSupportedImageTypes() ) {
if( type.equals(MachineImageType.VOLUME) ) {
volumeBased = true;
break;
}
}
} catch( Throwable ignore ) {
// ignore
}
for( Architecture architecture : architectures ) {
VirtualMachineProduct currentProduct = productMap.get(architecture);
if( currentProduct != null ) {
// Let WINDOWS come first for a greater chance of StatelessVMTests#getVMPassword to work
for( Platform platform : new Platform[]{Platform.UBUNTU, Platform.WINDOWS, Platform.COREOS, Platform.CENT_OS, Platform.RHEL} ) {
ImageFilterOptions options = ImageFilterOptions.getInstance(ImageClass.MACHINE).withArchitecture(architecture).onPlatform(platform);
try {
for( MachineImage image : imageSupport.listImages(options) ) {
if( MachineImageState.ACTIVE.equals(image.getCurrentState()) && "".equals(image.getSoftware()) ) {
testVMProductId = currentProduct.getProviderProductId();
testMachineImages.put(DaseinTestManager.STATELESS, image.getProviderMachineImageId());
testImagePlatform = image.getPlatform();
if( !volumeBased || image.getType().equals(MachineImageType.VOLUME) ) {
break;
}
}
}
} catch( Throwable ignore ) {
// ignore
}
if( testVMProductId != null ) {
break;
}
options = ImageFilterOptions.getInstance(ImageClass.MACHINE).withArchitecture(architecture).onPlatform(platform);
try {
for( MachineImage image : imageSupport.searchPublicImages(options) ) {
if( MachineImageState.ACTIVE.equals(image.getCurrentState()) && "".equals(image.getSoftware()) ) {
testVMProductId = currentProduct.getProviderProductId();
testMachineImages.put(DaseinTestManager.STATELESS, image.getProviderMachineImageId());
testImagePlatform = image.getPlatform();
if( !volumeBased || image.getType().equals(MachineImageType.VOLUME) ) {
break;
}
}
}
} catch( Throwable ignore ) {
// ignore
}
}
if( testVMProductId != null ) {
break;
}
}
}
}
VolumeSupport volumeSupport = computeServices.getVolumeSupport();
if( volumeSupport != null ) {
try {
VolumeProduct defaultProduct = null;
for( VolumeProduct product : volumeSupport.listVolumeProducts() ) {
if( defaultProduct == null ) {
defaultProduct = product;
}
else {
if( volumeSupport.getCapabilities().isVolumeSizeDeterminedByProduct() ) {
if( product.getVolumeSize().intValue() < defaultProduct.getVolumeSize().intValue() && product.getVolumeSize().intValue() >= 20 ) {
defaultProduct = product;
}
}
else {
if( product.getMonthlyGigabyteCost() > 0.00 ) {
if( product.getMonthlyGigabyteCost() < defaultProduct.getMonthlyGigabyteCost() ) {
defaultProduct = product;
}
}
}
}
}
if( defaultProduct != null ) {
testVolumeProductId = defaultProduct.getProviderProductId();
}
} catch( Throwable ignore ) {
// ignore me
}
}
if( vmSupport != null ) {
try {
for( VirtualMachine vm : vmSupport.listVirtualMachines() ) {
if (( vm.getProviderDataCenterId().equals(dataCenterId)) && ( VmState.RUNNING.equals(vm.getCurrentState()) )) { // no guarantee of being in the same datacenter
testVMs.put(DaseinTestManager.STATELESS, vm.getProviderVirtualMachineId());
break;
}
}
} catch( Throwable ignore ) {
// ignore
}
}
if( volumeSupport != null ) {
try {
Volume defaultVolume = null;
for( Volume volume : volumeSupport.listVolumes() ) {
if (( volume.getProviderDataCenterId().equals(dataCenterId)) && ( VolumeState.AVAILABLE.equals(volume.getCurrentState()) || defaultVolume == null )) {
if( defaultVolume == null || volume.isAttached() ) {
defaultVolume = volume;
}
if( VolumeState.AVAILABLE.equals(defaultVolume.getCurrentState()) && defaultVolume.isAttached() ) {
break;
}
}
}
if( defaultVolume != null ) {
testVolumes.put(DaseinTestManager.STATELESS, defaultVolume.getProviderVolumeId());
}
} catch( Throwable ignore ) {
// ignore
}
}
}
}
public @Nonnull String provisionImage( @Nonnull MachineImageSupport support, @Nonnull String label, @Nonnull String namePrefix, @Nullable String vmId ) throws CloudException, InternalException {
VirtualMachineSupport vmSupport = null;
ComputeServices services = provider.getComputeServices();
if( services != null ) {
vmSupport = services.getVirtualMachineSupport();
}
if( vmSupport == null ) {
throw new CloudException("Unable to provision a machine image because Dasein Cloud is showing no VM support");
}
if( vmId == null ) {
vmId = getTestVmId(DaseinTestManager.STATEFUL, VmState.RUNNING, true, null);
if( vmId == null ) {
throw new CloudException("Could not identify a VM for imaging");
}
}
VirtualMachine vm = vmSupport.getVirtualMachine(vmId);
if( vm == null ) {
throw new CloudException("Could not identify a VM for imaging");
}
String imageId = vm.getProviderMachineImageId();
MachineImage image = support.getImage(imageId);
if( image == null || support.getCapabilities().supportsImageCapture(image.getType()) ) {
String id = ImageCreateOptions.getInstance(vm, namePrefix + ( System.currentTimeMillis() % 10000 ), "Test machine image with label " + label).build(provider);
synchronized ( testMachineImages ) {
while( testMachineImages.containsKey(label) ) {
label = label + random.nextInt(9);
}
testMachineImages.put(label, id);
}
return id;
}
else if( !support.getCapabilities().identifyLocalBundlingRequirement().equals(Requirement.REQUIRED) ) {
Iterator<MachineImageFormat> formats = support.getCapabilities().listSupportedFormatsForBundling().iterator();
MachineImageFormat format = ( formats.hasNext() ? formats.next() : null );
if( format != null ) {
String id = support.bundleVirtualMachine(vmId, format, "dsnimg" + ( System.currentTimeMillis() % 100000 ), "dsnimg");
synchronized ( testMachineImages ) {
while( testMachineImages.containsKey(label) ) {
label = label + random.nextInt(9);
}
testMachineImages.put(label, id);
}
return id;
}
}
throw new CloudException("No mechanism exists for provisioning images from a virtual machine");
}
public @Nonnull String provisionSnapshot( @SuppressWarnings("UnusedParameters") @Nonnull SnapshotSupport support, @Nonnull String label, @Nonnull String namePrefix, @Nullable String volumeId ) throws CloudException, InternalException {
SnapshotCreateOptions options;
if( volumeId == null ) {
volumeId = getTestVolumeId(DaseinTestManager.STATEFUL + ( System.currentTimeMillis() % 1000 ), true, null, null);
if( volumeId == null ) {
throw new CloudException("No volume from which to create a snapshot");
}
}
@SuppressWarnings("ConstantConditions") VolumeSupport vs = provider.getComputeServices().getVolumeSupport();
if( vs != null ) {
Volume volume = vs.getVolume(volumeId);
if( volume != null ) {
long timeout = System.currentTimeMillis() + ( CalendarWrapper.MINUTE * 20L );
while( timeout > System.currentTimeMillis() ) {
try {
Thread.sleep(15000L);
} catch( InterruptedException ignore ) {
}
try {
volume = vs.getVolume(volumeId);
} catch( Throwable ignore ) {
}
if( volume == null || volume.getCurrentState().equals(VolumeState.AVAILABLE) || volume.getCurrentState().equals(VolumeState.DELETED) ) {
break;
}
}
}
if( volume != null && volume.getProviderVirtualMachineId() == null && support.getCapabilities().identifyAttachmentRequirement().equals(Requirement.REQUIRED) ) {
String vmId = getTestVmId(DaseinTestManager.STATEFUL, VmState.RUNNING, true, volume.getProviderDataCenterId());
if( vmId != null ) {
@SuppressWarnings("ConstantConditions") VirtualMachine vm = provider.getComputeServices().getVirtualMachineSupport().getVirtualMachine(vmId);
if( vm != null ) {
for( String deviceId : vs.getCapabilities().listPossibleDeviceIds(vm.getPlatform()) ) {
try {
vs.attach(volumeId, vmId, deviceId);
break;
} catch( Throwable ignore ) {
// ignore
}
}
}
}
}
}
options = SnapshotCreateOptions.getInstanceForCreate(volumeId, namePrefix + ( System.currentTimeMillis() % 10000 ), "Dasein Snapshot Test " + label);
String id = options.build(provider);
if( id == null ) {
throw new CloudException("Unable to create a snapshot");
}
synchronized ( testSnapshots ) {
while( testSnapshots.containsKey(label) ) {
label = label + random.nextInt(9);
}
testSnapshots.put(label, id);
}
return id;
}
public @Nonnull Iterable<String> provisionManyVMs( @Nonnull VirtualMachineSupport support, @Nonnull String label, @Nonnull VMLaunchOptions options, @Nullable String preferredDataCenter, int count ) throws CloudException, InternalException {
if( preferredDataCenter != null ) {
options.inDataCenter(preferredDataCenter);
}
if( options.getBootstrapUser() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyPasswordRequirement(testImagePlatform)) ) {
options.withBootstrapUser("dasein", "x" + random.nextInt(100000) + System.currentTimeMillis());
}
if( options.getBootstrapKey() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyShellKeyRequirement(testImagePlatform)) ) {
IdentityResources identity = DaseinTestManager.getIdentityResources();
if( identity != null ) {
String keypairId = identity.getTestKeypairId(DaseinTestManager.STATEFUL, true);
if( keypairId != null ) {
options.withBoostrapKey(keypairId);
}
}
}
if( options.getVlanId() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyVlanRequirement()) ) {
NetworkResources network = DaseinTestManager.getNetworkResources();
if( network != null ) {
String networkId = network.getTestVLANId(DaseinTestManager.STATEFUL, true, preferredDataCenter);
if( networkId == null ) {
networkId = network.getTestVLANId(DaseinTestManager.STATELESS, false, preferredDataCenter);
}
String subnetId = network.getTestSubnetId(DaseinTestManager.STATEFUL, true, networkId, preferredDataCenter);
try {
if( networkId != null || subnetId != null ) {
if( subnetId != null ) {
@SuppressWarnings("ConstantConditions") Subnet subnet = provider.getNetworkServices().getVlanSupport().getSubnet(subnetId);
if( subnet != null ) {
String dcId = subnet.getProviderDataCenterId();
if( dcId == null ) {
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(provider.getContext().getRegionId()) ) {
if( ( dc.isActive() && dc.isAvailable() ) || dcId == null ) {
dcId = dc.getProviderDataCenterId();
}
}
}
options.inSubnet(null, dcId, networkId, subnetId);
}
}
else {
@SuppressWarnings("ConstantConditions") VLAN vlan = provider.getNetworkServices().getVlanSupport().getVlan(networkId);
if( vlan != null ) {
String dcId = vlan.getProviderDataCenterId();
if( dcId == null ) {
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(provider.getContext().getRegionId()) ) {
if( ( dc.isActive() && dc.isAvailable() ) || dcId == null ) {
dcId = dc.getProviderDataCenterId();
if (dcId.equals(preferredDataCenter)) // Go with preferred one, else go with last one.
break;
}
}
}
options.inVlan(null, dcId, networkId);
}
}
}
} catch( NullPointerException ignore ) {
// ignore the fiasco
}
}
}
if( options.getStaticIpIds().length < 1 && Requirement.REQUIRED.equals(support.getCapabilities().identifyStaticIPRequirement()) ) {
NetworkResources network = DaseinTestManager.getNetworkResources();
if( network != null ) {
String ipId;
if( options.getVlanId() != null ) {
ipId = network.getTestStaticIpId(label, true, null, true, options.getVlanId());
}
else {
ipId = network.getTestStaticIpId(label, true, null, false, null);
}
if( ipId != null ) {
options.withStaticIps(ipId);
}
}
}
if( options.getRootVolumeProductId() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyRootVolumeRequirement()) && testVolumeProductId != null ) {
options.withRootVolumeProduct(testVolumeProductId);
}
options.withMetaData("dsntestcase", "true");
Iterable<String> ids = options.buildMany(provider, count);
for( String id : ids ) {
synchronized ( testVMs ) {
while( testVMs.containsKey(label) ) {
label = label + random.nextInt(9);
}
testVMs.put(label, id);
}
}
return ids;
}
public @Nonnull String provisionVM( @Nonnull VirtualMachineSupport support, @Nonnull String label, @Nonnull VMLaunchOptions options, @Nullable String preferredDataCenter ) throws CloudException, InternalException {
if( preferredDataCenter != null ) {
options.inDataCenter(preferredDataCenter);
}
if( options.getBootstrapUser() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyPasswordRequirement(testImagePlatform)) ) {
options.withBootstrapUser("dasein", "x" + random.nextInt(100000) + System.currentTimeMillis());
}
if( options.getBootstrapKey() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyShellKeyRequirement(testImagePlatform)) ) {
IdentityResources identity = DaseinTestManager.getIdentityResources();
if( identity != null ) {
String keypairId = identity.getTestKeypairId(DaseinTestManager.STATEFUL, true);
if( keypairId != null ) {
options.withBootstrapKey(keypairId);
}
}
}
if( options.getVlanId() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyVlanRequirement()) ) {
NetworkResources network = DaseinTestManager.getNetworkResources();
if( network != null ) {
String networkId = network.getTestVLANId(label, true, preferredDataCenter);
if( networkId == null ) {
networkId = network.getTestVLANId(DaseinTestManager.STATELESS, false, preferredDataCenter);
}
String subnetId = network.getTestSubnetId(DaseinTestManager.STATEFUL, true, networkId, preferredDataCenter);
try {
if( networkId != null || subnetId != null ) {
if( subnetId != null ) {
@SuppressWarnings("ConstantConditions") Subnet subnet = provider.getNetworkServices().getVlanSupport().getSubnet(subnetId);
if( subnet != null ) {
String dcId = subnet.getProviderDataCenterId();
if( dcId == null ) {
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(provider.getContext().getRegionId()) ) {
if( ( dc.isActive() && dc.isAvailable() ) || dcId == null ) {
dcId = dc.getProviderDataCenterId();
}
}
}
options.inSubnet(null, dcId, networkId, subnetId);
}
}
else {
@SuppressWarnings("ConstantConditions") VLAN vlan = provider.getNetworkServices().getVlanSupport().getVlan(networkId);
if( vlan != null ) {
String dcId = vlan.getProviderDataCenterId();
if( dcId == null ) {
if( preferredDataCenter != null ) // If we have a preferredDataCenter, lets run with it!
dcId = preferredDataCenter;
else
// so lets just go through all the dataCenters and pick the last one we find. why?
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(provider.getContext().getRegionId()) ) {
if( (dc.isActive() && dc.isAvailable()) || dcId == null ) {
dcId = dc.getProviderDataCenterId();
}
}
}
options.inVlan(null, dcId, networkId);
}
}
}
} catch( NullPointerException ignore ) {
// ignore the fiasco
}
}
}
if( options.getStaticIpIds().length < 1 && Requirement.REQUIRED.equals(support.getCapabilities().identifyStaticIPRequirement()) ) {
NetworkResources network = DaseinTestManager.getNetworkResources();
if( network != null ) {
String ipId;
if( options.getVlanId() != null ) {
ipId = network.getTestStaticIpId(label, true, null, true, options.getVlanId());
}
else {
ipId = network.getTestStaticIpId(label, true, null, false, null);
}
if( ipId != null ) {
options.withStaticIps(ipId);
}
}
}
if( options.getRootVolumeProductId() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyRootVolumeRequirement()) && testVolumeProductId != null ) {
options.withRootVolumeProduct(testVolumeProductId);
}
options.withMetaData("dsntestcase", "true");
String id = options.build(provider);
synchronized ( testVMs ) {
while( testVMs.containsKey(label) ) {
label = label + random.nextInt(9);
}
testVMs.put(label, id);
}
return id;
}
/**
* Provisions a virtual machine and returns the ID of the new virtual machine. This method tracks the newly provisioned
* virtual machine and will tear it down at the end of the test suite.
*
* @param support the virtual machine support object used to provision the VM
* @param label the label to store the VM under for re-use
* @param namePrefix a prefix for the friendly name of the VM
* @param hostPrefix a prefix for the host name of the VM
* @param preferredDataCenter the data center, if any is preferred, in which the VM should be provisioned
* @return the ID for the new VM
* @throws CloudException an error occurred with the cloud provider in provisioning the VM
* @throws InternalException an error occurred within Dasein Cloud provisioning the VM
*/
public @Nonnull String provisionVM( @Nonnull VirtualMachineSupport support, @Nonnull String label, @Nonnull String namePrefix, @Nonnull String hostPrefix, @Nullable String preferredDataCenter ) throws CloudException, InternalException {
String testImageId = getTestImageId(DaseinTestManager.STATELESS, false);
if( testImageId == null ) {
throw new CloudException("No test image exists for provisioning a virtual machine");
}
long now = System.currentTimeMillis();
String name = namePrefix + "-" + now;
String host = hostPrefix + ( now % 10000 );
Map<String, Object> metadata = new HashMap<String, Object>();
metadata.put("dsnNullTag", null);
metadata.put("dsnEmptyTag", "");
metadata.put("dsnExtraTag", "extra");
return provisionVM(support, label, VMLaunchOptions.getInstance(testVMProductId, testImageId, name, host, "Test VM for stateful integration tests for Dasein Cloud").withExtendedAnalytics().withMetaData(metadata).withUserData("#!/bin/bash\necho \"dasein\""), preferredDataCenter);
}
public @Nonnull Iterable<String> provisionManyVMs( @Nonnull VirtualMachineSupport support, @Nonnull String label, @Nonnull String namePrefix, @Nonnull String hostPrefix, @Nullable String preferredDataCenter, int count ) throws CloudException, InternalException {
String testImageId = getTestImageId(DaseinTestManager.STATELESS, false);
if( testImageId == null ) {
throw new CloudException("No test image exists for provisioning a virtual machine");
}
long now = System.currentTimeMillis();
String name = namePrefix + " " + now;
String host = hostPrefix + ( now % 10000 );
return provisionManyVMs(support, label, VMLaunchOptions.getInstance(testVMProductId, testImageId, name, host, "Test VM for stateful integration tests for Dasein Cloud").withExtendedAnalytics(), preferredDataCenter, count);
}
public @Nonnull String provisionVolume( @Nonnull VolumeSupport support, @Nonnull String label, @Nonnull String namePrefix, @Nullable VolumeFormat desiredFormat, @Nullable String preferredDataCenterId ) throws CloudException, InternalException {
VolumeCreateOptions options;
if( desiredFormat == null ) {
for( VolumeFormat fmt : support.getCapabilities().listSupportedFormats() ) {
if( fmt.equals(VolumeFormat.BLOCK) ) {
desiredFormat = VolumeFormat.BLOCK;
break;
}
}
if( desiredFormat == null ) {
desiredFormat = VolumeFormat.NFS;
}
}
if( support.getCapabilities().getVolumeProductRequirement().equals(Requirement.REQUIRED) && testVolumeProductId != null ) {
Storage<Gigabyte> size;
if( support.getCapabilities().isVolumeSizeDeterminedByProduct() ) {
VolumeProduct prd = null;
for( VolumeProduct product : support.listVolumeProducts() ) {
if( product.getProviderProductId().equals(testVolumeProductId) ) {
prd = product;
break;
}
}
if( prd != null ) {
size = prd.getVolumeSize();
if( size == null ) {
size = support.getCapabilities().getMinimumVolumeSize();
}
}
else {
size = support.getCapabilities().getMinimumVolumeSize();
}
}
else {
size = support.getCapabilities().getMinimumVolumeSize();
}
if( desiredFormat.equals(VolumeFormat.BLOCK) ) {
options = VolumeCreateOptions.getInstance(testVolumeProductId, size, namePrefix + ( System.currentTimeMillis() % 1000 ), "Dasein Cloud Integration Tests Volume Tests", 0);
}
else {
NetworkResources network = DaseinTestManager.getNetworkResources();
String testVlanId = null;
if( network != null ) {
testVlanId = network.getTestVLANId(DaseinTestManager.STATELESS, false, preferredDataCenterId);
}
if( testVlanId != null ) {
options = VolumeCreateOptions.getNetworkInstance(testVolumeProductId, testVlanId, size, namePrefix + ( System.currentTimeMillis() % 10000 ), "Dasein Cloud Integration Tests Volume Tests", 0);
}
else {
options = VolumeCreateOptions.getInstance(testVolumeProductId, size, namePrefix + ( System.currentTimeMillis() % 1000 ), "Dasein Cloud Integration Tests Volume Tests", 0);
}
}
}
else {
if( desiredFormat.equals(VolumeFormat.BLOCK) ) {
options = VolumeCreateOptions.getInstance(support.getCapabilities().getMinimumVolumeSize(), namePrefix + ( System.currentTimeMillis() % 10000 ), "Dasein Test Integration tests volume");
}
else {
NetworkResources network = DaseinTestManager.getNetworkResources();
String testVlanId = null;
if( network != null ) {
testVlanId = network.getTestVLANId(DaseinTestManager.STATELESS, false, preferredDataCenterId);
}
if( testVlanId != null ) {
options = VolumeCreateOptions.getNetworkInstance(testVlanId, support.getCapabilities().getMinimumVolumeSize(), namePrefix + ( System.currentTimeMillis() % 10000 ), "Dasein Cloud Integration Tests Volume Tests");
}
else {
options = VolumeCreateOptions.getInstance(support.getCapabilities().getMinimumVolumeSize(), namePrefix + ( System.currentTimeMillis() % 1000 ), "Dasein Cloud Integration Tests Volume Tests");
}
}
}
if( preferredDataCenterId == null ) {
preferredDataCenterId = getTestDataCenterId(false);
}
if( preferredDataCenterId != null ) {
options.inDataCenter(preferredDataCenterId);
}
options.withMetaData("dsntestcase", "true");
if (support.getCapabilities().requiresVMOnCreate().equals(Requirement.REQUIRED)) {
String testVmId = getTestVmId(DaseinTestManager.STATEFUL, VmState.STOPPED, true, testDataCenterId);
options.withVirtualMachineId(testVmId);
}
String id = options.build(provider);
Volume volume = support.getVolume(id);
if( volume != null && testDataCenterId == null ) {
testDataCenterId = volume.getProviderDataCenterId();
}
synchronized ( testVolumes ) {
while( testVolumes.containsKey(label) ) {
label = label + random.nextInt(9);
}
testVolumes.put(label, id);
}
return id;
}
private boolean setState( @Nonnull VirtualMachineSupport support, @Nonnull VirtualMachine vm, @Nonnull VmState state ) {
VmState currentState = vm.getCurrentState();
if( state.equals(currentState) ) {
return true;
}
if( state.equals(VmState.TERMINATED) ) {
return false;
}
String id = vm.getProviderVirtualMachineId();
long timeout = System.currentTimeMillis() + ( CalendarWrapper.MINUTE * 20L );
while( timeout > System.currentTimeMillis() ) {
if( !currentState.equals(VmState.PENDING) && !currentState.equals(VmState.PAUSING) && !currentState.equals(VmState.REBOOTING) && !currentState.equals(VmState.STOPPING) && !currentState.equals(VmState.SUSPENDING) ) {
break;
}
try {
Thread.sleep(15000L);
} catch( InterruptedException ignore ) {
}
try {
VirtualMachine v = support.getVirtualMachine(id);
if( v == null ) {
return state.equals(VmState.TERMINATED);
}
vm = v;
currentState = vm.getCurrentState();
} catch( Throwable ignore ) {
// ignore
}
}
try {
if( state.equals(VmState.RUNNING) ) {
if( currentState.equals(VmState.PAUSED) ) {
support.unpause(id);
}
else if( currentState.equals(VmState.STOPPED) ) {
support.start(id);
}
else if( currentState.equals(VmState.SUSPENDED) ) {
support.resume(id);
}
}
else if( state.equals(VmState.PAUSED) ) {
if( currentState.equals(VmState.RUNNING) || setState(support, vm, VmState.RUNNING) ) {
support.pause(id);
}
else {
return false;
}
}
else if( state.equals(VmState.STOPPED) ) {
if( currentState.equals(VmState.RUNNING) || setState(support, vm, VmState.RUNNING) ) {
support.stop(id, true);
}
else {
return false;
}
}
else if( state.equals(VmState.SUSPENDED) ) {
if( currentState.equals(VmState.RUNNING) || setState(support, vm, VmState.RUNNING) ) {
support.suspend(id);
}
else {
return false;
}
}
} catch( Throwable ignore ) {
return false;
}
timeout = System.currentTimeMillis() + ( CalendarWrapper.MINUTE * 20L );
while( timeout > System.currentTimeMillis() ) {
if( state.equals(currentState) ) {
return true;
}
try {
Thread.sleep(15000L);
} catch( InterruptedException ignore ) {
}
try {
VirtualMachine v = support.getVirtualMachine(id);
if( v == null ) {
return state.equals(VmState.TERMINATED);
}
vm = v;
currentState = vm.getCurrentState();
} catch( Throwable ignore ) {
// ignore
}
}
return false;
}
}
|
src/main/java/org/dasein/cloud/test/compute/ComputeResources.java
|
/**
* Copyright (C) 2009-2014 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.test.compute;
import java.util.*;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.apache.log4j.Logger;
import org.dasein.cloud.CloudException;
import org.dasein.cloud.CloudProvider;
import org.dasein.cloud.InternalException;
import org.dasein.cloud.Requirement;
import org.dasein.cloud.compute.*;
import org.dasein.cloud.dc.DataCenter;
import org.dasein.cloud.network.NetworkServices;
import org.dasein.cloud.network.Subnet;
import org.dasein.cloud.network.SubnetCreateOptions;
import org.dasein.cloud.network.VLAN;
import org.dasein.cloud.network.VLANSupport;
import org.dasein.cloud.test.DaseinTestManager;
import org.dasein.cloud.test.identity.IdentityResources;
import org.dasein.cloud.test.network.NetworkResources;
import org.dasein.util.CalendarWrapper;
import org.dasein.util.uom.storage.Gigabyte;
import org.dasein.util.uom.storage.Storage;
/**
* Handles the shared compute resources for executing various tests.
* <p>Created by George Reese: 2/17/13 8:35 PM</p>
*
* @author George Reese
* @version 2013.04
* @version 2014.08 limited architectures to those supported in the cloud
* @since 2013.02
*/
public class ComputeResources {
static private final Logger logger = Logger.getLogger(ComputeResources.class);
static private final Random random = new Random();
private CloudProvider provider;
private final Map<String, String> testMachineImages = new HashMap<String, String>();
private final Map<String, String> testSnapshots = new HashMap<String, String>();
private final Map<String, String> testVMs = new HashMap<String, String>();
private final Map<String, String> testVolumes = new HashMap<String, String>();
//defaults
private String testDataCenterId;
private Platform testImagePlatform;
private String testVMProductId;
private String testVolumeProductId;
public ComputeResources( @Nonnull CloudProvider provider ) {
this.provider = provider;
}
public int report() {
boolean header = false;
int count = 0;
testMachineImages.remove(DaseinTestManager.STATELESS);
if( !testMachineImages.isEmpty() ) {
logger.info("Provisioned Compute Resources:");
header = true;
count += testMachineImages.size();
DaseinTestManager.out(logger, null, "---> Machine Images", testMachineImages.size() + " " + testMachineImages);
}
testSnapshots.remove(DaseinTestManager.STATELESS);
if( !testSnapshots.isEmpty() ) {
if( !header ) {
logger.info("Provisioned Compute Resources:");
header = true;
}
count += testSnapshots.size();
DaseinTestManager.out(logger, null, "---> Snapshots", testSnapshots.size() + " " + testSnapshots);
}
testVMs.remove(DaseinTestManager.STATELESS);
if( !testVMs.isEmpty() ) {
if( !header ) {
logger.info("Provisioned Compute Resources:");
header = true;
}
count += testVMs.size();
DaseinTestManager.out(logger, null, "---> Virtual Machines", testVMs.size() + " " + testVMs);
}
testVolumes.remove(DaseinTestManager.STATELESS);
if( !testVolumes.isEmpty() ) {
if( !header ) {
logger.info("Provisioned Compute Resources:");
}
count += testVolumes.size();
DaseinTestManager.out(logger, null, "---> Volumes", testVolumes.size() + " " + testVolumes);
}
return count;
}
public int close() {
ComputeServices computeServices = provider.getComputeServices();
int count = 0;
if( computeServices != null ) {
VirtualMachineSupport vmSupport = computeServices.getVirtualMachineSupport();
if( vmSupport != null ) {
for( Map.Entry<String, String> entry : testVMs.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.STATELESS) ) {
try {
VirtualMachine vm = vmSupport.getVirtualMachine(entry.getValue());
if( vm != null ) {
vmSupport.terminate(entry.getValue());
count++;
}
else {
count++;
}
} catch( Throwable t ) {
logger.warn("Failed to de-provision test VM " + entry.getValue() + ": " + t.getMessage());
}
}
}
}
MachineImageSupport imageSupport = computeServices.getImageSupport();
if( imageSupport != null ) {
for( Map.Entry<String, String> entry : testMachineImages.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.STATELESS) ) {
try {
MachineImage img = imageSupport.getImage(entry.getValue());
if( img != null ) {
imageSupport.remove(entry.getValue());
count++;
}
else {
count++;
}
} catch( Throwable t ) {
logger.warn("Failed to de-provision test image " + entry.getValue() + ": " + t.getMessage());
}
}
}
}
SnapshotSupport snapshotSupport = computeServices.getSnapshotSupport();
if( snapshotSupport != null ) {
for( Map.Entry<String, String> entry : testSnapshots.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.STATELESS) ) {
try {
Snapshot snapshot = snapshotSupport.getSnapshot(entry.getValue());
if( snapshot != null ) {
snapshotSupport.remove(entry.getValue());
count++;
}
else {
count++;
}
} catch( Throwable t ) {
logger.warn("Failed to de-provision test snapshot " + entry.getValue() + " post-test: " + t.getMessage());
}
}
}
}
VolumeSupport volumeSupport = computeServices.getVolumeSupport();
if( volumeSupport != null ) {
for( Map.Entry<String, String> entry : testVolumes.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.STATELESS) ) {
try {
Volume volume = volumeSupport.getVolume(entry.getValue());
if( volume != null ) {
volumeSupport.detach(entry.getValue(), true);
}
} catch( Throwable ignore ) {
// IGNORE
}
}
}
try {
Thread.sleep(60000L);
} catch( InterruptedException ignore ) {
}
for( Map.Entry<String, String> entry : testVolumes.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.STATELESS) ) {
try {
Volume volume = volumeSupport.getVolume(entry.getValue());
if( volume != null ) {
volumeSupport.remove(entry.getValue());
count++;
}
else {
count++;
}
} catch( Throwable t ) {
logger.warn("Failed to de-provision test volume " + entry.getValue() + ": " + t.getMessage());
}
}
}
}
}
return count;
}
private @Nullable String findStatelessSnapshot() {
ComputeServices computeServices = provider.getComputeServices();
if( computeServices != null ) {
SnapshotSupport support = computeServices.getSnapshotSupport();
try {
if( support != null && support.isSubscribed() ) {
Snapshot defaultSnapshot = null;
for( Snapshot snapshot : support.listSnapshots() ) {
if( snapshot.getCurrentState().equals(SnapshotState.AVAILABLE) ) {
defaultSnapshot = snapshot;
break;
}
if( defaultSnapshot == null ) {
defaultSnapshot = snapshot;
}
}
if( defaultSnapshot != null ) {
String id = defaultSnapshot.getProviderSnapshotId();
if( id != null ) {
testSnapshots.put(DaseinTestManager.STATELESS, id);
}
return id;
}
}
} catch( Throwable ignore ) {
// ignore
}
}
return null;
}
public @Nullable String getTestDataCenterId( boolean stateless ) {
if( testDataCenterId != null ) {
return testDataCenterId;
}
if( stateless ) {
try {
DataCenter defaultDC = null;
//noinspection ConstantConditions
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(provider.getContext().getRegionId()) ) {
if( defaultDC == null ) {
defaultDC = dc;
}
if( dc.isActive() && dc.isAvailable() ) {
return dc.getProviderDataCenterId();
}
}
if( defaultDC != null ) {
return defaultDC.getProviderDataCenterId();
}
} catch( Throwable ignore ) {
// ignore
}
}
else {
ComputeServices services = provider.getComputeServices();
if( services != null ) {
VirtualMachineSupport support = services.getVirtualMachineSupport();
if( support != null ) {
try {
String id = provisionVM(support, DaseinTestManager.STATEFUL, "Dasein Stateless VM", "dsnstfvm", null);
VirtualMachine vm = support.getVirtualMachine(id);
if( vm != null ) {
testDataCenterId = vm.getProviderDataCenterId();
return testDataCenterId;
}
} catch( Throwable ignore ) {
// ignore me
}
}
}
}
return null;
}
public @Nullable String getTestImageId( @Nonnull String label, boolean provisionIfNull ) {
String id = testMachineImages.get(label);
if( id == null ) {
if( label.equals(DaseinTestManager.STATELESS) ) {
for( Map.Entry<String, String> entry : testMachineImages.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.REMOVED) ) {
id = entry.getValue();
if( id != null ) {
return id;
}
}
}
return null;
}
if( provisionIfNull ) {
ComputeServices services = provider.getComputeServices();
if( services != null ) {
MachineImageSupport support = services.getImageSupport();
if( support != null ) {
try {
return provisionImage(support, label, "dsnimg", null);
} catch( Throwable ignore ) {
return null;
}
}
}
}
}
return id;
}
public @Nullable String getTestSnapshotId( @Nonnull String label, boolean provisionIfNull ) {
if( label.equals(DaseinTestManager.STATELESS) ) {
for( Map.Entry<String, String> entry : testSnapshots.entrySet() ) {
if( !entry.getKey().startsWith(DaseinTestManager.REMOVED) ) {
String id = entry.getValue();
if( id != null ) {
return id;
}
}
}
return findStatelessSnapshot();
}
String id = testSnapshots.get(label);
if( id != null ) {
return id;
}
if( !provisionIfNull ) {
return null;
}
ComputeServices services = provider.getComputeServices();
if( services != null ) {
SnapshotSupport support = services.getSnapshotSupport();
if( support != null ) {
try {
return provisionSnapshot(support, label, "dsnsnap" + ( System.currentTimeMillis() % 10000 ), null);
} catch( Throwable ignore ) {
return null;
}
}
}
return null;
}
public @Nullable String getTestVmId(@Nonnull String label, @Nullable VmState desiredState, boolean provisionIfNull, @Nullable String preferredDataCenterId) {
return getTestVmId(label, "dsnvm", desiredState, provisionIfNull, preferredDataCenterId);
}
public @Nullable String getTestVmId( @Nonnull String label, @Nonnull String vmName, @Nullable VmState desiredState, boolean provisionIfNull, @Nullable String preferredDataCenterId ) {
if( label.equals(DaseinTestManager.STATELESS) ) {
for( Map.Entry<String, String> entry : testVMs.entrySet() ) {
if( !entry.getKey().startsWith(DaseinTestManager.REMOVED) ) {
String id = entry.getValue();
if( id != null ) {
try {
@SuppressWarnings("ConstantConditions") VirtualMachine vm = provider.getComputeServices().getVirtualMachineSupport().getVirtualMachine(id);
if( vm != null && !VmState.TERMINATED.equals(vm.getCurrentState()) ) {
return id;
}
} catch( Throwable ignore ) {
// ignore
}
}
}
}
return null;
}
String id = testVMs.get(label);
if( id == null && !provisionIfNull ) {
return null;
}
ComputeServices services = provider.getComputeServices();
if( services != null ) {
VirtualMachineSupport support = services.getVirtualMachineSupport();
if( support != null ) {
try {
VirtualMachine vm = ( id == null ? null : support.getVirtualMachine(id) );
if( ( vm == null || VmState.TERMINATED.equals(vm.getCurrentState()) ) && provisionIfNull ) {
id = provisionVM(support, label, "testvm-" + label, vmName, preferredDataCenterId);
vm = support.getVirtualMachine(id);
}
if( vm != null && desiredState != null ) {
setState(support, vm, desiredState);
}
return id;
} catch( Throwable t ) {
try {
if( support.isSubscribed() ) {
logger.warn("Unable to provision test virtual machine under label " + label + ": " + t.getMessage());
}
} catch( Throwable ignore ) {
// ignore
}
}
}
}
return null;
}
public @Nullable String getTestVLANVmId(@Nonnull String label, @Nullable VmState desiredState, @Nullable String vlanId, boolean provisionIfNull, @Nullable String preferredDataCenterId) {
if( label.equals(DaseinTestManager.STATELESS) ) {
for( Map.Entry<String,String> entry : testVMs.entrySet() ) {
if( !entry.getKey().startsWith(DaseinTestManager.REMOVED) ) {
String id = entry.getValue();
if( id != null ) {
try {
@SuppressWarnings("ConstantConditions") VirtualMachine vm = provider.getComputeServices().getVirtualMachineSupport().getVirtualMachine(id);
if( vm != null && !VmState.TERMINATED.equals(vm.getCurrentState()) && vm.getProviderVlanId() != null ) {
if( vlanId == null ) {
return id;
}
else if( vm.getProviderVlanId().equalsIgnoreCase(vlanId) ) {
return id;
}
}
}
catch( Throwable ignore ) {
// ignore
}
}
}
}
return null;
}
String id = testVMs.get(label);
if( id == null && !provisionIfNull ) {
return null;
}
ComputeServices services = provider.getComputeServices();
if( services != null ) {
VirtualMachineSupport support = services.getVirtualMachineSupport();
if( support != null ) {
try {
VirtualMachine vm = (id == null ? null : support.getVirtualMachine(id));
if( (vm == null || VmState.TERMINATED.equals(vm.getCurrentState()) || vm.getProviderVlanId() == null || !vm.getProviderVlanId().equalsIgnoreCase(vlanId)) && provisionIfNull ) {
String testImageId = getTestImageId(DaseinTestManager.STATELESS, false);
if( testImageId == null ) {
throw new CloudException("No test image exists for provisioning a virtual machine");
}
long now = System.currentTimeMillis();
String name = "dasein-test-" + label + " " + now;
String host = "dsnvm" + (now%10000);
VMLaunchOptions vmOpts = VMLaunchOptions.getInstance(testVMProductId, testImageId, name, host, "Test VM for stateful integration tests for Dasein Cloud").withExtendedAnalytics();
NetworkResources network = DaseinTestManager.getNetworkResources();
if( vlanId != null ) {
NetworkServices ns = provider.getNetworkServices();
VLANSupport vs = ns.getVlanSupport();
VLAN v = vs.getVlan(vlanId);
Iterable<Subnet> subnets = vs.listSubnets(vlanId);
if( subnets.iterator().hasNext() ) {
Subnet sub = subnets.iterator().next();
vmOpts.inSubnet( null, v.getProviderDataCenterId(), sub.getProviderVlanId(), sub.getProviderSubnetId());
} else {
Subnet sub = vs.createSubnet(SubnetCreateOptions.getInstance(vlanId, "192.168.50.0/24", "dsnsub", "dasein test create vm for vlan"));
vmOpts.inSubnet( null, v.getProviderDataCenterId(), sub.getProviderVlanId(), sub.getProviderSubnetId());
}
} else {
if( network != null ) {
String networkId = network.getTestVLANId(DaseinTestManager.STATEFUL, true, preferredDataCenterId);
if( networkId == null ) {
networkId = network.getTestVLANId(DaseinTestManager.STATELESS, false, preferredDataCenterId);
}
// wait for network to be ready
try {
Thread.sleep(10000L);
}
catch( InterruptedException ignore ) {
}
if( networkId != null ) {
String subnetId = network.getTestSubnetId(DaseinTestManager.STATEFUL, true, networkId, preferredDataCenterId);
if( subnetId == null ) {
subnetId = network.getTestSubnetId(DaseinTestManager.STATELESS, true, networkId, preferredDataCenterId);
}
if( subnetId != null ) {
// wait for subnet to be ready
try { Thread.sleep(10000L); }
catch( InterruptedException ignore ) { }
@SuppressWarnings("ConstantConditions") Subnet subnet = provider.getNetworkServices().getVlanSupport().getSubnet(subnetId);
if( subnet != null ) {
String dcId = subnet.getProviderDataCenterId();
if( dcId == null ) {
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(provider.getContext().getRegionId()) ) {
if( (dc.isActive() && dc.isAvailable()) || dcId == null ) {
dcId = dc.getProviderDataCenterId();
}
}
}
vmOpts.inSubnet(null, dcId, vlanId, subnetId);
}
}
}
}
}
id = provisionVM(support, label, vmOpts, preferredDataCenterId);
vm = support.getVirtualMachine(id);
}
if( vm != null && desiredState != null ) {
setState(support, vm, desiredState);
}
if( vlanId != null && vm.getProviderVlanId().equalsIgnoreCase(vlanId) && id != null ) {
return id;
}
else if( vlanId == null && id != null ) {
return id;
}
else {
return null;
}
}
catch( Throwable t ) {
try {
if( support.isSubscribed() ) {
logger.warn("Unable to provision test virtual machine under label " + label + ": " + t.getMessage());
}
}
catch( Throwable ignore ) {
// ignore
}
}
}
}
return null;
}
public @Nullable String getTestVMProductId() {
return testVMProductId;
}
public @Nullable String getTestVolumeId( @Nonnull String label, boolean provisionIfNull, @Nullable VolumeFormat desiredFormat, @Nullable String preferredDataCenterId ) {
if( label.equals(DaseinTestManager.STATELESS) ) {
for( Map.Entry<String, String> entry : testVolumes.entrySet() ) {
if( !entry.getKey().equals(DaseinTestManager.REMOVED) ) {
String id = entry.getValue();
if( id != null ) {
return id;
}
}
}
return null;
}
String id = testVolumes.get(label);
if( id != null ) {
return id;
}
if( provisionIfNull ) {
ComputeServices services = provider.getComputeServices();
if( services != null ) {
VolumeSupport support = services.getVolumeSupport();
if( support != null ) {
try {
return provisionVolume(support, label, "dsnvol" + ( System.currentTimeMillis() % 10000 ), desiredFormat, preferredDataCenterId);
} catch( Throwable ignore ) {
return null;
}
}
}
}
return null;
}
public @Nullable String getTestVolumeProductId() {
return testVolumeProductId;
}
public void init() {
ComputeServices computeServices = provider.getComputeServices();
// initialise available architectures
Iterable<Architecture> architectures = Collections.emptyList();
if( computeServices != null && computeServices.getVirtualMachineSupport() != null ) {
try {
architectures = computeServices.getVirtualMachineSupport().getCapabilities().listSupportedArchitectures();
} catch( InternalException e ) {
} catch( CloudException e ) {
}
}
String dataCenterId = System.getProperty("test.dataCenter");
if( computeServices != null ) {
Map<Architecture, VirtualMachineProduct> productMap = new HashMap<Architecture, VirtualMachineProduct>();
VirtualMachineSupport vmSupport = computeServices.getVirtualMachineSupport();
if( vmSupport != null ) {
try {
for( Architecture architecture : architectures ) {
VirtualMachineProduct defaultProduct = null;
try {
VirtualMachineProductFilterOptions options = VirtualMachineProductFilterOptions.getInstance().withDatacenterId(dataCenterId);
for( VirtualMachineProduct product : vmSupport.listProducts(options, architecture) ) {
if( !product.getStatus().equals(VirtualMachineProduct.Status.CURRENT) ) {
continue;
}
if( defaultProduct == null ) {
defaultProduct = product;
}
else if( defaultProduct.getRamSize().intValue() > product.getRamSize().intValue() ) {
if( product.getRamSize().intValue() > 1000 ) {
defaultProduct = product;
}
}
else {
if( defaultProduct.getRamSize().intValue() < 1024 && product.getRamSize().intValue() < 2200 ) {
defaultProduct = product;
}
else if( defaultProduct.getCpuCount() > product.getCpuCount() ) {
if( ( defaultProduct.getRamSize().intValue() * 2 ) > product.getRamSize().intValue() ) {
defaultProduct = product;
}
}
}
}
} catch( Throwable ignore ) {
// ignore
}
productMap.put(architecture, defaultProduct);
}
} catch( Throwable ignore ) {
// ignore
}
}
MachineImageSupport imageSupport = computeServices.getImageSupport();
if( imageSupport != null ) {
boolean volumeBased = false;
try {
for( MachineImageType type : imageSupport.getCapabilities().listSupportedImageTypes() ) {
if( type.equals(MachineImageType.VOLUME) ) {
volumeBased = true;
break;
}
}
} catch( Throwable ignore ) {
// ignore
}
for( Architecture architecture : architectures ) {
VirtualMachineProduct currentProduct = productMap.get(architecture);
if( currentProduct != null ) {
// Let WINDOWS come first for a greater chance of StatelessVMTests#getVMPassword to work
for( Platform platform : new Platform[]{Platform.UBUNTU, Platform.WINDOWS, Platform.COREOS, Platform.CENT_OS, Platform.RHEL} ) {
ImageFilterOptions options = ImageFilterOptions.getInstance(ImageClass.MACHINE).withArchitecture(architecture).onPlatform(platform);
try {
for( MachineImage image : imageSupport.listImages(options) ) {
if( MachineImageState.ACTIVE.equals(image.getCurrentState()) && "".equals(image.getSoftware()) ) {
testVMProductId = currentProduct.getProviderProductId();
testMachineImages.put(DaseinTestManager.STATELESS, image.getProviderMachineImageId());
testImagePlatform = image.getPlatform();
if( !volumeBased || image.getType().equals(MachineImageType.VOLUME) ) {
break;
}
}
}
} catch( Throwable ignore ) {
// ignore
}
if( testVMProductId != null ) {
break;
}
options = ImageFilterOptions.getInstance(ImageClass.MACHINE).withArchitecture(architecture).onPlatform(platform);
try {
for( MachineImage image : imageSupport.searchPublicImages(options) ) {
if( MachineImageState.ACTIVE.equals(image.getCurrentState()) && "".equals(image.getSoftware()) ) {
testVMProductId = currentProduct.getProviderProductId();
testMachineImages.put(DaseinTestManager.STATELESS, image.getProviderMachineImageId());
testImagePlatform = image.getPlatform();
if( !volumeBased || image.getType().equals(MachineImageType.VOLUME) ) {
break;
}
}
}
} catch( Throwable ignore ) {
// ignore
}
}
if( testVMProductId != null ) {
break;
}
}
}
}
VolumeSupport volumeSupport = computeServices.getVolumeSupport();
if( volumeSupport != null ) {
try {
VolumeProduct defaultProduct = null;
for( VolumeProduct product : volumeSupport.listVolumeProducts() ) {
if( defaultProduct == null ) {
defaultProduct = product;
}
else {
if( volumeSupport.getCapabilities().isVolumeSizeDeterminedByProduct() ) {
if( product.getVolumeSize().intValue() < defaultProduct.getVolumeSize().intValue() && product.getVolumeSize().intValue() >= 20 ) {
defaultProduct = product;
}
}
else {
if( product.getMonthlyGigabyteCost() > 0.00 ) {
if( product.getMonthlyGigabyteCost() < defaultProduct.getMonthlyGigabyteCost() ) {
defaultProduct = product;
}
}
}
}
}
if( defaultProduct != null ) {
testVolumeProductId = defaultProduct.getProviderProductId();
}
} catch( Throwable ignore ) {
// ignore me
}
}
if( vmSupport != null ) {
try {
for( VirtualMachine vm : vmSupport.listVirtualMachines() ) {
if (( vm.getProviderDataCenterId().equals(dataCenterId)) && ( VmState.RUNNING.equals(vm.getCurrentState()) )) { // no guarantee of being in the same datacenter
testVMs.put(DaseinTestManager.STATELESS, vm.getProviderVirtualMachineId());
break;
}
}
} catch( Throwable ignore ) {
// ignore
}
}
if( volumeSupport != null ) {
try {
Volume defaultVolume = null;
for( Volume volume : volumeSupport.listVolumes() ) {
if (( volume.getProviderDataCenterId().equals(dataCenterId)) && ( VolumeState.AVAILABLE.equals(volume.getCurrentState()) || defaultVolume == null )) {
if( defaultVolume == null || volume.isAttached() ) {
defaultVolume = volume;
}
if( VolumeState.AVAILABLE.equals(defaultVolume.getCurrentState()) && defaultVolume.isAttached() ) {
break;
}
}
}
if( defaultVolume != null ) {
testVolumes.put(DaseinTestManager.STATELESS, defaultVolume.getProviderVolumeId());
}
} catch( Throwable ignore ) {
// ignore
}
}
}
}
public @Nonnull String provisionImage( @Nonnull MachineImageSupport support, @Nonnull String label, @Nonnull String namePrefix, @Nullable String vmId ) throws CloudException, InternalException {
VirtualMachineSupport vmSupport = null;
ComputeServices services = provider.getComputeServices();
if( services != null ) {
vmSupport = services.getVirtualMachineSupport();
}
if( vmSupport == null ) {
throw new CloudException("Unable to provision a machine image because Dasein Cloud is showing no VM support");
}
if( vmId == null ) {
vmId = getTestVmId(DaseinTestManager.STATEFUL, VmState.RUNNING, true, null);
if( vmId == null ) {
throw new CloudException("Could not identify a VM for imaging");
}
}
VirtualMachine vm = vmSupport.getVirtualMachine(vmId);
if( vm == null ) {
throw new CloudException("Could not identify a VM for imaging");
}
String imageId = vm.getProviderMachineImageId();
MachineImage image = support.getImage(imageId);
if( image == null || support.getCapabilities().supportsImageCapture(image.getType()) ) {
String id = ImageCreateOptions.getInstance(vm, namePrefix + ( System.currentTimeMillis() % 10000 ), "Test machine image with label " + label).build(provider);
synchronized ( testMachineImages ) {
while( testMachineImages.containsKey(label) ) {
label = label + random.nextInt(9);
}
testMachineImages.put(label, id);
}
return id;
}
else if( !support.getCapabilities().identifyLocalBundlingRequirement().equals(Requirement.REQUIRED) ) {
Iterator<MachineImageFormat> formats = support.getCapabilities().listSupportedFormatsForBundling().iterator();
MachineImageFormat format = ( formats.hasNext() ? formats.next() : null );
if( format != null ) {
String id = support.bundleVirtualMachine(vmId, format, "dsnimg" + ( System.currentTimeMillis() % 100000 ), "dsnimg");
synchronized ( testMachineImages ) {
while( testMachineImages.containsKey(label) ) {
label = label + random.nextInt(9);
}
testMachineImages.put(label, id);
}
return id;
}
}
throw new CloudException("No mechanism exists for provisioning images from a virtual machine");
}
public @Nonnull String provisionSnapshot( @SuppressWarnings("UnusedParameters") @Nonnull SnapshotSupport support, @Nonnull String label, @Nonnull String namePrefix, @Nullable String volumeId ) throws CloudException, InternalException {
SnapshotCreateOptions options;
if( volumeId == null ) {
volumeId = getTestVolumeId(DaseinTestManager.STATEFUL + ( System.currentTimeMillis() % 1000 ), true, null, null);
if( volumeId == null ) {
throw new CloudException("No volume from which to create a snapshot");
}
}
@SuppressWarnings("ConstantConditions") VolumeSupport vs = provider.getComputeServices().getVolumeSupport();
if( vs != null ) {
Volume volume = vs.getVolume(volumeId);
if( volume != null ) {
long timeout = System.currentTimeMillis() + ( CalendarWrapper.MINUTE * 20L );
while( timeout > System.currentTimeMillis() ) {
try {
Thread.sleep(15000L);
} catch( InterruptedException ignore ) {
}
try {
volume = vs.getVolume(volumeId);
} catch( Throwable ignore ) {
}
if( volume == null || volume.getCurrentState().equals(VolumeState.AVAILABLE) || volume.getCurrentState().equals(VolumeState.DELETED) ) {
break;
}
}
}
if( volume != null && volume.getProviderVirtualMachineId() == null && support.getCapabilities().identifyAttachmentRequirement().equals(Requirement.REQUIRED) ) {
String vmId = getTestVmId(DaseinTestManager.STATEFUL, VmState.RUNNING, true, volume.getProviderDataCenterId());
if( vmId != null ) {
@SuppressWarnings("ConstantConditions") VirtualMachine vm = provider.getComputeServices().getVirtualMachineSupport().getVirtualMachine(vmId);
if( vm != null ) {
for( String deviceId : vs.getCapabilities().listPossibleDeviceIds(vm.getPlatform()) ) {
try {
vs.attach(volumeId, vmId, deviceId);
break;
} catch( Throwable ignore ) {
// ignore
}
}
}
}
}
}
options = SnapshotCreateOptions.getInstanceForCreate(volumeId, namePrefix + ( System.currentTimeMillis() % 10000 ), "Dasein Snapshot Test " + label);
String id = options.build(provider);
if( id == null ) {
throw new CloudException("Unable to create a snapshot");
}
synchronized ( testSnapshots ) {
while( testSnapshots.containsKey(label) ) {
label = label + random.nextInt(9);
}
testSnapshots.put(label, id);
}
return id;
}
public @Nonnull Iterable<String> provisionManyVMs( @Nonnull VirtualMachineSupport support, @Nonnull String label, @Nonnull VMLaunchOptions options, @Nullable String preferredDataCenter, int count ) throws CloudException, InternalException {
if( preferredDataCenter != null ) {
options.inDataCenter(preferredDataCenter);
}
if( options.getBootstrapUser() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyPasswordRequirement(testImagePlatform)) ) {
options.withBootstrapUser("dasein", "x" + random.nextInt(100000) + System.currentTimeMillis());
}
if( options.getBootstrapKey() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyShellKeyRequirement(testImagePlatform)) ) {
IdentityResources identity = DaseinTestManager.getIdentityResources();
if( identity != null ) {
String keypairId = identity.getTestKeypairId(DaseinTestManager.STATEFUL, true);
if( keypairId != null ) {
options.withBoostrapKey(keypairId);
}
}
}
if( options.getVlanId() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyVlanRequirement()) ) {
NetworkResources network = DaseinTestManager.getNetworkResources();
if( network != null ) {
String networkId = network.getTestVLANId(DaseinTestManager.STATEFUL, true, preferredDataCenter);
if( networkId == null ) {
networkId = network.getTestVLANId(DaseinTestManager.STATELESS, false, preferredDataCenter);
}
String subnetId = network.getTestSubnetId(DaseinTestManager.STATEFUL, true, networkId, preferredDataCenter);
try {
if( networkId != null || subnetId != null ) {
if( subnetId != null ) {
@SuppressWarnings("ConstantConditions") Subnet subnet = provider.getNetworkServices().getVlanSupport().getSubnet(subnetId);
if( subnet != null ) {
String dcId = subnet.getProviderDataCenterId();
if( dcId == null ) {
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(provider.getContext().getRegionId()) ) {
if( ( dc.isActive() && dc.isAvailable() ) || dcId == null ) {
dcId = dc.getProviderDataCenterId();
}
}
}
options.inSubnet(null, dcId, networkId, subnetId);
}
}
else {
@SuppressWarnings("ConstantConditions") VLAN vlan = provider.getNetworkServices().getVlanSupport().getVlan(networkId);
if( vlan != null ) {
String dcId = vlan.getProviderDataCenterId();
if( dcId == null ) {
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(provider.getContext().getRegionId()) ) {
if( ( dc.isActive() && dc.isAvailable() ) || dcId == null ) {
dcId = dc.getProviderDataCenterId();
if (dcId.equals(preferredDataCenter)) // Go with preferred one, else go with last one.
break;
}
}
}
options.inVlan(null, dcId, networkId);
}
}
}
} catch( NullPointerException ignore ) {
// ignore the fiasco
}
}
}
if( options.getStaticIpIds().length < 1 && Requirement.REQUIRED.equals(support.getCapabilities().identifyStaticIPRequirement()) ) {
NetworkResources network = DaseinTestManager.getNetworkResources();
if( network != null ) {
String ipId;
if( options.getVlanId() != null ) {
ipId = network.getTestStaticIpId(label, true, null, true, options.getVlanId());
}
else {
ipId = network.getTestStaticIpId(label, true, null, false, null);
}
if( ipId != null ) {
options.withStaticIps(ipId);
}
}
}
if( options.getRootVolumeProductId() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyRootVolumeRequirement()) && testVolumeProductId != null ) {
options.withRootVolumeProduct(testVolumeProductId);
}
options.withMetaData("dsntestcase", "true");
Iterable<String> ids = options.buildMany(provider, count);
for( String id : ids ) {
synchronized ( testVMs ) {
while( testVMs.containsKey(label) ) {
label = label + random.nextInt(9);
}
testVMs.put(label, id);
}
}
return ids;
}
public @Nonnull String provisionVM( @Nonnull VirtualMachineSupport support, @Nonnull String label, @Nonnull VMLaunchOptions options, @Nullable String preferredDataCenter ) throws CloudException, InternalException {
if( preferredDataCenter != null ) {
options.inDataCenter(preferredDataCenter);
}
if( options.getBootstrapUser() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyPasswordRequirement(testImagePlatform)) ) {
options.withBootstrapUser("dasein", "x" + random.nextInt(100000) + System.currentTimeMillis());
}
if( options.getBootstrapKey() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyShellKeyRequirement(testImagePlatform)) ) {
IdentityResources identity = DaseinTestManager.getIdentityResources();
if( identity != null ) {
String keypairId = identity.getTestKeypairId(DaseinTestManager.STATEFUL, true);
if( keypairId != null ) {
options.withBootstrapKey(keypairId);
}
}
}
if( options.getVlanId() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyVlanRequirement()) ) {
NetworkResources network = DaseinTestManager.getNetworkResources();
if( network != null ) {
String networkId = network.getTestVLANId(label, true, preferredDataCenter);
if( networkId == null ) {
networkId = network.getTestVLANId(DaseinTestManager.STATELESS, false, preferredDataCenter);
}
String subnetId = network.getTestSubnetId(DaseinTestManager.STATEFUL, true, networkId, preferredDataCenter);
try {
if( networkId != null || subnetId != null ) {
if( subnetId != null ) {
@SuppressWarnings("ConstantConditions") Subnet subnet = provider.getNetworkServices().getVlanSupport().getSubnet(subnetId);
if( subnet != null ) {
String dcId = subnet.getProviderDataCenterId();
if( dcId == null ) {
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(provider.getContext().getRegionId()) ) {
if( ( dc.isActive() && dc.isAvailable() ) || dcId == null ) {
dcId = dc.getProviderDataCenterId();
}
}
}
options.inSubnet(null, dcId, networkId, subnetId);
}
}
else {
@SuppressWarnings("ConstantConditions") VLAN vlan = provider.getNetworkServices().getVlanSupport().getVlan(networkId);
if( vlan != null ) {
String dcId = vlan.getProviderDataCenterId();
if( dcId == null ) {
if( preferredDataCenter != null ) // If we have a preferredDataCenter, lets run with it!
dcId = preferredDataCenter;
else
// so lets just go through all the dataCenters and pick the last one we find. why?
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(provider.getContext().getRegionId()) ) {
if( (dc.isActive() && dc.isAvailable()) || dcId == null ) {
dcId = dc.getProviderDataCenterId();
}
}
}
options.inVlan(null, dcId, networkId);
}
}
}
} catch( NullPointerException ignore ) {
// ignore the fiasco
}
}
}
if( options.getStaticIpIds().length < 1 && Requirement.REQUIRED.equals(support.getCapabilities().identifyStaticIPRequirement()) ) {
NetworkResources network = DaseinTestManager.getNetworkResources();
if( network != null ) {
String ipId;
if( options.getVlanId() != null ) {
ipId = network.getTestStaticIpId(label, true, null, true, options.getVlanId());
}
else {
ipId = network.getTestStaticIpId(label, true, null, false, null);
}
if( ipId != null ) {
options.withStaticIps(ipId);
}
}
}
if( options.getRootVolumeProductId() == null && Requirement.REQUIRED.equals(support.getCapabilities().identifyRootVolumeRequirement()) && testVolumeProductId != null ) {
options.withRootVolumeProduct(testVolumeProductId);
}
options.withMetaData("dsntestcase", "true");
String id = options.build(provider);
synchronized ( testVMs ) {
while( testVMs.containsKey(label) ) {
label = label + random.nextInt(9);
}
testVMs.put(label, id);
}
return id;
}
/**
* Provisions a virtual machine and returns the ID of the new virtual machine. This method tracks the newly provisioned
* virtual machine and will tear it down at the end of the test suite.
*
* @param support the virtual machine support object used to provision the VM
* @param label the label to store the VM under for re-use
* @param namePrefix a prefix for the friendly name of the VM
* @param hostPrefix a prefix for the host name of the VM
* @param preferredDataCenter the data center, if any is preferred, in which the VM should be provisioned
* @return the ID for the new VM
* @throws CloudException an error occurred with the cloud provider in provisioning the VM
* @throws InternalException an error occurred within Dasein Cloud provisioning the VM
*/
public @Nonnull String provisionVM( @Nonnull VirtualMachineSupport support, @Nonnull String label, @Nonnull String namePrefix, @Nonnull String hostPrefix, @Nullable String preferredDataCenter ) throws CloudException, InternalException {
String testImageId = getTestImageId(DaseinTestManager.STATELESS, false);
if( testImageId == null ) {
throw new CloudException("No test image exists for provisioning a virtual machine");
}
long now = System.currentTimeMillis();
String name = namePrefix + "-" + now;
String host = hostPrefix + ( now % 10000 );
Map<String, Object> metadata = new HashMap<String, Object>();
metadata.put("dsnNullTag", null);
metadata.put("dsnEmptyTag", "");
metadata.put("dsnExtraTag", "extra");
return provisionVM(support, label, VMLaunchOptions.getInstance(testVMProductId, testImageId, name, host, "Test VM for stateful integration tests for Dasein Cloud").withExtendedAnalytics().withMetaData(metadata).withUserData("#!/bin/bash\necho \"dasein\""), preferredDataCenter);
}
public @Nonnull Iterable<String> provisionManyVMs( @Nonnull VirtualMachineSupport support, @Nonnull String label, @Nonnull String namePrefix, @Nonnull String hostPrefix, @Nullable String preferredDataCenter, int count ) throws CloudException, InternalException {
String testImageId = getTestImageId(DaseinTestManager.STATELESS, false);
if( testImageId == null ) {
throw new CloudException("No test image exists for provisioning a virtual machine");
}
long now = System.currentTimeMillis();
String name = namePrefix + " " + now;
String host = hostPrefix + ( now % 10000 );
return provisionManyVMs(support, label, VMLaunchOptions.getInstance(testVMProductId, testImageId, name, host, "Test VM for stateful integration tests for Dasein Cloud").withExtendedAnalytics(), preferredDataCenter, count);
}
public @Nonnull String provisionVolume( @Nonnull VolumeSupport support, @Nonnull String label, @Nonnull String namePrefix, @Nullable VolumeFormat desiredFormat, @Nullable String preferredDataCenterId ) throws CloudException, InternalException {
VolumeCreateOptions options;
if( desiredFormat == null ) {
for( VolumeFormat fmt : support.getCapabilities().listSupportedFormats() ) {
if( fmt.equals(VolumeFormat.BLOCK) ) {
desiredFormat = VolumeFormat.BLOCK;
break;
}
}
if( desiredFormat == null ) {
desiredFormat = VolumeFormat.NFS;
}
}
if( support.getCapabilities().getVolumeProductRequirement().equals(Requirement.REQUIRED) && testVolumeProductId != null ) {
Storage<Gigabyte> size;
if( support.getCapabilities().isVolumeSizeDeterminedByProduct() ) {
VolumeProduct prd = null;
for( VolumeProduct product : support.listVolumeProducts() ) {
if( product.getProviderProductId().equals(testVolumeProductId) ) {
prd = product;
break;
}
}
if( prd != null ) {
size = prd.getVolumeSize();
if( size == null ) {
size = support.getCapabilities().getMinimumVolumeSize();
}
}
else {
size = support.getCapabilities().getMinimumVolumeSize();
}
}
else {
size = support.getCapabilities().getMinimumVolumeSize();
}
if( desiredFormat.equals(VolumeFormat.BLOCK) ) {
options = VolumeCreateOptions.getInstance(testVolumeProductId, size, namePrefix + ( System.currentTimeMillis() % 1000 ), "Dasein Cloud Integration Tests Volume Tests", 0);
}
else {
NetworkResources network = DaseinTestManager.getNetworkResources();
String testVlanId = null;
if( network != null ) {
testVlanId = network.getTestVLANId(DaseinTestManager.STATELESS, false, preferredDataCenterId);
}
if( testVlanId != null ) {
options = VolumeCreateOptions.getNetworkInstance(testVolumeProductId, testVlanId, size, namePrefix + ( System.currentTimeMillis() % 10000 ), "Dasein Cloud Integration Tests Volume Tests", 0);
}
else {
options = VolumeCreateOptions.getInstance(testVolumeProductId, size, namePrefix + ( System.currentTimeMillis() % 1000 ), "Dasein Cloud Integration Tests Volume Tests", 0);
}
}
}
else {
if( desiredFormat.equals(VolumeFormat.BLOCK) ) {
options = VolumeCreateOptions.getInstance(support.getCapabilities().getMinimumVolumeSize(), namePrefix + ( System.currentTimeMillis() % 10000 ), "Dasein Test Integration tests volume");
}
else {
NetworkResources network = DaseinTestManager.getNetworkResources();
String testVlanId = null;
if( network != null ) {
testVlanId = network.getTestVLANId(DaseinTestManager.STATELESS, false, preferredDataCenterId);
}
if( testVlanId != null ) {
options = VolumeCreateOptions.getNetworkInstance(testVlanId, support.getCapabilities().getMinimumVolumeSize(), namePrefix + ( System.currentTimeMillis() % 10000 ), "Dasein Cloud Integration Tests Volume Tests");
}
else {
options = VolumeCreateOptions.getInstance(support.getCapabilities().getMinimumVolumeSize(), namePrefix + ( System.currentTimeMillis() % 1000 ), "Dasein Cloud Integration Tests Volume Tests");
}
}
}
if( preferredDataCenterId == null ) {
preferredDataCenterId = getTestDataCenterId(false);
}
if( preferredDataCenterId != null ) {
options.inDataCenter(preferredDataCenterId);
}
options.withMetaData("dsntestcase", "true");
if (support.getCapabilities().requiresVMOnCreate().equals(Requirement.REQUIRED)) {
String testVmId = getTestVmId(DaseinTestManager.STATEFUL, VmState.STOPPED, true, testDataCenterId);
options.withVirtualMachineId(testVmId);
}
String id = options.build(provider);
Volume volume = support.getVolume(id);
if( volume != null && testDataCenterId == null ) {
testDataCenterId = volume.getProviderDataCenterId();
}
synchronized ( testVolumes ) {
while( testVolumes.containsKey(label) ) {
label = label + random.nextInt(9);
}
testVolumes.put(label, id);
}
return id;
}
private boolean setState( @Nonnull VirtualMachineSupport support, @Nonnull VirtualMachine vm, @Nonnull VmState state ) {
VmState currentState = vm.getCurrentState();
if( state.equals(currentState) ) {
return true;
}
if( state.equals(VmState.TERMINATED) ) {
return false;
}
String id = vm.getProviderVirtualMachineId();
long timeout = System.currentTimeMillis() + ( CalendarWrapper.MINUTE * 20L );
while( timeout > System.currentTimeMillis() ) {
if( !currentState.equals(VmState.PENDING) && !currentState.equals(VmState.PAUSING) && !currentState.equals(VmState.REBOOTING) && !currentState.equals(VmState.STOPPING) && !currentState.equals(VmState.SUSPENDING) ) {
break;
}
try {
Thread.sleep(15000L);
} catch( InterruptedException ignore ) {
}
try {
VirtualMachine v = support.getVirtualMachine(id);
if( v == null ) {
return state.equals(VmState.TERMINATED);
}
vm = v;
currentState = vm.getCurrentState();
} catch( Throwable ignore ) {
// ignore
}
}
try {
if( state.equals(VmState.RUNNING) ) {
if( currentState.equals(VmState.PAUSED) ) {
support.unpause(id);
}
else if( currentState.equals(VmState.STOPPED) ) {
support.start(id);
}
else if( currentState.equals(VmState.SUSPENDED) ) {
support.resume(id);
}
}
else if( state.equals(VmState.PAUSED) ) {
if( currentState.equals(VmState.RUNNING) || setState(support, vm, VmState.RUNNING) ) {
support.pause(id);
}
else {
return false;
}
}
else if( state.equals(VmState.STOPPED) ) {
if( currentState.equals(VmState.RUNNING) || setState(support, vm, VmState.RUNNING) ) {
support.stop(id, true);
}
else {
return false;
}
}
else if( state.equals(VmState.SUSPENDED) ) {
if( currentState.equals(VmState.RUNNING) || setState(support, vm, VmState.RUNNING) ) {
support.suspend(id);
}
else {
return false;
}
}
} catch( Throwable ignore ) {
return false;
}
timeout = System.currentTimeMillis() + ( CalendarWrapper.MINUTE * 20L );
while( timeout > System.currentTimeMillis() ) {
if( state.equals(currentState) ) {
return true;
}
try {
Thread.sleep(15000L);
} catch( InterruptedException ignore ) {
}
try {
VirtualMachine v = support.getVirtualMachine(id);
if( v == null ) {
return state.equals(VmState.TERMINATED);
}
vm = v;
currentState = vm.getCurrentState();
} catch( Throwable ignore ) {
// ignore
}
}
return false;
}
}
|
Fix for unterminated VMs
|
src/main/java/org/dasein/cloud/test/compute/ComputeResources.java
|
Fix for unterminated VMs
|
|
Java
|
apache-2.0
|
31d91dd85f9fc05750be1b5700bae9386661af0c
| 0
|
tomasdavidorg/optaplanner,netinept/Court-Scheduler,baldimir/optaplanner,glamperi/optaplanner,snurkabill/optaplanner,snurkabill/optaplanner,baldimir/optaplanner,eshen1991/optaplanner,tomasdavidorg/optaplanner,gsheldon/optaplanner,netinept/Court-Scheduler,oskopek/optaplanner,codeaudit/optaplanner,codeaudit/optaplanner,tkobayas/optaplanner,gsheldon/optaplanner,snurkabill/optaplanner,tkobayas/optaplanner,baldimir/optaplanner,DieterDePaepe/optaplanner,oskopek/optaplanner,eshen1991/optaplanner,bernardator/optaplanner,droolsjbpm/optaplanner,eshen1991/optaplanner,kunallimaye/optaplanner,DieterDePaepe/optaplanner,kunallimaye/optaplanner,kunallimaye/optaplanner,droolsjbpm/optaplanner,bernardator/optaplanner,bernardator/optaplanner,gsheldon/optaplanner,tomasdavidorg/optaplanner,glamperi/optaplanner,netinept/Court-Scheduler,droolsjbpm/optaplanner,oskopek/optaplanner,droolsjbpm/optaplanner,baldimir/optaplanner,tkobayas/optaplanner,oskopek/optaplanner,gsheldon/optaplanner,glamperi/optaplanner,tkobayas/optaplanner,codeaudit/optaplanner
|
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.examples.examination.solver.solution.initializer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.builder.CompareToBuilder;
import org.optaplanner.core.impl.phase.custom.CustomSolverPhaseCommand;
import org.optaplanner.core.api.score.Score;
import org.optaplanner.core.api.score.buildin.hardsoft.HardSoftScore;
import org.optaplanner.core.impl.score.director.ScoreDirector;
import org.optaplanner.examples.common.domain.PersistableIdComparator;
import org.optaplanner.examples.examination.domain.Exam;
import org.optaplanner.examples.examination.domain.Examination;
import org.optaplanner.examples.examination.domain.Period;
import org.optaplanner.examples.examination.domain.PeriodPenalty;
import org.optaplanner.examples.examination.domain.PeriodPenaltyType;
import org.optaplanner.examples.examination.domain.Room;
import org.optaplanner.examples.examination.domain.Topic;
import org.optaplanner.examples.examination.domain.solver.ExamBefore;
import org.optaplanner.examples.examination.domain.solver.ExamCoincidence;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ExaminationSolutionInitializer implements CustomSolverPhaseCommand {
protected final transient Logger logger = LoggerFactory.getLogger(getClass());
public void changeWorkingSolution(ScoreDirector scoreDirector) {
Examination examination = (Examination) scoreDirector.getWorkingSolution();
initializeExamList(scoreDirector, examination);
}
private void initializeExamList(ScoreDirector scoreDirector, Examination examination) {
List<Period> periodList = examination.getPeriodList();
List<Room> roomList = examination.getRoomList();
// TODO the planning entity list from the solution should be used and might already contain initialized entities
List<Exam> examList = new ArrayList<Exam>(examination.getTopicList().size()); // TODO this can be returned from createExamAssigningScoreList
List<ExamInitializationWeight> examInitialWeightList = createExamAssigningScoreList(examination);
for (ExamInitializationWeight examInitialWeight : examInitialWeightList) {
Score unscheduledScore = scoreDirector.calculateScore();
Exam leader = examInitialWeight.getExam();
List<ExamToHandle> examToHandleList = new ArrayList<ExamToHandle>(5);
if (leader.getExamCoincidence() == null) {
examToHandleList.add(new ExamToHandle(leader));
} else {
for (Exam coincidenceExam : leader.getExamCoincidence().getCoincidenceExamSet()) {
examToHandleList.add(new ExamToHandle(coincidenceExam));
}
}
List<PeriodScoring> periodScoringList = new ArrayList<PeriodScoring>(periodList.size());
for (Period period : periodList) {
for (ExamToHandle examToHandle : examToHandleList) {
Exam exam = examToHandle.getExam();
if (!examToHandle.isAdded()) {
scoreDirector.beforeEntityAdded(exam);
exam.setPeriod(period);
scoreDirector.afterEntityAdded(exam);
examToHandle.setAdded(true);
} else {
scoreDirector.beforeVariableChanged(exam, "period");
exam.setPeriod(period);
scoreDirector.afterVariableChanged(exam, "period");
}
}
Score score = scoreDirector.calculateScore();
periodScoringList.add(new PeriodScoring(period, score));
}
Collections.sort(periodScoringList);
scheduleLeader(periodScoringList, roomList, scoreDirector, unscheduledScore, examToHandleList, leader);
examList.add(leader);
// Schedule the non leaders
for (ExamToHandle examToHandle : examToHandleList) {
Exam exam = examToHandle.getExam();
// Leader already has a room
if (!exam.isCoincidenceLeader()) {
scheduleNonLeader(roomList, scoreDirector, exam);
examList.add(exam);
}
}
}
Collections.sort(examList, new PersistableIdComparator());
examination.setExamList(examList);
}
private void scheduleLeader(List<PeriodScoring> periodScoringList, List<Room> roomList,
ScoreDirector scoreDirector, Score unscheduledScore,
List<ExamToHandle> examToHandleList, Exam leader) {
boolean perfectMatch = false;
Score bestScore = HardSoftScore.valueOf(Integer.MIN_VALUE, Integer.MIN_VALUE);
Period bestPeriod = null;
Room bestRoom = null;
for (PeriodScoring periodScoring : periodScoringList) {
if (bestScore.compareTo(periodScoring.getScore()) >= 0) {
// No need to check the rest
break;
}
for (ExamToHandle examToHandle : examToHandleList) {
Exam exam = examToHandle.getExam();
scoreDirector.beforeVariableChanged(exam, "period");
exam.setPeriod(periodScoring.getPeriod());
scoreDirector.afterVariableChanged(exam, "period");
}
for (Room room : roomList) {
scoreDirector.beforeVariableChanged(leader, "room");
leader.setRoom(room);
scoreDirector.afterVariableChanged(leader, "room");
Score score = scoreDirector.calculateScore();
if (score.compareTo(unscheduledScore) < 0) {
if (score.compareTo(bestScore) > 0) {
bestScore = score;
bestPeriod = periodScoring.getPeriod();
bestRoom = room;
}
} else if (score.equals(unscheduledScore)) {
perfectMatch = true;
break;
} else {
throw new IllegalStateException("The score (" + score
+ ") cannot be higher than unscheduledScore (" + unscheduledScore + ").");
}
}
if (perfectMatch) {
break;
}
}
if (!perfectMatch) {
if (bestPeriod == null || bestRoom == null) {
throw new IllegalStateException("The bestPeriod (" + bestPeriod + ") or the bestRoom ("
+ bestRoom + ") cannot be null.");
}
scoreDirector.beforeVariableChanged(leader, "room");
leader.setRoom(bestRoom);
scoreDirector.afterVariableChanged(leader, "room");
for (ExamToHandle examToHandle : examToHandleList) {
Exam exam = examToHandle.getExam();
scoreDirector.beforeVariableChanged(exam, "period");
exam.setPeriod(bestPeriod);
scoreDirector.afterVariableChanged(exam, "period");
}
}
logger.debug(" Exam ({}) initialized.", leader);
}
private void scheduleNonLeader(List<Room> roomList, ScoreDirector scoreDirector, Exam exam) {
if (exam.getRoom() != null) {
throw new IllegalStateException("Exam (" + exam + ") already has a room.");
}
Score unscheduledScore = scoreDirector.calculateScore();
boolean perfectMatch = false;
Score bestScore = HardSoftScore.valueOf(Integer.MIN_VALUE, Integer.MIN_VALUE);
Room bestRoom = null;
for (Room room : roomList) {
scoreDirector.beforeVariableChanged(exam, "room");
exam.setRoom(room);
scoreDirector.afterVariableChanged(exam, "room");
Score score = scoreDirector.calculateScore();
if (score.compareTo(unscheduledScore) < 0) {
if (score.compareTo(bestScore) > 0) {
bestScore = score;
bestRoom = room;
}
} else if (score.equals(unscheduledScore)) {
perfectMatch = true;
break;
} else {
throw new IllegalStateException("The score (" + score
+ ") cannot be higher than unscheduledScore (" + unscheduledScore + ").");
}
}
if (!perfectMatch) {
if (bestRoom == null) {
throw new IllegalStateException("The bestRoom ("
+ bestRoom + ") cannot be null.");
}
scoreDirector.beforeVariableChanged(exam, "room");
exam.setRoom(bestRoom);
scoreDirector.afterVariableChanged(exam, "room");
}
logger.debug(" Exam ({}) initialized.", exam);
}
public static class ExamToHandle {
private Exam exam;
private boolean added = false;
public ExamToHandle(Exam exam) {
this.exam = exam;
}
public Exam getExam() {
return exam;
}
public boolean isAdded() {
return added;
}
public void setAdded(boolean added) {
this.added = added;
}
}
/**
* Create and order the exams in the order which we 'll assign them into periods and rooms.
* @param examination not null
* @return not null
*/
private List<ExamInitializationWeight> createExamAssigningScoreList(Examination examination) {
List<Exam> examList = createExamList(examination);
List<ExamInitializationWeight> examInitialWeightList = new ArrayList<ExamInitializationWeight>(examList.size());
for (Exam exam : examList) {
if (exam.isCoincidenceLeader()) {
examInitialWeightList.add(new ExamInitializationWeight(exam));
}
}
Collections.sort(examInitialWeightList);
return examInitialWeightList;
}
public List<Exam> createExamList(Examination examination) {
List<Topic> topicList = examination.getTopicList();
List<Exam> examList = new ArrayList<Exam>(topicList.size());
Map<Topic, Exam> topicToExamMap = new HashMap<Topic, Exam>(topicList.size());
for (Topic topic : topicList) {
Exam exam = new Exam();
exam.setId(topic.getId());
exam.setTopic(topic);
examList.add(exam);
topicToExamMap.put(topic, exam);
}
for (PeriodPenalty periodPenalty : examination.getPeriodPenaltyList()) {
if (periodPenalty.getPeriodPenaltyType() == PeriodPenaltyType.EXAM_COINCIDENCE) {
Exam leftExam = topicToExamMap.get(periodPenalty.getLeftSideTopic());
Exam rightExam = topicToExamMap.get(periodPenalty.getRightSideTopic());
Set<Exam> newCoincidenceExamSet = new LinkedHashSet<Exam>(4);
ExamCoincidence leftExamCoincidence = leftExam.getExamCoincidence();
if (leftExamCoincidence != null) {
newCoincidenceExamSet.addAll(leftExamCoincidence.getCoincidenceExamSet());
} else {
newCoincidenceExamSet.add(leftExam);
}
ExamCoincidence rightExamCoincidence = rightExam.getExamCoincidence();
if (rightExamCoincidence != null) {
newCoincidenceExamSet.addAll(rightExamCoincidence.getCoincidenceExamSet());
} else {
newCoincidenceExamSet.add(rightExam);
}
ExamCoincidence newExamCoincidence = new ExamCoincidence(newCoincidenceExamSet);
for (Exam exam : newCoincidenceExamSet) {
exam.setExamCoincidence(newExamCoincidence);
}
} else if (periodPenalty.getPeriodPenaltyType() == PeriodPenaltyType.AFTER) {
Exam afterExam = topicToExamMap.get(periodPenalty.getLeftSideTopic());
Exam beforeExam = topicToExamMap.get(periodPenalty.getRightSideTopic());
ExamBefore examBefore = beforeExam.getExamBefore();
if (examBefore == null) {
examBefore = new ExamBefore(new LinkedHashSet<Exam>(2));
beforeExam.setExamBefore(examBefore);
}
examBefore.getAfterExamSet().add(afterExam);
}
}
return examList;
}
private static class ExamInitializationWeight implements Comparable<ExamInitializationWeight> {
private Exam exam;
private int totalStudentSize;
private int maximumDuration;
private ExamInitializationWeight(Exam exam) {
this.exam = exam;
totalStudentSize = calculateTotalStudentSize(exam);
maximumDuration = calculateMaximumDuration(exam);
}
private int calculateTotalStudentSize(Exam innerExam) {
int innerTotalStudentSize = 0;
if (innerExam.getExamCoincidence() == null) {
innerTotalStudentSize = innerExam.getTopicStudentSize();
} else {
for (Exam coincidenceExam : innerExam.getExamCoincidence().getCoincidenceExamSet()) {
innerTotalStudentSize += coincidenceExam.getTopicStudentSize();
}
}
if (innerExam.getExamBefore() != null) {
for (Exam afterExam : innerExam.getExamBefore().getAfterExamSet()) {
innerTotalStudentSize += calculateTotalStudentSize(afterExam); // recursive
}
}
return innerTotalStudentSize;
}
private int calculateMaximumDuration(Exam innerExam) {
int innerMaximumDuration = innerExam.getTopic().getDuration();
if (innerExam.getExamCoincidence() != null) {
for (Exam coincidenceExam : innerExam.getExamCoincidence().getCoincidenceExamSet()) {
innerMaximumDuration = Math.max(innerMaximumDuration, coincidenceExam.getTopicStudentSize());
}
}
if (innerExam.getExamBefore() != null) {
for (Exam afterExam : innerExam.getExamBefore().getAfterExamSet()) {
innerMaximumDuration = Math.max(innerMaximumDuration, calculateMaximumDuration(afterExam)); // recursive
}
}
return innerMaximumDuration;
}
public Exam getExam() {
return exam;
}
public int compareTo(ExamInitializationWeight other) {
// TODO calculate a assigningScore based on the properties of a topic and sort on that assigningScore
return new CompareToBuilder()
.append(other.totalStudentSize, totalStudentSize) // Descending
.append(other.maximumDuration, maximumDuration) // Descending
.append(exam.getId(), other.exam.getId()) // Ascending
.toComparison();
}
}
private static class PeriodScoring implements Comparable<PeriodScoring> {
private Period period;
private Score score;
private PeriodScoring(Period period, Score score) {
this.period = period;
this.score = score;
}
public Period getPeriod() {
return period;
}
public Score getScore() {
return score;
}
public int compareTo(PeriodScoring other) {
return -new CompareToBuilder().append(score, other.score).toComparison();
}
}
}
|
optaplanner-examples/src/main/java/org/optaplanner/examples/examination/solver/solution/initializer/ExaminationSolutionInitializer.java
|
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.examples.examination.solver.solution.initializer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.builder.CompareToBuilder;
import org.optaplanner.core.impl.phase.custom.CustomSolverPhaseCommand;
import org.optaplanner.core.api.score.Score;
import org.optaplanner.core.api.score.buildin.hardsoft.HardSoftScore;
import org.optaplanner.core.impl.score.director.ScoreDirector;
import org.optaplanner.examples.common.domain.PersistableIdComparator;
import org.optaplanner.examples.examination.domain.Exam;
import org.optaplanner.examples.examination.domain.Examination;
import org.optaplanner.examples.examination.domain.Period;
import org.optaplanner.examples.examination.domain.PeriodPenalty;
import org.optaplanner.examples.examination.domain.PeriodPenaltyType;
import org.optaplanner.examples.examination.domain.Room;
import org.optaplanner.examples.examination.domain.Topic;
import org.optaplanner.examples.examination.domain.solver.ExamBefore;
import org.optaplanner.examples.examination.domain.solver.ExamCoincidence;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ExaminationSolutionInitializer implements CustomSolverPhaseCommand {
protected final transient Logger logger = LoggerFactory.getLogger(getClass());
public void changeWorkingSolution(ScoreDirector scoreDirector) {
Examination examination = (Examination) scoreDirector.getWorkingSolution();
initializeExamList(scoreDirector, examination);
}
private void initializeExamList(ScoreDirector scoreDirector, Examination examination) {
List<Period> periodList = examination.getPeriodList();
List<Room> roomList = examination.getRoomList();
// TODO the planning entity list from the solution should be used and might already contain initialized entities
List<Exam> examList = new ArrayList<Exam>(examination.getTopicList().size()); // TODO this can be returned from createExamAssigningScoreList
List<ExamInitializationWeight> examInitialWeightList = createExamAssigningScoreList(examination);
for (ExamInitializationWeight examInitialWeight : examInitialWeightList) {
Score unscheduledScore = scoreDirector.calculateScore();
Exam leader = examInitialWeight.getExam();
List<ExamToHandle> examToHandleList = new ArrayList<ExamToHandle>(5);
if (leader.getExamCoincidence() == null) {
examToHandleList.add(new ExamToHandle(leader));
} else {
for (Exam coincidenceExam : leader.getExamCoincidence().getCoincidenceExamSet()) {
examToHandleList.add(new ExamToHandle(coincidenceExam));
}
}
List<PeriodScoring> periodScoringList = new ArrayList<PeriodScoring>(periodList.size());
for (Period period : periodList) {
for (ExamToHandle examToHandle : examToHandleList) {
Exam exam = examToHandle.getExam();
if (!examToHandle.isAdded()) {
scoreDirector.beforeEntityAdded(exam);
exam.setPeriod(period);
scoreDirector.afterEntityAdded(exam);
examToHandle.setAdded(true);
} else {
scoreDirector.beforeVariableChanged(exam, "period");
exam.setPeriod(period);
scoreDirector.afterVariableChanged(exam, "period");
}
}
Score score = scoreDirector.calculateScore();
periodScoringList.add(new PeriodScoring(period, score));
}
Collections.sort(periodScoringList);
scheduleLeader(periodScoringList, roomList, scoreDirector, unscheduledScore, examToHandleList, leader);
examList.add(leader);
// Schedule the non leaders
for (ExamToHandle examToHandle : examToHandleList) {
Exam exam = examToHandle.getExam();
// Leader already has a room
if (!exam.isCoincidenceLeader()) {
scheduleNonLeader(roomList, scoreDirector, exam);
examList.add(exam);
}
}
}
Collections.sort(examList, new PersistableIdComparator());
examination.setExamList(examList);
}
private void scheduleLeader(List<PeriodScoring> periodScoringList, List<Room> roomList,
ScoreDirector scoreDirector, Score unscheduledScore,
List<ExamToHandle> examToHandleList, Exam leader) {
boolean perfectMatch = false;
Score bestScore = HardSoftScore.valueOf(Integer.MIN_VALUE, Integer.MIN_VALUE);
Period bestPeriod = null;
Room bestRoom = null;
for (PeriodScoring periodScoring : periodScoringList) {
if (bestScore.compareTo(periodScoring.getScore()) >= 0) {
// No need to check the rest
break;
}
for (ExamToHandle examToHandle : examToHandleList) {
Exam exam = examToHandle.getExam();
scoreDirector.beforeVariableChanged(exam, "period");
exam.setPeriod(periodScoring.getPeriod());
scoreDirector.afterVariableChanged(exam, "period");
}
for (Room room : roomList) {
scoreDirector.beforeVariableChanged(leader, "room");
leader.setRoom(room);
scoreDirector.afterVariableChanged(leader, "room");
Score score = scoreDirector.calculateScore();
if (score.compareTo(unscheduledScore) < 0) {
if (score.compareTo(bestScore) > 0) {
bestScore = score;
bestPeriod = periodScoring.getPeriod();
bestRoom = room;
}
} else if (score.equals(unscheduledScore)) {
perfectMatch = true;
break;
} else {
throw new IllegalStateException("The score (" + score
+ ") cannot be higher than unscheduledScore (" + unscheduledScore + ").");
}
}
if (perfectMatch) {
break;
}
}
if (!perfectMatch) {
if (bestPeriod == null || bestRoom == null) {
throw new IllegalStateException("The bestPeriod (" + bestPeriod + ") or the bestRoom ("
+ bestRoom + ") cannot be null.");
}
scoreDirector.beforeVariableChanged(leader, "room");
leader.setRoom(bestRoom);
scoreDirector.afterVariableChanged(leader, "room");
for (ExamToHandle examToHandle : examToHandleList) {
Exam exam = examToHandle.getExam();
scoreDirector.beforeVariableChanged(exam, "period");
exam.setPeriod(bestPeriod);
scoreDirector.afterVariableChanged(exam, "period");
}
}
logger.trace(" Exam ({}) initialized.", leader);
}
private void scheduleNonLeader(List<Room> roomList, ScoreDirector scoreDirector, Exam exam) {
if (exam.getRoom() != null) {
throw new IllegalStateException("Exam (" + exam + ") already has a room.");
}
Score unscheduledScore = scoreDirector.calculateScore();
boolean perfectMatch = false;
Score bestScore = HardSoftScore.valueOf(Integer.MIN_VALUE, Integer.MIN_VALUE);
Room bestRoom = null;
for (Room room : roomList) {
scoreDirector.beforeVariableChanged(exam, "room");
exam.setRoom(room);
scoreDirector.afterVariableChanged(exam, "room");
Score score = scoreDirector.calculateScore();
if (score.compareTo(unscheduledScore) < 0) {
if (score.compareTo(bestScore) > 0) {
bestScore = score;
bestRoom = room;
}
} else if (score.equals(unscheduledScore)) {
perfectMatch = true;
break;
} else {
throw new IllegalStateException("The score (" + score
+ ") cannot be higher than unscheduledScore (" + unscheduledScore + ").");
}
}
if (!perfectMatch) {
if (bestRoom == null) {
throw new IllegalStateException("The bestRoom ("
+ bestRoom + ") cannot be null.");
}
scoreDirector.beforeVariableChanged(exam, "room");
exam.setRoom(bestRoom);
scoreDirector.afterVariableChanged(exam, "room");
}
logger.trace(" Exam ({}) initialized.", exam);
}
public static class ExamToHandle {
private Exam exam;
private boolean added = false;
public ExamToHandle(Exam exam) {
this.exam = exam;
}
public Exam getExam() {
return exam;
}
public boolean isAdded() {
return added;
}
public void setAdded(boolean added) {
this.added = added;
}
}
/**
* Create and order the exams in the order which we 'll assign them into periods and rooms.
* @param examination not null
* @return not null
*/
private List<ExamInitializationWeight> createExamAssigningScoreList(Examination examination) {
List<Exam> examList = createExamList(examination);
List<ExamInitializationWeight> examInitialWeightList = new ArrayList<ExamInitializationWeight>(examList.size());
for (Exam exam : examList) {
if (exam.isCoincidenceLeader()) {
examInitialWeightList.add(new ExamInitializationWeight(exam));
}
}
Collections.sort(examInitialWeightList);
return examInitialWeightList;
}
public List<Exam> createExamList(Examination examination) {
List<Topic> topicList = examination.getTopicList();
List<Exam> examList = new ArrayList<Exam>(topicList.size());
Map<Topic, Exam> topicToExamMap = new HashMap<Topic, Exam>(topicList.size());
for (Topic topic : topicList) {
Exam exam = new Exam();
exam.setId(topic.getId());
exam.setTopic(topic);
examList.add(exam);
topicToExamMap.put(topic, exam);
}
for (PeriodPenalty periodPenalty : examination.getPeriodPenaltyList()) {
if (periodPenalty.getPeriodPenaltyType() == PeriodPenaltyType.EXAM_COINCIDENCE) {
Exam leftExam = topicToExamMap.get(periodPenalty.getLeftSideTopic());
Exam rightExam = topicToExamMap.get(periodPenalty.getRightSideTopic());
Set<Exam> newCoincidenceExamSet = new LinkedHashSet<Exam>(4);
ExamCoincidence leftExamCoincidence = leftExam.getExamCoincidence();
if (leftExamCoincidence != null) {
newCoincidenceExamSet.addAll(leftExamCoincidence.getCoincidenceExamSet());
} else {
newCoincidenceExamSet.add(leftExam);
}
ExamCoincidence rightExamCoincidence = rightExam.getExamCoincidence();
if (rightExamCoincidence != null) {
newCoincidenceExamSet.addAll(rightExamCoincidence.getCoincidenceExamSet());
} else {
newCoincidenceExamSet.add(rightExam);
}
ExamCoincidence newExamCoincidence = new ExamCoincidence(newCoincidenceExamSet);
for (Exam exam : newCoincidenceExamSet) {
exam.setExamCoincidence(newExamCoincidence);
}
} else if (periodPenalty.getPeriodPenaltyType() == PeriodPenaltyType.AFTER) {
Exam afterExam = topicToExamMap.get(periodPenalty.getLeftSideTopic());
Exam beforeExam = topicToExamMap.get(periodPenalty.getRightSideTopic());
ExamBefore examBefore = beforeExam.getExamBefore();
if (examBefore == null) {
examBefore = new ExamBefore(new LinkedHashSet<Exam>(2));
beforeExam.setExamBefore(examBefore);
}
examBefore.getAfterExamSet().add(afterExam);
}
}
return examList;
}
private static class ExamInitializationWeight implements Comparable<ExamInitializationWeight> {
private Exam exam;
private int totalStudentSize;
private int maximumDuration;
private ExamInitializationWeight(Exam exam) {
this.exam = exam;
totalStudentSize = calculateTotalStudentSize(exam);
maximumDuration = calculateMaximumDuration(exam);
}
private int calculateTotalStudentSize(Exam innerExam) {
int innerTotalStudentSize = 0;
if (innerExam.getExamCoincidence() == null) {
innerTotalStudentSize = innerExam.getTopicStudentSize();
} else {
for (Exam coincidenceExam : innerExam.getExamCoincidence().getCoincidenceExamSet()) {
innerTotalStudentSize += coincidenceExam.getTopicStudentSize();
}
}
if (innerExam.getExamBefore() != null) {
for (Exam afterExam : innerExam.getExamBefore().getAfterExamSet()) {
innerTotalStudentSize += calculateTotalStudentSize(afterExam); // recursive
}
}
return innerTotalStudentSize;
}
private int calculateMaximumDuration(Exam innerExam) {
int innerMaximumDuration = innerExam.getTopic().getDuration();
if (innerExam.getExamCoincidence() != null) {
for (Exam coincidenceExam : innerExam.getExamCoincidence().getCoincidenceExamSet()) {
innerMaximumDuration = Math.max(innerMaximumDuration, coincidenceExam.getTopicStudentSize());
}
}
if (innerExam.getExamBefore() != null) {
for (Exam afterExam : innerExam.getExamBefore().getAfterExamSet()) {
innerMaximumDuration = Math.max(innerMaximumDuration, calculateMaximumDuration(afterExam)); // recursive
}
}
return innerMaximumDuration;
}
public Exam getExam() {
return exam;
}
public int compareTo(ExamInitializationWeight other) {
// TODO calculate a assigningScore based on the properties of a topic and sort on that assigningScore
return new CompareToBuilder()
.append(other.totalStudentSize, totalStudentSize) // Descending
.append(other.maximumDuration, maximumDuration) // Descending
.append(exam.getId(), other.exam.getId()) // Ascending
.toComparison();
}
}
private static class PeriodScoring implements Comparable<PeriodScoring> {
private Period period;
private Score score;
private PeriodScoring(Period period, Score score) {
this.period = period;
this.score = score;
}
public Period getPeriod() {
return period;
}
public Score getScore() {
return score;
}
public int compareTo(PeriodScoring other) {
return -new CompareToBuilder().append(score, other.score).toComparison();
}
}
}
|
exam: initialized statements are logged in debug
|
optaplanner-examples/src/main/java/org/optaplanner/examples/examination/solver/solution/initializer/ExaminationSolutionInitializer.java
|
exam: initialized statements are logged in debug
|
|
Java
|
apache-2.0
|
091bd1d3757751a29c962e2c0b7f4f8720e06a6a
| 0
|
googleapis/java-spanner,googleapis/java-spanner,looker-open-source/java-spanner,googleapis/java-spanner,looker-open-source/java-spanner,looker-open-source/java-spanner
|
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.spanner.connection;
import static com.google.cloud.spanner.SpannerApiFutures.get;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.cloud.Timestamp;
import com.google.cloud.spanner.AsyncResultSet;
import com.google.cloud.spanner.CommitResponse;
import com.google.cloud.spanner.DatabaseClient;
import com.google.cloud.spanner.Dialect;
import com.google.cloud.spanner.ErrorCode;
import com.google.cloud.spanner.Mutation;
import com.google.cloud.spanner.Options;
import com.google.cloud.spanner.Options.QueryOption;
import com.google.cloud.spanner.Options.RpcPriority;
import com.google.cloud.spanner.Options.UpdateOption;
import com.google.cloud.spanner.ReadContext.QueryAnalyzeMode;
import com.google.cloud.spanner.ResultSet;
import com.google.cloud.spanner.ResultSets;
import com.google.cloud.spanner.Spanner;
import com.google.cloud.spanner.SpannerException;
import com.google.cloud.spanner.SpannerExceptionFactory;
import com.google.cloud.spanner.Statement;
import com.google.cloud.spanner.TimestampBound;
import com.google.cloud.spanner.TimestampBound.Mode;
import com.google.cloud.spanner.connection.AbstractStatementParser.ParsedStatement;
import com.google.cloud.spanner.connection.AbstractStatementParser.StatementType;
import com.google.cloud.spanner.connection.StatementExecutor.StatementTimeout;
import com.google.cloud.spanner.connection.UnitOfWork.UnitOfWorkState;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.spanner.v1.ExecuteSqlRequest.QueryOptions;
import com.google.spanner.v1.ResultSetStats;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Stack;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.threeten.bp.Instant;
/** Implementation for {@link Connection}, the generic Spanner connection API (not JDBC). */
class ConnectionImpl implements Connection {
private static final String CLOSED_ERROR_MSG = "This connection is closed";
private static final String ONLY_ALLOWED_IN_AUTOCOMMIT =
"This method may only be called while in autocommit mode";
private static final String NOT_ALLOWED_IN_AUTOCOMMIT =
"This method may not be called while in autocommit mode";
/**
* Exception that is used to register the stacktrace of the code that opened a {@link Connection}.
* This exception is logged if the application closes without first closing the connection.
*/
static class LeakedConnectionException extends RuntimeException {
private static final long serialVersionUID = 7119433786832158700L;
private LeakedConnectionException() {
super("Connection was opened at " + Instant.now());
}
}
private volatile LeakedConnectionException leakedException = new LeakedConnectionException();
private final SpannerPool spannerPool;
private AbstractStatementParser statementParser;
/**
* The {@link ConnectionStatementExecutor} is responsible for translating parsed {@link
* ClientSideStatement}s into actual method calls on this {@link ConnectionImpl}. I.e. the {@link
* ClientSideStatement} 'SET AUTOCOMMIT ON' will be translated into the method call {@link
* ConnectionImpl#setAutocommit(boolean)} with value <code>true</code>.
*/
private final ConnectionStatementExecutor connectionStatementExecutor =
new ConnectionStatementExecutorImpl(this);
/** Simple thread factory that is used for fire-and-forget rollbacks. */
static final class DaemonThreadFactory implements ThreadFactory {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("connection-rollback-executor");
t.setDaemon(true);
return t;
}
}
/**
* Statements are executed using a separate thread in order to be able to cancel these. Statements
* are automatically cancelled if the configured {@link ConnectionImpl#statementTimeout} is
* exceeded. In autocommit mode, the connection will try to rollback the effects of an update
* statement, but this is not guaranteed to actually succeed.
*/
private final StatementExecutor statementExecutor;
/**
* The {@link ConnectionOptions} that were used to create this {@link ConnectionImpl}. This is
* retained as it is used for getting a {@link Spanner} object and removing this connection from
* the {@link SpannerPool}.
*/
private final ConnectionOptions options;
/** The supported batch modes. */
enum BatchMode {
NONE,
DDL,
DML
}
/**
* This query option is used internally to indicate that a query is executed by the library itself
* to fetch metadata. These queries are specifically allowed to be executed even when a DDL batch
* is active.
*/
static final class InternalMetadataQuery implements QueryOption {
static final InternalMetadataQuery INSTANCE = new InternalMetadataQuery();
private InternalMetadataQuery() {}
}
/** The combination of all transaction modes and batch modes. */
enum UnitOfWorkType {
READ_ONLY_TRANSACTION {
@Override
TransactionMode getTransactionMode() {
return TransactionMode.READ_ONLY_TRANSACTION;
}
},
READ_WRITE_TRANSACTION {
@Override
TransactionMode getTransactionMode() {
return TransactionMode.READ_WRITE_TRANSACTION;
}
},
DML_BATCH {
@Override
TransactionMode getTransactionMode() {
return TransactionMode.READ_WRITE_TRANSACTION;
}
},
DDL_BATCH {
@Override
TransactionMode getTransactionMode() {
return null;
}
};
abstract TransactionMode getTransactionMode();
static UnitOfWorkType of(TransactionMode transactionMode) {
switch (transactionMode) {
case READ_ONLY_TRANSACTION:
return UnitOfWorkType.READ_ONLY_TRANSACTION;
case READ_WRITE_TRANSACTION:
return UnitOfWorkType.READ_WRITE_TRANSACTION;
default:
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT, "Unknown transaction mode: " + transactionMode);
}
}
}
private StatementExecutor.StatementTimeout statementTimeout =
new StatementExecutor.StatementTimeout();
private boolean closed = false;
private final Spanner spanner;
private DdlClient ddlClient;
private DatabaseClient dbClient;
private boolean autocommit;
private boolean readOnly;
private boolean returnCommitStats;
private UnitOfWork currentUnitOfWork = null;
/**
* This field is only used in autocommit mode to indicate that the user has explicitly started a
* transaction.
*/
private boolean inTransaction = false;
/**
* This field is used to indicate that a transaction begin has been indicated. This is done by
* calling beginTransaction or by setting a transaction property while not in autocommit mode.
*/
private boolean transactionBeginMarked = false;
private BatchMode batchMode;
private UnitOfWorkType unitOfWorkType;
private final Stack<UnitOfWork> transactionStack = new Stack<>();
private boolean retryAbortsInternally;
private final List<TransactionRetryListener> transactionRetryListeners = new ArrayList<>();
private AutocommitDmlMode autocommitDmlMode = AutocommitDmlMode.TRANSACTIONAL;
private TimestampBound readOnlyStaleness = TimestampBound.strong();
private QueryOptions queryOptions = QueryOptions.getDefaultInstance();
private RpcPriority rpcPriority = null;
private String transactionTag;
private String statementTag;
/** Create a connection and register it in the SpannerPool. */
ConnectionImpl(ConnectionOptions options) {
Preconditions.checkNotNull(options);
this.statementExecutor = new StatementExecutor(options.getStatementExecutionInterceptors());
this.spannerPool = SpannerPool.INSTANCE;
this.options = options;
this.spanner = spannerPool.getSpanner(options, this);
if (options.isAutoConfigEmulator()) {
EmulatorUtil.maybeCreateInstanceAndDatabase(spanner, options.getDatabaseId());
}
this.dbClient = spanner.getDatabaseClient(options.getDatabaseId());
this.retryAbortsInternally = options.isRetryAbortsInternally();
this.readOnly = options.isReadOnly();
this.autocommit = options.isAutocommit();
this.queryOptions = this.queryOptions.toBuilder().mergeFrom(options.getQueryOptions()).build();
this.rpcPriority = options.getRPCPriority();
this.returnCommitStats = options.isReturnCommitStats();
this.ddlClient = createDdlClient();
setDefaultTransactionOptions();
}
/** Constructor only for test purposes. */
@VisibleForTesting
ConnectionImpl(
ConnectionOptions options,
SpannerPool spannerPool,
DdlClient ddlClient,
DatabaseClient dbClient) {
Preconditions.checkNotNull(options);
Preconditions.checkNotNull(spannerPool);
Preconditions.checkNotNull(ddlClient);
Preconditions.checkNotNull(dbClient);
this.statementExecutor = new StatementExecutor(Collections.emptyList());
this.spannerPool = spannerPool;
this.options = options;
this.spanner = spannerPool.getSpanner(options, this);
this.ddlClient = ddlClient;
this.dbClient = dbClient;
setReadOnly(options.isReadOnly());
setAutocommit(options.isAutocommit());
setReturnCommitStats(options.isReturnCommitStats());
setDefaultTransactionOptions();
}
@VisibleForTesting
Spanner getSpanner() {
return this.spanner;
}
private DdlClient createDdlClient() {
return DdlClient.newBuilder()
.setDatabaseAdminClient(spanner.getDatabaseAdminClient())
.setInstanceId(options.getInstanceId())
.setDatabaseName(options.getDatabaseName())
.build();
}
private AbstractStatementParser getStatementParser() {
if (this.statementParser == null) {
this.statementParser = AbstractStatementParser.getInstance(dbClient.getDialect());
}
return this.statementParser;
}
@Override
public void close() {
try {
closeAsync().get(10L, TimeUnit.SECONDS);
} catch (SpannerException | InterruptedException | ExecutionException | TimeoutException e) {
// ignore and continue to close the connection.
} finally {
statementExecutor.shutdownNow();
}
}
public ApiFuture<Void> closeAsync() {
synchronized (this) {
if (!isClosed()) {
List<ApiFuture<Void>> futures = new ArrayList<>();
if (isBatchActive()) {
abortBatch();
}
if (isTransactionStarted()) {
try {
futures.add(rollbackAsync());
} catch (Exception exception) {
// ignore and continue to close the connection.
}
}
// Try to wait for the current statement to finish (if any) before we actually close the
// connection.
this.closed = true;
// Add a no-op statement to the executor. Once this has been executed, we know that all
// preceding statements have also been executed, as the executor is single-threaded and
// executes all statements in order of submitting. The Executor#submit method can throw a
// RejectedExecutionException if the executor is no longer in state where it accepts new
// tasks.
try {
futures.add(statementExecutor.submit(() -> null));
} catch (RejectedExecutionException ignored) {
// ignore and continue to close the connection.
}
statementExecutor.shutdown();
leakedException = null;
spannerPool.removeConnection(options, this);
return ApiFutures.transform(
ApiFutures.allAsList(futures), ignored -> null, MoreExecutors.directExecutor());
}
}
return ApiFutures.immediateFuture(null);
}
/** Get the current unit-of-work type of this connection. */
UnitOfWorkType getUnitOfWorkType() {
return unitOfWorkType;
}
/** Get the current batch mode of this connection. */
BatchMode getBatchMode() {
return batchMode;
}
/** @return <code>true</code> if this connection is in a batch. */
boolean isInBatch() {
return batchMode != BatchMode.NONE;
}
/** Get the call stack from when the {@link Connection} was opened. */
LeakedConnectionException getLeakedException() {
return leakedException;
}
@Override
public Dialect getDialect() {
return dbClient.getDialect();
}
@Override
public DatabaseClient getDatabaseClient() {
return dbClient;
}
@Override
public boolean isClosed() {
return closed;
}
@Override
public void setAutocommit(boolean autocommit) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isBatchActive(), "Cannot set autocommit while in a batch");
ConnectionPreconditions.checkState(
!isTransactionStarted(), "Cannot set autocommit while a transaction is active");
ConnectionPreconditions.checkState(
!(isAutocommit() && isInTransaction()),
"Cannot set autocommit while in a temporary transaction");
ConnectionPreconditions.checkState(
!transactionBeginMarked, "Cannot set autocommit when a transaction has begun");
this.autocommit = autocommit;
clearLastTransactionAndSetDefaultTransactionOptions();
// Reset the readOnlyStaleness value if it is no longer compatible with the new autocommit
// value.
if (!autocommit
&& (readOnlyStaleness.getMode() == Mode.MAX_STALENESS
|| readOnlyStaleness.getMode() == Mode.MIN_READ_TIMESTAMP)) {
readOnlyStaleness = TimestampBound.strong();
}
}
@Override
public boolean isAutocommit() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return internalIsAutocommit();
}
private boolean internalIsAutocommit() {
return this.autocommit;
}
@Override
public void setReadOnly(boolean readOnly) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isBatchActive(), "Cannot set read-only while in a batch");
ConnectionPreconditions.checkState(
!isTransactionStarted(), "Cannot set read-only while a transaction is active");
ConnectionPreconditions.checkState(
!(isAutocommit() && isInTransaction()),
"Cannot set read-only while in a temporary transaction");
ConnectionPreconditions.checkState(
!transactionBeginMarked, "Cannot set read-only when a transaction has begun");
this.readOnly = readOnly;
clearLastTransactionAndSetDefaultTransactionOptions();
}
@Override
public boolean isReadOnly() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.readOnly;
}
private void clearLastTransactionAndSetDefaultTransactionOptions() {
setDefaultTransactionOptions();
this.currentUnitOfWork = null;
}
@Override
public void setAutocommitDmlMode(AutocommitDmlMode mode) {
Preconditions.checkNotNull(mode);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Cannot set autocommit DML mode while in a batch");
ConnectionPreconditions.checkState(
!isInTransaction() && isAutocommit(),
"Cannot set autocommit DML mode while not in autocommit mode or while a transaction is active");
ConnectionPreconditions.checkState(
!isReadOnly(), "Cannot set autocommit DML mode for a read-only connection");
this.autocommitDmlMode = mode;
}
@Override
public AutocommitDmlMode getAutocommitDmlMode() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Cannot get autocommit DML mode while in a batch");
return this.autocommitDmlMode;
}
@Override
public void setReadOnlyStaleness(TimestampBound staleness) {
Preconditions.checkNotNull(staleness);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isBatchActive(), "Cannot set read-only while in a batch");
ConnectionPreconditions.checkState(
!isTransactionStarted(),
"Cannot set read-only staleness when a transaction has been started");
if (staleness.getMode() == Mode.MAX_STALENESS
|| staleness.getMode() == Mode.MIN_READ_TIMESTAMP) {
// These values are only allowed in autocommit mode.
ConnectionPreconditions.checkState(
isAutocommit() && !inTransaction,
"MAX_STALENESS and MIN_READ_TIMESTAMP are only allowed in autocommit mode");
}
this.readOnlyStaleness = staleness;
}
@Override
public TimestampBound getReadOnlyStaleness() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isBatchActive(), "Cannot get read-only while in a batch");
return this.readOnlyStaleness;
}
@Override
public void setOptimizerVersion(String optimizerVersion) {
Preconditions.checkNotNull(optimizerVersion);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
this.queryOptions = queryOptions.toBuilder().setOptimizerVersion(optimizerVersion).build();
}
@Override
public String getOptimizerVersion() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.queryOptions.getOptimizerVersion();
}
@Override
public void setOptimizerStatisticsPackage(String optimizerStatisticsPackage) {
Preconditions.checkNotNull(optimizerStatisticsPackage);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
this.queryOptions =
queryOptions.toBuilder().setOptimizerStatisticsPackage(optimizerStatisticsPackage).build();
}
@Override
public String getOptimizerStatisticsPackage() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.queryOptions.getOptimizerStatisticsPackage();
}
@Override
public void setRPCPriority(RpcPriority rpcPriority) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
this.rpcPriority = rpcPriority;
}
@Override
public RpcPriority getRPCPriority() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.rpcPriority;
}
@Override
public void setStatementTimeout(long timeout, TimeUnit unit) {
Preconditions.checkArgument(timeout > 0L, "Zero or negative timeout values are not allowed");
Preconditions.checkArgument(
StatementTimeout.isValidTimeoutUnit(unit),
"Time unit must be one of NANOSECONDS, MICROSECONDS, MILLISECONDS or SECONDS");
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
this.statementTimeout.setTimeoutValue(timeout, unit);
}
@Override
public void clearStatementTimeout() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
this.statementTimeout.clearTimeoutValue();
}
@Override
public long getStatementTimeout(TimeUnit unit) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
Preconditions.checkArgument(
StatementTimeout.isValidTimeoutUnit(unit),
"Time unit must be one of NANOSECONDS, MICROSECONDS, MILLISECONDS or SECONDS");
return this.statementTimeout.getTimeoutValue(unit);
}
@Override
public boolean hasStatementTimeout() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.statementTimeout.hasTimeout();
}
@Override
public void cancel() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
if (this.currentUnitOfWork != null) {
currentUnitOfWork.cancel();
}
}
@Override
public TransactionMode getTransactionMode() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isDdlBatchActive(), "This connection is in a DDL batch");
ConnectionPreconditions.checkState(isInTransaction(), "This connection has no transaction");
return unitOfWorkType.getTransactionMode();
}
@Override
public void setTransactionMode(TransactionMode transactionMode) {
Preconditions.checkNotNull(transactionMode);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Cannot set transaction mode while in a batch");
ConnectionPreconditions.checkState(isInTransaction(), "This connection has no transaction");
ConnectionPreconditions.checkState(
!isTransactionStarted(),
"The transaction mode cannot be set after the transaction has started");
ConnectionPreconditions.checkState(
!isReadOnly() || transactionMode == TransactionMode.READ_ONLY_TRANSACTION,
"The transaction mode can only be READ_ONLY when the connection is in read_only mode");
this.transactionBeginMarked = true;
this.unitOfWorkType = UnitOfWorkType.of(transactionMode);
}
@Override
public String getTransactionTag() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isDdlBatchActive(), "This connection is in a DDL batch");
return transactionTag;
}
@Override
public void setTransactionTag(String tag) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Cannot set transaction tag while in a batch");
ConnectionPreconditions.checkState(isInTransaction(), "This connection has no transaction");
ConnectionPreconditions.checkState(
!isTransactionStarted(),
"The transaction tag cannot be set after the transaction has started");
ConnectionPreconditions.checkState(
getTransactionMode() == TransactionMode.READ_WRITE_TRANSACTION,
"Transaction tag can only be set for a read/write transaction");
this.transactionBeginMarked = true;
this.transactionTag = tag;
}
@Override
public String getStatementTag() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Statement tags are not allowed inside a batch");
return statementTag;
}
@Override
public void setStatementTag(String tag) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Statement tags are not allowed inside a batch");
this.statementTag = tag;
}
/**
* Throws an {@link SpannerException} with code {@link ErrorCode#FAILED_PRECONDITION} if the
* current state of this connection does not allow changing the setting for retryAbortsInternally.
*/
private void checkSetRetryAbortsInternallyAvailable() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(isInTransaction(), "This connection has no transaction");
ConnectionPreconditions.checkState(
getTransactionMode() == TransactionMode.READ_WRITE_TRANSACTION,
"RetryAbortsInternally is only available for read-write transactions");
ConnectionPreconditions.checkState(
!isTransactionStarted(),
"RetryAbortsInternally cannot be set after the transaction has started");
}
@Override
public boolean isRetryAbortsInternally() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return retryAbortsInternally;
}
@Override
public void setRetryAbortsInternally(boolean retryAbortsInternally) {
checkSetRetryAbortsInternallyAvailable();
this.retryAbortsInternally = retryAbortsInternally;
}
@Override
public void addTransactionRetryListener(TransactionRetryListener listener) {
Preconditions.checkNotNull(listener);
transactionRetryListeners.add(listener);
}
@Override
public boolean removeTransactionRetryListener(TransactionRetryListener listener) {
Preconditions.checkNotNull(listener);
return transactionRetryListeners.remove(listener);
}
@Override
public Iterator<TransactionRetryListener> getTransactionRetryListeners() {
return Collections.unmodifiableList(transactionRetryListeners).iterator();
}
@Override
public boolean isInTransaction() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return internalIsInTransaction();
}
/** Returns true if this connection currently is in a transaction (and not a batch). */
private boolean internalIsInTransaction() {
return !isDdlBatchActive() && (!internalIsAutocommit() || inTransaction);
}
@Override
public boolean isTransactionStarted() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return internalIsTransactionStarted();
}
private boolean internalIsTransactionStarted() {
if (internalIsAutocommit() && !inTransaction) {
return false;
}
return internalIsInTransaction()
&& this.currentUnitOfWork != null
&& this.currentUnitOfWork.getState() == UnitOfWorkState.STARTED;
}
@Override
public Timestamp getReadTimestamp() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
this.currentUnitOfWork != null, "There is no transaction on this connection");
return this.currentUnitOfWork.getReadTimestamp();
}
Timestamp getReadTimestampOrNull() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.currentUnitOfWork == null ? null : this.currentUnitOfWork.getReadTimestampOrNull();
}
@Override
public Timestamp getCommitTimestamp() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
this.currentUnitOfWork != null, "There is no transaction on this connection");
return this.currentUnitOfWork.getCommitTimestamp();
}
Timestamp getCommitTimestampOrNull() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.currentUnitOfWork == null
? null
: this.currentUnitOfWork.getCommitTimestampOrNull();
}
@Override
public CommitResponse getCommitResponse() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
this.currentUnitOfWork != null, "There is no transaction on this connection");
return this.currentUnitOfWork.getCommitResponse();
}
CommitResponse getCommitResponseOrNull() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.currentUnitOfWork == null ? null : this.currentUnitOfWork.getCommitResponseOrNull();
}
@Override
public void setReturnCommitStats(boolean returnCommitStats) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
this.returnCommitStats = returnCommitStats;
}
@Override
public boolean isReturnCommitStats() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.returnCommitStats;
}
/** Resets this connection to its default transaction options. */
private void setDefaultTransactionOptions() {
if (transactionStack.isEmpty()) {
unitOfWorkType =
isReadOnly()
? UnitOfWorkType.READ_ONLY_TRANSACTION
: UnitOfWorkType.READ_WRITE_TRANSACTION;
batchMode = BatchMode.NONE;
transactionTag = null;
} else {
popUnitOfWorkFromTransactionStack();
}
}
@Override
public void beginTransaction() {
get(beginTransactionAsync());
}
@Override
public ApiFuture<Void> beginTransactionAsync() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "This connection has an active batch and cannot begin a transaction");
ConnectionPreconditions.checkState(
!isTransactionStarted(),
"Beginning a new transaction is not allowed when a transaction is already running");
ConnectionPreconditions.checkState(!transactionBeginMarked, "A transaction has already begun");
transactionBeginMarked = true;
clearLastTransactionAndSetDefaultTransactionOptions();
if (isAutocommit()) {
inTransaction = true;
}
return ApiFutures.immediateFuture(null);
}
/** Internal interface for ending a transaction (commit/rollback). */
private interface EndTransactionMethod {
ApiFuture<Void> endAsync(UnitOfWork t);
}
private static final class Commit implements EndTransactionMethod {
@Override
public ApiFuture<Void> endAsync(UnitOfWork t) {
return t.commitAsync();
}
}
private final Commit commit = new Commit();
@Override
public void commit() {
get(commitAsync());
}
public ApiFuture<Void> commitAsync() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return endCurrentTransactionAsync(commit);
}
private static final class Rollback implements EndTransactionMethod {
@Override
public ApiFuture<Void> endAsync(UnitOfWork t) {
return t.rollbackAsync();
}
}
private final Rollback rollback = new Rollback();
@Override
public void rollback() {
get(rollbackAsync());
}
public ApiFuture<Void> rollbackAsync() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return endCurrentTransactionAsync(rollback);
}
private ApiFuture<Void> endCurrentTransactionAsync(EndTransactionMethod endTransactionMethod) {
ConnectionPreconditions.checkState(!isBatchActive(), "This connection has an active batch");
ConnectionPreconditions.checkState(isInTransaction(), "This connection has no transaction");
ConnectionPreconditions.checkState(
statementTag == null, "Statement tags are not supported for COMMIT or ROLLBACK");
ApiFuture<Void> res;
try {
if (isTransactionStarted()) {
res = endTransactionMethod.endAsync(getCurrentUnitOfWorkOrStartNewUnitOfWork());
} else {
this.currentUnitOfWork = null;
res = ApiFutures.immediateFuture(null);
}
} finally {
transactionBeginMarked = false;
if (isAutocommit()) {
inTransaction = false;
}
setDefaultTransactionOptions();
}
return res;
}
@Override
public StatementResult execute(Statement statement) {
Preconditions.checkNotNull(statement);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(statement, this.queryOptions);
switch (parsedStatement.getType()) {
case CLIENT_SIDE:
return parsedStatement
.getClientSideStatement()
.execute(connectionStatementExecutor, parsedStatement.getSqlWithoutComments());
case QUERY:
return StatementResultImpl.of(internalExecuteQuery(parsedStatement, AnalyzeMode.NONE));
case UPDATE:
return StatementResultImpl.of(get(internalExecuteUpdateAsync(parsedStatement)));
case DDL:
get(executeDdlAsync(parsedStatement));
return StatementResultImpl.noResult();
case UNKNOWN:
default:
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Unknown statement: " + parsedStatement.getSqlWithoutComments());
}
@Override
public AsyncStatementResult executeAsync(Statement statement) {
Preconditions.checkNotNull(statement);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(statement, this.queryOptions);
switch (parsedStatement.getType()) {
case CLIENT_SIDE:
return AsyncStatementResultImpl.of(
parsedStatement
.getClientSideStatement()
.execute(connectionStatementExecutor, parsedStatement.getSqlWithoutComments()),
spanner.getAsyncExecutorProvider());
case QUERY:
return AsyncStatementResultImpl.of(
internalExecuteQueryAsync(parsedStatement, AnalyzeMode.NONE));
case UPDATE:
return AsyncStatementResultImpl.of(internalExecuteUpdateAsync(parsedStatement));
case DDL:
return AsyncStatementResultImpl.noResult(executeDdlAsync(parsedStatement));
case UNKNOWN:
default:
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Unknown statement: " + parsedStatement.getSqlWithoutComments());
}
@Override
public ResultSet executeQuery(Statement query, QueryOption... options) {
return parseAndExecuteQuery(query, AnalyzeMode.NONE, options);
}
@Override
public AsyncResultSet executeQueryAsync(Statement query, QueryOption... options) {
return parseAndExecuteQueryAsync(query, AnalyzeMode.NONE, options);
}
@Override
public ResultSet analyzeQuery(Statement query, QueryAnalyzeMode queryMode) {
Preconditions.checkNotNull(queryMode);
return parseAndExecuteQuery(query, AnalyzeMode.of(queryMode));
}
/**
* Parses the given statement as a query and executes it. Throws a {@link SpannerException} if the
* statement is not a query.
*/
private ResultSet parseAndExecuteQuery(
Statement query, AnalyzeMode analyzeMode, QueryOption... options) {
Preconditions.checkNotNull(query);
Preconditions.checkNotNull(analyzeMode);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(query, this.queryOptions);
if (parsedStatement.isQuery()) {
switch (parsedStatement.getType()) {
case CLIENT_SIDE:
return parsedStatement
.getClientSideStatement()
.execute(connectionStatementExecutor, parsedStatement.getSqlWithoutComments())
.getResultSet();
case QUERY:
return internalExecuteQuery(parsedStatement, analyzeMode, options);
case UPDATE:
case DDL:
case UNKNOWN:
default:
}
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Statement is not a query: " + parsedStatement.getSqlWithoutComments());
}
private AsyncResultSet parseAndExecuteQueryAsync(
Statement query, AnalyzeMode analyzeMode, QueryOption... options) {
Preconditions.checkNotNull(query);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(query, this.queryOptions);
if (parsedStatement.isQuery()) {
switch (parsedStatement.getType()) {
case CLIENT_SIDE:
return ResultSets.toAsyncResultSet(
parsedStatement
.getClientSideStatement()
.execute(connectionStatementExecutor, parsedStatement.getSqlWithoutComments())
.getResultSet(),
spanner.getAsyncExecutorProvider(),
options);
case QUERY:
return internalExecuteQueryAsync(parsedStatement, analyzeMode, options);
case UPDATE:
case DDL:
case UNKNOWN:
default:
}
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Statement is not a query: " + parsedStatement.getSqlWithoutComments());
}
@Override
public long executeUpdate(Statement update) {
Preconditions.checkNotNull(update);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(update);
if (parsedStatement.isUpdate()) {
switch (parsedStatement.getType()) {
case UPDATE:
return get(internalExecuteUpdateAsync(parsedStatement));
case CLIENT_SIDE:
case QUERY:
case DDL:
case UNKNOWN:
default:
}
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Statement is not an update statement: " + parsedStatement.getSqlWithoutComments());
}
@Override
public ApiFuture<Long> executeUpdateAsync(Statement update) {
Preconditions.checkNotNull(update);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(update);
if (parsedStatement.isUpdate()) {
switch (parsedStatement.getType()) {
case UPDATE:
return internalExecuteUpdateAsync(parsedStatement);
case CLIENT_SIDE:
case QUERY:
case DDL:
case UNKNOWN:
default:
}
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Statement is not an update statement: " + parsedStatement.getSqlWithoutComments());
}
@Override
public ResultSetStats analyzeUpdate(Statement update, QueryAnalyzeMode analyzeMode) {
Preconditions.checkNotNull(update);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(update);
if (parsedStatement.isUpdate()) {
switch (parsedStatement.getType()) {
case UPDATE:
return get(internalAnalyzeUpdateAsync(parsedStatement, AnalyzeMode.of(analyzeMode)));
case CLIENT_SIDE:
case QUERY:
case DDL:
case UNKNOWN:
default:
}
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Statement is not an update statement: " + parsedStatement.getSqlWithoutComments());
}
@Override
public long[] executeBatchUpdate(Iterable<Statement> updates) {
Preconditions.checkNotNull(updates);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
// Check that there are only DML statements in the input.
List<ParsedStatement> parsedStatements = new LinkedList<>();
for (Statement update : updates) {
ParsedStatement parsedStatement = getStatementParser().parse(update);
switch (parsedStatement.getType()) {
case UPDATE:
parsedStatements.add(parsedStatement);
break;
case CLIENT_SIDE:
case QUERY:
case DDL:
case UNKNOWN:
default:
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"The batch update list contains a statement that is not an update statement: "
+ parsedStatement.getSqlWithoutComments());
}
}
return get(internalExecuteBatchUpdateAsync(parsedStatements));
}
@Override
public ApiFuture<long[]> executeBatchUpdateAsync(Iterable<Statement> updates) {
Preconditions.checkNotNull(updates);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
// Check that there are only DML statements in the input.
List<ParsedStatement> parsedStatements = new LinkedList<>();
for (Statement update : updates) {
ParsedStatement parsedStatement = getStatementParser().parse(update);
switch (parsedStatement.getType()) {
case UPDATE:
parsedStatements.add(parsedStatement);
break;
case CLIENT_SIDE:
case QUERY:
case DDL:
case UNKNOWN:
default:
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"The batch update list contains a statement that is not an update statement: "
+ parsedStatement.getSqlWithoutComments());
}
}
return internalExecuteBatchUpdateAsync(parsedStatements);
}
private QueryOption[] mergeQueryStatementTag(QueryOption... options) {
if (this.statementTag != null) {
// Shortcut for the most common scenario.
if (options == null || options.length == 0) {
options = new QueryOption[] {Options.tag(statementTag)};
} else {
options = Arrays.copyOf(options, options.length + 1);
options[options.length - 1] = Options.tag(statementTag);
}
this.statementTag = null;
}
return options;
}
private QueryOption[] mergeQueryRequestOptions(QueryOption... options) {
if (this.rpcPriority != null) {
// Shortcut for the most common scenario.
if (options == null || options.length == 0) {
options = new QueryOption[] {Options.priority(this.rpcPriority)};
} else {
options = Arrays.copyOf(options, options.length + 1);
options[options.length - 1] = Options.priority(this.rpcPriority);
}
}
return options;
}
private UpdateOption[] mergeUpdateStatementTag(UpdateOption... options) {
if (this.statementTag != null) {
// Shortcut for the most common scenario.
if (options == null || options.length == 0) {
options = new UpdateOption[] {Options.tag(statementTag)};
} else {
options = Arrays.copyOf(options, options.length + 1);
options[options.length - 1] = Options.tag(statementTag);
}
this.statementTag = null;
}
return options;
}
private UpdateOption[] mergeUpdateRequestOptions(UpdateOption... options) {
if (this.rpcPriority != null) {
// Shortcut for the most common scenario.
if (options == null || options.length == 0) {
options = new UpdateOption[] {Options.priority(this.rpcPriority)};
} else {
options = Arrays.copyOf(options, options.length + 1);
options[options.length - 1] = Options.priority(this.rpcPriority);
}
}
return options;
}
private ResultSet internalExecuteQuery(
final ParsedStatement statement,
final AnalyzeMode analyzeMode,
final QueryOption... options) {
Preconditions.checkArgument(
statement.getType() == StatementType.QUERY
|| (statement.getType() == StatementType.UPDATE && analyzeMode != AnalyzeMode.NONE),
"Statement must either be a query or a DML mode with analyzeMode!=NONE");
UnitOfWork transaction = getCurrentUnitOfWorkOrStartNewUnitOfWork();
return get(
transaction.executeQueryAsync(
statement, analyzeMode, mergeQueryRequestOptions(mergeQueryStatementTag(options))));
}
private AsyncResultSet internalExecuteQueryAsync(
final ParsedStatement statement,
final AnalyzeMode analyzeMode,
final QueryOption... options) {
Preconditions.checkArgument(
statement.getType() == StatementType.QUERY, "Statement must be a query");
UnitOfWork transaction = getCurrentUnitOfWorkOrStartNewUnitOfWork();
return ResultSets.toAsyncResultSet(
transaction.executeQueryAsync(
statement, analyzeMode, mergeQueryRequestOptions(mergeQueryStatementTag(options))),
spanner.getAsyncExecutorProvider(),
options);
}
private ApiFuture<Long> internalExecuteUpdateAsync(
final ParsedStatement update, UpdateOption... options) {
Preconditions.checkArgument(
update.getType() == StatementType.UPDATE, "Statement must be an update");
UnitOfWork transaction = getCurrentUnitOfWorkOrStartNewUnitOfWork();
return transaction.executeUpdateAsync(
update, mergeUpdateRequestOptions(mergeUpdateStatementTag(options)));
}
private ApiFuture<ResultSetStats> internalAnalyzeUpdateAsync(
final ParsedStatement update, AnalyzeMode analyzeMode, UpdateOption... options) {
Preconditions.checkArgument(
update.getType() == StatementType.UPDATE, "Statement must be an update");
UnitOfWork transaction = getCurrentUnitOfWorkOrStartNewUnitOfWork();
return transaction.analyzeUpdateAsync(
update, analyzeMode, mergeUpdateRequestOptions(mergeUpdateStatementTag(options)));
}
private ApiFuture<long[]> internalExecuteBatchUpdateAsync(
List<ParsedStatement> updates, UpdateOption... options) {
UnitOfWork transaction = getCurrentUnitOfWorkOrStartNewUnitOfWork();
return transaction.executeBatchUpdateAsync(
updates, mergeUpdateRequestOptions(mergeUpdateStatementTag(options)));
}
/**
* Returns the current {@link UnitOfWork} of this connection, or creates a new one based on the
* current transaction settings of the connection and returns that.
*/
@VisibleForTesting
UnitOfWork getCurrentUnitOfWorkOrStartNewUnitOfWork() {
if (this.currentUnitOfWork == null || !this.currentUnitOfWork.isActive()) {
this.currentUnitOfWork = createNewUnitOfWork();
}
return this.currentUnitOfWork;
}
@VisibleForTesting
UnitOfWork createNewUnitOfWork() {
if (isAutocommit() && !isInTransaction() && !isInBatch()) {
return SingleUseTransaction.newBuilder()
.setDdlClient(ddlClient)
.setDatabaseClient(dbClient)
.setReadOnly(isReadOnly())
.setReadOnlyStaleness(readOnlyStaleness)
.setAutocommitDmlMode(autocommitDmlMode)
.setReturnCommitStats(returnCommitStats)
.setStatementTimeout(statementTimeout)
.withStatementExecutor(statementExecutor)
.build();
} else {
switch (getUnitOfWorkType()) {
case READ_ONLY_TRANSACTION:
return ReadOnlyTransaction.newBuilder()
.setDatabaseClient(dbClient)
.setReadOnlyStaleness(readOnlyStaleness)
.setStatementTimeout(statementTimeout)
.withStatementExecutor(statementExecutor)
.setTransactionTag(transactionTag)
.setRpcPriority(rpcPriority)
.build();
case READ_WRITE_TRANSACTION:
return ReadWriteTransaction.newBuilder()
.setDatabaseClient(dbClient)
.setRetryAbortsInternally(retryAbortsInternally)
.setReturnCommitStats(returnCommitStats)
.setTransactionRetryListeners(transactionRetryListeners)
.setStatementTimeout(statementTimeout)
.withStatementExecutor(statementExecutor)
.setTransactionTag(transactionTag)
.setRpcPriority(rpcPriority)
.build();
case DML_BATCH:
// A DML batch can run inside the current transaction. It should therefore only
// temporarily replace the current transaction.
pushCurrentUnitOfWorkToTransactionStack();
return DmlBatch.newBuilder()
.setTransaction(currentUnitOfWork)
.setStatementTimeout(statementTimeout)
.withStatementExecutor(statementExecutor)
.setStatementTag(statementTag)
.setRpcPriority(rpcPriority)
.build();
case DDL_BATCH:
return DdlBatch.newBuilder()
.setDdlClient(ddlClient)
.setDatabaseClient(dbClient)
.setStatementTimeout(statementTimeout)
.withStatementExecutor(statementExecutor)
.build();
default:
}
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.FAILED_PRECONDITION,
"This connection does not have an active transaction and the state of this connection does not allow any new transactions to be started");
}
/** Pushes the current unit of work to the stack of nested transactions. */
private void pushCurrentUnitOfWorkToTransactionStack() {
Preconditions.checkState(currentUnitOfWork != null, "There is no current transaction");
transactionStack.push(currentUnitOfWork);
}
/** Set the {@link UnitOfWork} of this connection back to the previous {@link UnitOfWork}. */
private void popUnitOfWorkFromTransactionStack() {
Preconditions.checkState(
!transactionStack.isEmpty(), "There is no unit of work in the transaction stack");
this.currentUnitOfWork = transactionStack.pop();
}
private ApiFuture<Void> executeDdlAsync(ParsedStatement ddl) {
return getCurrentUnitOfWorkOrStartNewUnitOfWork().executeDdlAsync(ddl);
}
@Override
public void write(Mutation mutation) {
get(writeAsync(Collections.singleton(Preconditions.checkNotNull(mutation))));
}
@Override
public ApiFuture<Void> writeAsync(Mutation mutation) {
return writeAsync(Collections.singleton(Preconditions.checkNotNull(mutation)));
}
@Override
public void write(Iterable<Mutation> mutations) {
get(writeAsync(Preconditions.checkNotNull(mutations)));
}
@Override
public ApiFuture<Void> writeAsync(Iterable<Mutation> mutations) {
Preconditions.checkNotNull(mutations);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(isAutocommit(), ONLY_ALLOWED_IN_AUTOCOMMIT);
return getCurrentUnitOfWorkOrStartNewUnitOfWork().writeAsync(mutations);
}
@Override
public void bufferedWrite(Mutation mutation) {
bufferedWrite(Preconditions.checkNotNull(Collections.singleton(mutation)));
}
@Override
public void bufferedWrite(Iterable<Mutation> mutations) {
Preconditions.checkNotNull(mutations);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isAutocommit(), NOT_ALLOWED_IN_AUTOCOMMIT);
get(getCurrentUnitOfWorkOrStartNewUnitOfWork().writeAsync(mutations));
}
@Override
public void startBatchDdl() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Cannot start a DDL batch when a batch is already active");
ConnectionPreconditions.checkState(
!isReadOnly(), "Cannot start a DDL batch when the connection is in read-only mode");
ConnectionPreconditions.checkState(
!isTransactionStarted(), "Cannot start a DDL batch while a transaction is active");
ConnectionPreconditions.checkState(
!(isAutocommit() && isInTransaction()),
"Cannot start a DDL batch while in a temporary transaction");
ConnectionPreconditions.checkState(
!transactionBeginMarked, "Cannot start a DDL batch when a transaction has begun");
this.batchMode = BatchMode.DDL;
this.unitOfWorkType = UnitOfWorkType.DDL_BATCH;
this.currentUnitOfWork = createNewUnitOfWork();
}
@Override
public void startBatchDml() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Cannot start a DML batch when a batch is already active");
ConnectionPreconditions.checkState(
!isReadOnly(), "Cannot start a DML batch when the connection is in read-only mode");
ConnectionPreconditions.checkState(
!(isInTransaction() && getTransactionMode() == TransactionMode.READ_ONLY_TRANSACTION),
"Cannot start a DML batch when a read-only transaction is in progress");
// Make sure that there is a current unit of work that the batch can use.
getCurrentUnitOfWorkOrStartNewUnitOfWork();
// Then create the DML batch.
this.batchMode = BatchMode.DML;
this.unitOfWorkType = UnitOfWorkType.DML_BATCH;
this.currentUnitOfWork = createNewUnitOfWork();
}
@Override
public long[] runBatch() {
return get(runBatchAsync());
}
@Override
public ApiFuture<long[]> runBatchAsync() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(isBatchActive(), "This connection has no active batch");
try {
if (this.currentUnitOfWork != null) {
return this.currentUnitOfWork.runBatchAsync();
}
return ApiFutures.immediateFuture(new long[0]);
} finally {
this.batchMode = BatchMode.NONE;
setDefaultTransactionOptions();
}
}
@Override
public void abortBatch() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(isBatchActive(), "This connection has no active batch");
try {
if (this.currentUnitOfWork != null) {
this.currentUnitOfWork.abortBatch();
}
} finally {
this.batchMode = BatchMode.NONE;
setDefaultTransactionOptions();
}
}
private boolean isBatchActive() {
return isDdlBatchActive() || isDmlBatchActive();
}
@Override
public boolean isDdlBatchActive() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.batchMode == BatchMode.DDL;
}
@Override
public boolean isDmlBatchActive() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.batchMode == BatchMode.DML;
}
}
|
google-cloud-spanner/src/main/java/com/google/cloud/spanner/connection/ConnectionImpl.java
|
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.spanner.connection;
import static com.google.cloud.spanner.SpannerApiFutures.get;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.cloud.Timestamp;
import com.google.cloud.spanner.AsyncResultSet;
import com.google.cloud.spanner.CommitResponse;
import com.google.cloud.spanner.DatabaseClient;
import com.google.cloud.spanner.Dialect;
import com.google.cloud.spanner.ErrorCode;
import com.google.cloud.spanner.Mutation;
import com.google.cloud.spanner.Options;
import com.google.cloud.spanner.Options.QueryOption;
import com.google.cloud.spanner.Options.RpcPriority;
import com.google.cloud.spanner.Options.UpdateOption;
import com.google.cloud.spanner.ReadContext.QueryAnalyzeMode;
import com.google.cloud.spanner.ResultSet;
import com.google.cloud.spanner.ResultSets;
import com.google.cloud.spanner.Spanner;
import com.google.cloud.spanner.SpannerException;
import com.google.cloud.spanner.SpannerExceptionFactory;
import com.google.cloud.spanner.Statement;
import com.google.cloud.spanner.TimestampBound;
import com.google.cloud.spanner.TimestampBound.Mode;
import com.google.cloud.spanner.connection.AbstractStatementParser.ParsedStatement;
import com.google.cloud.spanner.connection.AbstractStatementParser.StatementType;
import com.google.cloud.spanner.connection.StatementExecutor.StatementTimeout;
import com.google.cloud.spanner.connection.UnitOfWork.UnitOfWorkState;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.spanner.v1.ExecuteSqlRequest.QueryOptions;
import com.google.spanner.v1.ResultSetStats;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Stack;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.threeten.bp.Instant;
/** Implementation for {@link Connection}, the generic Spanner connection API (not JDBC). */
class ConnectionImpl implements Connection {
private static final String CLOSED_ERROR_MSG = "This connection is closed";
private static final String ONLY_ALLOWED_IN_AUTOCOMMIT =
"This method may only be called while in autocommit mode";
private static final String NOT_ALLOWED_IN_AUTOCOMMIT =
"This method may not be called while in autocommit mode";
/**
* Exception that is used to register the stacktrace of the code that opened a {@link Connection}.
* This exception is logged if the application closes without first closing the connection.
*/
static class LeakedConnectionException extends RuntimeException {
private static final long serialVersionUID = 7119433786832158700L;
private LeakedConnectionException() {
super("Connection was opened at " + Instant.now());
}
}
private volatile LeakedConnectionException leakedException = new LeakedConnectionException();
private final SpannerPool spannerPool;
private AbstractStatementParser statementParser;
/**
* The {@link ConnectionStatementExecutor} is responsible for translating parsed {@link
* ClientSideStatement}s into actual method calls on this {@link ConnectionImpl}. I.e. the {@link
* ClientSideStatement} 'SET AUTOCOMMIT ON' will be translated into the method call {@link
* ConnectionImpl#setAutocommit(boolean)} with value <code>true</code>.
*/
private final ConnectionStatementExecutor connectionStatementExecutor =
new ConnectionStatementExecutorImpl(this);
/** Simple thread factory that is used for fire-and-forget rollbacks. */
static final class DaemonThreadFactory implements ThreadFactory {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("connection-rollback-executor");
t.setDaemon(true);
return t;
}
}
/**
* Statements are executed using a separate thread in order to be able to cancel these. Statements
* are automatically cancelled if the configured {@link ConnectionImpl#statementTimeout} is
* exceeded. In autocommit mode, the connection will try to rollback the effects of an update
* statement, but this is not guaranteed to actually succeed.
*/
private final StatementExecutor statementExecutor;
/**
* The {@link ConnectionOptions} that were used to create this {@link ConnectionImpl}. This is
* retained as it is used for getting a {@link Spanner} object and removing this connection from
* the {@link SpannerPool}.
*/
private final ConnectionOptions options;
/** The supported batch modes. */
enum BatchMode {
NONE,
DDL,
DML
}
/**
* This query option is used internally to indicate that a query is executed by the library itself
* to fetch metadata. These queries are specifically allowed to be executed even when a DDL batch
* is active.
*/
static final class InternalMetadataQuery implements QueryOption {
static final InternalMetadataQuery INSTANCE = new InternalMetadataQuery();
private InternalMetadataQuery() {}
}
/** The combination of all transaction modes and batch modes. */
enum UnitOfWorkType {
READ_ONLY_TRANSACTION {
@Override
TransactionMode getTransactionMode() {
return TransactionMode.READ_ONLY_TRANSACTION;
}
},
READ_WRITE_TRANSACTION {
@Override
TransactionMode getTransactionMode() {
return TransactionMode.READ_WRITE_TRANSACTION;
}
},
DML_BATCH {
@Override
TransactionMode getTransactionMode() {
return TransactionMode.READ_WRITE_TRANSACTION;
}
},
DDL_BATCH {
@Override
TransactionMode getTransactionMode() {
return null;
}
};
abstract TransactionMode getTransactionMode();
static UnitOfWorkType of(TransactionMode transactionMode) {
switch (transactionMode) {
case READ_ONLY_TRANSACTION:
return UnitOfWorkType.READ_ONLY_TRANSACTION;
case READ_WRITE_TRANSACTION:
return UnitOfWorkType.READ_WRITE_TRANSACTION;
default:
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT, "Unknown transaction mode: " + transactionMode);
}
}
}
private StatementExecutor.StatementTimeout statementTimeout =
new StatementExecutor.StatementTimeout();
private boolean closed = false;
private final Spanner spanner;
private DdlClient ddlClient;
private DatabaseClient dbClient;
private boolean autocommit;
private boolean readOnly;
private boolean returnCommitStats;
private UnitOfWork currentUnitOfWork = null;
/**
* This field is only used in autocommit mode to indicate that the user has explicitly started a
* transaction.
*/
private boolean inTransaction = false;
/**
* This field is used to indicate that a transaction begin has been indicated. This is done by
* calling beginTransaction or by setting a transaction property while not in autocommit mode.
*/
private boolean transactionBeginMarked = false;
private BatchMode batchMode;
private UnitOfWorkType unitOfWorkType;
private final Stack<UnitOfWork> transactionStack = new Stack<>();
private boolean retryAbortsInternally;
private final List<TransactionRetryListener> transactionRetryListeners = new ArrayList<>();
private AutocommitDmlMode autocommitDmlMode = AutocommitDmlMode.TRANSACTIONAL;
private TimestampBound readOnlyStaleness = TimestampBound.strong();
private QueryOptions queryOptions = QueryOptions.getDefaultInstance();
private RpcPriority rpcPriority = null;
private String transactionTag;
private String statementTag;
/** Create a connection and register it in the SpannerPool. */
ConnectionImpl(ConnectionOptions options) {
Preconditions.checkNotNull(options);
this.statementExecutor = new StatementExecutor(options.getStatementExecutionInterceptors());
this.spannerPool = SpannerPool.INSTANCE;
this.options = options;
this.spanner = spannerPool.getSpanner(options, this);
if (options.isAutoConfigEmulator()) {
EmulatorUtil.maybeCreateInstanceAndDatabase(spanner, options.getDatabaseId());
}
this.dbClient = spanner.getDatabaseClient(options.getDatabaseId());
this.retryAbortsInternally = options.isRetryAbortsInternally();
this.readOnly = options.isReadOnly();
this.autocommit = options.isAutocommit();
this.queryOptions = this.queryOptions.toBuilder().mergeFrom(options.getQueryOptions()).build();
this.rpcPriority = options.getRPCPriority();
this.returnCommitStats = options.isReturnCommitStats();
this.ddlClient = createDdlClient();
setDefaultTransactionOptions();
}
/** Constructor only for test purposes. */
@VisibleForTesting
ConnectionImpl(
ConnectionOptions options,
SpannerPool spannerPool,
DdlClient ddlClient,
DatabaseClient dbClient) {
Preconditions.checkNotNull(options);
Preconditions.checkNotNull(spannerPool);
Preconditions.checkNotNull(ddlClient);
Preconditions.checkNotNull(dbClient);
this.statementExecutor = new StatementExecutor(Collections.emptyList());
this.spannerPool = spannerPool;
this.options = options;
this.spanner = spannerPool.getSpanner(options, this);
this.ddlClient = ddlClient;
this.dbClient = dbClient;
setReadOnly(options.isReadOnly());
setAutocommit(options.isAutocommit());
setReturnCommitStats(options.isReturnCommitStats());
setDefaultTransactionOptions();
}
@VisibleForTesting
Spanner getSpanner() {
return this.spanner;
}
private DdlClient createDdlClient() {
return DdlClient.newBuilder()
.setDatabaseAdminClient(spanner.getDatabaseAdminClient())
.setInstanceId(options.getInstanceId())
.setDatabaseName(options.getDatabaseName())
.build();
}
private AbstractStatementParser getStatementParser() {
if (this.statementParser == null) {
this.statementParser = AbstractStatementParser.getInstance(dbClient.getDialect());
}
return this.statementParser;
}
@Override
public void close() {
try {
closeAsync().get(10L, TimeUnit.SECONDS);
} catch (SpannerException | InterruptedException | ExecutionException | TimeoutException e) {
// ignore and continue to close the connection.
} finally {
statementExecutor.shutdownNow();
}
}
public ApiFuture<Void> closeAsync() {
if (!isClosed()) {
List<ApiFuture<Void>> futures = new ArrayList<>();
if (isBatchActive()) {
abortBatch();
}
if (isTransactionStarted()) {
try {
futures.add(rollbackAsync());
} catch (Exception exception) {
// ignore and continue to close the connection.
}
}
// Try to wait for the current statement to finish (if any) before we actually close the
// connection.
this.closed = true;
// Add a no-op statement to the executor. Once this has been executed, we know that all
// preceding statements have also been executed, as the executor is single-threaded and
// executes all statements in order of submitting.
futures.add(statementExecutor.submit(() -> null));
statementExecutor.shutdown();
leakedException = null;
spannerPool.removeConnection(options, this);
return ApiFutures.transform(
ApiFutures.allAsList(futures), ignored -> null, MoreExecutors.directExecutor());
}
return ApiFutures.immediateFuture(null);
}
/** Get the current unit-of-work type of this connection. */
UnitOfWorkType getUnitOfWorkType() {
return unitOfWorkType;
}
/** Get the current batch mode of this connection. */
BatchMode getBatchMode() {
return batchMode;
}
/** @return <code>true</code> if this connection is in a batch. */
boolean isInBatch() {
return batchMode != BatchMode.NONE;
}
/** Get the call stack from when the {@link Connection} was opened. */
LeakedConnectionException getLeakedException() {
return leakedException;
}
@Override
public Dialect getDialect() {
return dbClient.getDialect();
}
@Override
public DatabaseClient getDatabaseClient() {
return dbClient;
}
@Override
public boolean isClosed() {
return closed;
}
@Override
public void setAutocommit(boolean autocommit) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isBatchActive(), "Cannot set autocommit while in a batch");
ConnectionPreconditions.checkState(
!isTransactionStarted(), "Cannot set autocommit while a transaction is active");
ConnectionPreconditions.checkState(
!(isAutocommit() && isInTransaction()),
"Cannot set autocommit while in a temporary transaction");
ConnectionPreconditions.checkState(
!transactionBeginMarked, "Cannot set autocommit when a transaction has begun");
this.autocommit = autocommit;
clearLastTransactionAndSetDefaultTransactionOptions();
// Reset the readOnlyStaleness value if it is no longer compatible with the new autocommit
// value.
if (!autocommit
&& (readOnlyStaleness.getMode() == Mode.MAX_STALENESS
|| readOnlyStaleness.getMode() == Mode.MIN_READ_TIMESTAMP)) {
readOnlyStaleness = TimestampBound.strong();
}
}
@Override
public boolean isAutocommit() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return internalIsAutocommit();
}
private boolean internalIsAutocommit() {
return this.autocommit;
}
@Override
public void setReadOnly(boolean readOnly) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isBatchActive(), "Cannot set read-only while in a batch");
ConnectionPreconditions.checkState(
!isTransactionStarted(), "Cannot set read-only while a transaction is active");
ConnectionPreconditions.checkState(
!(isAutocommit() && isInTransaction()),
"Cannot set read-only while in a temporary transaction");
ConnectionPreconditions.checkState(
!transactionBeginMarked, "Cannot set read-only when a transaction has begun");
this.readOnly = readOnly;
clearLastTransactionAndSetDefaultTransactionOptions();
}
@Override
public boolean isReadOnly() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.readOnly;
}
private void clearLastTransactionAndSetDefaultTransactionOptions() {
setDefaultTransactionOptions();
this.currentUnitOfWork = null;
}
@Override
public void setAutocommitDmlMode(AutocommitDmlMode mode) {
Preconditions.checkNotNull(mode);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Cannot set autocommit DML mode while in a batch");
ConnectionPreconditions.checkState(
!isInTransaction() && isAutocommit(),
"Cannot set autocommit DML mode while not in autocommit mode or while a transaction is active");
ConnectionPreconditions.checkState(
!isReadOnly(), "Cannot set autocommit DML mode for a read-only connection");
this.autocommitDmlMode = mode;
}
@Override
public AutocommitDmlMode getAutocommitDmlMode() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Cannot get autocommit DML mode while in a batch");
return this.autocommitDmlMode;
}
@Override
public void setReadOnlyStaleness(TimestampBound staleness) {
Preconditions.checkNotNull(staleness);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isBatchActive(), "Cannot set read-only while in a batch");
ConnectionPreconditions.checkState(
!isTransactionStarted(),
"Cannot set read-only staleness when a transaction has been started");
if (staleness.getMode() == Mode.MAX_STALENESS
|| staleness.getMode() == Mode.MIN_READ_TIMESTAMP) {
// These values are only allowed in autocommit mode.
ConnectionPreconditions.checkState(
isAutocommit() && !inTransaction,
"MAX_STALENESS and MIN_READ_TIMESTAMP are only allowed in autocommit mode");
}
this.readOnlyStaleness = staleness;
}
@Override
public TimestampBound getReadOnlyStaleness() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isBatchActive(), "Cannot get read-only while in a batch");
return this.readOnlyStaleness;
}
@Override
public void setOptimizerVersion(String optimizerVersion) {
Preconditions.checkNotNull(optimizerVersion);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
this.queryOptions = queryOptions.toBuilder().setOptimizerVersion(optimizerVersion).build();
}
@Override
public String getOptimizerVersion() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.queryOptions.getOptimizerVersion();
}
@Override
public void setOptimizerStatisticsPackage(String optimizerStatisticsPackage) {
Preconditions.checkNotNull(optimizerStatisticsPackage);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
this.queryOptions =
queryOptions.toBuilder().setOptimizerStatisticsPackage(optimizerStatisticsPackage).build();
}
@Override
public String getOptimizerStatisticsPackage() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.queryOptions.getOptimizerStatisticsPackage();
}
@Override
public void setRPCPriority(RpcPriority rpcPriority) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
this.rpcPriority = rpcPriority;
}
@Override
public RpcPriority getRPCPriority() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.rpcPriority;
}
@Override
public void setStatementTimeout(long timeout, TimeUnit unit) {
Preconditions.checkArgument(timeout > 0L, "Zero or negative timeout values are not allowed");
Preconditions.checkArgument(
StatementTimeout.isValidTimeoutUnit(unit),
"Time unit must be one of NANOSECONDS, MICROSECONDS, MILLISECONDS or SECONDS");
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
this.statementTimeout.setTimeoutValue(timeout, unit);
}
@Override
public void clearStatementTimeout() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
this.statementTimeout.clearTimeoutValue();
}
@Override
public long getStatementTimeout(TimeUnit unit) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
Preconditions.checkArgument(
StatementTimeout.isValidTimeoutUnit(unit),
"Time unit must be one of NANOSECONDS, MICROSECONDS, MILLISECONDS or SECONDS");
return this.statementTimeout.getTimeoutValue(unit);
}
@Override
public boolean hasStatementTimeout() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.statementTimeout.hasTimeout();
}
@Override
public void cancel() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
if (this.currentUnitOfWork != null) {
currentUnitOfWork.cancel();
}
}
@Override
public TransactionMode getTransactionMode() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isDdlBatchActive(), "This connection is in a DDL batch");
ConnectionPreconditions.checkState(isInTransaction(), "This connection has no transaction");
return unitOfWorkType.getTransactionMode();
}
@Override
public void setTransactionMode(TransactionMode transactionMode) {
Preconditions.checkNotNull(transactionMode);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Cannot set transaction mode while in a batch");
ConnectionPreconditions.checkState(isInTransaction(), "This connection has no transaction");
ConnectionPreconditions.checkState(
!isTransactionStarted(),
"The transaction mode cannot be set after the transaction has started");
ConnectionPreconditions.checkState(
!isReadOnly() || transactionMode == TransactionMode.READ_ONLY_TRANSACTION,
"The transaction mode can only be READ_ONLY when the connection is in read_only mode");
this.transactionBeginMarked = true;
this.unitOfWorkType = UnitOfWorkType.of(transactionMode);
}
@Override
public String getTransactionTag() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isDdlBatchActive(), "This connection is in a DDL batch");
return transactionTag;
}
@Override
public void setTransactionTag(String tag) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Cannot set transaction tag while in a batch");
ConnectionPreconditions.checkState(isInTransaction(), "This connection has no transaction");
ConnectionPreconditions.checkState(
!isTransactionStarted(),
"The transaction tag cannot be set after the transaction has started");
ConnectionPreconditions.checkState(
getTransactionMode() == TransactionMode.READ_WRITE_TRANSACTION,
"Transaction tag can only be set for a read/write transaction");
this.transactionBeginMarked = true;
this.transactionTag = tag;
}
@Override
public String getStatementTag() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Statement tags are not allowed inside a batch");
return statementTag;
}
@Override
public void setStatementTag(String tag) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Statement tags are not allowed inside a batch");
this.statementTag = tag;
}
/**
* Throws an {@link SpannerException} with code {@link ErrorCode#FAILED_PRECONDITION} if the
* current state of this connection does not allow changing the setting for retryAbortsInternally.
*/
private void checkSetRetryAbortsInternallyAvailable() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(isInTransaction(), "This connection has no transaction");
ConnectionPreconditions.checkState(
getTransactionMode() == TransactionMode.READ_WRITE_TRANSACTION,
"RetryAbortsInternally is only available for read-write transactions");
ConnectionPreconditions.checkState(
!isTransactionStarted(),
"RetryAbortsInternally cannot be set after the transaction has started");
}
@Override
public boolean isRetryAbortsInternally() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return retryAbortsInternally;
}
@Override
public void setRetryAbortsInternally(boolean retryAbortsInternally) {
checkSetRetryAbortsInternallyAvailable();
this.retryAbortsInternally = retryAbortsInternally;
}
@Override
public void addTransactionRetryListener(TransactionRetryListener listener) {
Preconditions.checkNotNull(listener);
transactionRetryListeners.add(listener);
}
@Override
public boolean removeTransactionRetryListener(TransactionRetryListener listener) {
Preconditions.checkNotNull(listener);
return transactionRetryListeners.remove(listener);
}
@Override
public Iterator<TransactionRetryListener> getTransactionRetryListeners() {
return Collections.unmodifiableList(transactionRetryListeners).iterator();
}
@Override
public boolean isInTransaction() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return internalIsInTransaction();
}
/** Returns true if this connection currently is in a transaction (and not a batch). */
private boolean internalIsInTransaction() {
return !isDdlBatchActive() && (!internalIsAutocommit() || inTransaction);
}
@Override
public boolean isTransactionStarted() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return internalIsTransactionStarted();
}
private boolean internalIsTransactionStarted() {
if (internalIsAutocommit() && !inTransaction) {
return false;
}
return internalIsInTransaction()
&& this.currentUnitOfWork != null
&& this.currentUnitOfWork.getState() == UnitOfWorkState.STARTED;
}
@Override
public Timestamp getReadTimestamp() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
this.currentUnitOfWork != null, "There is no transaction on this connection");
return this.currentUnitOfWork.getReadTimestamp();
}
Timestamp getReadTimestampOrNull() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.currentUnitOfWork == null ? null : this.currentUnitOfWork.getReadTimestampOrNull();
}
@Override
public Timestamp getCommitTimestamp() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
this.currentUnitOfWork != null, "There is no transaction on this connection");
return this.currentUnitOfWork.getCommitTimestamp();
}
Timestamp getCommitTimestampOrNull() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.currentUnitOfWork == null
? null
: this.currentUnitOfWork.getCommitTimestampOrNull();
}
@Override
public CommitResponse getCommitResponse() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
this.currentUnitOfWork != null, "There is no transaction on this connection");
return this.currentUnitOfWork.getCommitResponse();
}
CommitResponse getCommitResponseOrNull() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.currentUnitOfWork == null ? null : this.currentUnitOfWork.getCommitResponseOrNull();
}
@Override
public void setReturnCommitStats(boolean returnCommitStats) {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
this.returnCommitStats = returnCommitStats;
}
@Override
public boolean isReturnCommitStats() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.returnCommitStats;
}
/** Resets this connection to its default transaction options. */
private void setDefaultTransactionOptions() {
if (transactionStack.isEmpty()) {
unitOfWorkType =
isReadOnly()
? UnitOfWorkType.READ_ONLY_TRANSACTION
: UnitOfWorkType.READ_WRITE_TRANSACTION;
batchMode = BatchMode.NONE;
transactionTag = null;
} else {
popUnitOfWorkFromTransactionStack();
}
}
@Override
public void beginTransaction() {
get(beginTransactionAsync());
}
@Override
public ApiFuture<Void> beginTransactionAsync() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "This connection has an active batch and cannot begin a transaction");
ConnectionPreconditions.checkState(
!isTransactionStarted(),
"Beginning a new transaction is not allowed when a transaction is already running");
ConnectionPreconditions.checkState(!transactionBeginMarked, "A transaction has already begun");
transactionBeginMarked = true;
clearLastTransactionAndSetDefaultTransactionOptions();
if (isAutocommit()) {
inTransaction = true;
}
return ApiFutures.immediateFuture(null);
}
/** Internal interface for ending a transaction (commit/rollback). */
private interface EndTransactionMethod {
ApiFuture<Void> endAsync(UnitOfWork t);
}
private static final class Commit implements EndTransactionMethod {
@Override
public ApiFuture<Void> endAsync(UnitOfWork t) {
return t.commitAsync();
}
}
private final Commit commit = new Commit();
@Override
public void commit() {
get(commitAsync());
}
public ApiFuture<Void> commitAsync() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return endCurrentTransactionAsync(commit);
}
private static final class Rollback implements EndTransactionMethod {
@Override
public ApiFuture<Void> endAsync(UnitOfWork t) {
return t.rollbackAsync();
}
}
private final Rollback rollback = new Rollback();
@Override
public void rollback() {
get(rollbackAsync());
}
public ApiFuture<Void> rollbackAsync() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return endCurrentTransactionAsync(rollback);
}
private ApiFuture<Void> endCurrentTransactionAsync(EndTransactionMethod endTransactionMethod) {
ConnectionPreconditions.checkState(!isBatchActive(), "This connection has an active batch");
ConnectionPreconditions.checkState(isInTransaction(), "This connection has no transaction");
ConnectionPreconditions.checkState(
statementTag == null, "Statement tags are not supported for COMMIT or ROLLBACK");
ApiFuture<Void> res;
try {
if (isTransactionStarted()) {
res = endTransactionMethod.endAsync(getCurrentUnitOfWorkOrStartNewUnitOfWork());
} else {
this.currentUnitOfWork = null;
res = ApiFutures.immediateFuture(null);
}
} finally {
transactionBeginMarked = false;
if (isAutocommit()) {
inTransaction = false;
}
setDefaultTransactionOptions();
}
return res;
}
@Override
public StatementResult execute(Statement statement) {
Preconditions.checkNotNull(statement);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(statement, this.queryOptions);
switch (parsedStatement.getType()) {
case CLIENT_SIDE:
return parsedStatement
.getClientSideStatement()
.execute(connectionStatementExecutor, parsedStatement.getSqlWithoutComments());
case QUERY:
return StatementResultImpl.of(internalExecuteQuery(parsedStatement, AnalyzeMode.NONE));
case UPDATE:
return StatementResultImpl.of(get(internalExecuteUpdateAsync(parsedStatement)));
case DDL:
get(executeDdlAsync(parsedStatement));
return StatementResultImpl.noResult();
case UNKNOWN:
default:
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Unknown statement: " + parsedStatement.getSqlWithoutComments());
}
@Override
public AsyncStatementResult executeAsync(Statement statement) {
Preconditions.checkNotNull(statement);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(statement, this.queryOptions);
switch (parsedStatement.getType()) {
case CLIENT_SIDE:
return AsyncStatementResultImpl.of(
parsedStatement
.getClientSideStatement()
.execute(connectionStatementExecutor, parsedStatement.getSqlWithoutComments()),
spanner.getAsyncExecutorProvider());
case QUERY:
return AsyncStatementResultImpl.of(
internalExecuteQueryAsync(parsedStatement, AnalyzeMode.NONE));
case UPDATE:
return AsyncStatementResultImpl.of(internalExecuteUpdateAsync(parsedStatement));
case DDL:
return AsyncStatementResultImpl.noResult(executeDdlAsync(parsedStatement));
case UNKNOWN:
default:
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Unknown statement: " + parsedStatement.getSqlWithoutComments());
}
@Override
public ResultSet executeQuery(Statement query, QueryOption... options) {
return parseAndExecuteQuery(query, AnalyzeMode.NONE, options);
}
@Override
public AsyncResultSet executeQueryAsync(Statement query, QueryOption... options) {
return parseAndExecuteQueryAsync(query, AnalyzeMode.NONE, options);
}
@Override
public ResultSet analyzeQuery(Statement query, QueryAnalyzeMode queryMode) {
Preconditions.checkNotNull(queryMode);
return parseAndExecuteQuery(query, AnalyzeMode.of(queryMode));
}
/**
* Parses the given statement as a query and executes it. Throws a {@link SpannerException} if the
* statement is not a query.
*/
private ResultSet parseAndExecuteQuery(
Statement query, AnalyzeMode analyzeMode, QueryOption... options) {
Preconditions.checkNotNull(query);
Preconditions.checkNotNull(analyzeMode);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(query, this.queryOptions);
if (parsedStatement.isQuery()) {
switch (parsedStatement.getType()) {
case CLIENT_SIDE:
return parsedStatement
.getClientSideStatement()
.execute(connectionStatementExecutor, parsedStatement.getSqlWithoutComments())
.getResultSet();
case QUERY:
return internalExecuteQuery(parsedStatement, analyzeMode, options);
case UPDATE:
case DDL:
case UNKNOWN:
default:
}
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Statement is not a query: " + parsedStatement.getSqlWithoutComments());
}
private AsyncResultSet parseAndExecuteQueryAsync(
Statement query, AnalyzeMode analyzeMode, QueryOption... options) {
Preconditions.checkNotNull(query);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(query, this.queryOptions);
if (parsedStatement.isQuery()) {
switch (parsedStatement.getType()) {
case CLIENT_SIDE:
return ResultSets.toAsyncResultSet(
parsedStatement
.getClientSideStatement()
.execute(connectionStatementExecutor, parsedStatement.getSqlWithoutComments())
.getResultSet(),
spanner.getAsyncExecutorProvider(),
options);
case QUERY:
return internalExecuteQueryAsync(parsedStatement, analyzeMode, options);
case UPDATE:
case DDL:
case UNKNOWN:
default:
}
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Statement is not a query: " + parsedStatement.getSqlWithoutComments());
}
@Override
public long executeUpdate(Statement update) {
Preconditions.checkNotNull(update);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(update);
if (parsedStatement.isUpdate()) {
switch (parsedStatement.getType()) {
case UPDATE:
return get(internalExecuteUpdateAsync(parsedStatement));
case CLIENT_SIDE:
case QUERY:
case DDL:
case UNKNOWN:
default:
}
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Statement is not an update statement: " + parsedStatement.getSqlWithoutComments());
}
@Override
public ApiFuture<Long> executeUpdateAsync(Statement update) {
Preconditions.checkNotNull(update);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(update);
if (parsedStatement.isUpdate()) {
switch (parsedStatement.getType()) {
case UPDATE:
return internalExecuteUpdateAsync(parsedStatement);
case CLIENT_SIDE:
case QUERY:
case DDL:
case UNKNOWN:
default:
}
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Statement is not an update statement: " + parsedStatement.getSqlWithoutComments());
}
@Override
public ResultSetStats analyzeUpdate(Statement update, QueryAnalyzeMode analyzeMode) {
Preconditions.checkNotNull(update);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ParsedStatement parsedStatement = getStatementParser().parse(update);
if (parsedStatement.isUpdate()) {
switch (parsedStatement.getType()) {
case UPDATE:
return get(internalAnalyzeUpdateAsync(parsedStatement, AnalyzeMode.of(analyzeMode)));
case CLIENT_SIDE:
case QUERY:
case DDL:
case UNKNOWN:
default:
}
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"Statement is not an update statement: " + parsedStatement.getSqlWithoutComments());
}
@Override
public long[] executeBatchUpdate(Iterable<Statement> updates) {
Preconditions.checkNotNull(updates);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
// Check that there are only DML statements in the input.
List<ParsedStatement> parsedStatements = new LinkedList<>();
for (Statement update : updates) {
ParsedStatement parsedStatement = getStatementParser().parse(update);
switch (parsedStatement.getType()) {
case UPDATE:
parsedStatements.add(parsedStatement);
break;
case CLIENT_SIDE:
case QUERY:
case DDL:
case UNKNOWN:
default:
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"The batch update list contains a statement that is not an update statement: "
+ parsedStatement.getSqlWithoutComments());
}
}
return get(internalExecuteBatchUpdateAsync(parsedStatements));
}
@Override
public ApiFuture<long[]> executeBatchUpdateAsync(Iterable<Statement> updates) {
Preconditions.checkNotNull(updates);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
// Check that there are only DML statements in the input.
List<ParsedStatement> parsedStatements = new LinkedList<>();
for (Statement update : updates) {
ParsedStatement parsedStatement = getStatementParser().parse(update);
switch (parsedStatement.getType()) {
case UPDATE:
parsedStatements.add(parsedStatement);
break;
case CLIENT_SIDE:
case QUERY:
case DDL:
case UNKNOWN:
default:
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.INVALID_ARGUMENT,
"The batch update list contains a statement that is not an update statement: "
+ parsedStatement.getSqlWithoutComments());
}
}
return internalExecuteBatchUpdateAsync(parsedStatements);
}
private QueryOption[] mergeQueryStatementTag(QueryOption... options) {
if (this.statementTag != null) {
// Shortcut for the most common scenario.
if (options == null || options.length == 0) {
options = new QueryOption[] {Options.tag(statementTag)};
} else {
options = Arrays.copyOf(options, options.length + 1);
options[options.length - 1] = Options.tag(statementTag);
}
this.statementTag = null;
}
return options;
}
private QueryOption[] mergeQueryRequestOptions(QueryOption... options) {
if (this.rpcPriority != null) {
// Shortcut for the most common scenario.
if (options == null || options.length == 0) {
options = new QueryOption[] {Options.priority(this.rpcPriority)};
} else {
options = Arrays.copyOf(options, options.length + 1);
options[options.length - 1] = Options.priority(this.rpcPriority);
}
}
return options;
}
private UpdateOption[] mergeUpdateStatementTag(UpdateOption... options) {
if (this.statementTag != null) {
// Shortcut for the most common scenario.
if (options == null || options.length == 0) {
options = new UpdateOption[] {Options.tag(statementTag)};
} else {
options = Arrays.copyOf(options, options.length + 1);
options[options.length - 1] = Options.tag(statementTag);
}
this.statementTag = null;
}
return options;
}
private UpdateOption[] mergeUpdateRequestOptions(UpdateOption... options) {
if (this.rpcPriority != null) {
// Shortcut for the most common scenario.
if (options == null || options.length == 0) {
options = new UpdateOption[] {Options.priority(this.rpcPriority)};
} else {
options = Arrays.copyOf(options, options.length + 1);
options[options.length - 1] = Options.priority(this.rpcPriority);
}
}
return options;
}
private ResultSet internalExecuteQuery(
final ParsedStatement statement,
final AnalyzeMode analyzeMode,
final QueryOption... options) {
Preconditions.checkArgument(
statement.getType() == StatementType.QUERY
|| (statement.getType() == StatementType.UPDATE && analyzeMode != AnalyzeMode.NONE),
"Statement must either be a query or a DML mode with analyzeMode!=NONE");
UnitOfWork transaction = getCurrentUnitOfWorkOrStartNewUnitOfWork();
return get(
transaction.executeQueryAsync(
statement, analyzeMode, mergeQueryRequestOptions(mergeQueryStatementTag(options))));
}
private AsyncResultSet internalExecuteQueryAsync(
final ParsedStatement statement,
final AnalyzeMode analyzeMode,
final QueryOption... options) {
Preconditions.checkArgument(
statement.getType() == StatementType.QUERY, "Statement must be a query");
UnitOfWork transaction = getCurrentUnitOfWorkOrStartNewUnitOfWork();
return ResultSets.toAsyncResultSet(
transaction.executeQueryAsync(
statement, analyzeMode, mergeQueryRequestOptions(mergeQueryStatementTag(options))),
spanner.getAsyncExecutorProvider(),
options);
}
private ApiFuture<Long> internalExecuteUpdateAsync(
final ParsedStatement update, UpdateOption... options) {
Preconditions.checkArgument(
update.getType() == StatementType.UPDATE, "Statement must be an update");
UnitOfWork transaction = getCurrentUnitOfWorkOrStartNewUnitOfWork();
return transaction.executeUpdateAsync(
update, mergeUpdateRequestOptions(mergeUpdateStatementTag(options)));
}
private ApiFuture<ResultSetStats> internalAnalyzeUpdateAsync(
final ParsedStatement update, AnalyzeMode analyzeMode, UpdateOption... options) {
Preconditions.checkArgument(
update.getType() == StatementType.UPDATE, "Statement must be an update");
UnitOfWork transaction = getCurrentUnitOfWorkOrStartNewUnitOfWork();
return transaction.analyzeUpdateAsync(
update, analyzeMode, mergeUpdateRequestOptions(mergeUpdateStatementTag(options)));
}
private ApiFuture<long[]> internalExecuteBatchUpdateAsync(
List<ParsedStatement> updates, UpdateOption... options) {
UnitOfWork transaction = getCurrentUnitOfWorkOrStartNewUnitOfWork();
return transaction.executeBatchUpdateAsync(
updates, mergeUpdateRequestOptions(mergeUpdateStatementTag(options)));
}
/**
* Returns the current {@link UnitOfWork} of this connection, or creates a new one based on the
* current transaction settings of the connection and returns that.
*/
@VisibleForTesting
UnitOfWork getCurrentUnitOfWorkOrStartNewUnitOfWork() {
if (this.currentUnitOfWork == null || !this.currentUnitOfWork.isActive()) {
this.currentUnitOfWork = createNewUnitOfWork();
}
return this.currentUnitOfWork;
}
@VisibleForTesting
UnitOfWork createNewUnitOfWork() {
if (isAutocommit() && !isInTransaction() && !isInBatch()) {
return SingleUseTransaction.newBuilder()
.setDdlClient(ddlClient)
.setDatabaseClient(dbClient)
.setReadOnly(isReadOnly())
.setReadOnlyStaleness(readOnlyStaleness)
.setAutocommitDmlMode(autocommitDmlMode)
.setReturnCommitStats(returnCommitStats)
.setStatementTimeout(statementTimeout)
.withStatementExecutor(statementExecutor)
.build();
} else {
switch (getUnitOfWorkType()) {
case READ_ONLY_TRANSACTION:
return ReadOnlyTransaction.newBuilder()
.setDatabaseClient(dbClient)
.setReadOnlyStaleness(readOnlyStaleness)
.setStatementTimeout(statementTimeout)
.withStatementExecutor(statementExecutor)
.setTransactionTag(transactionTag)
.setRpcPriority(rpcPriority)
.build();
case READ_WRITE_TRANSACTION:
return ReadWriteTransaction.newBuilder()
.setDatabaseClient(dbClient)
.setRetryAbortsInternally(retryAbortsInternally)
.setReturnCommitStats(returnCommitStats)
.setTransactionRetryListeners(transactionRetryListeners)
.setStatementTimeout(statementTimeout)
.withStatementExecutor(statementExecutor)
.setTransactionTag(transactionTag)
.setRpcPriority(rpcPriority)
.build();
case DML_BATCH:
// A DML batch can run inside the current transaction. It should therefore only
// temporarily replace the current transaction.
pushCurrentUnitOfWorkToTransactionStack();
return DmlBatch.newBuilder()
.setTransaction(currentUnitOfWork)
.setStatementTimeout(statementTimeout)
.withStatementExecutor(statementExecutor)
.setStatementTag(statementTag)
.setRpcPriority(rpcPriority)
.build();
case DDL_BATCH:
return DdlBatch.newBuilder()
.setDdlClient(ddlClient)
.setDatabaseClient(dbClient)
.setStatementTimeout(statementTimeout)
.withStatementExecutor(statementExecutor)
.build();
default:
}
}
throw SpannerExceptionFactory.newSpannerException(
ErrorCode.FAILED_PRECONDITION,
"This connection does not have an active transaction and the state of this connection does not allow any new transactions to be started");
}
/** Pushes the current unit of work to the stack of nested transactions. */
private void pushCurrentUnitOfWorkToTransactionStack() {
Preconditions.checkState(currentUnitOfWork != null, "There is no current transaction");
transactionStack.push(currentUnitOfWork);
}
/** Set the {@link UnitOfWork} of this connection back to the previous {@link UnitOfWork}. */
private void popUnitOfWorkFromTransactionStack() {
Preconditions.checkState(
!transactionStack.isEmpty(), "There is no unit of work in the transaction stack");
this.currentUnitOfWork = transactionStack.pop();
}
private ApiFuture<Void> executeDdlAsync(ParsedStatement ddl) {
return getCurrentUnitOfWorkOrStartNewUnitOfWork().executeDdlAsync(ddl);
}
@Override
public void write(Mutation mutation) {
get(writeAsync(Collections.singleton(Preconditions.checkNotNull(mutation))));
}
@Override
public ApiFuture<Void> writeAsync(Mutation mutation) {
return writeAsync(Collections.singleton(Preconditions.checkNotNull(mutation)));
}
@Override
public void write(Iterable<Mutation> mutations) {
get(writeAsync(Preconditions.checkNotNull(mutations)));
}
@Override
public ApiFuture<Void> writeAsync(Iterable<Mutation> mutations) {
Preconditions.checkNotNull(mutations);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(isAutocommit(), ONLY_ALLOWED_IN_AUTOCOMMIT);
return getCurrentUnitOfWorkOrStartNewUnitOfWork().writeAsync(mutations);
}
@Override
public void bufferedWrite(Mutation mutation) {
bufferedWrite(Preconditions.checkNotNull(Collections.singleton(mutation)));
}
@Override
public void bufferedWrite(Iterable<Mutation> mutations) {
Preconditions.checkNotNull(mutations);
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(!isAutocommit(), NOT_ALLOWED_IN_AUTOCOMMIT);
get(getCurrentUnitOfWorkOrStartNewUnitOfWork().writeAsync(mutations));
}
@Override
public void startBatchDdl() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Cannot start a DDL batch when a batch is already active");
ConnectionPreconditions.checkState(
!isReadOnly(), "Cannot start a DDL batch when the connection is in read-only mode");
ConnectionPreconditions.checkState(
!isTransactionStarted(), "Cannot start a DDL batch while a transaction is active");
ConnectionPreconditions.checkState(
!(isAutocommit() && isInTransaction()),
"Cannot start a DDL batch while in a temporary transaction");
ConnectionPreconditions.checkState(
!transactionBeginMarked, "Cannot start a DDL batch when a transaction has begun");
this.batchMode = BatchMode.DDL;
this.unitOfWorkType = UnitOfWorkType.DDL_BATCH;
this.currentUnitOfWork = createNewUnitOfWork();
}
@Override
public void startBatchDml() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(
!isBatchActive(), "Cannot start a DML batch when a batch is already active");
ConnectionPreconditions.checkState(
!isReadOnly(), "Cannot start a DML batch when the connection is in read-only mode");
ConnectionPreconditions.checkState(
!(isInTransaction() && getTransactionMode() == TransactionMode.READ_ONLY_TRANSACTION),
"Cannot start a DML batch when a read-only transaction is in progress");
// Make sure that there is a current unit of work that the batch can use.
getCurrentUnitOfWorkOrStartNewUnitOfWork();
// Then create the DML batch.
this.batchMode = BatchMode.DML;
this.unitOfWorkType = UnitOfWorkType.DML_BATCH;
this.currentUnitOfWork = createNewUnitOfWork();
}
@Override
public long[] runBatch() {
return get(runBatchAsync());
}
@Override
public ApiFuture<long[]> runBatchAsync() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(isBatchActive(), "This connection has no active batch");
try {
if (this.currentUnitOfWork != null) {
return this.currentUnitOfWork.runBatchAsync();
}
return ApiFutures.immediateFuture(new long[0]);
} finally {
this.batchMode = BatchMode.NONE;
setDefaultTransactionOptions();
}
}
@Override
public void abortBatch() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
ConnectionPreconditions.checkState(isBatchActive(), "This connection has no active batch");
try {
if (this.currentUnitOfWork != null) {
this.currentUnitOfWork.abortBatch();
}
} finally {
this.batchMode = BatchMode.NONE;
setDefaultTransactionOptions();
}
}
private boolean isBatchActive() {
return isDdlBatchActive() || isDmlBatchActive();
}
@Override
public boolean isDdlBatchActive() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.batchMode == BatchMode.DDL;
}
@Override
public boolean isDmlBatchActive() {
ConnectionPreconditions.checkState(!isClosed(), CLOSED_ERROR_MSG);
return this.batchMode == BatchMode.DML;
}
}
|
fix: gracefully ignore RejectedExecutionException during Connection#close() (#1887)
|
google-cloud-spanner/src/main/java/com/google/cloud/spanner/connection/ConnectionImpl.java
|
fix: gracefully ignore RejectedExecutionException during Connection#close() (#1887)
|
|
Java
|
apache-2.0
|
191c4a7c65105600ba19c10442b4c23bbaa79911
| 0
|
katre/bazel,ButterflyNetwork/bazel,katre/bazel,bazelbuild/bazel,bazelbuild/bazel,ButterflyNetwork/bazel,cushon/bazel,cushon/bazel,ButterflyNetwork/bazel,bazelbuild/bazel,bazelbuild/bazel,bazelbuild/bazel,katre/bazel,ButterflyNetwork/bazel,bazelbuild/bazel,ButterflyNetwork/bazel,cushon/bazel,ButterflyNetwork/bazel,cushon/bazel,katre/bazel,cushon/bazel,katre/bazel,cushon/bazel,katre/bazel
|
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.buildeventservice;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.flogger.GoogleLogger;
import com.google.common.util.concurrent.ForwardingListenableFuture.SimpleForwardingListenableFuture;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.common.util.concurrent.Uninterruptibles;
import com.google.devtools.build.lib.analysis.test.TestConfiguration.TestOptions;
import com.google.devtools.build.lib.authandtls.AuthAndTLSOptions;
import com.google.devtools.build.lib.bugreport.BugReport;
import com.google.devtools.build.lib.buildeventservice.BuildEventServiceOptions.BesUploadMode;
import com.google.devtools.build.lib.buildeventservice.client.BuildEventServiceClient;
import com.google.devtools.build.lib.buildeventstream.AnnounceBuildEventTransportsEvent;
import com.google.devtools.build.lib.buildeventstream.BuildEventArtifactUploader;
import com.google.devtools.build.lib.buildeventstream.BuildEventProtocolOptions;
import com.google.devtools.build.lib.buildeventstream.BuildEventStreamProtos.Aborted.AbortReason;
import com.google.devtools.build.lib.buildeventstream.BuildEventTransport;
import com.google.devtools.build.lib.buildeventstream.BuildEventTransportClosedEvent;
import com.google.devtools.build.lib.buildeventstream.LocalFilesArtifactUploader;
import com.google.devtools.build.lib.buildeventstream.transports.BinaryFormatFileTransport;
import com.google.devtools.build.lib.buildeventstream.transports.BuildEventStreamOptions;
import com.google.devtools.build.lib.buildeventstream.transports.JsonFormatFileTransport;
import com.google.devtools.build.lib.buildeventstream.transports.TextFormatFileTransport;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.events.Reporter;
import com.google.devtools.build.lib.network.ConnectivityStatus;
import com.google.devtools.build.lib.network.ConnectivityStatus.Status;
import com.google.devtools.build.lib.network.ConnectivityStatusProvider;
import com.google.devtools.build.lib.profiler.AutoProfiler;
import com.google.devtools.build.lib.profiler.GoogleAutoProfilerUtils;
import com.google.devtools.build.lib.runtime.BlazeModule;
import com.google.devtools.build.lib.runtime.BuildEventArtifactUploaderFactory;
import com.google.devtools.build.lib.runtime.BuildEventStreamer;
import com.google.devtools.build.lib.runtime.CommandEnvironment;
import com.google.devtools.build.lib.runtime.CommonCommandOptions;
import com.google.devtools.build.lib.runtime.CountingArtifactGroupNamer;
import com.google.devtools.build.lib.runtime.SynchronizedOutputStream;
import com.google.devtools.build.lib.runtime.TargetSummaryPublisher;
import com.google.devtools.build.lib.server.FailureDetails.BuildProgress;
import com.google.devtools.build.lib.server.FailureDetails.FailureDetail;
import com.google.devtools.build.lib.util.AbruptExitException;
import com.google.devtools.build.lib.util.DetailedExitCode;
import com.google.devtools.build.lib.util.ExitCode;
import com.google.devtools.build.lib.util.io.OutErr;
import com.google.devtools.common.options.OptionsBase;
import com.google.devtools.common.options.OptionsParsingException;
import com.google.devtools.common.options.OptionsParsingResult;
import com.google.protobuf.util.Timestamps;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import javax.annotation.Nullable;
/**
* Module responsible for the Build Event Transport (BEP) and Build Event Service (BES)
* functionality.
*/
public abstract class BuildEventServiceModule<OptionsT extends BuildEventServiceOptions>
extends BlazeModule {
private static final GoogleLogger logger = GoogleLogger.forEnclosingClass();
/**
* TargetComplete BEP events scale with the value of --runs_per_tests, thus setting a very large
* value for can result in BEP events that are too big for BES to handle.
*/
@VisibleForTesting static final int RUNS_PER_TEST_LIMIT = 100000;
private BuildEventProtocolOptions bepOptions;
private AuthAndTLSOptions authTlsOptions;
private BuildEventStreamOptions besStreamOptions;
private boolean isRunsPerTestOverTheLimit;
private BuildEventArtifactUploaderFactory uploaderFactoryToCleanup;
/**
* Holds the close futures for the upload of each transport with timeouts attached to them using
* {@link #constructCloseFuturesMapWithTimeouts(ImmutableMap)} obtained from {@link
* BuildEventTransport#getTimeout()}.
*/
private ImmutableMap<BuildEventTransport, ListenableFuture<Void>> closeFuturesWithTimeoutsMap =
ImmutableMap.of();
/**
* Holds the half-close futures for the upload of each transport with timeouts attached to them
* using {@link #constructCloseFuturesMapWithTimeouts(ImmutableMap)} obtained from {@link
* BuildEventTransport#getTimeout()}.
*
* <p>The completion of the half-close indicates that the client has sent all of the data to the
* server and is just waiting for acknowledgement. The client must still keep the data buffered
* locally in case acknowledgement fails.
*/
private ImmutableMap<BuildEventTransport, ListenableFuture<Void>>
halfCloseFuturesWithTimeoutsMap = ImmutableMap.of();
private BesUploadMode previousUploadMode = BesUploadMode.WAIT_FOR_UPLOAD_COMPLETE;
// TODO(lpino): Use Optional instead of @Nullable for the members below.
@Nullable private OutErr outErr;
@Nullable private ImmutableSet<BuildEventTransport> bepTransports;
@Nullable private String buildRequestId;
@Nullable private String invocationId;
@Nullable private Reporter reporter;
@Nullable private BuildEventStreamer streamer;
@Nullable private ConnectivityStatusProvider connectivityProvider;
private static final String CONNECTIVITY_CACHE_KEY = "BES";
protected OptionsT besOptions;
protected void reportCommandLineError(EventHandler commandLineReporter, Exception exception) {
// Don't hide unchecked exceptions as part of the error reporting.
Throwables.throwIfUnchecked(exception);
commandLineReporter.handle(Event.error(exception.getMessage()));
}
/** Maximum duration Bazel waits for the previous invocation to finish before cancelling it. */
protected Duration getMaxWaitForPreviousInvocation() {
return Duration.ofSeconds(5);
}
/** Report errors in the command line and possibly fail the build. */
private void reportError(
EventHandler commandLineReporter,
ModuleEnvironment moduleEnvironment,
String msg,
Exception exception,
BuildProgress.Code besCode) {
// Don't hide unchecked exceptions as part of the error reporting.
Throwables.throwIfUnchecked(exception);
logger.atSevere().withCause(exception).log("%s", msg);
reportCommandLineError(commandLineReporter, exception);
moduleEnvironment.exit(createAbruptExitException(exception, msg, besCode));
}
@Override
public Iterable<Class<? extends OptionsBase>> getCommonCommandOptions() {
return ImmutableList.of(
optionsClass(),
AuthAndTLSOptions.class,
BuildEventStreamOptions.class,
BuildEventProtocolOptions.class);
}
// Resets the maps tracking the state of closing/half-closing BES transports.
private void resetPendingUploads() {
closeFuturesWithTimeoutsMap = ImmutableMap.of();
halfCloseFuturesWithTimeoutsMap = ImmutableMap.of();
}
// Cancels and interrupts any in-flight threads closing BES transports, then resets the maps
// tracking in-flight close operations.
private void cancelAndResetPendingUploads() {
closeFuturesWithTimeoutsMap
.values()
.forEach(closeFuture -> closeFuture.cancel(/*mayInterruptIfRunning=*/ true));
resetPendingUploads();
}
private static boolean isTimeoutException(ExecutionException e) {
return e.getCause() instanceof TimeoutException;
}
private void waitForPreviousInvocation(boolean isShutdown) {
if (closeFuturesWithTimeoutsMap.isEmpty()) {
return;
}
ConnectivityStatus status = connectivityProvider.getStatus(CONNECTIVITY_CACHE_KEY);
if (status.status != ConnectivityStatus.Status.OK) {
reporter.handle(
Event.info(
String.format(
"The Build Event Protocol encountered a connectivity problem: %s. Cancelling"
+ " previous background uploads",
status)));
cancelAndResetPendingUploads();
return;
}
ImmutableMap<BuildEventTransport, ListenableFuture<Void>> waitingFutureMap = null;
boolean cancelCloseFutures = true;
switch (previousUploadMode) {
case FULLY_ASYNC:
waitingFutureMap =
isShutdown ? closeFuturesWithTimeoutsMap : halfCloseFuturesWithTimeoutsMap;
cancelCloseFutures = false;
break;
case WAIT_FOR_UPLOAD_COMPLETE:
case NOWAIT_FOR_UPLOAD_COMPLETE:
waitingFutureMap = closeFuturesWithTimeoutsMap;
cancelCloseFutures = true;
break;
}
Stopwatch stopwatch = Stopwatch.createStarted();
try {
// TODO(b/234994611): It would be better to report before we wait, but the current
// infrastructure does not support that. At least we can report it afterwards.
Uninterruptibles.getUninterruptibly(
Futures.allAsList(waitingFutureMap.values()),
getMaxWaitForPreviousInvocation().toMillis(),
TimeUnit.MILLISECONDS);
long waitedMillis = stopwatch.elapsed().toMillis();
if (waitedMillis > 100) {
reporter.handle(
Event.info(
String.format(
"Waited for the background upload of the Build Event Protocol for "
+ "%d.%03d seconds.",
waitedMillis / 1000, waitedMillis % 1000)));
}
} catch (TimeoutException exception) {
long waitedMillis = stopwatch.elapsed().toMillis();
String msg =
String.format(
"The background upload of the Build Event Protocol for the previous invocation "
+ "failed to complete in %d.%03d seconds. "
+ "Cancelling and starting a new invocation...",
waitedMillis / 1000, waitedMillis % 1000);
reporter.handle(Event.warn(msg));
logger.atWarning().withCause(exception).log("%s", msg);
cancelCloseFutures = true;
} catch (ExecutionException e) {
String msg;
// Futures.withTimeout wraps the TimeoutException in an ExecutionException when the future
// times out.
if (isTimeoutException(e)) {
msg =
"The background upload of the Build Event Protocol for the previous invocation "
+ "failed due to a network timeout. Ignoring the failure and starting a new "
+ "invocation...";
} else {
msg =
String.format(
"The background upload of the Build Event Protocol for the previous invocation "
+ "failed with the following exception: '%s'. "
+ "Ignoring the failure and starting a new invocation...",
e.getMessage());
}
reporter.handle(Event.warn(msg));
logger.atWarning().withCause(e).log("%s", msg);
cancelCloseFutures = true;
} finally {
if (cancelCloseFutures) {
cancelAndResetPendingUploads();
} else {
resetPendingUploads();
}
}
}
@Override
public void beforeCommand(CommandEnvironment cmdEnv) {
this.invocationId = cmdEnv.getCommandId().toString();
this.buildRequestId = cmdEnv.getBuildRequestId();
this.reporter = cmdEnv.getReporter();
this.connectivityProvider =
Preconditions.checkNotNull(
cmdEnv.getRuntime().getBlazeModule(ConnectivityStatusProvider.class),
"No ConnectivityStatusProvider found in modules list");
OptionsParsingResult parsingResult = cmdEnv.getOptions();
this.besOptions = Preconditions.checkNotNull(parsingResult.getOptions(optionsClass()));
this.bepOptions =
Preconditions.checkNotNull(parsingResult.getOptions(BuildEventProtocolOptions.class));
this.authTlsOptions =
Preconditions.checkNotNull(parsingResult.getOptions(AuthAndTLSOptions.class));
this.besStreamOptions =
Preconditions.checkNotNull(parsingResult.getOptions(BuildEventStreamOptions.class));
this.isRunsPerTestOverTheLimit =
parsingResult.getOptions(TestOptions.class) != null
&& parsingResult.getOptions(TestOptions.class).runsPerTest.stream()
.anyMatch(
(perLabelOptions) ->
Integer.parseInt(Iterables.getOnlyElement(perLabelOptions.getOptions()))
> RUNS_PER_TEST_LIMIT);
ConnectivityStatus status = connectivityProvider.getStatus(CONNECTIVITY_CACHE_KEY);
String buildEventUploadStrategy =
status.status.equals(ConnectivityStatus.Status.OK)
? this.bepOptions.buildEventUploadStrategy
: "local";
CountingArtifactGroupNamer artifactGroupNamer = new CountingArtifactGroupNamer();
// We need to wait for the previous invocation before we check the list of allowed commands to
// allow completing previous runs using BES, for example:
// bazel build (..run with async BES..)
// bazel info <-- Doesn't run with BES unless we wait before checking {@code allowedCommands}.
boolean commandIsShutdown = "shutdown".equals(cmdEnv.getCommandName());
waitForPreviousInvocation(commandIsShutdown);
if (commandIsShutdown && uploaderFactoryToCleanup != null) {
uploaderFactoryToCleanup.shutdown();
}
if (!allowedCommands(besOptions).contains(cmdEnv.getCommandName())) {
// Exit early if the running command isn't supported.
return;
}
BuildEventArtifactUploaderFactory uploaderFactory =
cmdEnv
.getRuntime()
.getBuildEventArtifactUploaderFactoryMap()
.select(buildEventUploadStrategy);
ThrowingBuildEventArtifactUploaderSupplier uploaderSupplier =
new ThrowingBuildEventArtifactUploaderSupplier(() -> uploaderFactory.create(cmdEnv));
this.uploaderFactoryToCleanup = uploaderFactory;
try {
bepTransports = createBepTransports(cmdEnv, uploaderSupplier, artifactGroupNamer);
} catch (IOException e) {
cmdEnv
.getBlazeModuleEnvironment()
.exit(
createAbruptExitException(
e,
"Could not create BEP transports.",
BuildProgress.Code.BES_INITIALIZATION_ERROR));
return;
}
if (bepTransports.isEmpty()) {
// Exit early if there are no transports to stream to.
return;
}
if (bepOptions.publishTargetSummary) {
cmdEnv.getEventBus().register(new TargetSummaryPublisher(cmdEnv.getEventBus()));
}
streamer =
new BuildEventStreamer.Builder()
.buildEventTransports(bepTransports)
.besStreamOptions(besStreamOptions)
.publishTargetSummaries(bepOptions.publishTargetSummary)
.artifactGroupNamer(artifactGroupNamer)
.oomMessage(parsingResult.getOptions(CommonCommandOptions.class).oomMessage)
.build();
cmdEnv.getEventBus().register(streamer);
registerOutAndErrOutputStreams();
// This event should probably be posted in a more general place (e.g. {@link BuildTool};
// however, so far the BES module is the only module that requires extra work after the build
// so we post it here until it's needed for other modules.
reporter.post(new AnnounceBuildEventTransportsEvent(bepTransports));
}
private void registerOutAndErrOutputStreams() {
int bufferSize = besOptions.besOuterrBufferSize;
int chunkSize = besOptions.besOuterrChunkSize;
SynchronizedOutputStream out = new SynchronizedOutputStream(bufferSize, chunkSize);
SynchronizedOutputStream err = new SynchronizedOutputStream(bufferSize, chunkSize);
this.outErr = OutErr.create(out, err);
streamer.registerOutErrProvider(
new BuildEventStreamer.OutErrProvider() {
@Override
public Iterable<String> getOut() {
return out.readAndReset();
}
@Override
public Iterable<String> getErr() {
return err.readAndReset();
}
});
err.registerStreamer(streamer);
out.registerStreamer(streamer);
}
@Override
public OutErr getOutputListener() {
return outErr;
}
private void forceShutdownBuildEventStreamer(AbortReason reason) {
streamer.closeOnAbort(reason);
closeFuturesWithTimeoutsMap =
constructCloseFuturesMapWithTimeouts(streamer.getCloseFuturesMap());
try {
logger.atInfo().log("Closing pending build event transports");
Uninterruptibles.getUninterruptibly(Futures.allAsList(closeFuturesWithTimeoutsMap.values()));
} catch (ExecutionException e) {
logger.atSevere().withCause(e).log("Failed to close a build event transport");
} finally {
cancelAndResetPendingUploads();
}
}
@Override
public void blazeShutdownOnCrash(DetailedExitCode exitCode) {
if (streamer != null) {
logger.atWarning().log("Attempting to close BES streamer on crash");
forceShutdownBuildEventStreamer(
exitCode.getExitCode().equals(ExitCode.OOM_ERROR)
? AbortReason.OUT_OF_MEMORY
: AbortReason.INTERNAL);
uploaderFactoryToCleanup.shutdown();
}
}
@Override
public void blazeShutdown() {
if (closeFuturesWithTimeoutsMap.isEmpty()) {
return;
}
try {
Uninterruptibles.getUninterruptibly(
Futures.allAsList(closeFuturesWithTimeoutsMap.values()),
getMaxWaitForPreviousInvocation().getSeconds(),
TimeUnit.SECONDS);
} catch (TimeoutException | ExecutionException exception) {
logger.atWarning().withCause(exception).log(
"Encountered Exception when closing BEP transports in Blaze's shutting down sequence");
} finally {
cancelAndResetPendingUploads();
if (uploaderFactoryToCleanup != null) {
uploaderFactoryToCleanup.shutdown();
}
}
}
private void waitForBuildEventTransportsToClose(
Map<BuildEventTransport, ListenableFuture<Void>> transportFutures,
boolean besUploadModeIsSynchronous)
throws AbruptExitException {
final ScheduledExecutorService executor =
Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder().setNameFormat("bes-notify-ui-%d").build());
ScheduledFuture<?> waitMessageFuture = null;
try {
// Notify the UI handler when a transport finished closing.
transportFutures.forEach(
(bepTransport, closeFuture) ->
closeFuture.addListener(
() -> {
reporter.post(new BuildEventTransportClosedEvent(bepTransport));
},
executor));
try (AutoProfiler p =
GoogleAutoProfilerUtils.logged(
"waiting for BES close for invocation " + this.invocationId)) {
Uninterruptibles.getUninterruptibly(Futures.allAsList(transportFutures.values()));
}
} catch (ExecutionException e) {
// Futures.withTimeout wraps the TimeoutException in an ExecutionException when the future
// times out.
if (isTimeoutException(e)) {
throw createAbruptExitException(
e,
"The Build Event Protocol upload timed out.",
BuildProgress.Code.BES_UPLOAD_TIMEOUT_ERROR);
}
Throwables.throwIfInstanceOf(e.getCause(), AbruptExitException.class);
throw new RuntimeException(
String.format(
"Unexpected Exception '%s' when closing BEP transports, this is a bug.",
e.getCause().getMessage()),
e);
} finally {
if (besUploadModeIsSynchronous) {
cancelAndResetPendingUploads();
}
if (waitMessageFuture != null) {
waitMessageFuture.cancel(/* mayInterruptIfRunning= */ true);
}
executor.shutdown();
}
}
private static ImmutableMap<BuildEventTransport, ListenableFuture<Void>>
constructCloseFuturesMapWithTimeouts(
ImmutableMap<BuildEventTransport, ListenableFuture<Void>> bepTransportToCloseFuturesMap) {
ImmutableMap.Builder<BuildEventTransport, ListenableFuture<Void>> builder =
ImmutableMap.builder();
bepTransportToCloseFuturesMap.forEach(
(bepTransport, closeFuture) -> {
final ListenableFuture<Void> closeFutureWithTimeout;
if (bepTransport.getTimeout().isZero() || bepTransport.getTimeout().isNegative()) {
closeFutureWithTimeout = closeFuture;
} else {
final ScheduledExecutorService timeoutExecutor =
Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder()
.setNameFormat("bes-close-" + bepTransport.name() + "-%d")
.build());
// Make sure to avoid propagating the cancellation to the enclosing future since
// we handle cancellation ourselves in this class.
// Futures.withTimeout may cancel the enclosing future when the timeout is
// reached.
final ListenableFuture<Void> enclosingFuture =
Futures.nonCancellationPropagating(closeFuture);
ListenableFuture<Void> timeoutFuture =
Futures.withTimeout(
enclosingFuture,
bepTransport.getTimeout().toMillis(),
TimeUnit.MILLISECONDS,
timeoutExecutor);
timeoutFuture.addListener(timeoutExecutor::shutdown, MoreExecutors.directExecutor());
// Cancellation is not propagated to the `closeFuture` for the reasons above. But in
// order to cancel the returned future by our explicit mechanism elsewhere in this
// class, we need to delegate the `cancel` to `closeFuture` so that cancellation
// from Futures.withTimeout is ignored and cancellation from our mechanism is properly
// handled.
closeFutureWithTimeout =
new SimpleForwardingListenableFuture<>(timeoutFuture) {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return closeFuture.cancel(mayInterruptIfRunning);
}
};
}
builder.put(bepTransport, closeFutureWithTimeout);
});
return builder.buildOrThrow();
}
private void closeBepTransports() throws AbruptExitException {
previousUploadMode = besOptions.besUploadMode;
closeFuturesWithTimeoutsMap =
constructCloseFuturesMapWithTimeouts(streamer.getCloseFuturesMap());
halfCloseFuturesWithTimeoutsMap =
constructCloseFuturesMapWithTimeouts(streamer.getHalfClosedMap());
boolean besUploadModeIsSynchronous =
besOptions.besUploadMode == BesUploadMode.WAIT_FOR_UPLOAD_COMPLETE;
Map<BuildEventTransport, ListenableFuture<Void>> blockingTransportFutures = new HashMap<>();
for (Map.Entry<BuildEventTransport, ListenableFuture<Void>> entry :
closeFuturesWithTimeoutsMap.entrySet()) {
BuildEventTransport bepTransport = entry.getKey();
if (!bepTransport.mayBeSlow() || besUploadModeIsSynchronous) {
blockingTransportFutures.put(bepTransport, entry.getValue());
} else {
// When running asynchronously notify the UI immediately since we won't wait for the
// uploads to close.
reporter.post(new BuildEventTransportClosedEvent(bepTransport));
}
}
if (!blockingTransportFutures.isEmpty()) {
waitForBuildEventTransportsToClose(blockingTransportFutures, besUploadModeIsSynchronous);
}
}
@Override
public void afterCommand() throws AbruptExitException {
if (streamer != null) {
if (!streamer.isClosed()) {
// This should not occur, but close with an internal error if a {@link BuildEventStreamer}
// bug manifests as an unclosed streamer.
logger.atWarning().log("Attempting to close BES streamer after command");
reporter.handle(Event.warn("BES was not properly closed"));
forceShutdownBuildEventStreamer(AbortReason.INTERNAL);
}
closeBepTransports();
if (!Strings.isNullOrEmpty(besOptions.besBackend)) {
constructAndMaybeReportInvocationIdUrl();
} else if (!bepTransports.isEmpty()) {
reporter.handle(Event.info("Build Event Protocol files produced successfully."));
}
}
// besStreamOptions can be null if we are crashing. Don't crash here too.
if (besStreamOptions != null && !besStreamOptions.keepBackendConnections) {
clearBesClient();
} else if (besStreamOptions == null) {
BugReport.sendBugReport(new NullPointerException("besStreamOptions null: in a crash?"));
}
}
@Override
public void commandComplete() {
this.outErr = null;
this.bepTransports = null;
this.invocationId = null;
this.buildRequestId = null;
this.reporter = null;
this.streamer = null;
}
private void constructAndMaybeReportInvocationIdUrl() {
if (!getInvocationIdPrefix().isEmpty()) {
reporter.handle(
Event.info("Streaming build results to: " + getInvocationIdPrefix() + invocationId));
}
}
private void constructAndMaybeReportBuildRequestIdUrl() {
if (!getBuildRequestIdPrefix().isEmpty()) {
reporter.handle(
Event.info(
"See "
+ getBuildRequestIdPrefix()
+ buildRequestId
+ " for more information about your request."));
}
}
private void logIds() {
logger.atInfo().log(
"Streaming Build Event Protocol to '%s' with build_request_id: '%s'"
+ " and invocation_id: '%s'",
besOptions.besBackend, buildRequestId, invocationId);
}
@Nullable
private BuildEventServiceTransport createBesTransport(
CommandEnvironment cmdEnv,
ThrowingBuildEventArtifactUploaderSupplier uploaderSupplier,
CountingArtifactGroupNamer artifactGroupNamer)
throws IOException {
if (Strings.isNullOrEmpty(besOptions.besBackend)) {
clearBesClient();
return null;
}
if (isRunsPerTestOverTheLimit) {
String msg =
String.format(
"The value of --runs_per_test is bigger than %d and it will produce build events "
+ "that are too big for the Build Event Service to handle.",
RUNS_PER_TEST_LIMIT);
reportError(
reporter,
cmdEnv.getBlazeModuleEnvironment(),
msg,
new OptionsParsingException(msg),
BuildProgress.Code.BES_RUNS_PER_TEST_LIMIT_UNSUPPORTED);
return null;
}
logIds();
ConnectivityStatus status = connectivityProvider.getStatus(CONNECTIVITY_CACHE_KEY);
if (status.status != Status.OK) {
clearBesClient();
String message =
String.format(
"Build Event Service uploads disabled due to a connectivity problem: %s", status);
reporter.handle(Event.warn(message));
logger.atWarning().log("%s", message);
return null;
}
final BuildEventServiceClient besClient;
try {
besClient = getBesClient(besOptions, authTlsOptions);
} catch (IOException | OptionsParsingException e) {
reportError(
reporter,
cmdEnv.getBlazeModuleEnvironment(),
e.getMessage(),
e,
BuildProgress.Code.BES_INITIALIZATION_ERROR);
return null;
}
BuildEventServiceProtoUtil besProtoUtil =
new BuildEventServiceProtoUtil.Builder()
.buildRequestId(buildRequestId)
.invocationId(invocationId)
.projectId(besOptions.instanceName)
.commandName(cmdEnv.getCommandName())
.keywords(getBesKeywords(besOptions, cmdEnv.getRuntime().getStartupOptionsProvider()))
.build();
return new BuildEventServiceTransport.Builder()
.localFileUploader(uploaderSupplier.get())
.besClient(besClient)
.besOptions(besOptions)
.besProtoUtil(besProtoUtil)
.artifactGroupNamer(artifactGroupNamer)
.bepOptions(bepOptions)
.clock(cmdEnv.getRuntime().getClock())
.eventBus(cmdEnv.getEventBus())
.commandStartTime(Timestamps.fromMillis(cmdEnv.getCommandStartTime()))
.build();
}
private ImmutableSet<BuildEventTransport> createBepTransports(
CommandEnvironment cmdEnv,
ThrowingBuildEventArtifactUploaderSupplier uploaderSupplier,
CountingArtifactGroupNamer artifactGroupNamer)
throws IOException {
ImmutableSet.Builder<BuildEventTransport> bepTransportsBuilder = new ImmutableSet.Builder<>();
if (!Strings.isNullOrEmpty(besStreamOptions.buildEventTextFile)) {
try {
BufferedOutputStream bepTextOutputStream =
new BufferedOutputStream(
Files.newOutputStream(Paths.get(besStreamOptions.buildEventTextFile)));
BuildEventArtifactUploader localFileUploader =
besStreamOptions.buildEventTextFilePathConversion
? uploaderSupplier.get()
: new LocalFilesArtifactUploader();
bepTransportsBuilder.add(
new TextFormatFileTransport(
bepTextOutputStream, bepOptions, localFileUploader, artifactGroupNamer));
} catch (IOException exception) {
// TODO(b/125216340): Consider making this a warning instead of an error once the
// associated bug has been resolved.
reportError(
reporter,
cmdEnv.getBlazeModuleEnvironment(),
"Unable to write to '"
+ besStreamOptions.buildEventTextFile
+ "'. Omitting --build_event_text_file.",
exception,
BuildProgress.Code.BES_LOCAL_WRITE_ERROR);
}
}
if (!Strings.isNullOrEmpty(besStreamOptions.buildEventBinaryFile)) {
try {
BufferedOutputStream bepBinaryOutputStream =
new BufferedOutputStream(
Files.newOutputStream(Paths.get(besStreamOptions.buildEventBinaryFile)));
BuildEventArtifactUploader localFileUploader =
besStreamOptions.buildEventBinaryFilePathConversion
? uploaderSupplier.get()
: new LocalFilesArtifactUploader();
bepTransportsBuilder.add(
new BinaryFormatFileTransport(
bepBinaryOutputStream, bepOptions, localFileUploader, artifactGroupNamer));
} catch (IOException exception) {
// TODO(b/125216340): Consider making this a warning instead of an error once the
// associated bug has been resolved.
reportError(
reporter,
cmdEnv.getBlazeModuleEnvironment(),
"Unable to write to '"
+ besStreamOptions.buildEventBinaryFile
+ "'. Omitting --build_event_binary_file.",
exception,
BuildProgress.Code.BES_LOCAL_WRITE_ERROR);
}
}
if (!Strings.isNullOrEmpty(besStreamOptions.buildEventJsonFile)) {
try {
BufferedOutputStream bepJsonOutputStream =
new BufferedOutputStream(
Files.newOutputStream(Paths.get(besStreamOptions.buildEventJsonFile)));
BuildEventArtifactUploader localFileUploader =
besStreamOptions.buildEventJsonFilePathConversion
? uploaderSupplier.get()
: new LocalFilesArtifactUploader();
bepTransportsBuilder.add(
new JsonFormatFileTransport(
bepJsonOutputStream, bepOptions, localFileUploader, artifactGroupNamer));
} catch (IOException exception) {
// TODO(b/125216340): Consider making this a warning instead of an error once the
// associated bug has been resolved.
reportError(
reporter,
cmdEnv.getBlazeModuleEnvironment(),
"Unable to write to '"
+ besStreamOptions.buildEventJsonFile
+ "'. Omitting --build_event_json_file.",
exception,
BuildProgress.Code.BES_LOCAL_WRITE_ERROR);
}
}
BuildEventServiceTransport besTransport =
createBesTransport(cmdEnv, uploaderSupplier, artifactGroupNamer);
if (besTransport != null) {
constructAndMaybeReportInvocationIdUrl();
constructAndMaybeReportBuildRequestIdUrl();
bepTransportsBuilder.add(besTransport);
}
return bepTransportsBuilder.build();
}
private static AbruptExitException createAbruptExitException(
Exception e, String message, BuildProgress.Code besCode) {
return new AbruptExitException(
DetailedExitCode.of(
FailureDetail.newBuilder()
.setMessage(message + " " + e.getMessage())
.setBuildProgress(BuildProgress.newBuilder().setCode(besCode).build())
.build()),
e);
}
protected abstract Class<OptionsT> optionsClass();
protected abstract BuildEventServiceClient getBesClient(
OptionsT besOptions, AuthAndTLSOptions authAndTLSOptions)
throws IOException, OptionsParsingException;
protected abstract void clearBesClient();
protected abstract Set<String> allowedCommands(OptionsT besOptions);
protected Set<String> getBesKeywords(
OptionsT besOptions, @Nullable OptionsParsingResult startupOptionsProvider) {
return besOptions.besKeywords.stream()
.map(keyword -> "user_keyword=" + keyword)
.collect(ImmutableSet.toImmutableSet());
}
/** A prefix used when printing the invocation ID in the command line */
protected abstract String getInvocationIdPrefix();
/** A prefix used when printing the build request ID in the command line */
protected abstract String getBuildRequestIdPrefix();
// TODO(b/115961387): This method shouldn't exist. It only does because some tests are relying on
// the transport creation logic of this module directly.
@VisibleForTesting
ImmutableSet<BuildEventTransport> getBepTransports() {
return bepTransports;
}
private static class ThrowingBuildEventArtifactUploaderSupplier {
private final Callable<BuildEventArtifactUploader> callable;
@Nullable private BuildEventArtifactUploader memoizedValue;
@Nullable private IOException exception;
ThrowingBuildEventArtifactUploaderSupplier(Callable<BuildEventArtifactUploader> callable) {
this.callable = callable;
}
BuildEventArtifactUploader get() throws IOException {
boolean needsInitialization = memoizedValue == null;
if (needsInitialization && exception == null) {
try {
memoizedValue = callable.call();
} catch (IOException e) {
exception = e;
} catch (Exception e) {
Throwables.throwIfUnchecked(e);
throw new IllegalStateException(e);
}
}
if (memoizedValue != null) {
if (!needsInitialization) {
memoizedValue.retain();
}
return memoizedValue;
}
throw exception;
}
}
}
|
src/main/java/com/google/devtools/build/lib/buildeventservice/BuildEventServiceModule.java
|
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.buildeventservice;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.flogger.GoogleLogger;
import com.google.common.util.concurrent.ForwardingListenableFuture.SimpleForwardingListenableFuture;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.common.util.concurrent.Uninterruptibles;
import com.google.devtools.build.lib.analysis.test.TestConfiguration.TestOptions;
import com.google.devtools.build.lib.authandtls.AuthAndTLSOptions;
import com.google.devtools.build.lib.bugreport.BugReport;
import com.google.devtools.build.lib.buildeventservice.BuildEventServiceOptions.BesUploadMode;
import com.google.devtools.build.lib.buildeventservice.client.BuildEventServiceClient;
import com.google.devtools.build.lib.buildeventstream.AnnounceBuildEventTransportsEvent;
import com.google.devtools.build.lib.buildeventstream.BuildEventArtifactUploader;
import com.google.devtools.build.lib.buildeventstream.BuildEventProtocolOptions;
import com.google.devtools.build.lib.buildeventstream.BuildEventStreamProtos.Aborted.AbortReason;
import com.google.devtools.build.lib.buildeventstream.BuildEventTransport;
import com.google.devtools.build.lib.buildeventstream.BuildEventTransportClosedEvent;
import com.google.devtools.build.lib.buildeventstream.LocalFilesArtifactUploader;
import com.google.devtools.build.lib.buildeventstream.transports.BinaryFormatFileTransport;
import com.google.devtools.build.lib.buildeventstream.transports.BuildEventStreamOptions;
import com.google.devtools.build.lib.buildeventstream.transports.JsonFormatFileTransport;
import com.google.devtools.build.lib.buildeventstream.transports.TextFormatFileTransport;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.events.Reporter;
import com.google.devtools.build.lib.network.ConnectivityStatus;
import com.google.devtools.build.lib.network.ConnectivityStatus.Status;
import com.google.devtools.build.lib.network.ConnectivityStatusProvider;
import com.google.devtools.build.lib.profiler.AutoProfiler;
import com.google.devtools.build.lib.profiler.GoogleAutoProfilerUtils;
import com.google.devtools.build.lib.runtime.BlazeModule;
import com.google.devtools.build.lib.runtime.BuildEventArtifactUploaderFactory;
import com.google.devtools.build.lib.runtime.BuildEventStreamer;
import com.google.devtools.build.lib.runtime.CommandEnvironment;
import com.google.devtools.build.lib.runtime.CommonCommandOptions;
import com.google.devtools.build.lib.runtime.CountingArtifactGroupNamer;
import com.google.devtools.build.lib.runtime.SynchronizedOutputStream;
import com.google.devtools.build.lib.runtime.TargetSummaryPublisher;
import com.google.devtools.build.lib.server.FailureDetails.BuildProgress;
import com.google.devtools.build.lib.server.FailureDetails.FailureDetail;
import com.google.devtools.build.lib.util.AbruptExitException;
import com.google.devtools.build.lib.util.DetailedExitCode;
import com.google.devtools.build.lib.util.ExitCode;
import com.google.devtools.build.lib.util.io.OutErr;
import com.google.devtools.common.options.OptionsBase;
import com.google.devtools.common.options.OptionsParsingException;
import com.google.devtools.common.options.OptionsParsingResult;
import com.google.protobuf.util.Timestamps;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import javax.annotation.Nullable;
/**
* Module responsible for the Build Event Transport (BEP) and Build Event Service (BES)
* functionality.
*/
public abstract class BuildEventServiceModule<OptionsT extends BuildEventServiceOptions>
extends BlazeModule {
private static final GoogleLogger logger = GoogleLogger.forEnclosingClass();
/**
* TargetComplete BEP events scale with the value of --runs_per_tests, thus setting a very large
* value for can result in BEP events that are too big for BES to handle.
*/
@VisibleForTesting static final int RUNS_PER_TEST_LIMIT = 100000;
private BuildEventProtocolOptions bepOptions;
private AuthAndTLSOptions authTlsOptions;
private BuildEventStreamOptions besStreamOptions;
private boolean isRunsPerTestOverTheLimit;
private BuildEventArtifactUploaderFactory uploaderFactoryToCleanup;
/**
* Holds the close futures for the upload of each transport with timeouts attached to them using
* {@link #constructCloseFuturesMapWithTimeouts(ImmutableMap)} obtained from {@link
* BuildEventTransport#getTimeout()}.
*/
private ImmutableMap<BuildEventTransport, ListenableFuture<Void>> closeFuturesWithTimeoutsMap =
ImmutableMap.of();
/**
* Holds the half-close futures for the upload of each transport with timeouts attached to them
* using {@link #constructCloseFuturesMapWithTimeouts(ImmutableMap)} obtained from {@link
* BuildEventTransport#getTimeout()}.
*
* <p>The completion of the half-close indicates that the client has sent all of the data to the
* server and is just waiting for acknowledgement. The client must still keep the data buffered
* locally in case acknowledgement fails.
*/
private ImmutableMap<BuildEventTransport, ListenableFuture<Void>>
halfCloseFuturesWithTimeoutsMap = ImmutableMap.of();
private BesUploadMode previousUploadMode = BesUploadMode.WAIT_FOR_UPLOAD_COMPLETE;
// TODO(lpino): Use Optional instead of @Nullable for the members below.
@Nullable private OutErr outErr;
@Nullable private ImmutableSet<BuildEventTransport> bepTransports;
@Nullable private String buildRequestId;
@Nullable private String invocationId;
@Nullable private Reporter reporter;
@Nullable private BuildEventStreamer streamer;
@Nullable private ConnectivityStatusProvider connectivityProvider;
private static final String CONNECTIVITY_CACHE_KEY = "BES";
protected OptionsT besOptions;
protected void reportCommandLineError(EventHandler commandLineReporter, Exception exception) {
// Don't hide unchecked exceptions as part of the error reporting.
Throwables.throwIfUnchecked(exception);
commandLineReporter.handle(Event.error(exception.getMessage()));
}
/** Maximum duration Bazel waits for the previous invocation to finish before cancelling it. */
protected Duration getMaxWaitForPreviousInvocation() {
return Duration.ofSeconds(5);
}
/** Report errors in the command line and possibly fail the build. */
private void reportError(
EventHandler commandLineReporter,
ModuleEnvironment moduleEnvironment,
String msg,
Exception exception,
BuildProgress.Code besCode) {
// Don't hide unchecked exceptions as part of the error reporting.
Throwables.throwIfUnchecked(exception);
logger.atSevere().withCause(exception).log("%s", msg);
reportCommandLineError(commandLineReporter, exception);
moduleEnvironment.exit(createAbruptExitException(exception, msg, besCode));
}
@Override
public Iterable<Class<? extends OptionsBase>> getCommonCommandOptions() {
return ImmutableList.of(
optionsClass(),
AuthAndTLSOptions.class,
BuildEventStreamOptions.class,
BuildEventProtocolOptions.class);
}
// Resets the maps tracking the state of closing/half-closing BES transports.
private void resetPendingUploads() {
closeFuturesWithTimeoutsMap = ImmutableMap.of();
halfCloseFuturesWithTimeoutsMap = ImmutableMap.of();
}
// Cancels and interrupts any in-flight threads closing BES transports, then resets the maps
// tracking in-flight close operations.
private void cancelAndResetPendingUploads() {
closeFuturesWithTimeoutsMap
.values()
.forEach(closeFuture -> closeFuture.cancel(/*mayInterruptIfRunning=*/ true));
resetPendingUploads();
}
private static boolean isTimeoutException(ExecutionException e) {
return e.getCause() instanceof TimeoutException;
}
private void waitForPreviousInvocation(boolean isShutdown) {
if (closeFuturesWithTimeoutsMap.isEmpty()) {
return;
}
ConnectivityStatus status = connectivityProvider.getStatus(CONNECTIVITY_CACHE_KEY);
if (status.status != ConnectivityStatus.Status.OK) {
reporter.handle(
Event.info(
String.format(
"The Build Event Protocol encountered a connectivity problem: %s. Cancelling"
+ " previous background uploads",
status)));
cancelAndResetPendingUploads();
return;
}
ImmutableMap<BuildEventTransport, ListenableFuture<Void>> waitingFutureMap = null;
boolean cancelCloseFutures = true;
switch (previousUploadMode) {
case FULLY_ASYNC:
waitingFutureMap =
isShutdown ? closeFuturesWithTimeoutsMap : halfCloseFuturesWithTimeoutsMap;
cancelCloseFutures = false;
break;
case WAIT_FOR_UPLOAD_COMPLETE:
case NOWAIT_FOR_UPLOAD_COMPLETE:
waitingFutureMap = closeFuturesWithTimeoutsMap;
cancelCloseFutures = true;
break;
}
Stopwatch stopwatch = Stopwatch.createStarted();
try {
// TODO(b/234994611): It would be better to report before we wait, but the current
// infrastructure does not support that. At least we can report it afterwards.
Uninterruptibles.getUninterruptibly(
Futures.allAsList(waitingFutureMap.values()),
getMaxWaitForPreviousInvocation().toMillis(),
TimeUnit.MILLISECONDS);
long waitedMillis = stopwatch.elapsed().toMillis();
if (waitedMillis > 100) {
reporter.handle(
Event.info(
String.format(
"Waited for the background upload of the Build Event Protocol for "
+ "%d.%03d seconds.",
waitedMillis / 1000, waitedMillis % 1000)));
}
} catch (TimeoutException exception) {
long waitedMillis = stopwatch.elapsed().toMillis();
String msg =
String.format(
"The background upload of the Build Event Protocol for the previous invocation "
+ "failed to complete in %d.%03d seconds. "
+ "Cancelling and starting a new invocation...",
waitedMillis / 1000, waitedMillis % 1000);
reporter.handle(Event.warn(msg));
logger.atWarning().withCause(exception).log("%s", msg);
cancelCloseFutures = true;
} catch (ExecutionException e) {
String msg;
// Futures.withTimeout wraps the TimeoutException in an ExecutionException when the future
// times out.
if (isTimeoutException(e)) {
msg =
"The background upload of the Build Event Protocol for the previous invocation "
+ "failed due to a network timeout. Ignoring the failure and starting a new "
+ "invocation...";
} else {
msg =
String.format(
"The background upload of the Build Event Protocol for the previous invocation "
+ "failed with the following exception: '%s'. "
+ "Ignoring the failure and starting a new invocation...",
e.getMessage());
}
reporter.handle(Event.warn(msg));
logger.atWarning().withCause(e).log("%s", msg);
cancelCloseFutures = true;
} finally {
if (cancelCloseFutures) {
cancelAndResetPendingUploads();
} else {
resetPendingUploads();
}
}
}
@Override
public void beforeCommand(CommandEnvironment cmdEnv) {
this.invocationId = cmdEnv.getCommandId().toString();
this.buildRequestId = cmdEnv.getBuildRequestId();
this.reporter = cmdEnv.getReporter();
this.connectivityProvider =
Preconditions.checkNotNull(
cmdEnv.getRuntime().getBlazeModule(ConnectivityStatusProvider.class),
"No ConnectivityStatusProvider found in modules list");
OptionsParsingResult parsingResult = cmdEnv.getOptions();
this.besOptions = Preconditions.checkNotNull(parsingResult.getOptions(optionsClass()));
this.bepOptions =
Preconditions.checkNotNull(parsingResult.getOptions(BuildEventProtocolOptions.class));
this.authTlsOptions =
Preconditions.checkNotNull(parsingResult.getOptions(AuthAndTLSOptions.class));
this.besStreamOptions =
Preconditions.checkNotNull(parsingResult.getOptions(BuildEventStreamOptions.class));
this.isRunsPerTestOverTheLimit =
parsingResult.getOptions(TestOptions.class) != null
&& parsingResult.getOptions(TestOptions.class).runsPerTest.stream()
.anyMatch(
(perLabelOptions) ->
Integer.parseInt(Iterables.getOnlyElement(perLabelOptions.getOptions()))
> RUNS_PER_TEST_LIMIT);
ConnectivityStatus status = connectivityProvider.getStatus(CONNECTIVITY_CACHE_KEY);
String buildEventUploadStrategy =
status.status.equals(ConnectivityStatus.Status.OK)
? this.bepOptions.buildEventUploadStrategy
: "local";
CountingArtifactGroupNamer artifactGroupNamer = new CountingArtifactGroupNamer();
// We need to wait for the previous invocation before we check the list of allowed commands to
// allow completing previous runs using BES, for example:
// bazel build (..run with async BES..)
// bazel info <-- Doesn't run with BES unless we wait before checking {@code allowedCommands}.
boolean commandIsShutdown = "shutdown".equals(cmdEnv.getCommandName());
waitForPreviousInvocation(commandIsShutdown);
if (commandIsShutdown && uploaderFactoryToCleanup != null) {
uploaderFactoryToCleanup.shutdown();
}
if (!allowedCommands(besOptions).contains(cmdEnv.getCommandName())) {
// Exit early if the running command isn't supported.
return;
}
BuildEventArtifactUploaderFactory uploaderFactory =
cmdEnv
.getRuntime()
.getBuildEventArtifactUploaderFactoryMap()
.select(buildEventUploadStrategy);
ThrowingBuildEventArtifactUploaderSupplier uploaderSupplier =
new ThrowingBuildEventArtifactUploaderSupplier(() -> uploaderFactory.create(cmdEnv));
this.uploaderFactoryToCleanup = uploaderFactory;
try {
bepTransports = createBepTransports(cmdEnv, uploaderSupplier, artifactGroupNamer);
} catch (IOException e) {
cmdEnv
.getBlazeModuleEnvironment()
.exit(
createAbruptExitException(
e,
"Could not create BEP transports.",
BuildProgress.Code.BES_INITIALIZATION_ERROR));
return;
}
if (bepTransports.isEmpty()) {
// Exit early if there are no transports to stream to.
return;
}
if (bepOptions.publishTargetSummary) {
cmdEnv.getEventBus().register(new TargetSummaryPublisher(cmdEnv.getEventBus()));
}
streamer =
new BuildEventStreamer.Builder()
.buildEventTransports(bepTransports)
.besStreamOptions(besStreamOptions)
.publishTargetSummaries(bepOptions.publishTargetSummary)
.artifactGroupNamer(artifactGroupNamer)
.oomMessage(parsingResult.getOptions(CommonCommandOptions.class).oomMessage)
.build();
cmdEnv.getEventBus().register(streamer);
registerOutAndErrOutputStreams();
// This event should probably be posted in a more general place (e.g. {@link BuildTool};
// however, so far the BES module is the only module that requires extra work after the build
// so we post it here until it's needed for other modules.
reporter.post(new AnnounceBuildEventTransportsEvent(bepTransports));
}
private void registerOutAndErrOutputStreams() {
int bufferSize = besOptions.besOuterrBufferSize;
int chunkSize = besOptions.besOuterrChunkSize;
SynchronizedOutputStream out = new SynchronizedOutputStream(bufferSize, chunkSize);
SynchronizedOutputStream err = new SynchronizedOutputStream(bufferSize, chunkSize);
this.outErr = OutErr.create(out, err);
streamer.registerOutErrProvider(
new BuildEventStreamer.OutErrProvider() {
@Override
public Iterable<String> getOut() {
return out.readAndReset();
}
@Override
public Iterable<String> getErr() {
return err.readAndReset();
}
});
err.registerStreamer(streamer);
out.registerStreamer(streamer);
}
@Override
public OutErr getOutputListener() {
return outErr;
}
private void forceShutdownBuildEventStreamer(AbortReason reason) {
streamer.closeOnAbort(reason);
closeFuturesWithTimeoutsMap =
constructCloseFuturesMapWithTimeouts(streamer.getCloseFuturesMap());
try {
logger.atInfo().log("Closing pending build event transports");
Uninterruptibles.getUninterruptibly(Futures.allAsList(closeFuturesWithTimeoutsMap.values()));
} catch (ExecutionException e) {
logger.atSevere().withCause(e).log("Failed to close a build event transport");
} finally {
cancelAndResetPendingUploads();
}
}
@Override
public void blazeShutdownOnCrash(DetailedExitCode exitCode) {
if (streamer != null) {
logger.atWarning().log("Attempting to close BES streamer on crash");
forceShutdownBuildEventStreamer(
exitCode.getExitCode().equals(ExitCode.OOM_ERROR)
? AbortReason.OUT_OF_MEMORY
: AbortReason.INTERNAL);
uploaderFactoryToCleanup.shutdown();
}
}
@Override
public void blazeShutdown() {
if (closeFuturesWithTimeoutsMap.isEmpty()) {
return;
}
try {
Uninterruptibles.getUninterruptibly(
Futures.allAsList(closeFuturesWithTimeoutsMap.values()),
getMaxWaitForPreviousInvocation().getSeconds(),
TimeUnit.SECONDS);
} catch (TimeoutException | ExecutionException exception) {
logger.atWarning().withCause(exception).log(
"Encountered Exception when closing BEP transports in Blaze's shutting down sequence");
} finally {
cancelAndResetPendingUploads();
if (uploaderFactoryToCleanup != null) {
uploaderFactoryToCleanup.shutdown();
}
}
}
private void waitForBuildEventTransportsToClose(
Map<BuildEventTransport, ListenableFuture<Void>> transportFutures,
boolean besUploadModeIsSynchronous)
throws AbruptExitException {
final ScheduledExecutorService executor =
Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder().setNameFormat("bes-notify-ui-%d").build());
ScheduledFuture<?> waitMessageFuture = null;
try {
// Notify the UI handler when a transport finished closing.
transportFutures.forEach(
(bepTransport, closeFuture) ->
closeFuture.addListener(
() -> {
reporter.post(new BuildEventTransportClosedEvent(bepTransport));
},
executor));
try (AutoProfiler p = GoogleAutoProfilerUtils.logged("waiting for BES close")) {
Uninterruptibles.getUninterruptibly(Futures.allAsList(transportFutures.values()));
}
} catch (ExecutionException e) {
// Futures.withTimeout wraps the TimeoutException in an ExecutionException when the future
// times out.
if (isTimeoutException(e)) {
throw createAbruptExitException(
e,
"The Build Event Protocol upload timed out.",
BuildProgress.Code.BES_UPLOAD_TIMEOUT_ERROR);
}
Throwables.throwIfInstanceOf(e.getCause(), AbruptExitException.class);
throw new RuntimeException(
String.format(
"Unexpected Exception '%s' when closing BEP transports, this is a bug.",
e.getCause().getMessage()),
e);
} finally {
if (besUploadModeIsSynchronous) {
cancelAndResetPendingUploads();
}
if (waitMessageFuture != null) {
waitMessageFuture.cancel(/* mayInterruptIfRunning= */ true);
}
executor.shutdown();
}
}
private static ImmutableMap<BuildEventTransport, ListenableFuture<Void>>
constructCloseFuturesMapWithTimeouts(
ImmutableMap<BuildEventTransport, ListenableFuture<Void>> bepTransportToCloseFuturesMap) {
ImmutableMap.Builder<BuildEventTransport, ListenableFuture<Void>> builder =
ImmutableMap.builder();
bepTransportToCloseFuturesMap.forEach(
(bepTransport, closeFuture) -> {
final ListenableFuture<Void> closeFutureWithTimeout;
if (bepTransport.getTimeout().isZero() || bepTransport.getTimeout().isNegative()) {
closeFutureWithTimeout = closeFuture;
} else {
final ScheduledExecutorService timeoutExecutor =
Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder()
.setNameFormat("bes-close-" + bepTransport.name() + "-%d")
.build());
// Make sure to avoid propagating the cancellation to the enclosing future since
// we handle cancellation ourselves in this class.
// Futures.withTimeout may cancel the enclosing future when the timeout is
// reached.
final ListenableFuture<Void> enclosingFuture =
Futures.nonCancellationPropagating(closeFuture);
ListenableFuture<Void> timeoutFuture =
Futures.withTimeout(
enclosingFuture,
bepTransport.getTimeout().toMillis(),
TimeUnit.MILLISECONDS,
timeoutExecutor);
timeoutFuture.addListener(timeoutExecutor::shutdown, MoreExecutors.directExecutor());
// Cancellation is not propagated to the `closeFuture` for the reasons above. But in
// order to cancel the returned future by our explicit mechanism elsewhere in this
// class, we need to delegate the `cancel` to `closeFuture` so that cancellation
// from Futures.withTimeout is ignored and cancellation from our mechanism is properly
// handled.
closeFutureWithTimeout =
new SimpleForwardingListenableFuture<>(timeoutFuture) {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return closeFuture.cancel(mayInterruptIfRunning);
}
};
}
builder.put(bepTransport, closeFutureWithTimeout);
});
return builder.buildOrThrow();
}
private void closeBepTransports() throws AbruptExitException {
previousUploadMode = besOptions.besUploadMode;
closeFuturesWithTimeoutsMap =
constructCloseFuturesMapWithTimeouts(streamer.getCloseFuturesMap());
halfCloseFuturesWithTimeoutsMap =
constructCloseFuturesMapWithTimeouts(streamer.getHalfClosedMap());
boolean besUploadModeIsSynchronous =
besOptions.besUploadMode == BesUploadMode.WAIT_FOR_UPLOAD_COMPLETE;
Map<BuildEventTransport, ListenableFuture<Void>> blockingTransportFutures = new HashMap<>();
for (Map.Entry<BuildEventTransport, ListenableFuture<Void>> entry :
closeFuturesWithTimeoutsMap.entrySet()) {
BuildEventTransport bepTransport = entry.getKey();
if (!bepTransport.mayBeSlow() || besUploadModeIsSynchronous) {
blockingTransportFutures.put(bepTransport, entry.getValue());
} else {
// When running asynchronously notify the UI immediately since we won't wait for the
// uploads to close.
reporter.post(new BuildEventTransportClosedEvent(bepTransport));
}
}
if (!blockingTransportFutures.isEmpty()) {
waitForBuildEventTransportsToClose(blockingTransportFutures, besUploadModeIsSynchronous);
}
}
@Override
public void afterCommand() throws AbruptExitException {
if (streamer != null) {
if (!streamer.isClosed()) {
// This should not occur, but close with an internal error if a {@link BuildEventStreamer}
// bug manifests as an unclosed streamer.
logger.atWarning().log("Attempting to close BES streamer after command");
reporter.handle(Event.warn("BES was not properly closed"));
forceShutdownBuildEventStreamer(AbortReason.INTERNAL);
}
closeBepTransports();
if (!Strings.isNullOrEmpty(besOptions.besBackend)) {
constructAndMaybeReportInvocationIdUrl();
} else if (!bepTransports.isEmpty()) {
reporter.handle(Event.info("Build Event Protocol files produced successfully."));
}
}
// besStreamOptions can be null if we are crashing. Don't crash here too.
if (besStreamOptions != null && !besStreamOptions.keepBackendConnections) {
clearBesClient();
} else if (besStreamOptions == null) {
BugReport.sendBugReport(new NullPointerException("besStreamOptions null: in a crash?"));
}
}
@Override
public void commandComplete() {
this.outErr = null;
this.bepTransports = null;
this.invocationId = null;
this.buildRequestId = null;
this.reporter = null;
this.streamer = null;
}
private void constructAndMaybeReportInvocationIdUrl() {
if (!getInvocationIdPrefix().isEmpty()) {
reporter.handle(
Event.info("Streaming build results to: " + getInvocationIdPrefix() + invocationId));
}
}
private void constructAndMaybeReportBuildRequestIdUrl() {
if (!getBuildRequestIdPrefix().isEmpty()) {
reporter.handle(
Event.info(
"See "
+ getBuildRequestIdPrefix()
+ buildRequestId
+ " for more information about your request."));
}
}
private void logIds() {
logger.atInfo().log(
"Streaming Build Event Protocol to '%s' with build_request_id: '%s'"
+ " and invocation_id: '%s'",
besOptions.besBackend, buildRequestId, invocationId);
}
@Nullable
private BuildEventServiceTransport createBesTransport(
CommandEnvironment cmdEnv,
ThrowingBuildEventArtifactUploaderSupplier uploaderSupplier,
CountingArtifactGroupNamer artifactGroupNamer)
throws IOException {
if (Strings.isNullOrEmpty(besOptions.besBackend)) {
clearBesClient();
return null;
}
if (isRunsPerTestOverTheLimit) {
String msg =
String.format(
"The value of --runs_per_test is bigger than %d and it will produce build events "
+ "that are too big for the Build Event Service to handle.",
RUNS_PER_TEST_LIMIT);
reportError(
reporter,
cmdEnv.getBlazeModuleEnvironment(),
msg,
new OptionsParsingException(msg),
BuildProgress.Code.BES_RUNS_PER_TEST_LIMIT_UNSUPPORTED);
return null;
}
logIds();
ConnectivityStatus status = connectivityProvider.getStatus(CONNECTIVITY_CACHE_KEY);
if (status.status != Status.OK) {
clearBesClient();
String message =
String.format(
"Build Event Service uploads disabled due to a connectivity problem: %s", status);
reporter.handle(Event.warn(message));
logger.atWarning().log("%s", message);
return null;
}
final BuildEventServiceClient besClient;
try {
besClient = getBesClient(besOptions, authTlsOptions);
} catch (IOException | OptionsParsingException e) {
reportError(
reporter,
cmdEnv.getBlazeModuleEnvironment(),
e.getMessage(),
e,
BuildProgress.Code.BES_INITIALIZATION_ERROR);
return null;
}
BuildEventServiceProtoUtil besProtoUtil =
new BuildEventServiceProtoUtil.Builder()
.buildRequestId(buildRequestId)
.invocationId(invocationId)
.projectId(besOptions.instanceName)
.commandName(cmdEnv.getCommandName())
.keywords(getBesKeywords(besOptions, cmdEnv.getRuntime().getStartupOptionsProvider()))
.build();
return new BuildEventServiceTransport.Builder()
.localFileUploader(uploaderSupplier.get())
.besClient(besClient)
.besOptions(besOptions)
.besProtoUtil(besProtoUtil)
.artifactGroupNamer(artifactGroupNamer)
.bepOptions(bepOptions)
.clock(cmdEnv.getRuntime().getClock())
.eventBus(cmdEnv.getEventBus())
.commandStartTime(Timestamps.fromMillis(cmdEnv.getCommandStartTime()))
.build();
}
private ImmutableSet<BuildEventTransport> createBepTransports(
CommandEnvironment cmdEnv,
ThrowingBuildEventArtifactUploaderSupplier uploaderSupplier,
CountingArtifactGroupNamer artifactGroupNamer)
throws IOException {
ImmutableSet.Builder<BuildEventTransport> bepTransportsBuilder = new ImmutableSet.Builder<>();
if (!Strings.isNullOrEmpty(besStreamOptions.buildEventTextFile)) {
try {
BufferedOutputStream bepTextOutputStream =
new BufferedOutputStream(
Files.newOutputStream(Paths.get(besStreamOptions.buildEventTextFile)));
BuildEventArtifactUploader localFileUploader =
besStreamOptions.buildEventTextFilePathConversion
? uploaderSupplier.get()
: new LocalFilesArtifactUploader();
bepTransportsBuilder.add(
new TextFormatFileTransport(
bepTextOutputStream, bepOptions, localFileUploader, artifactGroupNamer));
} catch (IOException exception) {
// TODO(b/125216340): Consider making this a warning instead of an error once the
// associated bug has been resolved.
reportError(
reporter,
cmdEnv.getBlazeModuleEnvironment(),
"Unable to write to '"
+ besStreamOptions.buildEventTextFile
+ "'. Omitting --build_event_text_file.",
exception,
BuildProgress.Code.BES_LOCAL_WRITE_ERROR);
}
}
if (!Strings.isNullOrEmpty(besStreamOptions.buildEventBinaryFile)) {
try {
BufferedOutputStream bepBinaryOutputStream =
new BufferedOutputStream(
Files.newOutputStream(Paths.get(besStreamOptions.buildEventBinaryFile)));
BuildEventArtifactUploader localFileUploader =
besStreamOptions.buildEventBinaryFilePathConversion
? uploaderSupplier.get()
: new LocalFilesArtifactUploader();
bepTransportsBuilder.add(
new BinaryFormatFileTransport(
bepBinaryOutputStream, bepOptions, localFileUploader, artifactGroupNamer));
} catch (IOException exception) {
// TODO(b/125216340): Consider making this a warning instead of an error once the
// associated bug has been resolved.
reportError(
reporter,
cmdEnv.getBlazeModuleEnvironment(),
"Unable to write to '"
+ besStreamOptions.buildEventBinaryFile
+ "'. Omitting --build_event_binary_file.",
exception,
BuildProgress.Code.BES_LOCAL_WRITE_ERROR);
}
}
if (!Strings.isNullOrEmpty(besStreamOptions.buildEventJsonFile)) {
try {
BufferedOutputStream bepJsonOutputStream =
new BufferedOutputStream(
Files.newOutputStream(Paths.get(besStreamOptions.buildEventJsonFile)));
BuildEventArtifactUploader localFileUploader =
besStreamOptions.buildEventJsonFilePathConversion
? uploaderSupplier.get()
: new LocalFilesArtifactUploader();
bepTransportsBuilder.add(
new JsonFormatFileTransport(
bepJsonOutputStream, bepOptions, localFileUploader, artifactGroupNamer));
} catch (IOException exception) {
// TODO(b/125216340): Consider making this a warning instead of an error once the
// associated bug has been resolved.
reportError(
reporter,
cmdEnv.getBlazeModuleEnvironment(),
"Unable to write to '"
+ besStreamOptions.buildEventJsonFile
+ "'. Omitting --build_event_json_file.",
exception,
BuildProgress.Code.BES_LOCAL_WRITE_ERROR);
}
}
BuildEventServiceTransport besTransport =
createBesTransport(cmdEnv, uploaderSupplier, artifactGroupNamer);
if (besTransport != null) {
constructAndMaybeReportInvocationIdUrl();
constructAndMaybeReportBuildRequestIdUrl();
bepTransportsBuilder.add(besTransport);
}
return bepTransportsBuilder.build();
}
private static AbruptExitException createAbruptExitException(
Exception e, String message, BuildProgress.Code besCode) {
return new AbruptExitException(
DetailedExitCode.of(
FailureDetail.newBuilder()
.setMessage(message + " " + e.getMessage())
.setBuildProgress(BuildProgress.newBuilder().setCode(besCode).build())
.build()),
e);
}
protected abstract Class<OptionsT> optionsClass();
protected abstract BuildEventServiceClient getBesClient(
OptionsT besOptions, AuthAndTLSOptions authAndTLSOptions)
throws IOException, OptionsParsingException;
protected abstract void clearBesClient();
protected abstract Set<String> allowedCommands(OptionsT besOptions);
protected Set<String> getBesKeywords(
OptionsT besOptions, @Nullable OptionsParsingResult startupOptionsProvider) {
return besOptions.besKeywords.stream()
.map(keyword -> "user_keyword=" + keyword)
.collect(ImmutableSet.toImmutableSet());
}
/** A prefix used when printing the invocation ID in the command line */
protected abstract String getInvocationIdPrefix();
/** A prefix used when printing the build request ID in the command line */
protected abstract String getBuildRequestIdPrefix();
// TODO(b/115961387): This method shouldn't exist. It only does because some tests are relying on
// the transport creation logic of this module directly.
@VisibleForTesting
ImmutableSet<BuildEventTransport> getBepTransports() {
return bepTransports;
}
private static class ThrowingBuildEventArtifactUploaderSupplier {
private final Callable<BuildEventArtifactUploader> callable;
@Nullable private BuildEventArtifactUploader memoizedValue;
@Nullable private IOException exception;
ThrowingBuildEventArtifactUploaderSupplier(Callable<BuildEventArtifactUploader> callable) {
this.callable = callable;
}
BuildEventArtifactUploader get() throws IOException {
boolean needsInitialization = memoizedValue == null;
if (needsInitialization && exception == null) {
try {
memoizedValue = callable.call();
} catch (IOException e) {
exception = e;
} catch (Exception e) {
Throwables.throwIfUnchecked(e);
throw new IllegalStateException(e);
}
}
if (memoizedValue != null) {
if (!needsInitialization) {
memoizedValue.retain();
}
return memoizedValue;
}
throw exception;
}
}
}
|
When in sync mode, log the invocation BES upload is waiting on to finish
Makes it easier to attribute where time was spent when grepping logs.
PiperOrigin-RevId: 425676197
|
src/main/java/com/google/devtools/build/lib/buildeventservice/BuildEventServiceModule.java
|
When in sync mode, log the invocation BES upload is waiting on to finish
|
|
Java
|
apache-2.0
|
bd872c6269c20c5fc8ae046c08dde13dbf9b35ff
| 0
|
dbeaver/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,serge-rider/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,Sargul/dbeaver,dbeaver/dbeaver,dbeaver/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,Sargul/dbeaver
|
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2017 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.dialogs;
import org.eclipse.jface.dialogs.ControlEnableState;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.jface.dialogs.IDialogPage;
import org.eclipse.jface.dialogs.TitleAreaDialog;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.operation.ModalContext;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.wizard.IWizard;
import org.eclipse.jface.wizard.IWizardContainer;
import org.eclipse.jface.wizard.IWizardPage;
import org.eclipse.jface.wizard.ProgressMonitorPart;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.SashForm;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.*;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.IWorkbenchWizard;
import org.jkiss.dbeaver.ui.ICompositeDialogPage;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.utils.ArrayUtils;
import java.lang.reflect.InvocationTargetException;
/**
* MultiPageWizardDialog
*/
public class MultiPageWizardDialog extends TitleAreaDialog implements IWizardContainer {
private IWizard wizard;
private Composite pageArea;
private Tree pagesTree;
private IDialogPage prevPage;
private ProgressMonitorPart monitorPart;
private SashForm wizardSash;
private volatile int runningOperations = 0;
public MultiPageWizardDialog(IWorkbenchWindow window, IWizard wizard)
{
this(window, wizard, null);
}
public MultiPageWizardDialog(IWorkbenchWindow window, IWizard wizard, IStructuredSelection selection)
{
super(window.getShell());
this.wizard = wizard;
this.wizard.setContainer(this);
// Initialize wizard
if (wizard instanceof IWorkbenchWizard) {
if (selection == null) {
if (window.getSelectionService().getSelection() instanceof IStructuredSelection) {
selection = (IStructuredSelection)window.getSelectionService().getSelection();
}
}
((IWorkbenchWizard)wizard).init(window.getWorkbench(), selection);
}
}
public IWizard getWizard()
{
return wizard;
}
@Override
protected boolean isResizable()
{
return true;
}
@Override
protected int getShellStyle()
{
return SWT.TITLE | SWT.MAX | SWT.RESIZE | SWT.APPLICATION_MODAL;
}
@Override
protected Control createContents(Composite parent)
{
Control contents = super.createContents(parent);
updateButtons();
return contents;
}
@Override
protected Control createDialogArea(Composite parent)
{
Composite composite = (Composite)super.createDialogArea(parent);
wizard.addPages();
wizardSash = new SashForm(composite, SWT.HORIZONTAL);
wizardSash.setLayoutData(new GridData(GridData.FILL_BOTH));
pagesTree = new Tree(wizardSash, SWT.SINGLE);
pagesTree.setLayoutData(new GridData(GridData.FILL_BOTH));
Composite pageContainer = UIUtils.createPlaceholder(wizardSash, 2);
// Vertical separator
new Label(pageContainer, SWT.SEPARATOR | SWT.VERTICAL)
.setLayoutData(new GridData(SWT.LEFT, SWT.FILL, false, true));
pageArea = UIUtils.createPlaceholder(pageContainer, 1);
GridData gd = new GridData(GridData.FILL_BOTH);
pageArea.setLayoutData(gd);
pageArea.setLayout(new GridLayout(1, true));
wizardSash.setWeights(new int[]{300, 700});
Point maxSize = new Point(0, 0);
IWizardPage[] pages = wizard.getPages();
for (IWizardPage page : pages) {
addPage(null, page, maxSize);
}
// gd = (GridData) pageArea.getLayoutData();
// gd.widthHint = 500;
// gd.heightHint = 400;
pagesTree.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e)
{
changePage();
}
});
// Select first page
pagesTree.select(pagesTree.getItem(0));
changePage();
// Set title and image from first page
IDialogPage firstPage = (IDialogPage) pagesTree.getItem(0).getData();
setTitle(firstPage.getTitle());
setTitleImage(firstPage.getImage());
setMessage(firstPage.getMessage());
// Horizontal separator
new Label(composite, SWT.HORIZONTAL | SWT.SEPARATOR)
.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
// Progress monitor
monitorPart = new ProgressMonitorPart(composite, null, true) {
@Override
public void setCanceled(boolean b) {
super.setCanceled(b);
if (b) {
cancelCurrentOperation();
}
}
};
gd = new GridData(GridData.FILL_HORIZONTAL);
gd.grabExcessHorizontalSpace = true;
gd.horizontalIndent = 20;
gd.verticalIndent = 0;
monitorPart.setLayoutData(gd);
monitorPart.setVisible(false);
return composite;
}
protected void cancelCurrentOperation() {
}
private TreeItem addPage(TreeItem parentItem, IDialogPage page, Point maxSize)
{
TreeItem item = parentItem == null ?
new TreeItem(pagesTree, SWT.NONE) :
new TreeItem(parentItem, SWT.NONE);
item.setText(page.getTitle());
item.setData(page);
// Ad sub pages
if (page instanceof ICompositeDialogPage) {
IDialogPage[] subPages = ((ICompositeDialogPage) page).getSubPages();
if (!ArrayUtils.isEmpty(subPages)) {
for (IDialogPage subPage : subPages) {
addPage(item, subPage, maxSize);
}
item.setExpanded(true);
}
}
return item;
}
private void changePage()
{
pageArea.setRedraw(false);
try {
TreeItem[] selection = pagesTree.getSelection();
if (selection.length != 1) {
return;
}
TreeItem newItem = selection[0];
if (prevPage == newItem.getData()) {
return;
}
GridData gd;
if (prevPage != null) {
gd = (GridData) prevPage.getControl().getLayoutData();
gd.exclude = true;
prevPage.setVisible(false);
if (prevPage instanceof ActiveWizardPage) {
((ActiveWizardPage) prevPage).deactivatePage();
}
}
boolean pageCreated = false;
IDialogPage page = (IDialogPage) newItem.getData();
Control pageControl = page.getControl();
if (pageControl == null) {
// Create page contents
page.createControl(pageArea);
pageControl = page.getControl();
//Point pageSize = pageControl.computeSize(SWT.DEFAULT, SWT.DEFAULT);
//if (pageSize.x > maxSize.x) maxSize.x = pageSize.x;
//if (pageSize.y > maxSize.y) maxSize.y = pageSize.y;
gd = (GridData) pageControl.getLayoutData();
if (gd == null) {
gd = new GridData(GridData.FILL_BOTH);
pageControl.setLayoutData(gd);
}
gd.exclude = false;
pageCreated = true;
}
gd = (GridData) pageControl.getLayoutData();
gd.exclude = false;
page.setVisible(true);
if (page instanceof ActiveWizardPage) {
((ActiveWizardPage) page).activatePage();
}
setTitle(page.getTitle());
setMessage(page.getDescription());
prevPage = page;
pageArea.layout();
if (pageCreated) {
UIUtils.resizeShell(getWizard().getContainer().getShell());
}
} finally {
pageArea.setRedraw(true);
}
}
@Override
protected void buttonPressed(int buttonId)
{
if (buttonId == IDialogConstants.CANCEL_ID) {
getWizard().performCancel();
} else if (buttonId == IDialogConstants.OK_ID) {
if (!getWizard().performFinish()) {
return;
}
}
super.buttonPressed(buttonId);
}
@Override
public IWizardPage getCurrentPage()
{
TreeItem[] selection = pagesTree.getSelection();
if (ArrayUtils.isEmpty(selection)) {
return null;
}
IDialogPage page = (IDialogPage)selection[0].getData();
return page instanceof IWizardPage ? (IWizardPage) page : null;
}
@Override
public void showPage(IWizardPage page)
{
for (TreeItem item : pagesTree.getItems()) {
if (item.getData() == page) {
pagesTree.setSelection(item);
changePage();
break;
}
for (TreeItem child : item.getItems()) {
if (child.getData() == page) {
pagesTree.setSelection(child);
changePage();
return;
}
}
}
}
@Override
public void updateButtons()
{
boolean complete = true;
for (TreeItem item : pagesTree.getItems()) {
if (item.getData() instanceof IWizardPage) {
IWizardPage page = (IWizardPage) item.getData();
if (page.getControl() != null && !page.isPageComplete()) {
complete = false;
break;
}
}
}
Button button = getButton(IDialogConstants.OK_ID);
if (button != null && !button.isDisposed()) {
button.setEnabled(complete);
}
}
@Override
public void updateMessage()
{
}
@Override
public void updateTitleBar()
{
}
@Override
public void updateWindowTitle()
{
}
public boolean close() {
if (runningOperations > 0) {
return false;
}
return super.close();
}
@Override
public void run(boolean fork, boolean cancelable, IRunnableWithProgress runnable) throws InvocationTargetException, InterruptedException
{
// Code copied from WizardDialog
if (monitorPart != null) {
monitorPart.setVisible(true);
monitorPart.layout();
monitorPart.attachToCancelComponent(null);
}
ControlEnableState pageEnableState = ControlEnableState.disable(wizardSash);
ControlEnableState buttonsEnableState = ControlEnableState.disable(getButtonBar());
try {
runningOperations++;
ModalContext.run(runnable, true, monitorPart, getShell().getDisplay());
} finally {
runningOperations--;
buttonsEnableState.restore();
pageEnableState.restore();
if (monitorPart != null) {
monitorPart.done();
monitorPart.setVisible(false);
}
}
}
}
|
plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/dialogs/MultiPageWizardDialog.java
|
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2017 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.dialogs;
import org.eclipse.jface.dialogs.ControlEnableState;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.jface.dialogs.IDialogPage;
import org.eclipse.jface.dialogs.TitleAreaDialog;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.operation.ModalContext;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.wizard.IWizard;
import org.eclipse.jface.wizard.IWizardContainer;
import org.eclipse.jface.wizard.IWizardPage;
import org.eclipse.jface.wizard.ProgressMonitorPart;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.SashForm;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.*;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.IWorkbenchWizard;
import org.jkiss.dbeaver.ui.ICompositeDialogPage;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.utils.ArrayUtils;
import java.lang.reflect.InvocationTargetException;
/**
* MultiPageWizardDialog
*/
public class MultiPageWizardDialog extends TitleAreaDialog implements IWizardContainer {
private IWizard wizard;
private Composite pageArea;
private Tree pagesTree;
private IDialogPage prevPage;
private ProgressMonitorPart monitorPart;
private SashForm wizardSash;
private volatile int runningOperations = 0;
public MultiPageWizardDialog(IWorkbenchWindow window, IWizard wizard)
{
this(window, wizard, null);
}
public MultiPageWizardDialog(IWorkbenchWindow window, IWizard wizard, IStructuredSelection selection)
{
super(window.getShell());
this.wizard = wizard;
this.wizard.setContainer(this);
// Initialize wizard
if (wizard instanceof IWorkbenchWizard) {
if (selection == null) {
if (window.getSelectionService().getSelection() instanceof IStructuredSelection) {
selection = (IStructuredSelection)window.getSelectionService().getSelection();
}
}
((IWorkbenchWizard)wizard).init(window.getWorkbench(), selection);
}
}
public IWizard getWizard()
{
return wizard;
}
@Override
protected boolean isResizable()
{
return true;
}
@Override
protected int getShellStyle()
{
return SWT.TITLE | SWT.MAX | SWT.RESIZE | SWT.APPLICATION_MODAL;
}
@Override
protected Control createContents(Composite parent)
{
Control contents = super.createContents(parent);
updateButtons();
return contents;
}
@Override
protected Control createDialogArea(Composite parent)
{
Composite composite = (Composite)super.createDialogArea(parent);
wizard.addPages();
wizardSash = new SashForm(composite, SWT.HORIZONTAL);
wizardSash.setLayoutData(new GridData(GridData.FILL_BOTH));
pagesTree = new Tree(wizardSash, SWT.SINGLE);
pagesTree.setLayoutData(new GridData(GridData.FILL_BOTH));
Composite pageContainer = UIUtils.createPlaceholder(wizardSash, 2);
// Vertical separator
new Label(pageContainer, SWT.SEPARATOR | SWT.VERTICAL)
.setLayoutData(new GridData(SWT.LEFT, SWT.FILL, false, true));
pageArea = UIUtils.createPlaceholder(pageContainer, 1);
GridData gd = new GridData(GridData.FILL_BOTH);
pageArea.setLayoutData(gd);
pageArea.setLayout(new GridLayout(1, true));
wizardSash.setWeights(new int[]{300, 700});
Point maxSize = new Point(0, 0);
IWizardPage[] pages = wizard.getPages();
for (IWizardPage page : pages) {
addPage(null, page, maxSize);
}
gd = (GridData) pageArea.getLayoutData();
gd.widthHint = 500;
gd.heightHint = 400;
pagesTree.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e)
{
changePage();
}
});
// Select first page
pagesTree.select(pagesTree.getItem(0));
changePage();
// Set title and image from first page
IDialogPage firstPage = (IDialogPage) pagesTree.getItem(0).getData();
setTitle(firstPage.getTitle());
setTitleImage(firstPage.getImage());
setMessage(firstPage.getMessage());
// Horizontal separator
new Label(composite, SWT.HORIZONTAL | SWT.SEPARATOR)
.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
// Progress monitor
monitorPart = new ProgressMonitorPart(composite, null, true) {
@Override
public void setCanceled(boolean b) {
super.setCanceled(b);
if (b) {
cancelCurrentOperation();
}
}
};
gd = new GridData(GridData.FILL_HORIZONTAL);
gd.grabExcessHorizontalSpace = true;
gd.horizontalIndent = 20;
gd.verticalIndent = 0;
monitorPart.setLayoutData(gd);
monitorPart.setVisible(false);
return composite;
}
protected void cancelCurrentOperation() {
}
private TreeItem addPage(TreeItem parentItem, IDialogPage page, Point maxSize)
{
TreeItem item = parentItem == null ?
new TreeItem(pagesTree, SWT.NONE) :
new TreeItem(parentItem, SWT.NONE);
item.setText(page.getTitle());
item.setData(page);
// Ad sub pages
if (page instanceof ICompositeDialogPage) {
IDialogPage[] subPages = ((ICompositeDialogPage) page).getSubPages();
if (!ArrayUtils.isEmpty(subPages)) {
for (IDialogPage subPage : subPages) {
addPage(item, subPage, maxSize);
}
item.setExpanded(true);
}
}
return item;
}
private void changePage()
{
pageArea.setRedraw(false);
try {
TreeItem[] selection = pagesTree.getSelection();
if (selection.length != 1) {
return;
}
TreeItem newItem = selection[0];
if (prevPage == newItem.getData()) {
return;
}
GridData gd;
if (prevPage != null) {
gd = (GridData) prevPage.getControl().getLayoutData();
gd.exclude = true;
prevPage.setVisible(false);
if (prevPage instanceof ActiveWizardPage) {
((ActiveWizardPage) prevPage).deactivatePage();
}
}
boolean pageCreated = false;
IDialogPage page = (IDialogPage) newItem.getData();
Control pageControl = page.getControl();
if (pageControl == null) {
// Create page contents
page.createControl(pageArea);
pageControl = page.getControl();
//Point pageSize = pageControl.computeSize(SWT.DEFAULT, SWT.DEFAULT);
//if (pageSize.x > maxSize.x) maxSize.x = pageSize.x;
//if (pageSize.y > maxSize.y) maxSize.y = pageSize.y;
gd = (GridData) pageControl.getLayoutData();
if (gd == null) {
gd = new GridData(GridData.FILL_BOTH);
pageControl.setLayoutData(gd);
}
gd.exclude = false;
pageCreated = true;
}
gd = (GridData) pageControl.getLayoutData();
gd.exclude = false;
page.setVisible(true);
if (page instanceof ActiveWizardPage) {
((ActiveWizardPage) page).activatePage();
}
setTitle(page.getTitle());
setMessage(page.getDescription());
prevPage = page;
pageArea.layout();
if (pageCreated) {
UIUtils.resizeShell(getWizard().getContainer().getShell());
}
} finally {
pageArea.setRedraw(true);
}
}
@Override
protected void buttonPressed(int buttonId)
{
if (buttonId == IDialogConstants.CANCEL_ID) {
getWizard().performCancel();
} else if (buttonId == IDialogConstants.OK_ID) {
if (!getWizard().performFinish()) {
return;
}
}
super.buttonPressed(buttonId);
}
@Override
public IWizardPage getCurrentPage()
{
TreeItem[] selection = pagesTree.getSelection();
if (ArrayUtils.isEmpty(selection)) {
return null;
}
IDialogPage page = (IDialogPage)selection[0].getData();
return page instanceof IWizardPage ? (IWizardPage) page : null;
}
@Override
public void showPage(IWizardPage page)
{
for (TreeItem item : pagesTree.getItems()) {
if (item.getData() == page) {
pagesTree.setSelection(item);
changePage();
break;
}
for (TreeItem child : item.getItems()) {
if (child.getData() == page) {
pagesTree.setSelection(child);
changePage();
return;
}
}
}
}
@Override
public void updateButtons()
{
boolean complete = true;
for (TreeItem item : pagesTree.getItems()) {
if (item.getData() instanceof IWizardPage) {
IWizardPage page = (IWizardPage) item.getData();
if (page.getControl() != null && !page.isPageComplete()) {
complete = false;
break;
}
}
}
Button button = getButton(IDialogConstants.OK_ID);
if (button != null && !button.isDisposed()) {
button.setEnabled(complete);
}
}
@Override
public void updateMessage()
{
}
@Override
public void updateTitleBar()
{
}
@Override
public void updateWindowTitle()
{
}
public boolean close() {
if (runningOperations > 0) {
return false;
}
return super.close();
}
@Override
public void run(boolean fork, boolean cancelable, IRunnableWithProgress runnable) throws InvocationTargetException, InterruptedException
{
// Code copied from WizardDialog
if (monitorPart != null) {
monitorPart.setVisible(true);
monitorPart.layout();
monitorPart.attachToCancelComponent(null);
}
ControlEnableState pageEnableState = ControlEnableState.disable(wizardSash);
ControlEnableState buttonsEnableState = ControlEnableState.disable(getButtonBar());
try {
runningOperations++;
ModalContext.run(runnable, true, monitorPart, getShell().getDisplay());
} finally {
runningOperations--;
buttonsEnableState.restore();
pageEnableState.restore();
if (monitorPart != null) {
monitorPart.done();
monitorPart.setVisible(false);
}
}
}
}
|
#3013 Edit connection dialog layout fix
Former-commit-id: 57d4e07858ee48653389c2674dc48949dcefb634
|
plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/dialogs/MultiPageWizardDialog.java
|
#3013 Edit connection dialog layout fix
|
|
Java
|
apache-2.0
|
5f336770309dda6e52489fe13caf0b4968b0018f
| 0
|
openengsb/openengsb,openengsb/openengsb,openengsb/openengsb,openengsb/openengsb,openengsb/openengsb,openengsb/openengsb
|
/**
* Licensed to the Austrian Association for Software Tool Integration (AASTI)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. The AASTI licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.ui.admin.organizeImportsPage;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeModel;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.form.AjaxButton;
import org.apache.wicket.authorization.strategies.role.annotations.AuthorizeInstantiation;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.panel.FeedbackPanel;
import org.apache.wicket.markup.html.tree.BaseTree;
import org.apache.wicket.markup.html.tree.LinkTree;
import org.apache.wicket.model.CompoundPropertyModel;
import org.apache.wicket.model.StringResourceModel;
import org.apache.wicket.spring.injection.annot.SpringBean;
import org.openengsb.core.api.workflow.RuleBaseException;
import org.openengsb.core.api.workflow.RuleManager;
import org.openengsb.ui.admin.basePage.BasePage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@AuthorizeInstantiation("ROLE_USER")
public class OrganizeImportsPage extends BasePage {
private static final Logger LOGGER = LoggerFactory.getLogger(OrganizeImportsPage.class);
@SpringBean
private RuleManager ruleManager;
private String importName = "";
private TextField<String> importField;
private AjaxButton submitButton;
private AjaxButton deleteButton;
private FeedbackPanel feedbackPanel;
@SuppressWarnings("serial")
public OrganizeImportsPage() {
TreeModel treeModel = createTreeModel();
final LinkTree tree = new LinkTree("tree", treeModel)
{
@Override
protected void onNodeLinkClicked(Object node, BaseTree tree, AjaxRequestTarget target) {
DefaultMutableTreeNode mnode = (DefaultMutableTreeNode) node;
if (!mnode.isLeaf()) {
return;
}
String imp = (String) mnode.getUserObject();
importName = imp;
info("");
target.addComponent(importField);
target.addComponent(feedbackPanel);
}
};
tree.getTreeState().expandAll();
add(tree);
Form<Object> form = new Form<Object>("editForm", new CompoundPropertyModel<Object>(this));
submitButton = new AjaxButton("submitButton", form) {
@Override
protected void onSubmit(AjaxRequestTarget target, Form<?> form) {
if (importName == null || importName.equals("")) {
String message = new StringResourceModel("emptyError", this, null).getString();
error(message);
target.addComponent(feedbackPanel);
return;
}
try {
ruleManager.addImport(importName);
String message = new StringResourceModel("insertedImport", this, null).getString();
LOGGER.info("successfully inserted import " + importName);
info(importName + " " + message);
} catch (RuleBaseException e) {
ruleManager.removeImport(importName);
LOGGER.debug("error while saving import " + importName, e);
String message = new StringResourceModel("savingError", this, null).getString();
error(importName + " " + message + "\n" + e.getLocalizedMessage());
}
tree.setModelObject(createTreeModel());
importName = "";
target.addComponent(importField);
target.addComponent(tree);
target.addComponent(feedbackPanel);
}
};
submitButton.setOutputMarkupId(true);
form.add(submitButton);
deleteButton = new AjaxButton("deleteButton", form) {
@Override
protected void onSubmit(AjaxRequestTarget target, Form<?> form) {
try {
ruleManager.removeImport(importName);
String message = new StringResourceModel("deletedImport", this, null).getString();
info(importName + " " + message);
LOGGER.info("successfully deleted import " + importName);
} catch (RuleBaseException e) {
LOGGER.debug("error while deleting import " + importName, e);
if (e.getMessage().startsWith("Rule Compilation error")) {
ruleManager.addImport(importName);
String message = new StringResourceModel("deletingError", this, null).getString();
error(importName + " " + message + "\n" + e.getLocalizedMessage());
} else {
String message = new StringResourceModel("notExistingError", this, null).getString();
error(importName + " " + message);
}
target.addComponent(feedbackPanel);
return;
}
tree.setModelObject(createTreeModel());
importName = "";
target.addComponent(feedbackPanel);
target.addComponent(importField);
target.addComponent(tree);
}
};
deleteButton.setOutputMarkupId(true);
form.add(deleteButton);
importField = new TextField<String>("importName");
importField.setOutputMarkupId(true);
form.add(importField);
add(form);
feedbackPanel = new FeedbackPanel("feedback");
feedbackPanel.setOutputMarkupId(true);
add(feedbackPanel);
}
private TreeModel createTreeModel() {
TreeModel model = null;
DefaultMutableTreeNode rootNode = new DefaultMutableTreeNode("Imports");
Collection<String> c = ruleManager.listImports();
List<String> l = new ArrayList<String>();
l.addAll(c);
Collections.sort(l);
for (String imp : l) {
DefaultMutableTreeNode child = new DefaultMutableTreeNode(imp);
rootNode.add(child);
}
model = new DefaultTreeModel(rootNode);
return model;
}
public String getImportName() {
return importName;
}
public void setImportName(String importName) {
this.importName = importName;
}
}
|
ui/admin/src/main/java/org/openengsb/ui/admin/organizeImportsPage/OrganizeImportsPage.java
|
/**
* Licensed to the Austrian Association for Software Tool Integration (AASTI)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. The AASTI licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.ui.admin.organizeImportsPage;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeModel;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.form.AjaxButton;
import org.apache.wicket.authorization.strategies.role.annotations.AuthorizeInstantiation;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.panel.FeedbackPanel;
import org.apache.wicket.markup.html.tree.BaseTree;
import org.apache.wicket.markup.html.tree.LinkTree;
import org.apache.wicket.model.CompoundPropertyModel;
import org.apache.wicket.model.StringResourceModel;
import org.apache.wicket.spring.injection.annot.SpringBean;
import org.openengsb.core.api.workflow.RuleBaseException;
import org.openengsb.core.api.workflow.RuleManager;
import org.openengsb.ui.admin.basePage.BasePage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@AuthorizeInstantiation("ROLE_USER")
public class OrganizeImportsPage extends BasePage {
private static final Logger LOGGER = LoggerFactory.getLogger(OrganizeImportsPage.class);
@SpringBean
private RuleManager ruleManager;
private String importName = "";
private TextField<String> importField;
private AjaxButton submitButton;
private AjaxButton deleteButton;
private FeedbackPanel feedbackPanel;
@SuppressWarnings("serial")
public OrganizeImportsPage() {
TreeModel treeModel = createTreeModel();
final LinkTree tree = new LinkTree("tree", treeModel)
{
@Override
protected void onNodeLinkClicked(Object node, BaseTree tree, AjaxRequestTarget target) {
DefaultMutableTreeNode mnode = (DefaultMutableTreeNode) node;
if (!mnode.isLeaf()) {
return;
}
String imp = (String) mnode.getUserObject();
importName = imp;
info("");
target.addComponent(importField);
target.addComponent(feedbackPanel);
}
};
tree.getTreeState().expandAll();
add(tree);
Form<Object> form = new Form<Object>("editForm", new CompoundPropertyModel<Object>(this));
submitButton = new AjaxButton("submitButton", form) {
@Override
protected void onSubmit(AjaxRequestTarget target, Form<?> form) {
if (importName == null || importName.equals("")) {
String message = new StringResourceModel("emptyError", this, null).getString();
error(message);
target.addComponent(feedbackPanel);
return;
}
try {
ruleManager.addImport(importName);
String message = new StringResourceModel("insertedImport", this, null).getString();
LOGGER.info("successfully inserted import " + importName);
info(importName + " " + message);
} catch (RuleBaseException e) {
ruleManager.removeImport(importName);
LOGGER.debug("error while saving import " + importName, e);
String message = new StringResourceModel("savingError", this, null).getString();
error(importName + " " + message + "\n" + e.getLocalizedMessage());
}
tree.setModelObject(createTreeModel());
importName = "";
target.addComponent(importField);
target.addComponent(tree);
target.addComponent(feedbackPanel);
}
};
submitButton.setOutputMarkupId(true);
form.add(submitButton);
deleteButton = new AjaxButton("deleteButton", form) {
@Override
protected void onSubmit(AjaxRequestTarget target, Form<?> form) {
try {
ruleManager.removeImport(importName);
String message = new StringResourceModel("deletedImport", this, null).getString();
info(importName + " " + message);
LOGGER.info("successfully deleted import " + importName);
} catch (RuleBaseException e) {
LOGGER.debug("error while deleting import " + importName, e);
if (e.getMessage().startsWith("Rule Compilation error")) {
ruleManager.addImport(importName);
String message = new StringResourceModel("deletingError", this, null).getString();
error(importName + " " + message + "\n" + e.getLocalizedMessage());
} else {
String message = new StringResourceModel("notExistingError", this, null).getString();
error(importName + " " + message);
}
target.addComponent(feedbackPanel);
return;
}
tree.setModelObject(createTreeModel());
importName = "";
target.addComponent(feedbackPanel);
target.addComponent(importField);
target.addComponent(tree);
}
};
deleteButton.setOutputMarkupId(true);
form.add(deleteButton);
importField = new TextField<String>("importName");
importField.setOutputMarkupId(true);
form.add(importField);
add(form);
feedbackPanel = new FeedbackPanel("feedback");
feedbackPanel.setOutputMarkupId(true);
add(feedbackPanel);
}
private TreeModel createTreeModel() {
TreeModel model = null;
DefaultMutableTreeNode rootNode = new DefaultMutableTreeNode("Imports");
for (String imp : ruleManager.listImports()) {
DefaultMutableTreeNode child = new DefaultMutableTreeNode(imp);
rootNode.add(child);
}
model = new DefaultTreeModel(rootNode);
return model;
}
public String getImportName() {
return importName;
}
public void setImportName(String importName) {
this.importName = importName;
}
}
|
[OPENENGSB-1323] sorted imports
|
ui/admin/src/main/java/org/openengsb/ui/admin/organizeImportsPage/OrganizeImportsPage.java
|
[OPENENGSB-1323] sorted imports
|
|
Java
|
apache-2.0
|
d9d34ec8d32f51cacce533dc508e448a301a1e7a
| 0
|
weijiangzhu/mina,dongjiaqiang/mina,yangzhongj/mina,universsky/mina,Vicky01200059/mina,mway08/mina,apache/mina,yangzhongj/mina,weijiangzhu/mina,jeffmaury/mina,apache/mina,universsky/mina,dongjiaqiang/mina,mway08/mina,Vicky01200059/mina
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.mina.core.polling;
import java.io.IOException;
import java.net.PortUnreachableException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.mina.core.buffer.IoBuffer;
import org.apache.mina.core.file.FileRegion;
import org.apache.mina.core.filterchain.IoFilterChain;
import org.apache.mina.core.filterchain.IoFilterChainBuilder;
import org.apache.mina.core.future.DefaultIoFuture;
import org.apache.mina.core.service.AbstractIoService;
import org.apache.mina.core.service.IoProcessor;
import org.apache.mina.core.service.IoServiceListenerSupport;
import org.apache.mina.core.session.AbstractIoSession;
import org.apache.mina.core.session.IoSession;
import org.apache.mina.core.session.IoSessionConfig;
import org.apache.mina.core.session.SessionState;
import org.apache.mina.core.write.WriteRequest;
import org.apache.mina.core.write.WriteRequestQueue;
import org.apache.mina.core.write.WriteToClosedSessionException;
import org.apache.mina.transport.socket.AbstractDatagramSessionConfig;
import org.apache.mina.util.ExceptionMonitor;
import org.apache.mina.util.NamePreservingRunnable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An abstract implementation of {@link IoProcessor} which helps transport
* developers to write an {@link IoProcessor} easily. This class is in charge of
* active polling a set of {@link IoSession} and trigger events when some I/O
* operation is possible.
*
* @author <a href="http://mina.apache.org">Apache MINA Project</a>
*/
public abstract class AbstractPollingIoProcessor<T extends AbstractIoSession>
implements IoProcessor<T> {
/** A logger for this class */
private final static Logger LOG = LoggerFactory.getLogger(IoProcessor.class);
/**
* The maximum loop count for a write operation until
* {@link #write(AbstractIoSession, IoBuffer, int)} returns non-zero value.
* It is similar to what a spin lock is for in concurrency programming. It
* improves memory utilization and write throughput significantly.
*/
private static final int WRITE_SPIN_COUNT = 256;
/**
* A timeout used for the select, as we need to get out to deal with idle
* sessions
*/
private static final long SELECT_TIMEOUT = 1000L;
/** A map containing the last Thread ID for each class */
private static final Map<Class<?>, AtomicInteger> threadIds = new ConcurrentHashMap<Class<?>, AtomicInteger>();
/** A lock used to protect the processor creation */
private final Object lock = new Object();
/** This IoProcessor instance name */
private final String threadName;
/** The executor to use when we need to start the inner Processor */
private final Executor executor;
/** A Session queue containing the newly created sessions */
private final Queue<T> newSessions = new ConcurrentLinkedQueue<T>();
/** A queue used to store the sessions to be removed */
private final Queue<T> removingSessions = new ConcurrentLinkedQueue<T>();
/** A queue used to store the sessions to be flushed */
private final Queue<T> flushingSessions = new ConcurrentLinkedQueue<T>();
/**
* A queue used to store the sessions which have a trafficControl to be
* updated
*/
private final Queue<T> trafficControllingSessions = new ConcurrentLinkedQueue<T>();
/** The processor thread : it handles the incoming messages */
private Processor processor;
private long lastIdleCheckTime;
private final Object disposalLock = new Object();
private volatile boolean disposing;
private volatile boolean disposed;
private final DefaultIoFuture disposalFuture = new DefaultIoFuture(null);
protected AtomicBoolean wakeupCalled = new AtomicBoolean(false);
/**
* Create an {@link AbstractPollingIoProcessor} with the given
* {@link Executor} for handling I/Os events.
*
* @param executor
* the {@link Executor} for handling I/O events
*/
protected AbstractPollingIoProcessor(Executor executor) {
if (executor == null) {
throw new NullPointerException("executor");
}
this.threadName = nextThreadName();
this.executor = executor;
}
/**
* Compute the thread ID for this class instance. As we may have different
* classes, we store the last ID number into a Map associating the class
* name to the last assigned ID.
*
* @return a name for the current thread, based on the class name and an
* incremental value, starting at 1.
*/
private String nextThreadName() {
Class<?> cls = getClass();
int newThreadId;
// We synchronize this block to avoid a concurrent access to
// the actomicInteger (it can be modified by another thread, while
// being seen as null by another thread)
synchronized (threadIds) {
// Get the current ID associated to this class' name
AtomicInteger threadId = threadIds.get(cls);
if (threadId == null) {
// We never have seen this class before, just create a
// new ID starting at 1 for it, and associate this ID
// with the class name in the map.
newThreadId = 1;
threadIds.put(cls, new AtomicInteger(newThreadId));
} else {
// Just increment the lat ID, and get it.
newThreadId = threadId.incrementAndGet();
}
}
// Now we can compute the name for this thread
return cls.getSimpleName() + '-' + newThreadId;
}
/**
* {@inheritDoc}
*/
public final boolean isDisposing() {
return disposing;
}
/**
* {@inheritDoc}
*/
public final boolean isDisposed() {
return disposed;
}
/**
* {@inheritDoc}
*/
public final void dispose() {
if (disposed) {
return;
}
synchronized (disposalLock) {
if (!disposing) {
disposing = true;
startupProcessor();
}
}
disposalFuture.awaitUninterruptibly();
disposed = true;
}
/**
* Dispose the resources used by this {@link IoProcessor} for polling the
* client connections
*
* @throws Exception
* if some low level IO error occurs
*/
protected abstract void dispose0() throws Exception;
/**
* poll those sessions for the given timeout
*
* @param timeout
* milliseconds before the call timeout if no event appear
* @return The number of session ready for read or for write
* @throws Exception
* if some low level IO error occurs
*/
protected abstract int select(long timeout) throws Exception;
/**
* poll those sessions forever
*
* @return The number of session ready for read or for write
* @throws Exception
* if some low level IO error occurs
*/
protected abstract int select() throws Exception;
/**
* Say if the list of {@link IoSession} polled by this {@link IoProcessor}
* is empty
*
* @return true if at least a session is managed by this {@link IoProcessor}
*/
protected abstract boolean isSelectorEmpty();
/**
* Interrupt the {@link AbstractPollingIoProcessor#select(int) call.
*/
protected abstract void wakeup();
/**
* Get an {@link Iterator} for the list of {@link IoSession} polled by this
* {@link IoProcessor}
*
* @return {@link Iterator} of {@link IoSession}
*/
protected abstract Iterator<T> allSessions();
/**
* Get an {@link Iterator} for the list of {@link IoSession} found selected
* by the last call of {@link AbstractPollingIoProcessor#select(int)
* @return {@link Iterator} of {@link IoSession} read for I/Os operation
*/
protected abstract Iterator<T> selectedSessions();
/**
* Get the state of a session (preparing, open, closed)
*
* @param session
* the {@link IoSession} to inspect
* @return the state of the session
*/
protected abstract SessionState getState(T session);
/**
* Is the session ready for writing
*
* @param session
* the session queried
* @return true is ready, false if not ready
*/
protected abstract boolean isWritable(T session);
/**
* Is the session ready for reading
*
* @param session
* the session queried
* @return true is ready, false if not ready
*/
protected abstract boolean isReadable(T session);
/**
* register a session for writing
*
* @param session
* the session registered
* @param isInterested
* true for registering, false for removing
*/
protected abstract void setInterestedInWrite(T session, boolean isInterested)
throws Exception;
/**
* register a session for reading
*
* @param session
* the session registered
* @param isInterested
* true for registering, false for removing
*/
protected abstract void setInterestedInRead(T session, boolean isInterested)
throws Exception;
/**
* is this session registered for reading
*
* @param session
* the session queried
* @return true is registered for reading
*/
protected abstract boolean isInterestedInRead(T session);
/**
* is this session registered for writing
*
* @param session
* the session queried
* @return true is registered for writing
*/
protected abstract boolean isInterestedInWrite(T session);
/**
* Initialize the polling of a session. Add it to the polling process.
*
* @param session the {@link IoSession} to add to the polling
* @throws Exception any exception thrown by the underlying system calls
*/
protected abstract void init(T session) throws Exception;
/**
* Destroy the underlying client socket handle
*
* @param session
* the {@link IoSession}
* @throws Exception
* any exception thrown by the underlying system calls
*/
protected abstract void destroy(T session) throws Exception;
/**
* Reads a sequence of bytes from a {@link IoSession} into the given
* {@link IoBuffer}. Is called when the session was found ready for reading.
*
* @param session
* the session to read
* @param buf
* the buffer to fill
* @return the number of bytes read
* @throws Exception
* any exception thrown by the underlying system calls
*/
protected abstract int read(T session, IoBuffer buf) throws Exception;
/**
* Write a sequence of bytes to a {@link IoSession}, means to be called when
* a session was found ready for writing.
*
* @param session
* the session to write
* @param buf
* the buffer to write
* @param length
* the number of bytes to write can be superior to the number of
* bytes remaining in the buffer
* @return the number of byte written
* @throws Exception
* any exception thrown by the underlying system calls
*/
protected abstract int write(T session, IoBuffer buf, int length)
throws Exception;
/**
* Write a part of a file to a {@link IoSession}, if the underlying API
* isn't supporting system calls like sendfile(), you can throw a
* {@link UnsupportedOperationException} so the file will be send using
* usual {@link #write(AbstractIoSession, IoBuffer, int)} call.
*
* @param session
* the session to write
* @param region
* the file region to write
* @param length
* the length of the portion to send
* @return the number of written bytes
* @throws Exception
* any exception thrown by the underlying system calls
*/
protected abstract int transferFile(T session, FileRegion region, int length)
throws Exception;
/**
* {@inheritDoc}
*/
public final void add(T session) {
if (isDisposing()) {
throw new IllegalStateException("Already disposed.");
}
// Adds the session to the newSession queue and starts the worker
newSessions.add(session);
startupProcessor();
}
/**
* {@inheritDoc}
*/
public final void remove(T session) {
scheduleRemove(session);
startupProcessor();
}
private void scheduleRemove(T session) {
removingSessions.add(session);
}
/**
* {@inheritDoc}
*/
public final void flush(T session) {
if (session.setScheduledForFlush(true)) {
flushingSessions.add(session);
wakeup();
}
}
private void scheduleFlush(T session) {
if (session.setScheduledForFlush(true)) {
// add the session to the queue
flushingSessions.add(session);
}
}
/**
* {@inheritDoc}
*/
public final void updateTrafficMask(T session) {
trafficControllingSessions.add(session);
wakeup();
}
/**
* Starts the inner Processor, asking the executor to pick a thread in its
* pool. The Runnable will be renamed
*/
private void startupProcessor() {
synchronized (lock) {
if (processor == null) {
processor = new Processor();
executor.execute(new NamePreservingRunnable(processor, threadName));
}
}
// Just stop the select() and start it again, so that the processor
// can be activated immediately.
wakeup();
}
/**
* In the case we are using the java select() method, this method is used to
* trash the buggy selector and create a new one, registring all the sockets
* on it.
*
* @throws IOException
* If we got an exception
*/
abstract protected void registerNewSelector() throws IOException;
/**
* Check that the select() has not exited immediately just because of a
* broken connection. In this case, this is a standard case, and we just
* have to loop.
*
* @return true if a connection has been brutally closed.
* @throws IOException
* If we got an exception
*/
abstract protected boolean isBrokenConnection() throws IOException;
/**
* Loops over the new sessions blocking queue and returns the number of
* sessions which are effectively created
*
* @return The number of new sessions
*/
private int handleNewSessions() {
int addedSessions = 0;
for (T session = newSessions.poll(); session != null; session = newSessions.poll()) {
if (addNow(session)) {
// A new session has been created
addedSessions++;
}
}
return addedSessions;
}
/**
* Process a new session :
* - initialize it
* - create its chain
* - fire the CREATED listeners if any
*
* @param session The session to create
* @return true if the session has been registered
*/
private boolean addNow(T session) {
boolean registered = false;
try {
init(session);
registered = true;
// Build the filter chain of this session.
IoFilterChainBuilder chainBuilder = session.getService().getFilterChainBuilder();
chainBuilder.buildFilterChain(session.getFilterChain());
// DefaultIoFilterChain.CONNECT_FUTURE is cleared inside here
// in AbstractIoFilterChain.fireSessionOpened().
// Propagate the SESSION_CREATED event up to the chain
IoServiceListenerSupport listeners = ((AbstractIoService) session.getService()).getListeners();
listeners.fireSessionCreated(session);
} catch (Throwable e) {
ExceptionMonitor.getInstance().exceptionCaught(e);
try {
destroy(session);
} catch (Exception e1) {
ExceptionMonitor.getInstance().exceptionCaught(e1);
} finally {
registered = false;
}
}
return registered;
}
private int removeSessions() {
int removedSessions = 0;
for (T session = removingSessions.poll();session != null;session = removingSessions.poll()) {
SessionState state = getState(session);
// Now deal with the removal accordingly to the session's state
switch (state) {
case OPENED:
// Try to remove this session
if (removeNow(session)) {
removedSessions++;
}
break;
case CLOSING:
// Skip if channel is already closed
break;
case OPENING:
// Remove session from the newSessions queue and
// remove it
newSessions.remove(session);
if (removeNow(session)) {
removedSessions++;
}
break;
default:
throw new IllegalStateException(String.valueOf(state));
}
}
return removedSessions;
}
private boolean removeNow(T session) {
clearWriteRequestQueue(session);
try {
destroy(session);
return true;
} catch (Exception e) {
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(e);
} finally {
clearWriteRequestQueue(session);
((AbstractIoService) session.getService()).getListeners()
.fireSessionDestroyed(session);
}
return false;
}
private void clearWriteRequestQueue(T session) {
WriteRequestQueue writeRequestQueue = session.getWriteRequestQueue();
WriteRequest req;
List<WriteRequest> failedRequests = new ArrayList<WriteRequest>();
if ((req = writeRequestQueue.poll(session)) != null) {
Object message = req.getMessage();
if (message instanceof IoBuffer) {
IoBuffer buf = (IoBuffer)message;
// The first unwritten empty buffer must be
// forwarded to the filter chain.
if (buf.hasRemaining()) {
buf.reset();
failedRequests.add(req);
} else {
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireMessageSent(req);
}
} else {
failedRequests.add(req);
}
// Discard others.
while ((req = writeRequestQueue.poll(session)) != null) {
failedRequests.add(req);
}
}
// Create an exception and notify.
if (!failedRequests.isEmpty()) {
WriteToClosedSessionException cause = new WriteToClosedSessionException(
failedRequests);
for (WriteRequest r : failedRequests) {
session.decreaseScheduledBytesAndMessages(r);
r.getFuture().setException(cause);
}
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(cause);
}
}
private void process() throws Exception {
for (Iterator<T> i = selectedSessions(); i.hasNext();) {
T session = i.next();
process(session);
i.remove();
}
}
/**
* Deal with session ready for the read or write operations, or both.
*/
private void process(T session) {
// Process Reads
if (isReadable(session) && !session.isReadSuspended()) {
read(session);
}
// Process writes
if (isWritable(session) && !session.isWriteSuspended() && session.setScheduledForFlush(true)) {
// add the session to the queue
flushingSessions.add(session);
}
}
private void read(T session) {
IoSessionConfig config = session.getConfig();
IoBuffer buf = IoBuffer.allocate(config.getReadBufferSize());
final boolean hasFragmentation = session.getTransportMetadata()
.hasFragmentation();
try {
int readBytes = 0;
int ret;
try {
if (hasFragmentation) {
while ((ret = read(session, buf)) > 0) {
readBytes += ret;
if (!buf.hasRemaining()) {
break;
}
}
} else {
ret = read(session, buf);
if (ret > 0) {
readBytes = ret;
}
}
} finally {
buf.flip();
}
if (readBytes > 0) {
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireMessageReceived(buf);
buf = null;
if (hasFragmentation) {
if (readBytes << 1 < config.getReadBufferSize()) {
session.decreaseReadBufferSize();
} else if (readBytes == config.getReadBufferSize()) {
session.increaseReadBufferSize();
}
}
}
if (ret < 0) {
scheduleRemove(session);
}
} catch (Throwable e) {
if (e instanceof IOException) {
if (!(e instanceof PortUnreachableException)
|| !AbstractDatagramSessionConfig.class.isAssignableFrom(config.getClass())
|| ((AbstractDatagramSessionConfig) config).isCloseOnPortUnreachable()) {
scheduleRemove(session);
}
}
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(e);
}
}
private void notifyIdleSessions(long currentTime) throws Exception {
// process idle sessions
if (currentTime - lastIdleCheckTime >= SELECT_TIMEOUT) {
lastIdleCheckTime = currentTime;
AbstractIoSession.notifyIdleness(allSessions(), currentTime);
}
}
/**
* Write all the pending messages
*/
private void flush(long currentTime) {
if (flushingSessions.isEmpty()) {
return;
}
do {
T session = flushingSessions.poll(); // the same one with firstSession
if (session == null) {
// Just in case ... It should not happen.
break;
}
session.setScheduledForFlush(false);
SessionState state = getState(session);
switch (state) {
case OPENED:
try {
boolean flushedAll = flushNow(session, currentTime);
if (flushedAll
&& !session.getWriteRequestQueue().isEmpty(session)
&& !session.isScheduledForFlush()) {
scheduleFlush(session);
}
} catch (Exception e) {
scheduleRemove(session);
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(e);
}
break;
case CLOSING:
// Skip if the channel is already closed.
break;
case OPENING:
// Retry later if session is not yet fully initialized.
// (In case that Session.write() is called before addSession()
// is processed)
scheduleFlush(session);
return;
default:
throw new IllegalStateException(String.valueOf(state));
}
} while (!flushingSessions.isEmpty());
}
private boolean flushNow(T session, long currentTime) {
if (!session.isConnected()) {
scheduleRemove(session);
return false;
}
final boolean hasFragmentation = session.getTransportMetadata()
.hasFragmentation();
final WriteRequestQueue writeRequestQueue = session
.getWriteRequestQueue();
// Set limitation for the number of written bytes for read-write
// fairness. I used maxReadBufferSize * 3 / 2, which yields best
// performance in my experience while not breaking fairness much.
final int maxWrittenBytes = session.getConfig().getMaxReadBufferSize()
+ (session.getConfig().getMaxReadBufferSize() >>> 1);
int writtenBytes = 0;
WriteRequest req = null;
try {
// Clear OP_WRITE
setInterestedInWrite(session, false);
do {
// Check for pending writes.
req = session.getCurrentWriteRequest();
if (req == null) {
req = writeRequestQueue.poll(session);
if (req == null) {
break;
}
session.setCurrentWriteRequest(req);
}
int localWrittenBytes = 0;
Object message = req.getMessage();
if (message instanceof IoBuffer) {
localWrittenBytes = writeBuffer(session, req,
hasFragmentation, maxWrittenBytes - writtenBytes,
currentTime);
if (localWrittenBytes > 0
&& ((IoBuffer) message).hasRemaining()) {
// the buffer isn't empty, we re-interest it in writing
writtenBytes += localWrittenBytes;
setInterestedInWrite(session, true);
return false;
}
} else if (message instanceof FileRegion) {
localWrittenBytes = writeFile(session, req,
hasFragmentation, maxWrittenBytes - writtenBytes,
currentTime);
// Fix for Java bug on Linux
// http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=5103988
// If there's still data to be written in the FileRegion,
// return 0 indicating that we need
// to pause until writing may resume.
if (localWrittenBytes > 0
&& ((FileRegion) message).getRemainingBytes() > 0) {
writtenBytes += localWrittenBytes;
setInterestedInWrite(session, true);
return false;
}
} else {
throw new IllegalStateException(
"Don't know how to handle message of type '"
+ message.getClass().getName()
+ "'. Are you missing a protocol encoder?");
}
if (localWrittenBytes == 0) {
// Kernel buffer is full.
setInterestedInWrite(session, true);
return false;
}
writtenBytes += localWrittenBytes;
if (writtenBytes >= maxWrittenBytes) {
// Wrote too much
scheduleFlush(session);
return false;
}
} while (writtenBytes < maxWrittenBytes);
} catch (Exception e) {
if (req != null) {
req.getFuture().setException(e);
}
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(e);
return false;
}
return true;
}
private int writeBuffer(T session, WriteRequest req,
boolean hasFragmentation, int maxLength, long currentTime)
throws Exception {
IoBuffer buf = (IoBuffer) req.getMessage();
int localWrittenBytes = 0;
if (buf.hasRemaining()) {
int length;
if (hasFragmentation) {
length = Math.min(buf.remaining(), maxLength);
} else {
length = buf.remaining();
}
for (int i = WRITE_SPIN_COUNT; i > 0; i--) {
localWrittenBytes = write(session, buf, length);
if (localWrittenBytes != 0) {
break;
}
}
}
session.increaseWrittenBytes(localWrittenBytes, currentTime);
if (!buf.hasRemaining() || !hasFragmentation && localWrittenBytes != 0) {
// Buffer has been sent, clear the current request.
int pos = buf.position();
buf.reset();
fireMessageSent(session, req);
// And set it back to its position
buf.position(pos);
}
return localWrittenBytes;
}
private int writeFile(T session, WriteRequest req,
boolean hasFragmentation, int maxLength, long currentTime)
throws Exception {
int localWrittenBytes;
FileRegion region = (FileRegion) req.getMessage();
if (region.getRemainingBytes() > 0) {
int length;
if (hasFragmentation) {
length = (int) Math.min(region.getRemainingBytes(), maxLength);
} else {
length = (int) Math.min(Integer.MAX_VALUE, region
.getRemainingBytes());
}
localWrittenBytes = transferFile(session, region, length);
region.update(localWrittenBytes);
} else {
localWrittenBytes = 0;
}
session.increaseWrittenBytes(localWrittenBytes, currentTime);
if (region.getRemainingBytes() <= 0 || !hasFragmentation
&& localWrittenBytes != 0) {
fireMessageSent(session, req);
}
return localWrittenBytes;
}
private void fireMessageSent(T session, WriteRequest req) {
session.setCurrentWriteRequest(null);
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireMessageSent(req);
}
/**
* Update the trafficControl for all the session.
*/
private void updateTrafficMask() {
int queueSize = trafficControllingSessions.size();
while (queueSize > 0) {
T session = trafficControllingSessions.poll();
if (session == null) {
// We are done with this queue.
return;
}
SessionState state = getState(session);
switch (state) {
case OPENED:
updateTrafficControl(session);
break;
case CLOSING:
break;
case OPENING:
// Retry later if session is not yet fully initialized.
// (In case that Session.suspend??() or session.resume??() is
// called before addSession() is processed)
// We just put back the session at the end of the queue.
trafficControllingSessions.add(session);
break;
default:
throw new IllegalStateException(String.valueOf(state));
}
// As we have handled one session, decrement the number of
// remaining sessions. The OPENING session will be processed
// with the next select(), as the queue size has been decreased, even
// if the session has been pushed at the end of the queue
queueSize--;
}
}
/**
* {@inheritDoc}
*/
public void updateTrafficControl(T session) {
//
try {
setInterestedInRead(session, !session.isReadSuspended());
} catch (Exception e) {
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(e);
}
try {
setInterestedInWrite(session, !session.getWriteRequestQueue()
.isEmpty(session)
&& !session.isWriteSuspended());
} catch (Exception e) {
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(e);
}
}
/**
* The main loop. This is the place in charge to poll the Selector, and to
* process the active sessions. It's done in
* - handle the newly created sessions
* -
*/
private class Processor implements Runnable {
public void run() {
int nSessions = 0;
lastIdleCheckTime = System.currentTimeMillis();
for (;;) {
try {
// This select has a timeout so that we can manage
// idle session when we get out of the select every
// second. (note : this is a hack to avoid creating
// a dedicated thread).
long t0 = System.currentTimeMillis();
int selected = select(SELECT_TIMEOUT);
long t1 = System.currentTimeMillis();
long delta = (t1 - t0);
if ((selected == 0) && !wakeupCalled.get() && (delta < 100)) {
// Last chance : the select() may have been
// interrupted because we have had an closed channel.
if (isBrokenConnection()) {
LOG.warn("Broken connection");
// we can reselect immediately
// set back the flag to false
wakeupCalled.getAndSet(false);
continue;
} else {
LOG.warn("Create a new selector. Selected is 0, delta = "
+ (t1 - t0));
// Ok, we are hit by the nasty epoll
// spinning.
// Basically, there is a race condition
// which causes a closing file descriptor not to be
// considered as available as a selected channel, but
// it stopped the select. The next time we will
// call select(), it will exit immediately for the same
// reason, and do so forever, consuming 100%
// CPU.
// We have to destroy the selector, and
// register all the socket on a new one.
registerNewSelector();
}
// Set back the flag to false
wakeupCalled.getAndSet(false);
// and continue the loop
continue;
}
// Manage newly created session first
nSessions += handleNewSessions();
updateTrafficMask();
// Now, if we have had some incoming or outgoing events,
// deal with them
if (selected > 0) {
//LOG.debug("Processing ..."); // This log hurts one of the MDCFilter test...
process();
}
// Write the pending requests
long currentTime = System.currentTimeMillis();
flush(currentTime);
// And manage removed sessions
nSessions -= removeSessions();
// Last, not least, send Idle events to the idle sessions
notifyIdleSessions(currentTime);
// Get a chance to exit the infinite loop if there are no
// more sessions on this Processor
if (nSessions == 0) {
synchronized (lock) {
if (newSessions.isEmpty() && isSelectorEmpty()) {
processor = null;
break;
}
}
}
// Disconnect all sessions immediately if disposal has been
// requested so that we exit this loop eventually.
if (isDisposing()) {
for (Iterator<T> i = allSessions(); i.hasNext();) {
scheduleRemove(i.next());
}
wakeup();
}
} catch (Throwable t) {
ExceptionMonitor.getInstance().exceptionCaught(t);
try {
Thread.sleep(1000);
} catch (InterruptedException e1) {
ExceptionMonitor.getInstance().exceptionCaught(e1);
}
}
}
try {
synchronized (disposalLock) {
if (isDisposing()) {
dispose0();
}
}
} catch (Throwable t) {
ExceptionMonitor.getInstance().exceptionCaught(t);
} finally {
disposalFuture.setValue(true);
}
}
}
}
|
core/src/main/java/org/apache/mina/core/polling/AbstractPollingIoProcessor.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.mina.core.polling;
import java.io.IOException;
import java.net.PortUnreachableException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.mina.core.buffer.IoBuffer;
import org.apache.mina.core.file.FileRegion;
import org.apache.mina.core.filterchain.IoFilterChain;
import org.apache.mina.core.filterchain.IoFilterChainBuilder;
import org.apache.mina.core.future.DefaultIoFuture;
import org.apache.mina.core.service.AbstractIoService;
import org.apache.mina.core.service.IoProcessor;
import org.apache.mina.core.service.IoServiceListenerSupport;
import org.apache.mina.core.session.AbstractIoSession;
import org.apache.mina.core.session.IoSession;
import org.apache.mina.core.session.IoSessionConfig;
import org.apache.mina.core.session.SessionState;
import org.apache.mina.core.write.WriteRequest;
import org.apache.mina.core.write.WriteRequestQueue;
import org.apache.mina.core.write.WriteToClosedSessionException;
import org.apache.mina.transport.socket.AbstractDatagramSessionConfig;
import org.apache.mina.util.ExceptionMonitor;
import org.apache.mina.util.NamePreservingRunnable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An abstract implementation of {@link IoProcessor} which helps transport
* developers to write an {@link IoProcessor} easily. This class is in charge of
* active polling a set of {@link IoSession} and trigger events when some I/O
* operation is possible.
*
* @author <a href="http://mina.apache.org">Apache MINA Project</a>
*/
public abstract class AbstractPollingIoProcessor<T extends AbstractIoSession>
implements IoProcessor<T> {
/** A logger for this class */
private final static Logger LOG = LoggerFactory.getLogger(IoProcessor.class);
/**
* The maximum loop count for a write operation until
* {@link #write(AbstractIoSession, IoBuffer, int)} returns non-zero value.
* It is similar to what a spin lock is for in concurrency programming. It
* improves memory utilization and write throughput significantly.
*/
private static final int WRITE_SPIN_COUNT = 256;
/**
* A timeout used for the select, as we need to get out to deal with idle
* sessions
*/
private static final long SELECT_TIMEOUT = 1000L;
/** A map containing the last Thread ID for each class */
private static final Map<Class<?>, AtomicInteger> threadIds = new ConcurrentHashMap<Class<?>, AtomicInteger>();
/** A lock used to protect the processor creation */
private final Object lock = new Object();
/** This IoProcessor instance name */
private final String threadName;
/** The executor to use when we need to start the inner Processor */
private final Executor executor;
/** A Session queue containing the newly created sessions */
private final Queue<T> newSessions = new ConcurrentLinkedQueue<T>();
/** A queue used to store the sessions to be removed */
private final Queue<T> removingSessions = new ConcurrentLinkedQueue<T>();
/** A queue used to store the sessions to be flushed */
private final Queue<T> flushingSessions = new ConcurrentLinkedQueue<T>();
/**
* A queue used to store the sessions which have a trafficControl to be
* updated
*/
private final Queue<T> trafficControllingSessions = new ConcurrentLinkedQueue<T>();
/** The processor thread : it handles the incoming messages */
private Processor processor;
private long lastIdleCheckTime;
private final Object disposalLock = new Object();
private volatile boolean disposing;
private volatile boolean disposed;
private final DefaultIoFuture disposalFuture = new DefaultIoFuture(null);
protected AtomicBoolean wakeupCalled = new AtomicBoolean(false);
/**
* Create an {@link AbstractPollingIoProcessor} with the given
* {@link Executor} for handling I/Os events.
*
* @param executor
* the {@link Executor} for handling I/O events
*/
protected AbstractPollingIoProcessor(Executor executor) {
if (executor == null) {
throw new NullPointerException("executor");
}
this.threadName = nextThreadName();
this.executor = executor;
}
/**
* Compute the thread ID for this class instance. As we may have different
* classes, we store the last ID number into a Map associating the class
* name to the last assigned ID.
*
* @return a name for the current thread, based on the class name and an
* incremental value, starting at 1.
*/
private String nextThreadName() {
Class<?> cls = getClass();
int newThreadId;
// We synchronize this block to avoid a concurrent access to
// the actomicInteger (it can be modified by another thread, while
// being seen as null by another thread)
synchronized (threadIds) {
// Get the current ID associated to this class' name
AtomicInteger threadId = threadIds.get(cls);
if (threadId == null) {
// We never have seen this class before, just create a
// new ID starting at 1 for it, and associate this ID
// with the class name in the map.
newThreadId = 1;
threadIds.put(cls, new AtomicInteger(newThreadId));
} else {
// Just increment the lat ID, and get it.
newThreadId = threadId.incrementAndGet();
}
}
// Now we can compute the name for this thread
return cls.getSimpleName() + '-' + newThreadId;
}
/**
* {@inheritDoc}
*/
public final boolean isDisposing() {
return disposing;
}
/**
* {@inheritDoc}
*/
public final boolean isDisposed() {
return disposed;
}
/**
* {@inheritDoc}
*/
public final void dispose() {
if (disposed) {
return;
}
synchronized (disposalLock) {
if (!disposing) {
disposing = true;
startupProcessor();
}
}
disposalFuture.awaitUninterruptibly();
disposed = true;
}
/**
* Dispose the resources used by this {@link IoProcessor} for polling the
* client connections
*
* @throws Exception
* if some low level IO error occurs
*/
protected abstract void dispose0() throws Exception;
/**
* poll those sessions for the given timeout
*
* @param timeout
* milliseconds before the call timeout if no event appear
* @return The number of session ready for read or for write
* @throws Exception
* if some low level IO error occurs
*/
protected abstract int select(long timeout) throws Exception;
/**
* poll those sessions forever
*
* @return The number of session ready for read or for write
* @throws Exception
* if some low level IO error occurs
*/
protected abstract int select() throws Exception;
/**
* Say if the list of {@link IoSession} polled by this {@link IoProcessor}
* is empty
*
* @return true if at least a session is managed by this {@link IoProcessor}
*/
protected abstract boolean isSelectorEmpty();
/**
* Interrupt the {@link AbstractPollingIoProcessor#select(int) call.
*/
protected abstract void wakeup();
/**
* Get an {@link Iterator} for the list of {@link IoSession} polled by this
* {@link IoProcessor}
*
* @return {@link Iterator} of {@link IoSession}
*/
protected abstract Iterator<T> allSessions();
/**
* Get an {@link Iterator} for the list of {@link IoSession} found selected
* by the last call of {@link AbstractPollingIoProcessor#select(int)
* @return {@link Iterator} of {@link IoSession} read for I/Os operation
*/
protected abstract Iterator<T> selectedSessions();
/**
* Get the state of a session (preparing, open, closed)
*
* @param session
* the {@link IoSession} to inspect
* @return the state of the session
*/
protected abstract SessionState getState(T session);
/**
* Is the session ready for writing
*
* @param session
* the session queried
* @return true is ready, false if not ready
*/
protected abstract boolean isWritable(T session);
/**
* Is the session ready for reading
*
* @param session
* the session queried
* @return true is ready, false if not ready
*/
protected abstract boolean isReadable(T session);
/**
* register a session for writing
*
* @param session
* the session registered
* @param isInterested
* true for registering, false for removing
*/
protected abstract void setInterestedInWrite(T session, boolean isInterested)
throws Exception;
/**
* register a session for reading
*
* @param session
* the session registered
* @param isInterested
* true for registering, false for removing
*/
protected abstract void setInterestedInRead(T session, boolean isInterested)
throws Exception;
/**
* is this session registered for reading
*
* @param session
* the session queried
* @return true is registered for reading
*/
protected abstract boolean isInterestedInRead(T session);
/**
* is this session registered for writing
*
* @param session
* the session queried
* @return true is registered for writing
*/
protected abstract boolean isInterestedInWrite(T session);
/**
* Initialize the polling of a session. Add it to the polling process.
*
* @param session the {@link IoSession} to add to the polling
* @throws Exception any exception thrown by the underlying system calls
*/
protected abstract void init(T session) throws Exception;
/**
* Destroy the underlying client socket handle
*
* @param session
* the {@link IoSession}
* @throws Exception
* any exception thrown by the underlying system calls
*/
protected abstract void destroy(T session) throws Exception;
/**
* Reads a sequence of bytes from a {@link IoSession} into the given
* {@link IoBuffer}. Is called when the session was found ready for reading.
*
* @param session
* the session to read
* @param buf
* the buffer to fill
* @return the number of bytes read
* @throws Exception
* any exception thrown by the underlying system calls
*/
protected abstract int read(T session, IoBuffer buf) throws Exception;
/**
* Write a sequence of bytes to a {@link IoSession}, means to be called when
* a session was found ready for writing.
*
* @param session
* the session to write
* @param buf
* the buffer to write
* @param length
* the number of bytes to write can be superior to the number of
* bytes remaining in the buffer
* @return the number of byte written
* @throws Exception
* any exception thrown by the underlying system calls
*/
protected abstract int write(T session, IoBuffer buf, int length)
throws Exception;
/**
* Write a part of a file to a {@link IoSession}, if the underlying API
* isn't supporting system calls like sendfile(), you can throw a
* {@link UnsupportedOperationException} so the file will be send using
* usual {@link #write(AbstractIoSession, IoBuffer, int)} call.
*
* @param session
* the session to write
* @param region
* the file region to write
* @param length
* the length of the portion to send
* @return the number of written bytes
* @throws Exception
* any exception thrown by the underlying system calls
*/
protected abstract int transferFile(T session, FileRegion region, int length)
throws Exception;
/**
* {@inheritDoc}
*/
public final void add(T session) {
if (isDisposing()) {
throw new IllegalStateException("Already disposed.");
}
// Adds the session to the newSession queue and starts the worker
newSessions.add(session);
startupProcessor();
}
/**
* {@inheritDoc}
*/
public final void remove(T session) {
scheduleRemove(session);
startupProcessor();
}
private void scheduleRemove(T session) {
removingSessions.add(session);
}
/**
* {@inheritDoc}
*/
public final void flush(T session) {
if (session.setScheduledForFlush(true)) {
flushingSessions.add(session);
wakeup();
}
}
private void scheduleFlush(T session) {
if (session.setScheduledForFlush(true)) {
// add the session to the queue
flushingSessions.add(session);
}
}
/**
* {@inheritDoc}
*/
public final void updateTrafficMask(T session) {
trafficControllingSessions.add(session);
wakeup();
}
/**
* Starts the inner Processor, asking the executor to pick a thread in its
* pool. The Runnable will be renamed
*/
private void startupProcessor() {
synchronized (lock) {
if (processor == null) {
processor = new Processor();
executor.execute(new NamePreservingRunnable(processor, threadName));
}
}
// Just stop the select() and start it again, so that the processor
// can be activated immediately.
wakeup();
}
/**
* In the case we are using the java select() method, this method is used to
* trash the buggy selector and create a new one, registring all the sockets
* on it.
*
* @throws IOException
* If we got an exception
*/
abstract protected void registerNewSelector() throws IOException;
/**
* Check that the select() has not exited immediately just because of a
* broken connection. In this case, this is a standard case, and we just
* have to loop.
*
* @return true if a connection has been brutally closed.
* @throws IOException
* If we got an exception
*/
abstract protected boolean isBrokenConnection() throws IOException;
/**
* Loops over the new sessions blocking queue and returns the number of
* sessions which are effectively created
*
* @return The number of new sessions
*/
private int handleNewSessions() {
int addedSessions = 0;
for (T session = newSessions.poll(); session != null; session = newSessions.poll()) {
if (addNow(session)) {
// A new session has been created
addedSessions++;
}
}
return addedSessions;
}
/**
* Process a new session :
* - initialize it
* - create its chain
* - fire the CREATED listeners if any
*
* @param session The session to create
* @return true if the session has been registered
*/
private boolean addNow(T session) {
boolean registered = false;
try {
init(session);
registered = true;
// Build the filter chain of this session.
IoFilterChainBuilder chainBuilder = session.getService().getFilterChainBuilder();
chainBuilder.buildFilterChain(session.getFilterChain());
// DefaultIoFilterChain.CONNECT_FUTURE is cleared inside here
// in AbstractIoFilterChain.fireSessionOpened().
// Propagate the SESSION_CREATED event up to the chain
IoServiceListenerSupport listeners = ((AbstractIoService) session.getService()).getListeners();
listeners.fireSessionCreated(session);
} catch (Throwable e) {
ExceptionMonitor.getInstance().exceptionCaught(e);
try {
destroy(session);
} catch (Exception e1) {
ExceptionMonitor.getInstance().exceptionCaught(e1);
} finally {
registered = false;
}
}
return registered;
}
private int removeSessions() {
int removedSessions = 0;
for (T session = removingSessions.poll();session != null;session = removingSessions.poll()) {
SessionState state = getState(session);
// Now deal with the removal accordingly to the session's state
switch (state) {
case OPENED:
// Try to remove this session
if (removeNow(session)) {
removedSessions++;
}
break;
case CLOSING:
// Skip if channel is already closed
break;
case OPENING:
// Remove session from the newSessions queue and
// remove it
newSessions.remove(session);
if (removeNow(session)) {
removedSessions++;
}
break;
default:
throw new IllegalStateException(String.valueOf(state));
}
}
return removedSessions;
}
private boolean removeNow(T session) {
clearWriteRequestQueue(session);
try {
destroy(session);
return true;
} catch (Exception e) {
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(e);
} finally {
clearWriteRequestQueue(session);
((AbstractIoService) session.getService()).getListeners()
.fireSessionDestroyed(session);
}
return false;
}
private void clearWriteRequestQueue(T session) {
WriteRequestQueue writeRequestQueue = session.getWriteRequestQueue();
WriteRequest req;
List<WriteRequest> failedRequests = new ArrayList<WriteRequest>();
if ((req = writeRequestQueue.poll(session)) != null) {
Object message = req.getMessage();
if (message instanceof IoBuffer) {
IoBuffer buf = (IoBuffer)message;
// The first unwritten empty buffer must be
// forwarded to the filter chain.
if (buf.hasRemaining()) {
buf.reset();
failedRequests.add(req);
} else {
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireMessageSent(req);
}
} else {
failedRequests.add(req);
}
// Discard others.
while ((req = writeRequestQueue.poll(session)) != null) {
failedRequests.add(req);
}
}
// Create an exception and notify.
if (!failedRequests.isEmpty()) {
WriteToClosedSessionException cause = new WriteToClosedSessionException(
failedRequests);
for (WriteRequest r : failedRequests) {
session.decreaseScheduledBytesAndMessages(r);
r.getFuture().setException(cause);
}
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(cause);
}
}
private void process() throws Exception {
for (Iterator<T> i = selectedSessions(); i.hasNext();) {
T session = i.next();
process(session);
i.remove();
}
}
/**
* Deal with session ready for the read or write operations, or both.
*/
private void process(T session) {
// Process Reads
if (isReadable(session) && !session.isReadSuspended()) {
read(session);
}
// Process writes
if (isWritable(session) && !session.isWriteSuspended() && session.setScheduledForFlush(true)) {
// add the session to the queue
flushingSessions.add(session);
}
}
private void read(T session) {
IoSessionConfig config = session.getConfig();
IoBuffer buf = IoBuffer.allocate(config.getReadBufferSize());
final boolean hasFragmentation = session.getTransportMetadata()
.hasFragmentation();
try {
int readBytes = 0;
int ret;
try {
if (hasFragmentation) {
while ((ret = read(session, buf)) > 0) {
readBytes += ret;
if (!buf.hasRemaining()) {
break;
}
}
} else {
ret = read(session, buf);
if (ret > 0) {
readBytes = ret;
}
}
} finally {
buf.flip();
}
if (readBytes > 0) {
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireMessageReceived(buf);
buf = null;
if (hasFragmentation) {
if (readBytes << 1 < config.getReadBufferSize()) {
session.decreaseReadBufferSize();
} else if (readBytes == config.getReadBufferSize()) {
session.increaseReadBufferSize();
}
}
}
if (ret < 0) {
scheduleRemove(session);
}
} catch (Throwable e) {
if (e instanceof IOException) {
if (!(e instanceof PortUnreachableException)
|| !AbstractDatagramSessionConfig.class.isAssignableFrom(config.getClass())
|| ((AbstractDatagramSessionConfig) config).isCloseOnPortUnreachable()) {
scheduleRemove(session);
}
}
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(e);
}
}
private void notifyIdleSessions(long currentTime) throws Exception {
// process idle sessions
if (currentTime - lastIdleCheckTime >= SELECT_TIMEOUT) {
lastIdleCheckTime = currentTime;
AbstractIoSession.notifyIdleness(allSessions(), currentTime);
}
}
/**
* Write all the pending messages
*/
private void flush(long currentTime) {
if (flushingSessions.isEmpty()) {
return;
}
do {
T session = flushingSessions.poll(); // the same one with firstSession
if (session == null) {
// Just in case ... It should not happen.
break;
}
session.setScheduledForFlush(false);
SessionState state = getState(session);
switch (state) {
case OPENED:
try {
boolean flushedAll = flushNow(session, currentTime);
if (flushedAll
&& !session.getWriteRequestQueue().isEmpty(session)
&& !session.isScheduledForFlush()) {
scheduleFlush(session);
}
} catch (Exception e) {
scheduleRemove(session);
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(e);
}
break;
case CLOSING:
// Skip if the channel is already closed.
break;
case OPENING:
// Retry later if session is not yet fully initialized.
// (In case that Session.write() is called before addSession()
// is processed)
scheduleFlush(session);
return;
default:
throw new IllegalStateException(String.valueOf(state));
}
} while (!flushingSessions.isEmpty());
}
private boolean flushNow(T session, long currentTime) {
if (!session.isConnected()) {
scheduleRemove(session);
return false;
}
final boolean hasFragmentation = session.getTransportMetadata()
.hasFragmentation();
final WriteRequestQueue writeRequestQueue = session
.getWriteRequestQueue();
// Set limitation for the number of written bytes for read-write
// fairness. I used maxReadBufferSize * 3 / 2, which yields best
// performance in my experience while not breaking fairness much.
final int maxWrittenBytes = session.getConfig().getMaxReadBufferSize()
+ (session.getConfig().getMaxReadBufferSize() >>> 1);
int writtenBytes = 0;
WriteRequest req = null;
try {
// Clear OP_WRITE
setInterestedInWrite(session, false);
do {
// Check for pending writes.
req = session.getCurrentWriteRequest();
if (req == null) {
req = writeRequestQueue.poll(session);
if (req == null) {
break;
}
session.setCurrentWriteRequest(req);
}
int localWrittenBytes = 0;
Object message = req.getMessage();
if (message instanceof IoBuffer) {
localWrittenBytes = writeBuffer(session, req,
hasFragmentation, maxWrittenBytes - writtenBytes,
currentTime);
if (localWrittenBytes > 0
&& ((IoBuffer) message).hasRemaining()) {
// the buffer isn't empty, we re-interest it in writing
writtenBytes += localWrittenBytes;
setInterestedInWrite(session, true);
return false;
}
} else if (message instanceof FileRegion) {
localWrittenBytes = writeFile(session, req,
hasFragmentation, maxWrittenBytes - writtenBytes,
currentTime);
// Fix for Java bug on Linux
// http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=5103988
// If there's still data to be written in the FileRegion,
// return 0 indicating that we need
// to pause until writing may resume.
if (localWrittenBytes > 0
&& ((FileRegion) message).getRemainingBytes() > 0) {
writtenBytes += localWrittenBytes;
setInterestedInWrite(session, true);
return false;
}
} else {
throw new IllegalStateException(
"Don't know how to handle message of type '"
+ message.getClass().getName()
+ "'. Are you missing a protocol encoder?");
}
if (localWrittenBytes == 0) {
// Kernel buffer is full.
setInterestedInWrite(session, true);
return false;
}
writtenBytes += localWrittenBytes;
if (writtenBytes >= maxWrittenBytes) {
// Wrote too much
scheduleFlush(session);
return false;
}
} while (writtenBytes < maxWrittenBytes);
} catch (Exception e) {
if (req != null) {
req.getFuture().setException(e);
}
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(e);
return false;
}
return true;
}
private int writeBuffer(T session, WriteRequest req,
boolean hasFragmentation, int maxLength, long currentTime)
throws Exception {
IoBuffer buf = (IoBuffer) req.getMessage();
int localWrittenBytes = 0;
if (buf.hasRemaining()) {
int length;
if (hasFragmentation) {
length = Math.min(buf.remaining(), maxLength);
} else {
length = buf.remaining();
}
for (int i = WRITE_SPIN_COUNT; i > 0; i--) {
localWrittenBytes = write(session, buf, length);
if (localWrittenBytes != 0) {
break;
}
}
}
session.increaseWrittenBytes(localWrittenBytes, currentTime);
if (!buf.hasRemaining() || !hasFragmentation && localWrittenBytes != 0) {
// Buffer has been sent, clear the current request.
buf.reset();
fireMessageSent(session, req);
}
return localWrittenBytes;
}
private int writeFile(T session, WriteRequest req,
boolean hasFragmentation, int maxLength, long currentTime)
throws Exception {
int localWrittenBytes;
FileRegion region = (FileRegion) req.getMessage();
if (region.getRemainingBytes() > 0) {
int length;
if (hasFragmentation) {
length = (int) Math.min(region.getRemainingBytes(), maxLength);
} else {
length = (int) Math.min(Integer.MAX_VALUE, region
.getRemainingBytes());
}
localWrittenBytes = transferFile(session, region, length);
region.update(localWrittenBytes);
} else {
localWrittenBytes = 0;
}
session.increaseWrittenBytes(localWrittenBytes, currentTime);
if (region.getRemainingBytes() <= 0 || !hasFragmentation
&& localWrittenBytes != 0) {
fireMessageSent(session, req);
}
return localWrittenBytes;
}
private void fireMessageSent(T session, WriteRequest req) {
session.setCurrentWriteRequest(null);
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireMessageSent(req);
}
/**
* Update the trafficControl for all the session.
*/
private void updateTrafficMask() {
int queueSize = trafficControllingSessions.size();
while (queueSize > 0) {
T session = trafficControllingSessions.poll();
if (session == null) {
// We are done with this queue.
return;
}
SessionState state = getState(session);
switch (state) {
case OPENED:
updateTrafficControl(session);
break;
case CLOSING:
break;
case OPENING:
// Retry later if session is not yet fully initialized.
// (In case that Session.suspend??() or session.resume??() is
// called before addSession() is processed)
// We just put back the session at the end of the queue.
trafficControllingSessions.add(session);
break;
default:
throw new IllegalStateException(String.valueOf(state));
}
// As we have handled one session, decrement the number of
// remaining sessions. The OPENING session will be processed
// with the next select(), as the queue size has been decreased, even
// if the session has been pushed at the end of the queue
queueSize--;
}
}
/**
* {@inheritDoc}
*/
public void updateTrafficControl(T session) {
//
try {
setInterestedInRead(session, !session.isReadSuspended());
} catch (Exception e) {
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(e);
}
try {
setInterestedInWrite(session, !session.getWriteRequestQueue()
.isEmpty(session)
&& !session.isWriteSuspended());
} catch (Exception e) {
IoFilterChain filterChain = session.getFilterChain();
filterChain.fireExceptionCaught(e);
}
}
/**
* The main loop. This is the place in charge to poll the Selector, and to
* process the active sessions. It's done in
* - handle the newly created sessions
* -
*/
private class Processor implements Runnable {
public void run() {
int nSessions = 0;
lastIdleCheckTime = System.currentTimeMillis();
for (;;) {
try {
// This select has a timeout so that we can manage
// idle session when we get out of the select every
// second. (note : this is a hack to avoid creating
// a dedicated thread).
long t0 = System.currentTimeMillis();
int selected = select(SELECT_TIMEOUT);
long t1 = System.currentTimeMillis();
long delta = (t1 - t0);
if ((selected == 0) && !wakeupCalled.get() && (delta < 100)) {
// Last chance : the select() may have been
// interrupted because we have had an closed channel.
if (isBrokenConnection()) {
LOG.warn("Broken connection");
// we can reselect immediately
// set back the flag to false
wakeupCalled.getAndSet(false);
continue;
} else {
LOG.warn("Create a new selector. Selected is 0, delta = "
+ (t1 - t0));
// Ok, we are hit by the nasty epoll
// spinning.
// Basically, there is a race condition
// which causes a closing file descriptor not to be
// considered as available as a selected channel, but
// it stopped the select. The next time we will
// call select(), it will exit immediately for the same
// reason, and do so forever, consuming 100%
// CPU.
// We have to destroy the selector, and
// register all the socket on a new one.
registerNewSelector();
}
// Set back the flag to false
wakeupCalled.getAndSet(false);
// and continue the loop
continue;
}
// Manage newly created session first
nSessions += handleNewSessions();
updateTrafficMask();
// Now, if we have had some incoming or outgoing events,
// deal with them
if (selected > 0) {
//LOG.debug("Processing ..."); // This log hurts one of the MDCFilter test...
process();
}
// Write the pending requests
long currentTime = System.currentTimeMillis();
flush(currentTime);
// And manage removed sessions
nSessions -= removeSessions();
// Last, not least, send Idle events to the idle sessions
notifyIdleSessions(currentTime);
// Get a chance to exit the infinite loop if there are no
// more sessions on this Processor
if (nSessions == 0) {
synchronized (lock) {
if (newSessions.isEmpty() && isSelectorEmpty()) {
processor = null;
break;
}
}
}
// Disconnect all sessions immediately if disposal has been
// requested so that we exit this loop eventually.
if (isDisposing()) {
for (Iterator<T> i = allSessions(); i.hasNext();) {
scheduleRemove(i.next());
}
wakeup();
}
} catch (Throwable t) {
ExceptionMonitor.getInstance().exceptionCaught(t);
try {
Thread.sleep(1000);
} catch (InterruptedException e1) {
ExceptionMonitor.getInstance().exceptionCaught(e1);
}
}
}
try {
synchronized (disposalLock) {
if (isDisposing()) {
dispose0();
}
}
} catch (Throwable t) {
ExceptionMonitor.getInstance().exceptionCaught(t);
} finally {
disposalFuture.setValue(true);
}
}
}
}
|
Reposition the buffer after having called the MessageSent event
git-svn-id: b7022df5c975f24f6cce374a8cf09e1bba3b7a2e@910779 13f79535-47bb-0310-9956-ffa450edef68
|
core/src/main/java/org/apache/mina/core/polling/AbstractPollingIoProcessor.java
|
Reposition the buffer after having called the MessageSent event
|
|
Java
|
apache-2.0
|
b0358c77d88a924a331481de196a5a7452f2c6cd
| 0
|
anchela/jackrabbit-oak,mreutegg/jackrabbit-oak,apache/jackrabbit-oak,amit-jain/jackrabbit-oak,trekawek/jackrabbit-oak,anchela/jackrabbit-oak,mreutegg/jackrabbit-oak,trekawek/jackrabbit-oak,apache/jackrabbit-oak,mreutegg/jackrabbit-oak,trekawek/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak,trekawek/jackrabbit-oak,amit-jain/jackrabbit-oak,amit-jain/jackrabbit-oak,apache/jackrabbit-oak,trekawek/jackrabbit-oak,anchela/jackrabbit-oak,amit-jain/jackrabbit-oak,mreutegg/jackrabbit-oak,amit-jain/jackrabbit-oak,mreutegg/jackrabbit-oak,anchela/jackrabbit-oak,apache/jackrabbit-oak
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document.rdb;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.jackrabbit.oak.plugins.document.UpdateUtils.checkConditions;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.locks.Lock;
import java.util.zip.Deflater;
import java.util.zip.GZIPOutputStream;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.sql.DataSource;
import org.apache.jackrabbit.oak.cache.CacheStats;
import org.apache.jackrabbit.oak.cache.CacheValue;
import org.apache.jackrabbit.oak.plugins.document.Collection;
import org.apache.jackrabbit.oak.plugins.document.Document;
import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
import org.apache.jackrabbit.oak.plugins.document.DocumentStore;
import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException;
import org.apache.jackrabbit.oak.plugins.document.NodeDocument;
import org.apache.jackrabbit.oak.plugins.document.Revision;
import org.apache.jackrabbit.oak.plugins.document.StableRevisionComparator;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Condition;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation;
import org.apache.jackrabbit.oak.plugins.document.UpdateUtils;
import org.apache.jackrabbit.oak.plugins.document.cache.CacheInvalidationStats;
import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore;
import org.apache.jackrabbit.oak.plugins.document.util.StringValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Objects;
import com.google.common.cache.Cache;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.Striped;
/**
* Implementation of {@link DocumentStore} for relational databases.
*
* <h3>Supported Databases</h3>
* <p>
* The code is supposed to be sufficiently generic to run with a variety of
* database implementations. However, the tables are created when required to
* simplify testing, and <em>that</em> code specifically supports these
* databases:
* <ul>
* <li>h2</li>
* <li>IBM DB2</li>
* <li>Postgres</li>
* <li>MariaDB (MySQL) (experimental)</li>
* <li>Oracle (experimental)</li>
* </ul>
*
* <h3>Table Layout</h3>
* <p>
* Data for each of the DocumentStore's {@link Collection}s is stored in its own
* database table (with a name matching the collection).
* <p>
* The tables essentially implement key/value storage, where the key usually is
* derived from an Oak path, and the value is a serialization of a
* {@link Document} (or a part of one). Additional fields are used for queries,
* debugging, and concurrency control:
* <table style="text-align: left;">
* <thead>
* <tr>
* <th>Column</th>
* <th>Type</th>
* <th>Description</th>
* </tr>
* </thead> <tbody>
* <tr>
* <th>ID</th>
* <td>varchar(512) not null primary key</td>
* <td>the document's key (for databases that can not handle 512 character
* primary keys, such as MySQL, varbinary is possible as well; note that this
* currently needs to be hardcoded)</td>
* </tr>
* <tr>
* <th>MODIFIED</th>
* <td>bigint</td>
* <td>low-resolution timestamp
* </tr>
* <tr>
* <th>HASBINARY</th>
* <td>smallint</td>
* <td>flag indicating whether the document has binary properties
* </tr>
* <tr>
* <th>DELETEDONCE</th>
* <td>smallint</td>
* <td>flag indicating whether the document has been deleted once
* </tr>
* <tr>
* <th>MODCOUNT</th>
* <td>bigint</td>
* <td>modification counter, used for avoiding overlapping updates</td>
* </tr>
* <tr>
* <th>DSIZE</th>
* <td>bigint</td>
* <td>the approximate size of the document's JSON serialization (for debugging
* purposes)</td>
* </tr>
* <tr>
* <th>DATA</th>
* <td>varchar(16384)</td>
* <td>the document's JSON serialization (only used for small document sizes, in
* which case BDATA (below) is not set), or a sequence of JSON serialized update
* operations to be applied against the last full serialization</td>
* </tr>
* <tr>
* <th>BDATA</th>
* <td>blob</td>
* <td>the document's JSON serialization (usually GZIPped, only used for "large"
* documents)</td>
* </tr>
* </tbody>
* </table>
* <p>
* The names of database tables can be prefixed; the purpose is mainly for
* testing, as tables can also be dropped automatically when the store is
* disposed (this only happens for those tables that have been created on
* demand)
* <p>
* <em>Note that the database needs to be created/configured to support all Unicode
* characters in text fields, and to collate by Unicode code point (in DB2: "collate using identity",
* in Postgres: "C").
* THIS IS NOT THE DEFAULT!</em>
* <p>
* <em>For MySQL, the database parameter "max_allowed_packet" needs to be increased to support ~16 blobs.</em>
*
* <h3>Caching</h3>
* <p>
* The cache borrows heavily from the {@link MongoDocumentStore} implementation;
* however it does not support the off-heap mechanism yet.
*
* <h3>Queries</h3>
* <p>
* The implementation currently supports only three indexed properties:
* "_bin", "deletedOnce", and "_modified". Attempts to use a different indexed property will
* cause a {@link DocumentStoreException}.
*/
public class RDBDocumentStore implements DocumentStore {
/**
* Creates a {@linkplain RDBDocumentStore} instance using the provided
* {@link DataSource}, {@link DocumentMK.Builder}, and {@link RDBOptions}.
*/
public RDBDocumentStore(DataSource ds, DocumentMK.Builder builder, RDBOptions options) {
try {
initialize(ds, builder, options);
} catch (Exception ex) {
throw new DocumentStoreException("initializing RDB document store", ex);
}
}
/**
* Creates a {@linkplain RDBDocumentStore} instance using the provided
* {@link DataSource}, {@link DocumentMK.Builder}, and default
* {@link RDBOptions}.
*/
public RDBDocumentStore(DataSource ds, DocumentMK.Builder builder) {
this(ds, builder, new RDBOptions());
}
@Override
public <T extends Document> T find(Collection<T> collection, String id) {
return find(collection, id, Integer.MAX_VALUE);
}
@Override
public <T extends Document> T find(final Collection<T> collection, final String id, int maxCacheAge) {
return readDocumentCached(collection, id, maxCacheAge);
}
@Nonnull
@Override
public <T extends Document> List<T> query(Collection<T> collection, String fromKey, String toKey, int limit) {
return query(collection, fromKey, toKey, null, 0, limit);
}
@Nonnull
@Override
public <T extends Document> List<T> query(Collection<T> collection, String fromKey, String toKey, String indexedProperty,
long startValue, int limit) {
return internalQuery(collection, fromKey, toKey, indexedProperty, startValue, limit);
}
@Override
public <T extends Document> void remove(Collection<T> collection, String id) {
delete(collection, id);
invalidateCache(collection, id, true);
}
@Override
public <T extends Document> void remove(Collection<T> collection, List<String> ids) {
for (String id : ids) {
invalidateCache(collection, id, true);
}
delete(collection, ids);
}
@Override
public <T extends Document> int remove(Collection<T> collection,
Map<String, Map<Key, Condition>> toRemove) {
int num = delete(collection, toRemove);
for (String id : toRemove.keySet()) {
invalidateCache(collection, id, true);
}
return num;
}
@Override
public <T extends Document> boolean create(Collection<T> collection, List<UpdateOp> updateOps) {
return internalCreate(collection, updateOps);
}
@Override
public <T extends Document> void update(Collection<T> collection, List<String> keys, UpdateOp updateOp) {
internalUpdate(collection, keys, updateOp);
}
@Override
public <T extends Document> T createOrUpdate(Collection<T> collection, UpdateOp update) {
return internalCreateOrUpdate(collection, update, true, false);
}
@Override
public <T extends Document> T findAndUpdate(Collection<T> collection, UpdateOp update) {
return internalCreateOrUpdate(collection, update, false, true);
}
@Override
public CacheInvalidationStats invalidateCache() {
for (NodeDocument nd : nodesCache.asMap().values()) {
nd.markUpToDate(0);
}
return null;
}
@Override
public <T extends Document> void invalidateCache(Collection<T> collection, String id) {
invalidateCache(collection, id, false);
}
private <T extends Document> void invalidateCache(Collection<T> collection, String id, boolean remove) {
if (collection == Collection.NODES) {
invalidateNodesCache(id, remove);
}
}
private void invalidateNodesCache(String id, boolean remove) {
StringValue key = new StringValue(id);
Lock lock = getAndLock(id);
try {
if (remove) {
nodesCache.invalidate(key);
} else {
NodeDocument entry = nodesCache.getIfPresent(key);
if (entry != null) {
entry.markUpToDate(0);
}
}
} finally {
lock.unlock();
}
}
// used for diagnostics
private String droppedTables = "";
public String getDroppedTables() {
return this.droppedTables;
}
// table names
private static Map<Object, String> TABLEMAP;
private static List<String> TABLENAMES;
static {
Map<Object, String> tmp = new HashMap<Object, String>();
tmp.put(Collection.CLUSTER_NODES, "CLUSTERNODES");
tmp.put(Collection.JOURNAL, "JOURNAL");
tmp.put(Collection.NODES, "NODES");
tmp.put(Collection.SETTINGS, "SETTINGS");
TABLEMAP = Collections.unmodifiableMap(tmp);
List<String> tl = new ArrayList<String>(TABLEMAP.values());
Collections.sort(tl);
TABLENAMES = Collections.unmodifiableList(tl);
}
public static List<String> getTableNames() {
return TABLENAMES;
}
@Override
public void dispose() {
if (!this.tablesToBeDropped.isEmpty()) {
String dropped = "";
LOG.debug("attempting to drop: " + this.tablesToBeDropped);
for (String tname : this.tablesToBeDropped) {
Connection con = null;
try {
con = this.ch.getRWConnection();
Statement stmt = null;
try {
stmt = con.createStatement();
stmt.execute("drop table " + tname);
stmt.close();
con.commit();
dropped += tname + " ";
} catch (SQLException ex) {
LOG.debug("attempting to drop: " + tname, ex);
} finally {
this.ch.closeStatement(stmt);
}
} catch (SQLException ex) {
LOG.debug("attempting to drop: " + tname, ex);
} finally {
this.ch.closeConnection(con);
}
}
this.droppedTables = dropped.trim();
}
try {
this.ch.close();
} catch (IOException ex) {
LOG.error("closing connection handler", ex);
}
}
@Override
public <T extends Document> T getIfCached(Collection<T> collection, String id) {
if (collection != Collection.NODES) {
return null;
} else {
NodeDocument doc = nodesCache.getIfPresent(new StringValue(id));
return castAsT(doc);
}
}
@Override
public CacheStats getCacheStats() {
return this.cacheStats;
}
@Override
public Map<String, String> getMetadata() {
return metadata;
}
// implementation
enum FETCHFIRSTSYNTAX { FETCHFIRST, LIMIT, TOP};
private static void versionCheck(DatabaseMetaData md, int xmaj, int xmin, String description) throws SQLException {
int maj = md.getDatabaseMajorVersion();
int min = md.getDatabaseMinorVersion();
if (maj < xmaj || (maj == xmaj && min < xmin)) {
LOG.info("Unsupported " + description + " version: " + maj + "." + min + ", expected at least " + xmaj + "." + xmin);
}
}
/**
* Defines variation in the capabilities of different RDBs.
*/
protected enum DB {
DEFAULT("default") {
},
H2("H2") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 1, 4, description);
}
},
DERBY("Apache Derby") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 10, 11, description);
}
public boolean allowsCaseInSelect() {
return false;
}
},
POSTGRES("PostgreSQL") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 9, 3, description);
}
@Override
public String getTableCreationStatement(String tableName) {
return ("create table " + tableName + " (ID varchar(512) not null primary key, MODIFIED bigint, HASBINARY smallint, DELETEDONCE smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16384), BDATA bytea)");
}
@Override
public String getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) {
Connection con = null;
PreparedStatement stmt = null;
ResultSet rs = null;
Map<String, String> result = new HashMap<String, String>();
try {
con = ch.getROConnection();
String cat = con.getCatalog();
stmt = con.prepareStatement("SELECT pg_encoding_to_char(encoding), datcollate FROM pg_database WHERE datname=?");
stmt.setString(1, cat);
rs = stmt.executeQuery();
while (rs.next()) {
result.put("pg_encoding_to_char(encoding)", rs.getString(1));
result.put("datcollate", rs.getString(2));
}
stmt.close();
con.commit();
} catch (SQLException ex) {
LOG.debug("while getting diagnostics", ex);
} finally {
ch.closeResultSet(rs);
ch.closeStatement(stmt);
ch.closeConnection(con);
}
return result.toString();
}
},
DB2("DB2") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 10, 1, description);
}
@Override
public String getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) {
Connection con = null;
PreparedStatement stmt = null;
ResultSet rs = null;
Map<String, String> result = new HashMap<String, String>();
try {
con = ch.getROConnection();
// we can't look up by schema as con.getSchema is JDK 1.7
stmt = con.prepareStatement("SELECT CODEPAGE, COLLATIONSCHEMA, COLLATIONNAME, TABSCHEMA FROM SYSCAT.COLUMNS WHERE COLNAME=? and COLNO=0 AND UPPER(TABNAME)=UPPER(?)");
stmt.setString(1, "ID");
stmt.setString(2, tableName);
rs = stmt.executeQuery();
while (rs.next() && result.size() < 20) {
// thus including the schema name here
String schema = rs.getString("TABSCHEMA").trim();
result.put(schema + ".CODEPAGE", rs.getString("CODEPAGE").trim());
result.put(schema + ".COLLATIONSCHEMA", rs.getString("COLLATIONSCHEMA").trim());
result.put(schema + ".COLLATIONNAME", rs.getString("COLLATIONNAME").trim());
}
stmt.close();
con.commit();
} catch (SQLException ex) {
LOG.debug("while getting diagnostics", ex);
} finally {
ch.closeResultSet(rs);
ch.closeStatement(stmt);
ch.closeConnection(con);
}
return result.toString();
}
},
ORACLE("Oracle") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 12, 1, description);
}
@Override
public String getInitializationStatement() {
// see https://issues.apache.org/jira/browse/OAK-1914
// for some reason, the default for NLS_SORT is incorrect
return ("ALTER SESSION SET NLS_SORT='BINARY'");
}
@Override
public String getTableCreationStatement(String tableName) {
// see https://issues.apache.org/jira/browse/OAK-1914
return ("create table " + tableName + " (ID varchar(512) not null primary key, MODIFIED number, HASBINARY number, DELETEDONCE number, MODCOUNT number, CMODCOUNT number, DSIZE number, DATA varchar(4000), BDATA blob)");
}
@Override
public String getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) {
Connection con = null;
Statement stmt = null;
ResultSet rs = null;
Map<String, String> result = new HashMap<String, String>();
try {
con = ch.getROConnection();
stmt = con.createStatement();
rs = stmt.executeQuery("SELECT PARAMETER, VALUE from NLS_DATABASE_PARAMETERS WHERE PARAMETER IN ('NLS_COMP', 'NLS_CHARACTERSET')");
while (rs.next()) {
result.put(rs.getString(1), rs.getString(2));
}
stmt.close();
con.commit();
} catch (SQLException ex) {
LOG.debug("while getting diagnostics", ex);
} finally {
ch.closeResultSet(rs);
ch.closeStatement(stmt);
ch.closeConnection(con);
}
return result.toString();
}
},
MYSQL("MySQL") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 5, 5, description);
}
@Override
public boolean isPrimaryColumnByteEncoded() {
// TODO: we should dynamically detect this
return true;
}
@Override
public String getTableCreationStatement(String tableName) {
// see https://issues.apache.org/jira/browse/OAK-1913
return ("create table " + tableName + " (ID varbinary(512) not null primary key, MODIFIED bigint, HASBINARY smallint, DELETEDONCE smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16000), BDATA longblob)");
}
@Override
public FETCHFIRSTSYNTAX getFetchFirstSyntax() {
return FETCHFIRSTSYNTAX.LIMIT;
}
@Override
public String getConcatQueryString(int dataOctetLimit, int dataLength) {
return "CONCAT(DATA, ?)";
}
@Override
public String getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) {
Connection con = null;
PreparedStatement stmt = null;
ResultSet rs = null;
Map<String, String> result = new HashMap<String, String>();
try {
con = ch.getROConnection();
stmt = con.prepareStatement("SHOW TABLE STATUS LIKE ?");
stmt.setString(1, tableName);
rs = stmt.executeQuery();
while (rs.next()) {
result.put("collation", rs.getString("Collation"));
}
stmt.close();
con.commit();
} catch (SQLException ex) {
LOG.debug("while getting diagnostics", ex);
} finally {
ch.closeResultSet(rs);
ch.closeStatement(stmt);
ch.closeConnection(con);
}
return result.toString();
}
},
MSSQL("Microsoft SQL Server") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 11, 0, description);
}
@Override
public boolean isPrimaryColumnByteEncoded() {
// TODO: we should dynamically detect this
return true;
}
@Override
public String getTableCreationStatement(String tableName) {
// see https://issues.apache.org/jira/browse/OAK-2395
return ("create table " + tableName + " (ID varbinary(512) not null primary key, MODIFIED bigint, HASBINARY smallint, DELETEDONCE smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA nvarchar(4000), BDATA varbinary(max))");
}
@Override
public FETCHFIRSTSYNTAX getFetchFirstSyntax() {
return FETCHFIRSTSYNTAX.TOP;
}
@Override
public String getConcatQueryString(int dataOctetLimit, int dataLength) {
/*
* To avoid truncation when concatenating force an error when
* limit is above the octet limit
*/
return "CASE WHEN LEN(DATA) <= " + (dataOctetLimit - dataLength) + " THEN (DATA + CAST(? AS nvarchar("
+ dataOctetLimit + "))) ELSE (DATA + CAST(DATA AS nvarchar(max))) END";
}
@Override
public String getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) {
Connection con = null;
PreparedStatement stmt = null;
ResultSet rs = null;
Map<String, String> result = new HashMap<String, String>();
try {
con = ch.getROConnection();
String cat = con.getCatalog();
stmt = con.prepareStatement("SELECT collation_name FROM sys.databases WHERE name=?");
stmt.setString(1, cat);
rs = stmt.executeQuery();
while (rs.next()) {
result.put("collation_name", rs.getString(1));
}
stmt.close();
con.commit();
} catch (SQLException ex) {
LOG.debug("while getting diagnostics", ex);
} finally {
ch.closeResultSet(rs);
ch.closeStatement(stmt);
ch.closeConnection(con);
}
return result.toString();
}
};
/**
* Check the database brand and version
*/
public void checkVersion(DatabaseMetaData md) throws SQLException {
LOG.info("Unknown database type: " + md.getDatabaseProductName());
}
/**
* If the primary column is encoded in bytes.
* Default false
* @return boolean
*/
public boolean isPrimaryColumnByteEncoded() {
return false;
}
/**
* Allows case in select. Default true.
*/
public boolean allowsCaseInSelect() {
return true;
}
/**
* Query syntax for "FETCH FIRST"
*/
public FETCHFIRSTSYNTAX getFetchFirstSyntax() {
return FETCHFIRSTSYNTAX.FETCHFIRST;
}
/**
* Returns the CONCAT function or its equivalent function or sub-query.
* Note that the function MUST NOT cause a truncated value to be
* written!
*
* @param dataOctetLimit
* expected capacity of data column
* @param dataLength
* length of string to be inserted
*
* @return the concat query string
*/
public String getConcatQueryString(int dataOctetLimit, int dataLength) {
return "DATA || CAST(? AS varchar(" + dataOctetLimit + "))";
}
/**
* Query for any required initialization of the DB.
*
* @return the DB initialization SQL string
*/
public @Nonnull String getInitializationStatement() {
return "";
}
/**
* Table creation statement string
*
* @param tableName
* @return the table creation string
*/
public String getTableCreationStatement(String tableName) {
return "create table "
+ tableName
+ " (ID varchar(512) not null primary key, MODIFIED bigint, HASBINARY smallint, DELETEDONCE smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16384), BDATA blob("
+ 1024 * 1024 * 1024 + "))";
}
public String getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) {
return "";
}
protected String description;
private DB(String description) {
this.description = description;
}
@Override
public String toString() {
return this.description;
}
@Nonnull
public static DB getValue(String desc) {
for (DB db : DB.values()) {
if (db.description.equals(desc)) {
return db;
} else if (db == DB2 && desc.startsWith("DB2/")) {
return db;
}
}
LOG.error("DB type " + desc + " unknown, trying default settings");
DEFAULT.description = desc + " - using default settings";
return DEFAULT;
}
}
private static final String MODIFIED = "_modified";
private static final String MODCOUNT = "_modCount";
/**
* Optional counter for changes to "_collisions" map ({@link NodeDocument#COLLISIONS}).
*/
private static final String COLLISIONSMODCOUNT = "_collisionsModCount";
private static final String ID = "_id";
private static final Logger LOG = LoggerFactory.getLogger(RDBDocumentStore.class);
private final Comparator<Revision> comparator = StableRevisionComparator.REVERSE;
private Exception callStack;
private RDBConnectionHandler ch;
// from options
private Set<String> tablesToBeDropped = new HashSet<String>();
// table names
private String tnNodes, tnClusterNodes, tnSettings, tnJournal;
// ratio between Java characters and UTF-8 encoding
// a) single characters will fit into 3 bytes
// b) a surrogate pair (two Java characters) will fit into 4 bytes
// thus...
private static final int CHAR2OCTETRATIO = 3;
// capacity of DATA column
private int dataLimitInOctets = 16384;
// number of retries for updates
private static final int RETRIES = 10;
// see OAK-2044
protected static final boolean USECMODCOUNT = true;
private static final Key MODIFIEDKEY = new Key(MODIFIED, null);
// DB-specific information
private DB db;
private Map<String, String> metadata;
// set of supported indexed properties
private static final Set<String> INDEXEDPROPERTIES = new HashSet<String>(Arrays.asList(new String[] { MODIFIED,
NodeDocument.HAS_BINARY_FLAG, NodeDocument.DELETED_ONCE }));
// set of properties not serialized to JSON
private static final Set<String> COLUMNPROPERTIES = new HashSet<String>(Arrays.asList(new String[] { ID,
NodeDocument.HAS_BINARY_FLAG, NodeDocument.DELETED_ONCE, COLLISIONSMODCOUNT, MODIFIED, MODCOUNT }));
private final RDBDocumentSerializer SR = new RDBDocumentSerializer(this, COLUMNPROPERTIES);
private void initialize(DataSource ds, DocumentMK.Builder builder, RDBOptions options) throws Exception {
this.tnNodes = RDBJDBCTools.createTableName(options.getTablePrefix(), TABLEMAP.get(Collection.NODES));
this.tnClusterNodes = RDBJDBCTools.createTableName(options.getTablePrefix(), TABLEMAP.get(Collection.CLUSTER_NODES));
this.tnSettings = RDBJDBCTools.createTableName(options.getTablePrefix(), TABLEMAP.get(Collection.SETTINGS));
this.tnJournal = RDBJDBCTools.createTableName(options.getTablePrefix(), TABLEMAP.get(Collection.JOURNAL));
this.ch = new RDBConnectionHandler(ds);
this.callStack = LOG.isDebugEnabled() ? new Exception("call stack of RDBDocumentStore creation") : null;
this.nodesCache = builder.buildDocumentCache(this);
this.cacheStats = new CacheStats(nodesCache, "Document-Documents", builder.getWeigher(), builder.getDocumentCacheSize());
Connection con = this.ch.getRWConnection();
int isolation = con.getTransactionIsolation();
String isolationDiags = RDBJDBCTools.isolationLevelToString(isolation);
if (isolation != Connection.TRANSACTION_READ_COMMITTED) {
LOG.info("Detected transaction isolation level " + isolationDiags + " is "
+ (isolation < Connection.TRANSACTION_READ_COMMITTED ? "lower" : "higher") + " than expected "
+ RDBJDBCTools.isolationLevelToString(Connection.TRANSACTION_READ_COMMITTED)
+ " - check datasource configuration");
}
DatabaseMetaData md = con.getMetaData();
String dbDesc = String.format("%s %s (%d.%d)", md.getDatabaseProductName(), md.getDatabaseProductVersion(),
md.getDatabaseMajorVersion(), md.getDatabaseMinorVersion());
String driverDesc = String.format("%s %s (%d.%d)", md.getDriverName(), md.getDriverVersion(), md.getDriverMajorVersion(),
md.getDriverMinorVersion());
String dbUrl = md.getURL();
this.db = DB.getValue(md.getDatabaseProductName());
this.metadata = ImmutableMap.<String,String>builder()
.put("type", "rdb")
.put("db", md.getDatabaseProductName())
.put("version", md.getDatabaseProductVersion())
.build();
db.checkVersion(md);
if (! "".equals(db.getInitializationStatement())) {
Statement stmt = null;
try {
stmt = con.createStatement();
stmt.execute(db.getInitializationStatement());
stmt.close();
con.commit();
}
finally {
this.ch.closeStatement(stmt);
}
}
List<String> tablesCreated = new ArrayList<String>();
List<String> tablesPresent = new ArrayList<String>();
try {
createTableFor(con, Collection.CLUSTER_NODES, tablesCreated, tablesPresent);
createTableFor(con, Collection.NODES, tablesCreated, tablesPresent);
createTableFor(con, Collection.SETTINGS, tablesCreated, tablesPresent);
createTableFor(con, Collection.JOURNAL, tablesCreated, tablesPresent);
} finally {
con.commit();
con.close();
}
if (options.isDropTablesOnClose()) {
tablesToBeDropped.addAll(tablesCreated);
}
String diag = db.getAdditionalDiagnostics(this.ch, this.tnNodes);
LOG.info("RDBDocumentStore instantiated for database " + dbDesc + ", using driver: " + driverDesc + ", connecting to: "
+ dbUrl + (diag.isEmpty() ? "" : (", properties: " + diag)) + ", transaction isolation level: " + isolationDiags
+ ", detected size of DATA column: " + this.dataLimitInOctets);
if (!tablesPresent.isEmpty()) {
LOG.info("Tables present upon startup: " + tablesPresent);
}
if (!tablesCreated.isEmpty()) {
LOG.info("Tables created upon startup: " + tablesCreated
+ (options.isDropTablesOnClose() ? " (will be dropped on exit)" : ""));
}
}
private void createTableFor(Connection con, Collection<? extends Document> col, List<String> tablesCreated, List<String> tablesPresent) throws SQLException {
String dbname = this.db.toString();
if (con.getMetaData().getURL() != null) {
dbname += " (" + con.getMetaData().getURL() + ")";
}
String tableName = getTable(col);
PreparedStatement checkStatement = null;
ResultSet checkResultSet = null;
Statement creatStatement = null;
try {
checkStatement = con.prepareStatement("select DATA from " + tableName + " where ID = ?");
checkStatement.setString(1, "0:/");
checkResultSet = checkStatement.executeQuery();
if (col.equals(Collection.NODES)) {
// try to discover size of DATA column
ResultSetMetaData met = checkResultSet.getMetaData();
this.dataLimitInOctets = met.getPrecision(1);
}
tablesPresent.add(tableName);
} catch (SQLException ex) {
// table does not appear to exist
con.rollback();
try {
creatStatement = con.createStatement();
creatStatement.execute(this.db.getTableCreationStatement(tableName));
creatStatement.close();
con.commit();
tablesCreated.add(tableName);
if (col.equals(Collection.NODES)) {
PreparedStatement pstmt = con.prepareStatement("select DATA from " + tableName + " where ID = ?");
pstmt.setString(1, "0:/");
ResultSet rs = pstmt.executeQuery();
ResultSetMetaData met = rs.getMetaData();
this.dataLimitInOctets = met.getPrecision(1);
}
}
catch (SQLException ex2) {
LOG.error("Failed to create table " + tableName + " in " + dbname, ex2);
throw ex2;
}
}
finally {
this.ch.closeResultSet(checkResultSet);
this.ch.closeStatement(checkStatement);
this.ch.closeStatement(creatStatement);
}
}
@Override
protected void finalize() {
if (!this.ch.isClosed() && this.callStack != null) {
LOG.debug("finalizing RDBDocumentStore that was not disposed", this.callStack);
}
}
private <T extends Document> T readDocumentCached(final Collection<T> collection, final String id, int maxCacheAge) {
if (collection != Collection.NODES) {
return readDocumentUncached(collection, id, null);
} else {
CacheValue cacheKey = new StringValue(id);
NodeDocument doc = null;
if (maxCacheAge > 0) {
// first try without lock
doc = nodesCache.getIfPresent(cacheKey);
if (doc != null) {
long lastCheckTime = doc.getLastCheckTime();
if (lastCheckTime != 0) {
if (maxCacheAge == Integer.MAX_VALUE || System.currentTimeMillis() - lastCheckTime < maxCacheAge) {
return castAsT(unwrap(doc));
}
}
}
}
try {
Lock lock = getAndLock(id);
try {
// caller really wants the cache to be cleared
if (maxCacheAge == 0) {
invalidateNodesCache(id, true);
doc = null;
}
final NodeDocument cachedDoc = doc;
doc = nodesCache.get(cacheKey, new Callable<NodeDocument>() {
@Override
public NodeDocument call() throws Exception {
NodeDocument doc = (NodeDocument) readDocumentUncached(collection, id, cachedDoc);
if (doc != null) {
doc.seal();
}
return wrap(doc);
}
});
// inspect the doc whether it can be used
long lastCheckTime = doc.getLastCheckTime();
if (lastCheckTime != 0 && (maxCacheAge == 0 || maxCacheAge == Integer.MAX_VALUE)) {
// we either just cleared the cache or the caller does
// not care;
} else if (lastCheckTime != 0 && (System.currentTimeMillis() - lastCheckTime < maxCacheAge)) {
// is new enough
} else {
// need to at least revalidate
NodeDocument ndoc = (NodeDocument) readDocumentUncached(collection, id, cachedDoc);
if (ndoc != null) {
ndoc.seal();
}
doc = wrap(ndoc);
nodesCache.put(cacheKey, doc);
}
} finally {
lock.unlock();
}
return castAsT(unwrap(doc));
} catch (ExecutionException e) {
throw new IllegalStateException("Failed to load document with " + id, e);
}
}
}
@CheckForNull
private <T extends Document> boolean internalCreate(Collection<T> collection, List<UpdateOp> updates) {
try {
boolean success = true;
// try up to CHUNKSIZE ops in one transaction
for (List<UpdateOp> chunks : Lists.partition(updates, CHUNKSIZE)) {
List<T> docs = new ArrayList<T>();
for (UpdateOp update : chunks) {
T doc = collection.newDocument(this);
update.increment(MODCOUNT, 1);
if (hasChangesToCollisions(update)) {
update.increment(COLLISIONSMODCOUNT, 1);
}
UpdateUtils.applyChanges(doc, update, comparator);
if (!update.getId().equals(doc.getId())) {
throw new DocumentStoreException("ID mismatch - UpdateOp: " + update.getId() + ", ID property: "
+ doc.getId());
}
docs.add(doc);
}
boolean done = insertDocuments(collection, docs);
if (done) {
for (T doc : docs) {
addToCache(collection, doc);
}
}
else {
success = false;
}
}
return success;
} catch (DocumentStoreException ex) {
return false;
}
}
@CheckForNull
private <T extends Document> T internalCreateOrUpdate(Collection<T> collection, UpdateOp update, boolean allowCreate,
boolean checkConditions) {
T oldDoc = readDocumentCached(collection, update.getId(), Integer.MAX_VALUE);
if (oldDoc == null) {
if (!allowCreate) {
return null;
} else if (!update.isNew()) {
throw new DocumentStoreException("Document does not exist: " + update.getId());
}
T doc = collection.newDocument(this);
if (checkConditions && !checkConditions(doc, update.getConditions())) {
return null;
}
update.increment(MODCOUNT, 1);
if (hasChangesToCollisions(update)) {
update.increment(COLLISIONSMODCOUNT, 1);
}
UpdateUtils.applyChanges(doc, update, comparator);
try {
insertDocuments(collection, Collections.singletonList(doc));
addToCache(collection, doc);
return oldDoc;
} catch (DocumentStoreException ex) {
// may have failed due to a race condition; try update instead
// this is an edge case, so it's ok to bypass the cache
// (avoiding a race condition where the DB is already updated
// but the cache is not)
oldDoc = readDocumentUncached(collection, update.getId(), null);
if (oldDoc == null) {
// something else went wrong
LOG.error("insert failed, but document " + update.getId() + " is not present, aborting", ex);
throw (ex);
}
return internalUpdate(collection, update, oldDoc, checkConditions, RETRIES);
}
} else {
T result = internalUpdate(collection, update, oldDoc, checkConditions, RETRIES);
if (allowCreate && result == null) {
// TODO OAK-2655 need to implement some kind of retry
LOG.error("update of " + update.getId() + " failed, race condition?");
throw new DocumentStoreException("update of " + update.getId() + " failed, race condition?");
}
return result;
}
}
/**
* @return previous version of document or <code>null</code>
*/
@CheckForNull
private <T extends Document> T internalUpdate(Collection<T> collection, UpdateOp update, T oldDoc, boolean checkConditions,
int maxRetries) {
T doc = applyChanges(collection, oldDoc, update, checkConditions);
if (doc == null) {
// conditions not met
return null;
} else {
Lock l = getAndLock(update.getId());
try {
boolean success = false;
int retries = maxRetries;
while (!success && retries > 0) {
long lastmodcount = modcountOf(oldDoc);
success = updateDocument(collection, doc, update, lastmodcount);
if (!success) {
retries -= 1;
oldDoc = readDocumentCached(collection, update.getId(), Integer.MAX_VALUE);
if (oldDoc != null) {
long newmodcount = modcountOf(oldDoc);
if (lastmodcount == newmodcount) {
// cached copy did not change so it probably was
// updated by a different instance, get a fresh one
oldDoc = readDocumentUncached(collection, update.getId(), null);
}
}
if (oldDoc == null) {
// document was there but is now gone
LOG.debug("failed to apply update because document is gone in the meantime: " + update.getId(), new Exception("call stack"));
return null;
}
doc = applyChanges(collection, oldDoc, update, checkConditions);
if (doc == null) {
return null;
}
} else {
if (collection == Collection.NODES) {
applyToCache((NodeDocument) oldDoc, (NodeDocument) doc);
}
}
}
if (!success) {
throw new DocumentStoreException("failed update of " + doc.getId() + " (race?) after " + maxRetries
+ " retries");
}
return oldDoc;
} finally {
l.unlock();
}
}
}
@CheckForNull
private <T extends Document> T applyChanges(Collection<T> collection, T oldDoc, UpdateOp update, boolean checkConditions) {
T doc = collection.newDocument(this);
oldDoc.deepCopy(doc);
if (checkConditions && !checkConditions(doc, update.getConditions())) {
return null;
}
if (hasChangesToCollisions(update)) {
update.increment(COLLISIONSMODCOUNT, 1);
}
update.increment(MODCOUNT, 1);
UpdateUtils.applyChanges(doc, update, comparator);
doc.seal();
return doc;
}
@CheckForNull
private <T extends Document> void internalUpdate(Collection<T> collection, List<String> ids, UpdateOp update) {
if (isAppendableUpdate(update) && !requiresPreviousState(update)) {
Operation modOperation = update.getChanges().get(MODIFIEDKEY);
long modified = getModifiedFromOperation(modOperation);
boolean modifiedIsConditional = modOperation == null || modOperation.type != UpdateOp.Operation.Type.SET;
String appendData = SR.asString(update);
for (List<String> chunkedIds : Lists.partition(ids, CHUNKSIZE)) {
// remember what we already have in the cache
Map<String, NodeDocument> cachedDocs = Collections.emptyMap();
if (collection == Collection.NODES) {
cachedDocs = new HashMap<String, NodeDocument>();
for (String key : chunkedIds) {
cachedDocs.put(key, nodesCache.getIfPresent(new StringValue(key)));
}
}
Connection connection = null;
String tableName = getTable(collection);
boolean success = false;
try {
connection = this.ch.getRWConnection();
success = dbBatchedAppendingUpdate(connection, tableName, chunkedIds, modified, modifiedIsConditional,
appendData);
connection.commit();
} catch (SQLException ex) {
success = false;
this.ch.rollbackConnection(connection);
} finally {
this.ch.closeConnection(connection);
}
if (success) {
for (Entry<String, NodeDocument> entry : cachedDocs.entrySet()) {
T oldDoc = castAsT(entry.getValue());
if (oldDoc == null) {
// make sure concurrently loaded document is
// invalidated
nodesCache.invalidate(new StringValue(entry.getKey()));
} else {
T newDoc = applyChanges(collection, oldDoc, update, true);
if (newDoc != null) {
applyToCache((NodeDocument) oldDoc, (NodeDocument) newDoc);
}
}
}
} else {
for (String id : chunkedIds) {
UpdateOp up = update.copy();
up = up.shallowCopy(id);
internalCreateOrUpdate(collection, up, false, true);
}
}
}
} else {
for (String id : ids) {
UpdateOp up = update.copy();
up = up.shallowCopy(id);
internalCreateOrUpdate(collection, up, false, true);
}
}
}
private <T extends Document> List<T> internalQuery(Collection<T> collection, String fromKey, String toKey,
String indexedProperty, long startValue, int limit) {
Connection connection = null;
String tableName = getTable(collection);
List<T> result = Collections.emptyList();
if (indexedProperty != null && (!INDEXEDPROPERTIES.contains(indexedProperty))) {
String message = "indexed property " + indexedProperty + " not supported, query was '>= '" + startValue
+ "'; supported properties are " + INDEXEDPROPERTIES;
LOG.info(message);
throw new DocumentStoreException(message);
}
try {
long now = System.currentTimeMillis();
connection = this.ch.getROConnection();
List<RDBRow> dbresult = dbQuery(connection, tableName, fromKey, toKey, indexedProperty, startValue, limit);
connection.commit();
int size = dbresult.size();
result = new ArrayList<T>(size);
for (int i = 0; i < size; i++) {
RDBRow row = dbresult.set(i, null); // free RDBRow ASAP
T doc = runThroughCache(collection, row, now);
result.add(doc);
}
} catch (Exception ex) {
LOG.error("SQL exception on query", ex);
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
return result;
}
private <T extends Document> String getTable(Collection<T> collection) {
if (collection == Collection.CLUSTER_NODES) {
return this.tnClusterNodes;
} else if (collection == Collection.NODES) {
return this.tnNodes;
} else if (collection == Collection.SETTINGS) {
return this.tnSettings;
} else if (collection == Collection.JOURNAL) {
return this.tnJournal;
} else {
throw new IllegalArgumentException("Unknown collection: " + collection.toString());
}
}
@CheckForNull
private <T extends Document> T readDocumentUncached(Collection<T> collection, String id, NodeDocument cachedDoc) {
Connection connection = null;
String tableName = getTable(collection);
try {
long lastmodcount = -1;
if (cachedDoc != null) {
lastmodcount = modcountOf(cachedDoc);
}
connection = this.ch.getROConnection();
RDBRow row = dbRead(connection, tableName, id, lastmodcount);
connection.commit();
if (row == null) {
return null;
} else {
if (lastmodcount == row.getModcount()) {
// we can re-use the cached document
cachedDoc.markUpToDate(System.currentTimeMillis());
return castAsT(cachedDoc);
} else {
return SR.fromRow(collection, row);
}
}
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
}
private <T extends Document> void delete(Collection<T> collection, String id) {
Connection connection = null;
String tableName = getTable(collection);
try {
connection = this.ch.getRWConnection();
dbDelete(connection, tableName, Collections.singletonList(id));
connection.commit();
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
}
private <T extends Document> int delete(Collection<T> collection, List<String> ids) {
int numDeleted = 0;
for (List<String> sublist : Lists.partition(ids, 64)) {
Connection connection = null;
String tableName = getTable(collection);
try {
connection = this.ch.getRWConnection();
numDeleted += dbDelete(connection, tableName, sublist);
connection.commit();
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
}
return numDeleted;
}
private <T extends Document> int delete(Collection<T> collection,
Map<String, Map<Key, Condition>> toRemove) {
int numDeleted = 0;
String tableName = getTable(collection);
Map<String, Map<Key, Condition>> subMap = Maps.newHashMap();
Iterator<Entry<String, Map<Key, Condition>>> it = toRemove.entrySet().iterator();
while (it.hasNext()) {
Entry<String, Map<Key, Condition>> entry = it.next();
subMap.put(entry.getKey(), entry.getValue());
if (subMap.size() == 64 || !it.hasNext()) {
Connection connection = null;
try {
connection = this.ch.getRWConnection();
numDeleted += dbDelete(connection, tableName, subMap);
connection.commit();
} catch (Exception ex) {
throw DocumentStoreException.convert(ex);
} finally {
this.ch.closeConnection(connection);
}
subMap.clear();
}
}
return numDeleted;
}
private <T extends Document> boolean updateDocument(@Nonnull Collection<T> collection, @Nonnull T document,
@Nonnull UpdateOp update, Long oldmodcount) {
Connection connection = null;
String tableName = getTable(collection);
try {
connection = this.ch.getRWConnection();
Operation modOperation = update.getChanges().get(MODIFIEDKEY);
long modified = getModifiedFromOperation(modOperation);
boolean modifiedIsConditional = modOperation == null || modOperation.type != UpdateOp.Operation.Type.SET;
Number flagB = (Number) document.get(NodeDocument.HAS_BINARY_FLAG);
Boolean hasBinary = flagB != null && flagB.intValue() == NodeDocument.HAS_BINARY_VAL;
Boolean flagD = (Boolean) document.get(NodeDocument.DELETED_ONCE);
Boolean deletedOnce = flagD != null && flagD.booleanValue();
Long modcount = (Long) document.get(MODCOUNT);
Long cmodcount = (Long) document.get(COLLISIONSMODCOUNT);
boolean success = false;
// every 16th update is a full rewrite
if (isAppendableUpdate(update) && modcount % 16 != 0) {
String appendData = SR.asString(update);
if (appendData.length() < this.dataLimitInOctets / CHAR2OCTETRATIO) {
try {
success = dbAppendingUpdate(connection, tableName, document.getId(), modified, modifiedIsConditional, hasBinary, deletedOnce,
modcount, cmodcount, oldmodcount, appendData);
connection.commit();
} catch (SQLException ex) {
continueIfStringOverflow(ex);
this.ch.rollbackConnection(connection);
success = false;
}
}
}
if (!success) {
String data = SR.asString(document);
success = dbUpdate(connection, tableName, document.getId(), modified, hasBinary, deletedOnce, modcount, cmodcount,
oldmodcount, data);
connection.commit();
}
return success;
} catch (SQLException ex) {
this.ch.rollbackConnection(connection);
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
}
private static void continueIfStringOverflow(SQLException ex) throws SQLException {
String state = ex.getSQLState();
if ("22001".equals(state) /* everybody */|| ("72000".equals(state) && 1489 == ex.getErrorCode()) /* Oracle */) {
// ok
} else {
throw (ex);
}
}
/*
* currently we use append for all updates, but this might change in the
* future
*/
private static boolean isAppendableUpdate(UpdateOp update) {
return true;
}
/*
* check whether this update operation requires knowledge about the previous
* state
*/
private static boolean requiresPreviousState(UpdateOp update) {
return !update.getConditions().isEmpty();
}
private static long getModifiedFromOperation(Operation op) {
return op == null ? 0L : Long.parseLong(op.value.toString());
}
private <T extends Document> boolean insertDocuments(Collection<T> collection, List<T> documents) {
Connection connection = null;
String tableName = getTable(collection);
List<String> ids = new ArrayList<String>();
try {
connection = this.ch.getRWConnection();
boolean result = dbInsert(connection, tableName, documents);
connection.commit();
return result;
} catch (SQLException ex) {
LOG.debug("insert of " + ids + " failed", ex);
this.ch.rollbackConnection(connection);
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
}
// configuration
// Whether to use GZIP compression
private static final boolean NOGZIP = Boolean
.getBoolean("org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.NOGZIP");
// Number of documents to insert at once for batch create
private static final int CHUNKSIZE = Integer.getInteger(
"org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.CHUNKSIZE", 64);
// Number of query hits above which a diagnostic warning is generated
private static final int QUERYHITSLIMIT = Integer.getInteger(
"org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.QUERYHITSLIMIT", 4096);
// Number of elapsed ms in a query above which a diagnostic warning is generated
private static final int QUERYTIMELIMIT = Integer.getInteger(
"org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.QUERYTIMELIMIT", 10000);
private static byte[] asBytes(String data) {
byte[] bytes;
try {
bytes = data.getBytes("UTF-8");
} catch (UnsupportedEncodingException ex) {
LOG.error("UTF-8 not supported??", ex);
throw new DocumentStoreException(ex);
}
if (NOGZIP) {
return bytes;
} else {
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream(data.length());
GZIPOutputStream gos = new GZIPOutputStream(bos) {
{
// TODO: make this configurable
this.def.setLevel(Deflater.BEST_SPEED);
}
};
gos.write(bytes);
gos.close();
return bos.toByteArray();
} catch (IOException ex) {
LOG.error("Error while gzipping contents", ex);
throw new DocumentStoreException(ex);
}
}
}
private void setIdInStatement(PreparedStatement stmt, int idx, String id) throws SQLException {
if (db.isPrimaryColumnByteEncoded()) {
try {
stmt.setBytes(idx, id.getBytes("UTF-8"));
} catch (UnsupportedEncodingException ex) {
LOG.error("UTF-8 not supported??", ex);
throw new DocumentStoreException(ex);
}
} else {
stmt.setString(idx, id);
}
}
private String getIdFromRS(ResultSet rs, int idx) throws SQLException {
String id;
if (db.isPrimaryColumnByteEncoded()) {
try {
id = new String(rs.getBytes(idx), "UTF-8");
} catch (UnsupportedEncodingException ex) {
LOG.error("UTF-8 not supported??", ex);
throw new DocumentStoreException(ex);
}
} else {
id = rs.getString(idx);
}
return id;
}
@CheckForNull
private RDBRow dbRead(Connection connection, String tableName, String id, long lastmodcount) throws SQLException {
PreparedStatement stmt;
boolean useCaseStatement = lastmodcount != -1 && this.db.allowsCaseInSelect();
if (useCaseStatement) {
// the case statement causes the actual row data not to be
// sent in case we already have it
stmt = connection
.prepareStatement("select MODIFIED, MODCOUNT, CMODCOUNT, HASBINARY, DELETEDONCE, case MODCOUNT when ? then null else DATA end as DATA, "
+ "case MODCOUNT when ? then null else BDATA end as BDATA from " + tableName + " where ID = ?");
} else {
// either we don't have a previous version of the document
// or the database does not support CASE in SELECT
stmt = connection.prepareStatement("select MODIFIED, MODCOUNT, CMODCOUNT, HASBINARY, DELETEDONCE, DATA, BDATA from "
+ tableName + " where ID = ?");
}
try {
int si = 1;
if (useCaseStatement) {
stmt.setLong(si++, lastmodcount);
stmt.setLong(si++, lastmodcount);
}
setIdInStatement(stmt, si, id);
ResultSet rs = stmt.executeQuery();
if (rs.next()) {
long modified = rs.getLong(1);
long modcount = rs.getLong(2);
long cmodcount = rs.getLong(3);
long hasBinary = rs.getLong(4);
long deletedOnce = rs.getLong(5);
String data = rs.getString(6);
byte[] bdata = rs.getBytes(7);
return new RDBRow(id, hasBinary == 1, deletedOnce == 1, modified, modcount, cmodcount, data, bdata);
} else {
return null;
}
} catch (SQLException ex) {
LOG.error("attempting to read " + id + " (id length is " + id.length() + ")", ex);
// DB2 throws an SQLException for invalid keys; handle this more
// gracefully
if ("22001".equals(ex.getSQLState())) {
this.ch.rollbackConnection(connection);
return null;
} else {
throw (ex);
}
} finally {
stmt.close();
}
}
private List<RDBRow> dbQuery(Connection connection, String tableName, String minId, String maxId, String indexedProperty,
long startValue, int limit) throws SQLException {
long start = System.currentTimeMillis();
String t = "select ";
if (limit != Integer.MAX_VALUE && this.db.getFetchFirstSyntax() == FETCHFIRSTSYNTAX.TOP) {
t += "TOP " + limit + " ";
}
t += "ID, MODIFIED, MODCOUNT, CMODCOUNT, HASBINARY, DELETEDONCE, DATA, BDATA from " + tableName
+ " where ID > ? and ID < ?";
if (indexedProperty != null) {
if (MODIFIED.equals(indexedProperty)) {
t += " and MODIFIED >= ?";
} else if (NodeDocument.HAS_BINARY_FLAG.equals(indexedProperty)) {
if (startValue != NodeDocument.HAS_BINARY_VAL) {
throw new DocumentStoreException("unsupported value for property " + NodeDocument.HAS_BINARY_FLAG);
}
t += " and HASBINARY = 1";
} else if (NodeDocument.DELETED_ONCE.equals(indexedProperty)) {
if (startValue != 1) {
throw new DocumentStoreException("unsupported value for property " + NodeDocument.DELETED_ONCE);
}
t += " and DELETEDONCE = 1";
} else {
throw new DocumentStoreException("unsupported indexed property: " + indexedProperty);
}
}
t += " order by ID";
if (limit != Integer.MAX_VALUE) {
switch (this.db.getFetchFirstSyntax()) {
case LIMIT:
t += " LIMIT " + limit;
break;
case FETCHFIRST:
t += " FETCH FIRST " + limit + " ROWS ONLY";
break;
default:
break;
}
}
PreparedStatement stmt = connection.prepareStatement(t);
List<RDBRow> result = new ArrayList<RDBRow>();
try {
int si = 1;
setIdInStatement(stmt, si++, minId);
setIdInStatement(stmt, si++, maxId);
if (MODIFIED.equals(indexedProperty)) {
stmt.setLong(si++, startValue);
}
if (limit != Integer.MAX_VALUE) {
stmt.setFetchSize(limit);
}
ResultSet rs = stmt.executeQuery();
while (rs.next() && result.size() < limit) {
String id = getIdFromRS(rs, 1);
if (id.compareTo(minId) < 0 || id.compareTo(maxId) > 0) {
throw new DocumentStoreException("unexpected query result: '" + minId + "' < '" + id + "' < '" + maxId
+ "' - broken DB collation?");
}
long modified = rs.getLong(2);
long modcount = rs.getLong(3);
long cmodcount = rs.getLong(4);
long hasBinary = rs.getLong(5);
long deletedOnce = rs.getLong(6);
String data = rs.getString(7);
byte[] bdata = rs.getBytes(8);
result.add(new RDBRow(id, hasBinary == 1, deletedOnce == 1, modified, modcount, cmodcount, data, bdata));
}
} finally {
stmt.close();
}
long elapsed = System.currentTimeMillis() - start;
if (QUERYHITSLIMIT != 0 && result.size() > QUERYHITSLIMIT) {
String message = String.format("Potentially excessive query with %d hits (limited to %d, configured QUERYHITSLIMIT %d), elapsed time %dms, params minid '%s' maxid '%s' indexedProperty %s startValue %d limit %d. Check calling method.",
result.size(), limit, QUERYHITSLIMIT, elapsed, minId, maxId, indexedProperty, startValue, limit);
LOG.info(message, new Exception("call stack"));
}
else if (QUERYTIMELIMIT != 0 && elapsed > QUERYTIMELIMIT) {
String message = String.format("Long running query with %d hits (limited to %d), elapsed time %dms (configured QUERYTIMELIMIT %d), params minid '%s' maxid '%s' indexedProperty %s startValue %d limit %d. Check calling method.",
result.size(), limit, elapsed, QUERYTIMELIMIT, minId, maxId, indexedProperty, startValue, limit);
LOG.info(message, new Exception("call stack"));
}
return result;
}
private boolean dbUpdate(Connection connection, String tableName, String id, Long modified, Boolean hasBinary,
Boolean deletedOnce, Long modcount, Long cmodcount, Long oldmodcount, String data) throws SQLException {
String t = "update "
+ tableName
+ " set MODIFIED = ?, HASBINARY = ?, DELETEDONCE = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = ?, DATA = ?, BDATA = ? where ID = ?";
if (oldmodcount != null) {
t += " and MODCOUNT = ?";
}
PreparedStatement stmt = connection.prepareStatement(t);
try {
int si = 1;
stmt.setObject(si++, modified, Types.BIGINT);
stmt.setObject(si++, hasBinary ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, deletedOnce ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, modcount, Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? Long.valueOf(0) : cmodcount, Types.BIGINT);
stmt.setObject(si++, data.length(), Types.BIGINT);
if (data.length() < this.dataLimitInOctets / CHAR2OCTETRATIO) {
stmt.setString(si++, data);
stmt.setBinaryStream(si++, null, 0);
} else {
stmt.setString(si++, "\"blob\"");
byte[] bytes = asBytes(data);
stmt.setBytes(si++, bytes);
}
setIdInStatement(stmt, si++, id);
if (oldmodcount != null) {
stmt.setObject(si++, oldmodcount, Types.BIGINT);
}
int result = stmt.executeUpdate();
if (result != 1) {
LOG.debug("DB update failed for " + tableName + "/" + id + " with oldmodcount=" + oldmodcount);
}
return result == 1;
} finally {
stmt.close();
}
}
private boolean dbAppendingUpdate(Connection connection, String tableName, String id, Long modified,
boolean setModifiedConditionally, Boolean hasBinary, Boolean deletedOnce, Long modcount, Long cmodcount,
Long oldmodcount, String appendData) throws SQLException {
StringBuilder t = new StringBuilder();
t.append("update " + tableName + " set ");
t.append(setModifiedConditionally ? "MODIFIED = case when ? > MODIFIED then ? else MODIFIED end, " : "MODIFIED = ?, ");
t.append("HASBINARY = ?, DELETEDONCE = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = DSIZE + ?, ");
t.append("DATA = " + this.db.getConcatQueryString(this.dataLimitInOctets, appendData.length()) + " ");
t.append("where ID = ?");
if (oldmodcount != null) {
t.append(" and MODCOUNT = ?");
}
PreparedStatement stmt = connection.prepareStatement(t.toString());
try {
int si = 1;
stmt.setObject(si++, modified, Types.BIGINT);
if (setModifiedConditionally) {
stmt.setObject(si++, modified, Types.BIGINT);
}
stmt.setObject(si++, hasBinary ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, deletedOnce ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, modcount, Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? Long.valueOf(0) : cmodcount, Types.BIGINT);
stmt.setObject(si++, 1 + appendData.length(), Types.BIGINT);
stmt.setString(si++, "," + appendData);
setIdInStatement(stmt, si++, id);
if (oldmodcount != null) {
stmt.setObject(si++, oldmodcount, Types.BIGINT);
}
int result = stmt.executeUpdate();
if (result != 1) {
LOG.debug("DB append update failed for " + tableName + "/" + id + " with oldmodcount=" + oldmodcount);
}
return result == 1;
} finally {
stmt.close();
}
}
private boolean dbBatchedAppendingUpdate(Connection connection, String tableName, List<String> ids, Long modified,
boolean setModifiedConditionally,
String appendData) throws SQLException {
StringBuilder t = new StringBuilder();
t.append("update " + tableName + " set ");
t.append(setModifiedConditionally ? "MODIFIED = case when ? > MODIFIED then ? else MODIFIED end, " : "MODIFIED = ?, ");
t.append("MODCOUNT = MODCOUNT + 1, DSIZE = DSIZE + ?, ");
t.append("DATA = " + this.db.getConcatQueryString(this.dataLimitInOctets, appendData.length()) + " ");
t.append("where ID in (");
for (int i = 0; i < ids.size(); i++) {
if (i != 0) {
t.append(',');
}
t.append('?');
}
t.append(")");
PreparedStatement stmt = connection.prepareStatement(t.toString());
try {
int si = 1;
stmt.setObject(si++, modified, Types.BIGINT);
if (setModifiedConditionally) {
stmt.setObject(si++, modified, Types.BIGINT);
}
stmt.setObject(si++, 1 + appendData.length(), Types.BIGINT);
stmt.setString(si++, "," + appendData);
for (String id : ids) {
setIdInStatement(stmt, si++, id);
}
int result = stmt.executeUpdate();
if (result != ids.size()) {
LOG.debug("DB update failed: only " + result + " of " + ids.size() + " updated. Table: " + tableName + ", IDs:"
+ ids);
}
return result == ids.size();
} finally {
stmt.close();
}
}
private <T extends Document> boolean dbInsert(Connection connection, String tableName, List<T> documents) throws SQLException {
PreparedStatement stmt = connection.prepareStatement("insert into " + tableName +
"(ID, MODIFIED, HASBINARY, DELETEDONCE, MODCOUNT, CMODCOUNT, DSIZE, DATA, BDATA) " +
"values (?, ?, ?, ?, ?, ?, ?, ?, ?)");
try {
for (T document : documents) {
String data = SR.asString(document);
String id = document.getId();
Number hasBinary = (Number) document.get(NodeDocument.HAS_BINARY_FLAG);
Boolean deletedOnce = (Boolean) document.get(NodeDocument.DELETED_ONCE);
Long cmodcount = (Long) document.get(COLLISIONSMODCOUNT);
int si = 1;
setIdInStatement(stmt, si++, id);
stmt.setObject(si++, document.get(MODIFIED), Types.BIGINT);
stmt.setObject(si++, (hasBinary != null && hasBinary.intValue() == NodeDocument.HAS_BINARY_VAL) ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, (deletedOnce != null && deletedOnce) ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, document.get(MODCOUNT), Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? Long.valueOf(0) : cmodcount, Types.BIGINT);
stmt.setObject(si++, data.length(), Types.BIGINT);
if (data.length() < this.dataLimitInOctets / CHAR2OCTETRATIO) {
stmt.setString(si++, data);
stmt.setBinaryStream(si++, null, 0);
} else {
stmt.setString(si++, "\"blob\"");
byte[] bytes = asBytes(data);
stmt.setBytes(si++, bytes);
}
stmt.addBatch();
}
int[] results = stmt.executeBatch();
boolean success = true;
for (int i = 0; i < documents.size(); i++) {
int result = results[i];
if (result != 1 && result != Statement.SUCCESS_NO_INFO) {
LOG.error("DB insert failed for {}: {}", tableName, documents.get(i).getId());
success = false;
}
}
return success;
} finally {
stmt.close();
}
}
private int dbDelete(Connection connection, String tableName, List<String> ids) throws SQLException {
PreparedStatement stmt;
int cnt = ids.size();
if (cnt == 1) {
stmt = connection.prepareStatement("delete from " + tableName + " where ID=?");
} else {
StringBuilder inClause = new StringBuilder();
for (int i = 0; i < cnt; i++) {
inClause.append('?');
if (i != cnt - 1) {
inClause.append(',');
}
}
stmt = connection.prepareStatement("delete from " + tableName + " where ID in (" + inClause.toString() + ")");
}
try {
for (int i = 0; i < cnt; i++) {
setIdInStatement(stmt, i + 1, ids.get(i));
}
int result = stmt.executeUpdate();
if (result != cnt) {
LOG.debug("DB delete failed for " + tableName + "/" + ids);
}
return result;
} finally {
stmt.close();
}
}
private int dbDelete(Connection connection, String tableName,
Map<String, Map<Key, Condition>> toDelete)
throws SQLException, DocumentStoreException {
String or = "";
StringBuilder whereClause = new StringBuilder();
for (Entry<String, Map<Key, Condition>> entry : toDelete.entrySet()) {
whereClause.append(or);
or = " or ";
whereClause.append("ID=?");
for (Entry<Key, Condition> c : entry.getValue().entrySet()) {
if (!c.getKey().getName().equals(MODIFIED)) {
throw new DocumentStoreException(
"Unsupported condition: " + c);
}
whereClause.append(" and MODIFIED");
if (c.getValue().type == Condition.Type.EQUALS
&& c.getValue().value instanceof Long) {
whereClause.append("=?");
} else if (c.getValue().type == Condition.Type.EXISTS) {
whereClause.append(" is not null");
} else {
throw new DocumentStoreException(
"Unsupported condition: " + c);
}
}
}
PreparedStatement stmt= connection.prepareStatement(
"delete from " + tableName + " where " + whereClause);
try {
int i = 1;
for (Entry<String, Map<Key, Condition>> entry : toDelete.entrySet()) {
setIdInStatement(stmt, i++, entry.getKey());
for (Entry<Key, Condition> c : entry.getValue().entrySet()) {
if (c.getValue().type == Condition.Type.EQUALS) {
stmt.setLong(i++, (Long) c.getValue().value);
}
}
}
return stmt.executeUpdate();
} finally {
stmt.close();
}
}
@Override
public void setReadWriteMode(String readWriteMode) {
// ignored
}
@SuppressWarnings("unchecked")
private static <T extends Document> T castAsT(NodeDocument doc) {
return (T) doc;
}
// Memory Cache
private Cache<CacheValue, NodeDocument> nodesCache;
private CacheStats cacheStats;
private final Striped<Lock> locks = Striped.lock(64);
private Lock getAndLock(String key) {
Lock l = locks.get(key);
l.lock();
return l;
}
@CheckForNull
private static NodeDocument unwrap(@Nonnull NodeDocument doc) {
return doc == NodeDocument.NULL ? null : doc;
}
@Nonnull
private static NodeDocument wrap(@CheckForNull NodeDocument doc) {
return doc == null ? NodeDocument.NULL : doc;
}
@Nonnull
private static String idOf(@Nonnull Document doc) {
String id = doc.getId();
if (id == null) {
throw new IllegalArgumentException("non-null ID expected");
}
return id;
}
private static long modcountOf(@Nonnull Document doc) {
Number n = doc.getModCount();
return n != null ? n.longValue() : -1;
}
/**
* Adds a document to the {@link #nodesCache} iff there is no document in
* the cache with the document key. This method does not acquire a lock from
* {@link #locks}! The caller must ensure a lock is held for the given
* document.
*
* @param doc
* the document to add to the cache.
* @return either the given <code>doc</code> or the document already present
* in the cache.
*/
@Nonnull
private NodeDocument addToCache(@Nonnull final NodeDocument doc) {
if (doc == NodeDocument.NULL) {
throw new IllegalArgumentException("doc must not be NULL document");
}
doc.seal();
// make sure we only cache the document if it wasn't
// changed and cached by some other thread in the
// meantime. That is, use get() with a Callable,
// which is only used when the document isn't there
try {
CacheValue key = new StringValue(idOf(doc));
for (;;) {
NodeDocument cached = nodesCache.get(key, new Callable<NodeDocument>() {
@Override
public NodeDocument call() {
return doc;
}
});
if (cached != NodeDocument.NULL) {
return cached;
} else {
nodesCache.invalidate(key);
}
}
} catch (ExecutionException e) {
// will never happen because call() just returns
// the already available doc
throw new IllegalStateException(e);
}
}
@Nonnull
private void applyToCache(@Nonnull final NodeDocument oldDoc, @Nonnull final NodeDocument newDoc) {
NodeDocument cached = addToCache(newDoc);
if (cached == newDoc) {
// successful
return;
} else if (oldDoc == null) {
// this is an insert and some other thread was quicker
// loading it into the cache -> return now
return;
} else {
CacheValue key = new StringValue(idOf(newDoc));
// this is an update (oldDoc != null)
if (Objects.equal(cached.getModCount(), oldDoc.getModCount())) {
nodesCache.put(key, newDoc);
} else {
// the cache entry was modified by some other thread in
// the meantime. the updated cache entry may or may not
// include this update. we cannot just apply our update
// on top of the cached entry.
// therefore we must invalidate the cache entry
nodesCache.invalidate(key);
}
}
}
private <T extends Document> void addToCache(Collection<T> collection, T doc) {
if (collection == Collection.NODES) {
Lock lock = getAndLock(idOf(doc));
try {
addToCache((NodeDocument) doc);
} finally {
lock.unlock();
}
}
}
private <T extends Document> T runThroughCache(Collection<T> collection, RDBRow row, long now) {
if (collection != Collection.NODES) {
// not in the cache anyway
return SR.fromRow(collection, row);
}
String id = row.getId();
CacheValue cacheKey = new StringValue(id);
NodeDocument inCache = nodesCache.getIfPresent(cacheKey);
Number modCount = row.getModcount();
// do not overwrite document in cache if the
// existing one in the cache is newer
if (inCache != null && inCache != NodeDocument.NULL) {
// check mod count
Number cachedModCount = inCache.getModCount();
if (cachedModCount == null) {
throw new IllegalStateException("Missing " + Document.MOD_COUNT);
}
if (modCount.longValue() <= cachedModCount.longValue()) {
// we can use the cached document
inCache.markUpToDate(now);
return castAsT(inCache);
}
}
NodeDocument fresh = (NodeDocument) SR.fromRow(collection, row);
fresh.seal();
Lock lock = getAndLock(id);
try {
inCache = nodesCache.getIfPresent(cacheKey);
if (inCache != null && inCache != NodeDocument.NULL) {
// check mod count
Number cachedModCount = inCache.getModCount();
if (cachedModCount == null) {
throw new IllegalStateException("Missing " + Document.MOD_COUNT);
}
if (modCount.longValue() > cachedModCount.longValue()) {
nodesCache.put(cacheKey, fresh);
} else {
fresh = inCache;
}
} else {
nodesCache.put(cacheKey, fresh);
}
} finally {
lock.unlock();
}
return castAsT(fresh);
}
private boolean hasChangesToCollisions(UpdateOp update) {
if (! USECMODCOUNT) return false;
for (Entry<Key, Operation> e : checkNotNull(update).getChanges().entrySet()) {
Key k = e.getKey();
Operation op = e.getValue();
if (op.type == Operation.Type.SET_MAP_ENTRY) {
if (NodeDocument.COLLISIONS.equals(k.getName())) {
return true;
}
}
}
return false;
}
}
|
oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document.rdb;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.jackrabbit.oak.plugins.document.UpdateUtils.checkConditions;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.locks.Lock;
import java.util.zip.Deflater;
import java.util.zip.GZIPOutputStream;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.sql.DataSource;
import org.apache.jackrabbit.oak.cache.CacheStats;
import org.apache.jackrabbit.oak.cache.CacheValue;
import org.apache.jackrabbit.oak.plugins.document.Collection;
import org.apache.jackrabbit.oak.plugins.document.Document;
import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
import org.apache.jackrabbit.oak.plugins.document.DocumentStore;
import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException;
import org.apache.jackrabbit.oak.plugins.document.NodeDocument;
import org.apache.jackrabbit.oak.plugins.document.Revision;
import org.apache.jackrabbit.oak.plugins.document.StableRevisionComparator;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Condition;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Operation;
import org.apache.jackrabbit.oak.plugins.document.UpdateUtils;
import org.apache.jackrabbit.oak.plugins.document.cache.CacheInvalidationStats;
import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore;
import org.apache.jackrabbit.oak.plugins.document.util.StringValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Objects;
import com.google.common.cache.Cache;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.Striped;
/**
* Implementation of {@link DocumentStore} for relational databases.
*
* <h3>Supported Databases</h3>
* <p>
* The code is supposed to be sufficiently generic to run with a variety of
* database implementations. However, the tables are created when required to
* simplify testing, and <em>that</em> code specifically supports these
* databases:
* <ul>
* <li>h2</li>
* <li>IBM DB2</li>
* <li>Postgres</li>
* <li>MariaDB (MySQL) (experimental)</li>
* <li>Oracle (experimental)</li>
* </ul>
*
* <h3>Table Layout</h3>
* <p>
* Data for each of the DocumentStore's {@link Collection}s is stored in its own
* database table (with a name matching the collection).
* <p>
* The tables essentially implement key/value storage, where the key usually is
* derived from an Oak path, and the value is a serialization of a
* {@link Document} (or a part of one). Additional fields are used for queries,
* debugging, and concurrency control:
* <table style="text-align: left;">
* <thead>
* <tr>
* <th>Column</th>
* <th>Type</th>
* <th>Description</th>
* </tr>
* </thead> <tbody>
* <tr>
* <th>ID</th>
* <td>varchar(512) not null primary key</td>
* <td>the document's key (for databases that can not handle 512 character
* primary keys, such as MySQL, varbinary is possible as well; note that this
* currently needs to be hardcoded)</td>
* </tr>
* <tr>
* <th>MODIFIED</th>
* <td>bigint</td>
* <td>low-resolution timestamp
* </tr>
* <tr>
* <th>HASBINARY</th>
* <td>smallint</td>
* <td>flag indicating whether the document has binary properties
* </tr>
* <tr>
* <th>DELETEDONCE</th>
* <td>smallint</td>
* <td>flag indicating whether the document has been deleted once
* </tr>
* <tr>
* <th>MODCOUNT</th>
* <td>bigint</td>
* <td>modification counter, used for avoiding overlapping updates</td>
* </tr>
* <tr>
* <th>DSIZE</th>
* <td>bigint</td>
* <td>the approximate size of the document's JSON serialization (for debugging
* purposes)</td>
* </tr>
* <tr>
* <th>DATA</th>
* <td>varchar(16384)</td>
* <td>the document's JSON serialization (only used for small document sizes, in
* which case BDATA (below) is not set), or a sequence of JSON serialized update
* operations to be applied against the last full serialization</td>
* </tr>
* <tr>
* <th>BDATA</th>
* <td>blob</td>
* <td>the document's JSON serialization (usually GZIPped, only used for "large"
* documents)</td>
* </tr>
* </tbody>
* </table>
* <p>
* The names of database tables can be prefixed; the purpose is mainly for
* testing, as tables can also be dropped automatically when the store is
* disposed (this only happens for those tables that have been created on
* demand)
* <p>
* <em>Note that the database needs to be created/configured to support all Unicode
* characters in text fields, and to collate by Unicode code point (in DB2: "collate using identity",
* in Postgres: "C").
* THIS IS NOT THE DEFAULT!</em>
* <p>
* <em>For MySQL, the database parameter "max_allowed_packet" needs to be increased to support ~16 blobs.</em>
*
* <h3>Caching</h3>
* <p>
* The cache borrows heavily from the {@link MongoDocumentStore} implementation;
* however it does not support the off-heap mechanism yet.
*
* <h3>Queries</h3>
* <p>
* The implementation currently supports only three indexed properties:
* "_bin", "deletedOnce", and "_modified". Attempts to use a different indexed property will
* cause a {@link DocumentStoreException}.
*/
public class RDBDocumentStore implements DocumentStore {
/**
* Creates a {@linkplain RDBDocumentStore} instance using the provided
* {@link DataSource}, {@link DocumentMK.Builder}, and {@link RDBOptions}.
*/
public RDBDocumentStore(DataSource ds, DocumentMK.Builder builder, RDBOptions options) {
try {
initialize(ds, builder, options);
} catch (Exception ex) {
throw new DocumentStoreException("initializing RDB document store", ex);
}
}
/**
* Creates a {@linkplain RDBDocumentStore} instance using the provided
* {@link DataSource}, {@link DocumentMK.Builder}, and default
* {@link RDBOptions}.
*/
public RDBDocumentStore(DataSource ds, DocumentMK.Builder builder) {
this(ds, builder, new RDBOptions());
}
@Override
public <T extends Document> T find(Collection<T> collection, String id) {
return find(collection, id, Integer.MAX_VALUE);
}
@Override
public <T extends Document> T find(final Collection<T> collection, final String id, int maxCacheAge) {
return readDocumentCached(collection, id, maxCacheAge);
}
@Nonnull
@Override
public <T extends Document> List<T> query(Collection<T> collection, String fromKey, String toKey, int limit) {
return query(collection, fromKey, toKey, null, 0, limit);
}
@Nonnull
@Override
public <T extends Document> List<T> query(Collection<T> collection, String fromKey, String toKey, String indexedProperty,
long startValue, int limit) {
return internalQuery(collection, fromKey, toKey, indexedProperty, startValue, limit);
}
@Override
public <T extends Document> void remove(Collection<T> collection, String id) {
delete(collection, id);
invalidateCache(collection, id, true);
}
@Override
public <T extends Document> void remove(Collection<T> collection, List<String> ids) {
for (String id : ids) {
invalidateCache(collection, id, true);
}
delete(collection, ids);
}
@Override
public <T extends Document> int remove(Collection<T> collection,
Map<String, Map<Key, Condition>> toRemove) {
int num = delete(collection, toRemove);
for (String id : toRemove.keySet()) {
invalidateCache(collection, id, true);
}
return num;
}
@Override
public <T extends Document> boolean create(Collection<T> collection, List<UpdateOp> updateOps) {
return internalCreate(collection, updateOps);
}
@Override
public <T extends Document> void update(Collection<T> collection, List<String> keys, UpdateOp updateOp) {
internalUpdate(collection, keys, updateOp);
}
@Override
public <T extends Document> T createOrUpdate(Collection<T> collection, UpdateOp update) {
return internalCreateOrUpdate(collection, update, true, false);
}
@Override
public <T extends Document> T findAndUpdate(Collection<T> collection, UpdateOp update) {
return internalCreateOrUpdate(collection, update, false, true);
}
@Override
public CacheInvalidationStats invalidateCache() {
for (NodeDocument nd : nodesCache.asMap().values()) {
nd.markUpToDate(0);
}
return null;
}
@Override
public <T extends Document> void invalidateCache(Collection<T> collection, String id) {
invalidateCache(collection, id, false);
}
private <T extends Document> void invalidateCache(Collection<T> collection, String id, boolean remove) {
if (collection == Collection.NODES) {
invalidateNodesCache(id, remove);
}
}
private void invalidateNodesCache(String id, boolean remove) {
StringValue key = new StringValue(id);
Lock lock = getAndLock(id);
try {
if (remove) {
nodesCache.invalidate(key);
} else {
NodeDocument entry = nodesCache.getIfPresent(key);
if (entry != null) {
entry.markUpToDate(0);
}
}
} finally {
lock.unlock();
}
}
// used for diagnostics
private String droppedTables = "";
public String getDroppedTables() {
return this.droppedTables;
}
// table names
private static Map<Object, String> TABLEMAP;
private static List<String> TABLENAMES;
static {
Map<Object, String> tmp = new HashMap<Object, String>();
tmp.put(Collection.CLUSTER_NODES, "CLUSTERNODES");
tmp.put(Collection.JOURNAL, "JOURNAL");
tmp.put(Collection.NODES, "NODES");
tmp.put(Collection.SETTINGS, "SETTINGS");
TABLEMAP = Collections.unmodifiableMap(tmp);
List<String> tl = new ArrayList<String>(TABLEMAP.values());
Collections.sort(tl);
TABLENAMES = Collections.unmodifiableList(tl);
}
public static List<String> getTableNames() {
return TABLENAMES;
}
@Override
public void dispose() {
if (!this.tablesToBeDropped.isEmpty()) {
String dropped = "";
LOG.debug("attempting to drop: " + this.tablesToBeDropped);
for (String tname : this.tablesToBeDropped) {
Connection con = null;
try {
con = this.ch.getRWConnection();
Statement stmt = null;
try {
stmt = con.createStatement();
stmt.execute("drop table " + tname);
stmt.close();
con.commit();
dropped += tname + " ";
} catch (SQLException ex) {
LOG.debug("attempting to drop: " + tname, ex);
} finally {
this.ch.closeStatement(stmt);
}
} catch (SQLException ex) {
LOG.debug("attempting to drop: " + tname, ex);
} finally {
this.ch.closeConnection(con);
}
}
this.droppedTables = dropped.trim();
}
try {
this.ch.close();
} catch (IOException ex) {
LOG.error("closing connection handler", ex);
}
}
@Override
public <T extends Document> T getIfCached(Collection<T> collection, String id) {
if (collection != Collection.NODES) {
return null;
} else {
NodeDocument doc = nodesCache.getIfPresent(new StringValue(id));
return castAsT(doc);
}
}
@Override
public CacheStats getCacheStats() {
return this.cacheStats;
}
@Override
public Map<String, String> getMetadata() {
return metadata;
}
// implementation
enum FETCHFIRSTSYNTAX { FETCHFIRST, LIMIT, TOP};
private static void versionCheck(DatabaseMetaData md, int xmaj, int xmin, String description) throws SQLException {
int maj = md.getDatabaseMajorVersion();
int min = md.getDatabaseMinorVersion();
if (maj < xmaj || (maj == xmaj && min < xmin)) {
LOG.info("Unsupported " + description + " version: " + maj + "." + min + ", expected at least " + xmaj + "." + xmin);
}
}
/**
* Defines variation in the capabilities of different RDBs.
*/
protected enum DB {
DEFAULT("default") {
},
H2("H2") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 1, 4, description);
}
},
DERBY("Apache Derby") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 10, 11, description);
}
public boolean allowsCaseInSelect() {
return false;
}
},
POSTGRES("PostgreSQL") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 9, 3, description);
}
@Override
public String getTableCreationStatement(String tableName) {
return ("create table " + tableName + " (ID varchar(512) not null primary key, MODIFIED bigint, HASBINARY smallint, DELETEDONCE smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16384), BDATA bytea)");
}
@Override
public String getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) {
Connection con = null;
PreparedStatement stmt = null;
ResultSet rs = null;
Map<String, String> result = new HashMap<String, String>();
try {
con = ch.getROConnection();
String cat = con.getCatalog();
stmt = con.prepareStatement("SELECT pg_encoding_to_char(encoding), datcollate FROM pg_database WHERE datname=?");
stmt.setString(1, cat);
rs = stmt.executeQuery();
while (rs.next()) {
result.put("pg_encoding_to_char(encoding)", rs.getString(1));
result.put("datcollate", rs.getString(2));
}
stmt.close();
con.commit();
} catch (SQLException ex) {
LOG.debug("while getting diagnostics", ex);
} finally {
ch.closeResultSet(rs);
ch.closeStatement(stmt);
ch.closeConnection(con);
}
return result.toString();
}
},
DB2("DB2") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 10, 1, description);
}
@Override
public String getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) {
Connection con = null;
PreparedStatement stmt = null;
ResultSet rs = null;
Map<String, String> result = new HashMap<String, String>();
try {
con = ch.getROConnection();
// we can't look up by schema as con.getSchema is JDK 1.7
stmt = con.prepareStatement("SELECT CODEPAGE, COLLATIONSCHEMA, COLLATIONNAME, TABSCHEMA FROM SYSCAT.COLUMNS WHERE COLNAME=? and COLNO=0 AND UPPER(TABNAME)=UPPER(?)");
stmt.setString(1, "ID");
stmt.setString(2, tableName);
rs = stmt.executeQuery();
while (rs.next() && result.size() < 20) {
// thus including the schema name here
String schema = rs.getString("TABSCHEMA").trim();
result.put(schema + ".CODEPAGE", rs.getString("CODEPAGE").trim());
result.put(schema + ".COLLATIONSCHEMA", rs.getString("COLLATIONSCHEMA").trim());
result.put(schema + ".COLLATIONNAME", rs.getString("COLLATIONNAME").trim());
}
stmt.close();
con.commit();
} catch (SQLException ex) {
LOG.debug("while getting diagnostics", ex);
} finally {
ch.closeResultSet(rs);
ch.closeStatement(stmt);
ch.closeConnection(con);
}
return result.toString();
}
},
ORACLE("Oracle") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 12, 1, description);
}
@Override
public String getInitializationStatement() {
// see https://issues.apache.org/jira/browse/OAK-1914
// for some reason, the default for NLS_SORT is incorrect
return ("ALTER SESSION SET NLS_SORT='BINARY'");
}
@Override
public String getTableCreationStatement(String tableName) {
// see https://issues.apache.org/jira/browse/OAK-1914
return ("create table " + tableName + " (ID varchar(512) not null primary key, MODIFIED number, HASBINARY number, DELETEDONCE number, MODCOUNT number, CMODCOUNT number, DSIZE number, DATA varchar(4000), BDATA blob)");
}
@Override
public String getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) {
Connection con = null;
Statement stmt = null;
ResultSet rs = null;
Map<String, String> result = new HashMap<String, String>();
try {
con = ch.getROConnection();
stmt = con.createStatement();
rs = stmt.executeQuery("SELECT PARAMETER, VALUE from NLS_DATABASE_PARAMETERS WHERE PARAMETER IN ('NLS_COMP', 'NLS_CHARACTERSET')");
while (rs.next()) {
result.put(rs.getString(1), rs.getString(2));
}
stmt.close();
con.commit();
} catch (SQLException ex) {
LOG.debug("while getting diagnostics", ex);
} finally {
ch.closeResultSet(rs);
ch.closeStatement(stmt);
ch.closeConnection(con);
}
return result.toString();
}
},
MYSQL("MySQL") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 5, 5, description);
}
@Override
public boolean isPrimaryColumnByteEncoded() {
// TODO: we should dynamically detect this
return true;
}
@Override
public String getTableCreationStatement(String tableName) {
// see https://issues.apache.org/jira/browse/OAK-1913
return ("create table " + tableName + " (ID varbinary(512) not null primary key, MODIFIED bigint, HASBINARY smallint, DELETEDONCE smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16000), BDATA longblob)");
}
@Override
public FETCHFIRSTSYNTAX getFetchFirstSyntax() {
return FETCHFIRSTSYNTAX.LIMIT;
}
@Override
public String getConcatQueryString(int dataOctetLimit, int dataLength) {
return "CONCAT(DATA, ?)";
}
@Override
public String getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) {
Connection con = null;
PreparedStatement stmt = null;
ResultSet rs = null;
Map<String, String> result = new HashMap<String, String>();
try {
con = ch.getROConnection();
stmt = con.prepareStatement("SHOW TABLE STATUS LIKE ?");
stmt.setString(1, tableName);
rs = stmt.executeQuery();
while (rs.next()) {
result.put("collation", rs.getString("Collation"));
}
stmt.close();
con.commit();
} catch (SQLException ex) {
LOG.debug("while getting diagnostics", ex);
} finally {
ch.closeResultSet(rs);
ch.closeStatement(stmt);
ch.closeConnection(con);
}
return result.toString();
}
},
MSSQL("Microsoft SQL Server") {
@Override
public void checkVersion(DatabaseMetaData md) throws SQLException {
versionCheck(md, 11, 0, description);
}
@Override
public boolean isPrimaryColumnByteEncoded() {
// TODO: we should dynamically detect this
return true;
}
@Override
public String getTableCreationStatement(String tableName) {
// see https://issues.apache.org/jira/browse/OAK-2395
return ("create table " + tableName + " (ID varbinary(512) not null primary key, MODIFIED bigint, HASBINARY smallint, DELETEDONCE smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA nvarchar(4000), BDATA varbinary(max))");
}
@Override
public FETCHFIRSTSYNTAX getFetchFirstSyntax() {
return FETCHFIRSTSYNTAX.TOP;
}
@Override
public String getConcatQueryString(int dataOctetLimit, int dataLength) {
/*
* To avoid truncation when concatenating force an error when
* limit is above the octet limit
*/
return "CASE WHEN LEN(DATA) <= " + (dataOctetLimit - dataLength) + " THEN (DATA + CAST(? AS nvarchar("
+ dataOctetLimit + "))) ELSE (DATA + CAST(DATA AS nvarchar(max))) END";
}
@Override
public String getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) {
Connection con = null;
PreparedStatement stmt = null;
ResultSet rs = null;
Map<String, String> result = new HashMap<String, String>();
try {
con = ch.getROConnection();
String cat = con.getCatalog();
stmt = con.prepareStatement("SELECT collation_name FROM sys.databases WHERE name=?");
stmt.setString(1, cat);
rs = stmt.executeQuery();
while (rs.next()) {
result.put("collation_name", rs.getString(1));
}
stmt.close();
con.commit();
} catch (SQLException ex) {
LOG.debug("while getting diagnostics", ex);
} finally {
ch.closeResultSet(rs);
ch.closeStatement(stmt);
ch.closeConnection(con);
}
return result.toString();
}
};
/**
* Check the database brand and version
*/
public void checkVersion(DatabaseMetaData md) throws SQLException {
LOG.info("Unknown database type: " + md.getDatabaseProductName());
}
/**
* If the primary column is encoded in bytes.
* Default false
* @return boolean
*/
public boolean isPrimaryColumnByteEncoded() {
return false;
}
/**
* Allows case in select. Default true.
*/
public boolean allowsCaseInSelect() {
return true;
}
/**
* Query syntax for "FETCH FIRST"
*/
public FETCHFIRSTSYNTAX getFetchFirstSyntax() {
return FETCHFIRSTSYNTAX.FETCHFIRST;
}
/**
* Returns the CONCAT function or its equivalent function or sub-query.
* Note that the function MUST NOT cause a truncated value to be
* written!
*
* @param dataOctetLimit
* expected capacity of data column
* @param dataLength
* length of string to be inserted
*
* @return the concat query string
*/
public String getConcatQueryString(int dataOctetLimit, int dataLength) {
return "DATA || CAST(? AS varchar(" + dataOctetLimit + "))";
}
/**
* Query for any required initialization of the DB.
*
* @return the DB initialization SQL string
*/
public @Nonnull String getInitializationStatement() {
return "";
}
/**
* Table creation statement string
*
* @param tableName
* @return the table creation string
*/
public String getTableCreationStatement(String tableName) {
return "create table "
+ tableName
+ " (ID varchar(512) not null primary key, MODIFIED bigint, HASBINARY smallint, DELETEDONCE smallint, MODCOUNT bigint, CMODCOUNT bigint, DSIZE bigint, DATA varchar(16384), BDATA blob("
+ 1024 * 1024 * 1024 + "))";
}
public String getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) {
return "";
}
protected String description;
private DB(String description) {
this.description = description;
}
@Override
public String toString() {
return this.description;
}
@Nonnull
public static DB getValue(String desc) {
for (DB db : DB.values()) {
if (db.description.equals(desc)) {
return db;
} else if (db == DB2 && desc.startsWith("DB2/")) {
return db;
}
}
LOG.error("DB type " + desc + " unknown, trying default settings");
DEFAULT.description = desc + " - using default settings";
return DEFAULT;
}
}
private static final String MODIFIED = "_modified";
private static final String MODCOUNT = "_modCount";
/**
* Optional counter for changes to "_collisions" map ({@link NodeDocument#COLLISIONS}).
*/
private static final String COLLISIONSMODCOUNT = "_collisionsModCount";
private static final String ID = "_id";
private static final Logger LOG = LoggerFactory.getLogger(RDBDocumentStore.class);
private final Comparator<Revision> comparator = StableRevisionComparator.REVERSE;
private Exception callStack;
private RDBConnectionHandler ch;
// from options
private Set<String> tablesToBeDropped = new HashSet<String>();
// table names
private String tnNodes, tnClusterNodes, tnSettings, tnJournal;
// ratio between Java characters and UTF-8 encoding
// a) single characters will fit into 3 bytes
// b) a surrogate pair (two Java characters) will fit into 4 bytes
// thus...
private static final int CHAR2OCTETRATIO = 3;
// capacity of DATA column
private int dataLimitInOctets = 16384;
// number of retries for updates
private static final int RETRIES = 10;
// see OAK-2044
protected static final boolean USECMODCOUNT = true;
private static final Key MODIFIEDKEY = new Key(MODIFIED, null);
// DB-specific information
private DB db;
private Map<String, String> metadata;
// set of supported indexed properties
private static final Set<String> INDEXEDPROPERTIES = new HashSet<String>(Arrays.asList(new String[] { MODIFIED,
NodeDocument.HAS_BINARY_FLAG, NodeDocument.DELETED_ONCE }));
// set of properties not serialized to JSON
private static final Set<String> COLUMNPROPERTIES = new HashSet<String>(Arrays.asList(new String[] { ID,
NodeDocument.HAS_BINARY_FLAG, NodeDocument.DELETED_ONCE, COLLISIONSMODCOUNT, MODIFIED, MODCOUNT }));
private final RDBDocumentSerializer SR = new RDBDocumentSerializer(this, COLUMNPROPERTIES);
private void initialize(DataSource ds, DocumentMK.Builder builder, RDBOptions options) throws Exception {
this.tnNodes = RDBJDBCTools.createTableName(options.getTablePrefix(), TABLEMAP.get(Collection.NODES));
this.tnClusterNodes = RDBJDBCTools.createTableName(options.getTablePrefix(), TABLEMAP.get(Collection.CLUSTER_NODES));
this.tnSettings = RDBJDBCTools.createTableName(options.getTablePrefix(), TABLEMAP.get(Collection.SETTINGS));
this.tnJournal = RDBJDBCTools.createTableName(options.getTablePrefix(), TABLEMAP.get(Collection.JOURNAL));
this.ch = new RDBConnectionHandler(ds);
this.callStack = LOG.isDebugEnabled() ? new Exception("call stack of RDBDocumentStore creation") : null;
this.nodesCache = builder.buildDocumentCache(this);
this.cacheStats = new CacheStats(nodesCache, "Document-Documents", builder.getWeigher(), builder.getDocumentCacheSize());
Connection con = this.ch.getRWConnection();
int isolation = con.getTransactionIsolation();
String isolationDiags = RDBJDBCTools.isolationLevelToString(isolation);
if (isolation != Connection.TRANSACTION_READ_COMMITTED) {
LOG.info("Detected transaction isolation level " + isolationDiags + " is "
+ (isolation < Connection.TRANSACTION_READ_COMMITTED ? "lower" : "higher") + " than expected "
+ RDBJDBCTools.isolationLevelToString(Connection.TRANSACTION_READ_COMMITTED)
+ " - check datasource configuration");
}
DatabaseMetaData md = con.getMetaData();
String dbDesc = String.format("%s %s (%d.%d)", md.getDatabaseProductName(), md.getDatabaseProductVersion(),
md.getDatabaseMajorVersion(), md.getDatabaseMinorVersion());
String driverDesc = String.format("%s %s (%d.%d)", md.getDriverName(), md.getDriverVersion(), md.getDriverMajorVersion(),
md.getDriverMinorVersion());
String dbUrl = md.getURL();
this.db = DB.getValue(md.getDatabaseProductName());
this.metadata = ImmutableMap.<String,String>builder()
.put("type", "rdb")
.put("db", md.getDatabaseProductName())
.put("version", md.getDatabaseProductVersion())
.build();
db.checkVersion(md);
if (! "".equals(db.getInitializationStatement())) {
Statement stmt = null;
try {
stmt = con.createStatement();
stmt.execute(db.getInitializationStatement());
stmt.close();
con.commit();
}
finally {
this.ch.closeStatement(stmt);
}
}
List<String> tablesCreated = new ArrayList<String>();
List<String> tablesPresent = new ArrayList<String>();
try {
createTableFor(con, Collection.CLUSTER_NODES, tablesCreated, tablesPresent);
createTableFor(con, Collection.NODES, tablesCreated, tablesPresent);
createTableFor(con, Collection.SETTINGS, tablesCreated, tablesPresent);
createTableFor(con, Collection.JOURNAL, tablesCreated, tablesPresent);
} finally {
con.commit();
con.close();
}
if (options.isDropTablesOnClose()) {
tablesToBeDropped.addAll(tablesCreated);
}
String diag = db.getAdditionalDiagnostics(this.ch, this.tnNodes);
LOG.info("RDBDocumentStore instantiated for database " + dbDesc + ", using driver: " + driverDesc + ", connecting to: "
+ dbUrl + (diag.isEmpty() ? "" : (", properties: " + diag)) + ", transaction isolation level: " + isolationDiags);
if (!tablesPresent.isEmpty()) {
LOG.info("Tables present upon startup: " + tablesPresent);
}
if (!tablesCreated.isEmpty()) {
LOG.info("Tables created upon startup: " + tablesCreated
+ (options.isDropTablesOnClose() ? " (will be dropped on exit)" : ""));
}
}
private void createTableFor(Connection con, Collection<? extends Document> col, List<String> tablesCreated, List<String> tablesPresent) throws SQLException {
String dbname = this.db.toString();
if (con.getMetaData().getURL() != null) {
dbname += " (" + con.getMetaData().getURL() + ")";
}
String tableName = getTable(col);
PreparedStatement checkStatement = null;
ResultSet checkResultSet = null;
Statement creatStatement = null;
try {
checkStatement = con.prepareStatement("select DATA from " + tableName + " where ID = ?");
checkStatement.setString(1, "0:/");
checkResultSet = checkStatement.executeQuery();
if (col.equals(Collection.NODES)) {
// try to discover size of DATA column
ResultSetMetaData met = checkResultSet.getMetaData();
this.dataLimitInOctets = met.getPrecision(1);
}
tablesPresent.add(tableName);
} catch (SQLException ex) {
// table does not appear to exist
con.rollback();
try {
creatStatement = con.createStatement();
creatStatement.execute(this.db.getTableCreationStatement(tableName));
creatStatement.close();
con.commit();
tablesCreated.add(tableName);
if (col.equals(Collection.NODES)) {
PreparedStatement pstmt = con.prepareStatement("select DATA from " + tableName + " where ID = ?");
pstmt.setString(1, "0:/");
ResultSet rs = pstmt.executeQuery();
ResultSetMetaData met = rs.getMetaData();
this.dataLimitInOctets = met.getPrecision(1);
}
}
catch (SQLException ex2) {
LOG.error("Failed to create table " + tableName + " in " + dbname, ex2);
throw ex2;
}
}
finally {
this.ch.closeResultSet(checkResultSet);
this.ch.closeStatement(checkStatement);
this.ch.closeStatement(creatStatement);
}
}
@Override
protected void finalize() {
if (!this.ch.isClosed() && this.callStack != null) {
LOG.debug("finalizing RDBDocumentStore that was not disposed", this.callStack);
}
}
private <T extends Document> T readDocumentCached(final Collection<T> collection, final String id, int maxCacheAge) {
if (collection != Collection.NODES) {
return readDocumentUncached(collection, id, null);
} else {
CacheValue cacheKey = new StringValue(id);
NodeDocument doc = null;
if (maxCacheAge > 0) {
// first try without lock
doc = nodesCache.getIfPresent(cacheKey);
if (doc != null) {
long lastCheckTime = doc.getLastCheckTime();
if (lastCheckTime != 0) {
if (maxCacheAge == Integer.MAX_VALUE || System.currentTimeMillis() - lastCheckTime < maxCacheAge) {
return castAsT(unwrap(doc));
}
}
}
}
try {
Lock lock = getAndLock(id);
try {
// caller really wants the cache to be cleared
if (maxCacheAge == 0) {
invalidateNodesCache(id, true);
doc = null;
}
final NodeDocument cachedDoc = doc;
doc = nodesCache.get(cacheKey, new Callable<NodeDocument>() {
@Override
public NodeDocument call() throws Exception {
NodeDocument doc = (NodeDocument) readDocumentUncached(collection, id, cachedDoc);
if (doc != null) {
doc.seal();
}
return wrap(doc);
}
});
// inspect the doc whether it can be used
long lastCheckTime = doc.getLastCheckTime();
if (lastCheckTime != 0 && (maxCacheAge == 0 || maxCacheAge == Integer.MAX_VALUE)) {
// we either just cleared the cache or the caller does
// not care;
} else if (lastCheckTime != 0 && (System.currentTimeMillis() - lastCheckTime < maxCacheAge)) {
// is new enough
} else {
// need to at least revalidate
NodeDocument ndoc = (NodeDocument) readDocumentUncached(collection, id, cachedDoc);
if (ndoc != null) {
ndoc.seal();
}
doc = wrap(ndoc);
nodesCache.put(cacheKey, doc);
}
} finally {
lock.unlock();
}
return castAsT(unwrap(doc));
} catch (ExecutionException e) {
throw new IllegalStateException("Failed to load document with " + id, e);
}
}
}
@CheckForNull
private <T extends Document> boolean internalCreate(Collection<T> collection, List<UpdateOp> updates) {
try {
boolean success = true;
// try up to CHUNKSIZE ops in one transaction
for (List<UpdateOp> chunks : Lists.partition(updates, CHUNKSIZE)) {
List<T> docs = new ArrayList<T>();
for (UpdateOp update : chunks) {
T doc = collection.newDocument(this);
update.increment(MODCOUNT, 1);
if (hasChangesToCollisions(update)) {
update.increment(COLLISIONSMODCOUNT, 1);
}
UpdateUtils.applyChanges(doc, update, comparator);
if (!update.getId().equals(doc.getId())) {
throw new DocumentStoreException("ID mismatch - UpdateOp: " + update.getId() + ", ID property: "
+ doc.getId());
}
docs.add(doc);
}
boolean done = insertDocuments(collection, docs);
if (done) {
for (T doc : docs) {
addToCache(collection, doc);
}
}
else {
success = false;
}
}
return success;
} catch (DocumentStoreException ex) {
return false;
}
}
@CheckForNull
private <T extends Document> T internalCreateOrUpdate(Collection<T> collection, UpdateOp update, boolean allowCreate,
boolean checkConditions) {
T oldDoc = readDocumentCached(collection, update.getId(), Integer.MAX_VALUE);
if (oldDoc == null) {
if (!allowCreate) {
return null;
} else if (!update.isNew()) {
throw new DocumentStoreException("Document does not exist: " + update.getId());
}
T doc = collection.newDocument(this);
if (checkConditions && !checkConditions(doc, update.getConditions())) {
return null;
}
update.increment(MODCOUNT, 1);
if (hasChangesToCollisions(update)) {
update.increment(COLLISIONSMODCOUNT, 1);
}
UpdateUtils.applyChanges(doc, update, comparator);
try {
insertDocuments(collection, Collections.singletonList(doc));
addToCache(collection, doc);
return oldDoc;
} catch (DocumentStoreException ex) {
// may have failed due to a race condition; try update instead
// this is an edge case, so it's ok to bypass the cache
// (avoiding a race condition where the DB is already updated
// but the cache is not)
oldDoc = readDocumentUncached(collection, update.getId(), null);
if (oldDoc == null) {
// something else went wrong
LOG.error("insert failed, but document " + update.getId() + " is not present, aborting", ex);
throw (ex);
}
return internalUpdate(collection, update, oldDoc, checkConditions, RETRIES);
}
} else {
T result = internalUpdate(collection, update, oldDoc, checkConditions, RETRIES);
if (allowCreate && result == null) {
// TODO OAK-2655 need to implement some kind of retry
LOG.error("update of " + update.getId() + " failed, race condition?");
throw new DocumentStoreException("update of " + update.getId() + " failed, race condition?");
}
return result;
}
}
/**
* @return previous version of document or <code>null</code>
*/
@CheckForNull
private <T extends Document> T internalUpdate(Collection<T> collection, UpdateOp update, T oldDoc, boolean checkConditions,
int maxRetries) {
T doc = applyChanges(collection, oldDoc, update, checkConditions);
if (doc == null) {
// conditions not met
return null;
} else {
Lock l = getAndLock(update.getId());
try {
boolean success = false;
int retries = maxRetries;
while (!success && retries > 0) {
long lastmodcount = modcountOf(oldDoc);
success = updateDocument(collection, doc, update, lastmodcount);
if (!success) {
retries -= 1;
oldDoc = readDocumentCached(collection, update.getId(), Integer.MAX_VALUE);
if (oldDoc != null) {
long newmodcount = modcountOf(oldDoc);
if (lastmodcount == newmodcount) {
// cached copy did not change so it probably was
// updated by a different instance, get a fresh one
oldDoc = readDocumentUncached(collection, update.getId(), null);
}
}
if (oldDoc == null) {
// document was there but is now gone
LOG.debug("failed to apply update because document is gone in the meantime: " + update.getId(), new Exception("call stack"));
return null;
}
doc = applyChanges(collection, oldDoc, update, checkConditions);
if (doc == null) {
return null;
}
} else {
if (collection == Collection.NODES) {
applyToCache((NodeDocument) oldDoc, (NodeDocument) doc);
}
}
}
if (!success) {
throw new DocumentStoreException("failed update of " + doc.getId() + " (race?) after " + maxRetries
+ " retries");
}
return oldDoc;
} finally {
l.unlock();
}
}
}
@CheckForNull
private <T extends Document> T applyChanges(Collection<T> collection, T oldDoc, UpdateOp update, boolean checkConditions) {
T doc = collection.newDocument(this);
oldDoc.deepCopy(doc);
if (checkConditions && !checkConditions(doc, update.getConditions())) {
return null;
}
if (hasChangesToCollisions(update)) {
update.increment(COLLISIONSMODCOUNT, 1);
}
update.increment(MODCOUNT, 1);
UpdateUtils.applyChanges(doc, update, comparator);
doc.seal();
return doc;
}
@CheckForNull
private <T extends Document> void internalUpdate(Collection<T> collection, List<String> ids, UpdateOp update) {
if (isAppendableUpdate(update) && !requiresPreviousState(update)) {
Operation modOperation = update.getChanges().get(MODIFIEDKEY);
long modified = getModifiedFromOperation(modOperation);
boolean modifiedIsConditional = modOperation == null || modOperation.type != UpdateOp.Operation.Type.SET;
String appendData = SR.asString(update);
for (List<String> chunkedIds : Lists.partition(ids, CHUNKSIZE)) {
// remember what we already have in the cache
Map<String, NodeDocument> cachedDocs = Collections.emptyMap();
if (collection == Collection.NODES) {
cachedDocs = new HashMap<String, NodeDocument>();
for (String key : chunkedIds) {
cachedDocs.put(key, nodesCache.getIfPresent(new StringValue(key)));
}
}
Connection connection = null;
String tableName = getTable(collection);
boolean success = false;
try {
connection = this.ch.getRWConnection();
success = dbBatchedAppendingUpdate(connection, tableName, chunkedIds, modified, modifiedIsConditional,
appendData);
connection.commit();
} catch (SQLException ex) {
success = false;
this.ch.rollbackConnection(connection);
} finally {
this.ch.closeConnection(connection);
}
if (success) {
for (Entry<String, NodeDocument> entry : cachedDocs.entrySet()) {
T oldDoc = castAsT(entry.getValue());
if (oldDoc == null) {
// make sure concurrently loaded document is
// invalidated
nodesCache.invalidate(new StringValue(entry.getKey()));
} else {
T newDoc = applyChanges(collection, oldDoc, update, true);
if (newDoc != null) {
applyToCache((NodeDocument) oldDoc, (NodeDocument) newDoc);
}
}
}
} else {
for (String id : chunkedIds) {
UpdateOp up = update.copy();
up = up.shallowCopy(id);
internalCreateOrUpdate(collection, up, false, true);
}
}
}
} else {
for (String id : ids) {
UpdateOp up = update.copy();
up = up.shallowCopy(id);
internalCreateOrUpdate(collection, up, false, true);
}
}
}
private <T extends Document> List<T> internalQuery(Collection<T> collection, String fromKey, String toKey,
String indexedProperty, long startValue, int limit) {
Connection connection = null;
String tableName = getTable(collection);
List<T> result = Collections.emptyList();
if (indexedProperty != null && (!INDEXEDPROPERTIES.contains(indexedProperty))) {
String message = "indexed property " + indexedProperty + " not supported, query was '>= '" + startValue
+ "'; supported properties are " + INDEXEDPROPERTIES;
LOG.info(message);
throw new DocumentStoreException(message);
}
try {
long now = System.currentTimeMillis();
connection = this.ch.getROConnection();
List<RDBRow> dbresult = dbQuery(connection, tableName, fromKey, toKey, indexedProperty, startValue, limit);
connection.commit();
int size = dbresult.size();
result = new ArrayList<T>(size);
for (int i = 0; i < size; i++) {
RDBRow row = dbresult.set(i, null); // free RDBRow ASAP
T doc = runThroughCache(collection, row, now);
result.add(doc);
}
} catch (Exception ex) {
LOG.error("SQL exception on query", ex);
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
return result;
}
private <T extends Document> String getTable(Collection<T> collection) {
if (collection == Collection.CLUSTER_NODES) {
return this.tnClusterNodes;
} else if (collection == Collection.NODES) {
return this.tnNodes;
} else if (collection == Collection.SETTINGS) {
return this.tnSettings;
} else if (collection == Collection.JOURNAL) {
return this.tnJournal;
} else {
throw new IllegalArgumentException("Unknown collection: " + collection.toString());
}
}
@CheckForNull
private <T extends Document> T readDocumentUncached(Collection<T> collection, String id, NodeDocument cachedDoc) {
Connection connection = null;
String tableName = getTable(collection);
try {
long lastmodcount = -1;
if (cachedDoc != null) {
lastmodcount = modcountOf(cachedDoc);
}
connection = this.ch.getROConnection();
RDBRow row = dbRead(connection, tableName, id, lastmodcount);
connection.commit();
if (row == null) {
return null;
} else {
if (lastmodcount == row.getModcount()) {
// we can re-use the cached document
cachedDoc.markUpToDate(System.currentTimeMillis());
return castAsT(cachedDoc);
} else {
return SR.fromRow(collection, row);
}
}
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
}
private <T extends Document> void delete(Collection<T> collection, String id) {
Connection connection = null;
String tableName = getTable(collection);
try {
connection = this.ch.getRWConnection();
dbDelete(connection, tableName, Collections.singletonList(id));
connection.commit();
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
}
private <T extends Document> int delete(Collection<T> collection, List<String> ids) {
int numDeleted = 0;
for (List<String> sublist : Lists.partition(ids, 64)) {
Connection connection = null;
String tableName = getTable(collection);
try {
connection = this.ch.getRWConnection();
numDeleted += dbDelete(connection, tableName, sublist);
connection.commit();
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
}
return numDeleted;
}
private <T extends Document> int delete(Collection<T> collection,
Map<String, Map<Key, Condition>> toRemove) {
int numDeleted = 0;
String tableName = getTable(collection);
Map<String, Map<Key, Condition>> subMap = Maps.newHashMap();
Iterator<Entry<String, Map<Key, Condition>>> it = toRemove.entrySet().iterator();
while (it.hasNext()) {
Entry<String, Map<Key, Condition>> entry = it.next();
subMap.put(entry.getKey(), entry.getValue());
if (subMap.size() == 64 || !it.hasNext()) {
Connection connection = null;
try {
connection = this.ch.getRWConnection();
numDeleted += dbDelete(connection, tableName, subMap);
connection.commit();
} catch (Exception ex) {
throw DocumentStoreException.convert(ex);
} finally {
this.ch.closeConnection(connection);
}
subMap.clear();
}
}
return numDeleted;
}
private <T extends Document> boolean updateDocument(@Nonnull Collection<T> collection, @Nonnull T document,
@Nonnull UpdateOp update, Long oldmodcount) {
Connection connection = null;
String tableName = getTable(collection);
try {
connection = this.ch.getRWConnection();
Operation modOperation = update.getChanges().get(MODIFIEDKEY);
long modified = getModifiedFromOperation(modOperation);
boolean modifiedIsConditional = modOperation == null || modOperation.type != UpdateOp.Operation.Type.SET;
Number flagB = (Number) document.get(NodeDocument.HAS_BINARY_FLAG);
Boolean hasBinary = flagB != null && flagB.intValue() == NodeDocument.HAS_BINARY_VAL;
Boolean flagD = (Boolean) document.get(NodeDocument.DELETED_ONCE);
Boolean deletedOnce = flagD != null && flagD.booleanValue();
Long modcount = (Long) document.get(MODCOUNT);
Long cmodcount = (Long) document.get(COLLISIONSMODCOUNT);
boolean success = false;
// every 16th update is a full rewrite
if (isAppendableUpdate(update) && modcount % 16 != 0) {
String appendData = SR.asString(update);
if (appendData.length() < this.dataLimitInOctets / CHAR2OCTETRATIO) {
try {
success = dbAppendingUpdate(connection, tableName, document.getId(), modified, modifiedIsConditional, hasBinary, deletedOnce,
modcount, cmodcount, oldmodcount, appendData);
connection.commit();
} catch (SQLException ex) {
continueIfStringOverflow(ex);
this.ch.rollbackConnection(connection);
success = false;
}
}
}
if (!success) {
String data = SR.asString(document);
success = dbUpdate(connection, tableName, document.getId(), modified, hasBinary, deletedOnce, modcount, cmodcount,
oldmodcount, data);
connection.commit();
}
return success;
} catch (SQLException ex) {
this.ch.rollbackConnection(connection);
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
}
private static void continueIfStringOverflow(SQLException ex) throws SQLException {
String state = ex.getSQLState();
if ("22001".equals(state) /* everybody */|| ("72000".equals(state) && 1489 == ex.getErrorCode()) /* Oracle */) {
// ok
} else {
throw (ex);
}
}
/*
* currently we use append for all updates, but this might change in the
* future
*/
private static boolean isAppendableUpdate(UpdateOp update) {
return true;
}
/*
* check whether this update operation requires knowledge about the previous
* state
*/
private static boolean requiresPreviousState(UpdateOp update) {
return !update.getConditions().isEmpty();
}
private static long getModifiedFromOperation(Operation op) {
return op == null ? 0L : Long.parseLong(op.value.toString());
}
private <T extends Document> boolean insertDocuments(Collection<T> collection, List<T> documents) {
Connection connection = null;
String tableName = getTable(collection);
List<String> ids = new ArrayList<String>();
try {
connection = this.ch.getRWConnection();
boolean result = dbInsert(connection, tableName, documents);
connection.commit();
return result;
} catch (SQLException ex) {
LOG.debug("insert of " + ids + " failed", ex);
this.ch.rollbackConnection(connection);
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
}
// configuration
// Whether to use GZIP compression
private static final boolean NOGZIP = Boolean
.getBoolean("org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.NOGZIP");
// Number of documents to insert at once for batch create
private static final int CHUNKSIZE = Integer.getInteger(
"org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.CHUNKSIZE", 64);
// Number of query hits above which a diagnostic warning is generated
private static final int QUERYHITSLIMIT = Integer.getInteger(
"org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.QUERYHITSLIMIT", 4096);
// Number of elapsed ms in a query above which a diagnostic warning is generated
private static final int QUERYTIMELIMIT = Integer.getInteger(
"org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.QUERYTIMELIMIT", 10000);
private static byte[] asBytes(String data) {
byte[] bytes;
try {
bytes = data.getBytes("UTF-8");
} catch (UnsupportedEncodingException ex) {
LOG.error("UTF-8 not supported??", ex);
throw new DocumentStoreException(ex);
}
if (NOGZIP) {
return bytes;
} else {
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream(data.length());
GZIPOutputStream gos = new GZIPOutputStream(bos) {
{
// TODO: make this configurable
this.def.setLevel(Deflater.BEST_SPEED);
}
};
gos.write(bytes);
gos.close();
return bos.toByteArray();
} catch (IOException ex) {
LOG.error("Error while gzipping contents", ex);
throw new DocumentStoreException(ex);
}
}
}
private void setIdInStatement(PreparedStatement stmt, int idx, String id) throws SQLException {
if (db.isPrimaryColumnByteEncoded()) {
try {
stmt.setBytes(idx, id.getBytes("UTF-8"));
} catch (UnsupportedEncodingException ex) {
LOG.error("UTF-8 not supported??", ex);
throw new DocumentStoreException(ex);
}
} else {
stmt.setString(idx, id);
}
}
private String getIdFromRS(ResultSet rs, int idx) throws SQLException {
String id;
if (db.isPrimaryColumnByteEncoded()) {
try {
id = new String(rs.getBytes(idx), "UTF-8");
} catch (UnsupportedEncodingException ex) {
LOG.error("UTF-8 not supported??", ex);
throw new DocumentStoreException(ex);
}
} else {
id = rs.getString(idx);
}
return id;
}
@CheckForNull
private RDBRow dbRead(Connection connection, String tableName, String id, long lastmodcount) throws SQLException {
PreparedStatement stmt;
boolean useCaseStatement = lastmodcount != -1 && this.db.allowsCaseInSelect();
if (useCaseStatement) {
// the case statement causes the actual row data not to be
// sent in case we already have it
stmt = connection
.prepareStatement("select MODIFIED, MODCOUNT, CMODCOUNT, HASBINARY, DELETEDONCE, case MODCOUNT when ? then null else DATA end as DATA, "
+ "case MODCOUNT when ? then null else BDATA end as BDATA from " + tableName + " where ID = ?");
} else {
// either we don't have a previous version of the document
// or the database does not support CASE in SELECT
stmt = connection.prepareStatement("select MODIFIED, MODCOUNT, CMODCOUNT, HASBINARY, DELETEDONCE, DATA, BDATA from "
+ tableName + " where ID = ?");
}
try {
int si = 1;
if (useCaseStatement) {
stmt.setLong(si++, lastmodcount);
stmt.setLong(si++, lastmodcount);
}
setIdInStatement(stmt, si, id);
ResultSet rs = stmt.executeQuery();
if (rs.next()) {
long modified = rs.getLong(1);
long modcount = rs.getLong(2);
long cmodcount = rs.getLong(3);
long hasBinary = rs.getLong(4);
long deletedOnce = rs.getLong(5);
String data = rs.getString(6);
byte[] bdata = rs.getBytes(7);
return new RDBRow(id, hasBinary == 1, deletedOnce == 1, modified, modcount, cmodcount, data, bdata);
} else {
return null;
}
} catch (SQLException ex) {
LOG.error("attempting to read " + id + " (id length is " + id.length() + ")", ex);
// DB2 throws an SQLException for invalid keys; handle this more
// gracefully
if ("22001".equals(ex.getSQLState())) {
this.ch.rollbackConnection(connection);
return null;
} else {
throw (ex);
}
} finally {
stmt.close();
}
}
private List<RDBRow> dbQuery(Connection connection, String tableName, String minId, String maxId, String indexedProperty,
long startValue, int limit) throws SQLException {
long start = System.currentTimeMillis();
String t = "select ";
if (limit != Integer.MAX_VALUE && this.db.getFetchFirstSyntax() == FETCHFIRSTSYNTAX.TOP) {
t += "TOP " + limit + " ";
}
t += "ID, MODIFIED, MODCOUNT, CMODCOUNT, HASBINARY, DELETEDONCE, DATA, BDATA from " + tableName
+ " where ID > ? and ID < ?";
if (indexedProperty != null) {
if (MODIFIED.equals(indexedProperty)) {
t += " and MODIFIED >= ?";
} else if (NodeDocument.HAS_BINARY_FLAG.equals(indexedProperty)) {
if (startValue != NodeDocument.HAS_BINARY_VAL) {
throw new DocumentStoreException("unsupported value for property " + NodeDocument.HAS_BINARY_FLAG);
}
t += " and HASBINARY = 1";
} else if (NodeDocument.DELETED_ONCE.equals(indexedProperty)) {
if (startValue != 1) {
throw new DocumentStoreException("unsupported value for property " + NodeDocument.DELETED_ONCE);
}
t += " and DELETEDONCE = 1";
} else {
throw new DocumentStoreException("unsupported indexed property: " + indexedProperty);
}
}
t += " order by ID";
if (limit != Integer.MAX_VALUE) {
switch (this.db.getFetchFirstSyntax()) {
case LIMIT:
t += " LIMIT " + limit;
break;
case FETCHFIRST:
t += " FETCH FIRST " + limit + " ROWS ONLY";
break;
default:
break;
}
}
PreparedStatement stmt = connection.prepareStatement(t);
List<RDBRow> result = new ArrayList<RDBRow>();
try {
int si = 1;
setIdInStatement(stmt, si++, minId);
setIdInStatement(stmt, si++, maxId);
if (MODIFIED.equals(indexedProperty)) {
stmt.setLong(si++, startValue);
}
if (limit != Integer.MAX_VALUE) {
stmt.setFetchSize(limit);
}
ResultSet rs = stmt.executeQuery();
while (rs.next() && result.size() < limit) {
String id = getIdFromRS(rs, 1);
if (id.compareTo(minId) < 0 || id.compareTo(maxId) > 0) {
throw new DocumentStoreException("unexpected query result: '" + minId + "' < '" + id + "' < '" + maxId
+ "' - broken DB collation?");
}
long modified = rs.getLong(2);
long modcount = rs.getLong(3);
long cmodcount = rs.getLong(4);
long hasBinary = rs.getLong(5);
long deletedOnce = rs.getLong(6);
String data = rs.getString(7);
byte[] bdata = rs.getBytes(8);
result.add(new RDBRow(id, hasBinary == 1, deletedOnce == 1, modified, modcount, cmodcount, data, bdata));
}
} finally {
stmt.close();
}
long elapsed = System.currentTimeMillis() - start;
if (QUERYHITSLIMIT != 0 && result.size() > QUERYHITSLIMIT) {
String message = String.format("Potentially excessive query with %d hits (limited to %d, configured QUERYHITSLIMIT %d), elapsed time %dms, params minid '%s' maxid '%s' indexedProperty %s startValue %d limit %d. Check calling method.",
result.size(), limit, QUERYHITSLIMIT, elapsed, minId, maxId, indexedProperty, startValue, limit);
LOG.info(message, new Exception("call stack"));
}
else if (QUERYTIMELIMIT != 0 && elapsed > QUERYTIMELIMIT) {
String message = String.format("Long running query with %d hits (limited to %d), elapsed time %dms (configured QUERYTIMELIMIT %d), params minid '%s' maxid '%s' indexedProperty %s startValue %d limit %d. Check calling method.",
result.size(), limit, elapsed, QUERYTIMELIMIT, minId, maxId, indexedProperty, startValue, limit);
LOG.info(message, new Exception("call stack"));
}
return result;
}
private boolean dbUpdate(Connection connection, String tableName, String id, Long modified, Boolean hasBinary,
Boolean deletedOnce, Long modcount, Long cmodcount, Long oldmodcount, String data) throws SQLException {
String t = "update "
+ tableName
+ " set MODIFIED = ?, HASBINARY = ?, DELETEDONCE = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = ?, DATA = ?, BDATA = ? where ID = ?";
if (oldmodcount != null) {
t += " and MODCOUNT = ?";
}
PreparedStatement stmt = connection.prepareStatement(t);
try {
int si = 1;
stmt.setObject(si++, modified, Types.BIGINT);
stmt.setObject(si++, hasBinary ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, deletedOnce ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, modcount, Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? Long.valueOf(0) : cmodcount, Types.BIGINT);
stmt.setObject(si++, data.length(), Types.BIGINT);
if (data.length() < this.dataLimitInOctets / CHAR2OCTETRATIO) {
stmt.setString(si++, data);
stmt.setBinaryStream(si++, null, 0);
} else {
stmt.setString(si++, "\"blob\"");
byte[] bytes = asBytes(data);
stmt.setBytes(si++, bytes);
}
setIdInStatement(stmt, si++, id);
if (oldmodcount != null) {
stmt.setObject(si++, oldmodcount, Types.BIGINT);
}
int result = stmt.executeUpdate();
if (result != 1) {
LOG.debug("DB update failed for " + tableName + "/" + id + " with oldmodcount=" + oldmodcount);
}
return result == 1;
} finally {
stmt.close();
}
}
private boolean dbAppendingUpdate(Connection connection, String tableName, String id, Long modified,
boolean setModifiedConditionally, Boolean hasBinary, Boolean deletedOnce, Long modcount, Long cmodcount,
Long oldmodcount, String appendData) throws SQLException {
StringBuilder t = new StringBuilder();
t.append("update " + tableName + " set ");
t.append(setModifiedConditionally ? "MODIFIED = case when ? > MODIFIED then ? else MODIFIED end, " : "MODIFIED = ?, ");
t.append("HASBINARY = ?, DELETEDONCE = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = DSIZE + ?, ");
t.append("DATA = " + this.db.getConcatQueryString(this.dataLimitInOctets, appendData.length()) + " ");
t.append("where ID = ?");
if (oldmodcount != null) {
t.append(" and MODCOUNT = ?");
}
PreparedStatement stmt = connection.prepareStatement(t.toString());
try {
int si = 1;
stmt.setObject(si++, modified, Types.BIGINT);
if (setModifiedConditionally) {
stmt.setObject(si++, modified, Types.BIGINT);
}
stmt.setObject(si++, hasBinary ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, deletedOnce ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, modcount, Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? Long.valueOf(0) : cmodcount, Types.BIGINT);
stmt.setObject(si++, 1 + appendData.length(), Types.BIGINT);
stmt.setString(si++, "," + appendData);
setIdInStatement(stmt, si++, id);
if (oldmodcount != null) {
stmt.setObject(si++, oldmodcount, Types.BIGINT);
}
int result = stmt.executeUpdate();
if (result != 1) {
LOG.debug("DB append update failed for " + tableName + "/" + id + " with oldmodcount=" + oldmodcount);
}
return result == 1;
} finally {
stmt.close();
}
}
private boolean dbBatchedAppendingUpdate(Connection connection, String tableName, List<String> ids, Long modified,
boolean setModifiedConditionally,
String appendData) throws SQLException {
StringBuilder t = new StringBuilder();
t.append("update " + tableName + " set ");
t.append(setModifiedConditionally ? "MODIFIED = case when ? > MODIFIED then ? else MODIFIED end, " : "MODIFIED = ?, ");
t.append("MODCOUNT = MODCOUNT + 1, DSIZE = DSIZE + ?, ");
t.append("DATA = " + this.db.getConcatQueryString(this.dataLimitInOctets, appendData.length()) + " ");
t.append("where ID in (");
for (int i = 0; i < ids.size(); i++) {
if (i != 0) {
t.append(',');
}
t.append('?');
}
t.append(")");
PreparedStatement stmt = connection.prepareStatement(t.toString());
try {
int si = 1;
stmt.setObject(si++, modified, Types.BIGINT);
if (setModifiedConditionally) {
stmt.setObject(si++, modified, Types.BIGINT);
}
stmt.setObject(si++, 1 + appendData.length(), Types.BIGINT);
stmt.setString(si++, "," + appendData);
for (String id : ids) {
setIdInStatement(stmt, si++, id);
}
int result = stmt.executeUpdate();
if (result != ids.size()) {
LOG.debug("DB update failed: only " + result + " of " + ids.size() + " updated. Table: " + tableName + ", IDs:"
+ ids);
}
return result == ids.size();
} finally {
stmt.close();
}
}
private <T extends Document> boolean dbInsert(Connection connection, String tableName, List<T> documents) throws SQLException {
PreparedStatement stmt = connection.prepareStatement("insert into " + tableName +
"(ID, MODIFIED, HASBINARY, DELETEDONCE, MODCOUNT, CMODCOUNT, DSIZE, DATA, BDATA) " +
"values (?, ?, ?, ?, ?, ?, ?, ?, ?)");
try {
for (T document : documents) {
String data = SR.asString(document);
String id = document.getId();
Number hasBinary = (Number) document.get(NodeDocument.HAS_BINARY_FLAG);
Boolean deletedOnce = (Boolean) document.get(NodeDocument.DELETED_ONCE);
Long cmodcount = (Long) document.get(COLLISIONSMODCOUNT);
int si = 1;
setIdInStatement(stmt, si++, id);
stmt.setObject(si++, document.get(MODIFIED), Types.BIGINT);
stmt.setObject(si++, (hasBinary != null && hasBinary.intValue() == NodeDocument.HAS_BINARY_VAL) ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, (deletedOnce != null && deletedOnce) ? 1 : 0, Types.SMALLINT);
stmt.setObject(si++, document.get(MODCOUNT), Types.BIGINT);
stmt.setObject(si++, cmodcount == null ? Long.valueOf(0) : cmodcount, Types.BIGINT);
stmt.setObject(si++, data.length(), Types.BIGINT);
if (data.length() < this.dataLimitInOctets / CHAR2OCTETRATIO) {
stmt.setString(si++, data);
stmt.setBinaryStream(si++, null, 0);
} else {
stmt.setString(si++, "\"blob\"");
byte[] bytes = asBytes(data);
stmt.setBytes(si++, bytes);
}
stmt.addBatch();
}
int[] results = stmt.executeBatch();
boolean success = true;
for (int i = 0; i < documents.size(); i++) {
int result = results[i];
if (result != 1 && result != Statement.SUCCESS_NO_INFO) {
LOG.error("DB insert failed for {}: {}", tableName, documents.get(i).getId());
success = false;
}
}
return success;
} finally {
stmt.close();
}
}
private int dbDelete(Connection connection, String tableName, List<String> ids) throws SQLException {
PreparedStatement stmt;
int cnt = ids.size();
if (cnt == 1) {
stmt = connection.prepareStatement("delete from " + tableName + " where ID=?");
} else {
StringBuilder inClause = new StringBuilder();
for (int i = 0; i < cnt; i++) {
inClause.append('?');
if (i != cnt - 1) {
inClause.append(',');
}
}
stmt = connection.prepareStatement("delete from " + tableName + " where ID in (" + inClause.toString() + ")");
}
try {
for (int i = 0; i < cnt; i++) {
setIdInStatement(stmt, i + 1, ids.get(i));
}
int result = stmt.executeUpdate();
if (result != cnt) {
LOG.debug("DB delete failed for " + tableName + "/" + ids);
}
return result;
} finally {
stmt.close();
}
}
private int dbDelete(Connection connection, String tableName,
Map<String, Map<Key, Condition>> toDelete)
throws SQLException, DocumentStoreException {
String or = "";
StringBuilder whereClause = new StringBuilder();
for (Entry<String, Map<Key, Condition>> entry : toDelete.entrySet()) {
whereClause.append(or);
or = " or ";
whereClause.append("ID=?");
for (Entry<Key, Condition> c : entry.getValue().entrySet()) {
if (!c.getKey().getName().equals(MODIFIED)) {
throw new DocumentStoreException(
"Unsupported condition: " + c);
}
whereClause.append(" and MODIFIED");
if (c.getValue().type == Condition.Type.EQUALS
&& c.getValue().value instanceof Long) {
whereClause.append("=?");
} else if (c.getValue().type == Condition.Type.EXISTS) {
whereClause.append(" is not null");
} else {
throw new DocumentStoreException(
"Unsupported condition: " + c);
}
}
}
PreparedStatement stmt= connection.prepareStatement(
"delete from " + tableName + " where " + whereClause);
try {
int i = 1;
for (Entry<String, Map<Key, Condition>> entry : toDelete.entrySet()) {
setIdInStatement(stmt, i++, entry.getKey());
for (Entry<Key, Condition> c : entry.getValue().entrySet()) {
if (c.getValue().type == Condition.Type.EQUALS) {
stmt.setLong(i++, (Long) c.getValue().value);
}
}
}
return stmt.executeUpdate();
} finally {
stmt.close();
}
}
@Override
public void setReadWriteMode(String readWriteMode) {
// ignored
}
@SuppressWarnings("unchecked")
private static <T extends Document> T castAsT(NodeDocument doc) {
return (T) doc;
}
// Memory Cache
private Cache<CacheValue, NodeDocument> nodesCache;
private CacheStats cacheStats;
private final Striped<Lock> locks = Striped.lock(64);
private Lock getAndLock(String key) {
Lock l = locks.get(key);
l.lock();
return l;
}
@CheckForNull
private static NodeDocument unwrap(@Nonnull NodeDocument doc) {
return doc == NodeDocument.NULL ? null : doc;
}
@Nonnull
private static NodeDocument wrap(@CheckForNull NodeDocument doc) {
return doc == null ? NodeDocument.NULL : doc;
}
@Nonnull
private static String idOf(@Nonnull Document doc) {
String id = doc.getId();
if (id == null) {
throw new IllegalArgumentException("non-null ID expected");
}
return id;
}
private static long modcountOf(@Nonnull Document doc) {
Number n = doc.getModCount();
return n != null ? n.longValue() : -1;
}
/**
* Adds a document to the {@link #nodesCache} iff there is no document in
* the cache with the document key. This method does not acquire a lock from
* {@link #locks}! The caller must ensure a lock is held for the given
* document.
*
* @param doc
* the document to add to the cache.
* @return either the given <code>doc</code> or the document already present
* in the cache.
*/
@Nonnull
private NodeDocument addToCache(@Nonnull final NodeDocument doc) {
if (doc == NodeDocument.NULL) {
throw new IllegalArgumentException("doc must not be NULL document");
}
doc.seal();
// make sure we only cache the document if it wasn't
// changed and cached by some other thread in the
// meantime. That is, use get() with a Callable,
// which is only used when the document isn't there
try {
CacheValue key = new StringValue(idOf(doc));
for (;;) {
NodeDocument cached = nodesCache.get(key, new Callable<NodeDocument>() {
@Override
public NodeDocument call() {
return doc;
}
});
if (cached != NodeDocument.NULL) {
return cached;
} else {
nodesCache.invalidate(key);
}
}
} catch (ExecutionException e) {
// will never happen because call() just returns
// the already available doc
throw new IllegalStateException(e);
}
}
@Nonnull
private void applyToCache(@Nonnull final NodeDocument oldDoc, @Nonnull final NodeDocument newDoc) {
NodeDocument cached = addToCache(newDoc);
if (cached == newDoc) {
// successful
return;
} else if (oldDoc == null) {
// this is an insert and some other thread was quicker
// loading it into the cache -> return now
return;
} else {
CacheValue key = new StringValue(idOf(newDoc));
// this is an update (oldDoc != null)
if (Objects.equal(cached.getModCount(), oldDoc.getModCount())) {
nodesCache.put(key, newDoc);
} else {
// the cache entry was modified by some other thread in
// the meantime. the updated cache entry may or may not
// include this update. we cannot just apply our update
// on top of the cached entry.
// therefore we must invalidate the cache entry
nodesCache.invalidate(key);
}
}
}
private <T extends Document> void addToCache(Collection<T> collection, T doc) {
if (collection == Collection.NODES) {
Lock lock = getAndLock(idOf(doc));
try {
addToCache((NodeDocument) doc);
} finally {
lock.unlock();
}
}
}
private <T extends Document> T runThroughCache(Collection<T> collection, RDBRow row, long now) {
if (collection != Collection.NODES) {
// not in the cache anyway
return SR.fromRow(collection, row);
}
String id = row.getId();
CacheValue cacheKey = new StringValue(id);
NodeDocument inCache = nodesCache.getIfPresent(cacheKey);
Number modCount = row.getModcount();
// do not overwrite document in cache if the
// existing one in the cache is newer
if (inCache != null && inCache != NodeDocument.NULL) {
// check mod count
Number cachedModCount = inCache.getModCount();
if (cachedModCount == null) {
throw new IllegalStateException("Missing " + Document.MOD_COUNT);
}
if (modCount.longValue() <= cachedModCount.longValue()) {
// we can use the cached document
inCache.markUpToDate(now);
return castAsT(inCache);
}
}
NodeDocument fresh = (NodeDocument) SR.fromRow(collection, row);
fresh.seal();
Lock lock = getAndLock(id);
try {
inCache = nodesCache.getIfPresent(cacheKey);
if (inCache != null && inCache != NodeDocument.NULL) {
// check mod count
Number cachedModCount = inCache.getModCount();
if (cachedModCount == null) {
throw new IllegalStateException("Missing " + Document.MOD_COUNT);
}
if (modCount.longValue() > cachedModCount.longValue()) {
nodesCache.put(cacheKey, fresh);
} else {
fresh = inCache;
}
} else {
nodesCache.put(cacheKey, fresh);
}
} finally {
lock.unlock();
}
return castAsT(fresh);
}
private boolean hasChangesToCollisions(UpdateOp update) {
if (! USECMODCOUNT) return false;
for (Entry<Key, Operation> e : checkNotNull(update).getChanges().entrySet()) {
Key k = e.getKey();
Operation op = e.getValue();
if (op.type == Operation.Type.SET_MAP_ENTRY) {
if (NodeDocument.COLLISIONS.equals(k.getName())) {
return true;
}
}
}
return false;
}
}
|
OAK-1266 - RDBDocumentStore: log detected size of DATA column
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1686253 13f79535-47bb-0310-9956-ffa450edef68
|
oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java
|
OAK-1266 - RDBDocumentStore: log detected size of DATA column
|
|
Java
|
apache-2.0
|
4f64e196b47514935633b6dacab54fd9d6d1d0bb
| 0
|
asedunov/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,izonder/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,fnouama/intellij-community,signed/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,kool79/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,fnouama/intellij-community,amith01994/intellij-community,izonder/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,ryano144/intellij-community,muntasirsyed/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,fengbaicanhe/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,MER-GROUP/intellij-community,jagguli/intellij-community,caot/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,jagguli/intellij-community,ryano144/intellij-community,kool79/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,FHannes/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,supersven/intellij-community,clumsy/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,slisson/intellij-community,kool79/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,ibinti/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,holmes/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,xfournet/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,caot/intellij-community,supersven/intellij-community,youdonghai/intellij-community,allotria/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,muntasirsyed/intellij-community,samthor/intellij-community,holmes/intellij-community,hurricup/intellij-community,caot/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,semonte/intellij-community,supersven/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,ibinti/intellij-community,allotria/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,kdwink/intellij-community,jagguli/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,tmpgit/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,robovm/robovm-studio,izonder/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,kool79/intellij-community,robovm/robovm-studio,ryano144/intellij-community,fitermay/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,fnouama/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,retomerz/intellij-community,slisson/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,robovm/robovm-studio,lucafavatella/intellij-community,samthor/intellij-community,supersven/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,jagguli/intellij-community,retomerz/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,hurricup/intellij-community,kdwink/intellij-community,da1z/intellij-community,suncycheng/intellij-community,izonder/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,samthor/intellij-community,vladmm/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,holmes/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,caot/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,kool79/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,amith01994/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,semonte/intellij-community,semonte/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,petteyg/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,asedunov/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,dslomov/intellij-community,slisson/intellij-community,fitermay/intellij-community,gnuhub/intellij-community,izonder/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,dslomov/intellij-community,caot/intellij-community,Lekanich/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,samthor/intellij-community,slisson/intellij-community,amith01994/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,caot/intellij-community,signed/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,robovm/robovm-studio,Distrotech/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,wreckJ/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,vladmm/intellij-community,izonder/intellij-community,dslomov/intellij-community,kool79/intellij-community,nicolargo/intellij-community,adedayo/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,xfournet/intellij-community,vladmm/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,akosyakov/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,adedayo/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,signed/intellij-community,izonder/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,diorcety/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,blademainer/intellij-community,nicolargo/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,vladmm/intellij-community,xfournet/intellij-community,dslomov/intellij-community,semonte/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,izonder/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,signed/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,xfournet/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,da1z/intellij-community,Distrotech/intellij-community,da1z/intellij-community,tmpgit/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,fnouama/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,wreckJ/intellij-community,kool79/intellij-community,asedunov/intellij-community,supersven/intellij-community,slisson/intellij-community,samthor/intellij-community,hurricup/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,samthor/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,caot/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,caot/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,signed/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,akosyakov/intellij-community,caot/intellij-community,FHannes/intellij-community,asedunov/intellij-community,signed/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,supersven/intellij-community,FHannes/intellij-community,samthor/intellij-community,dslomov/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,dslomov/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,kdwink/intellij-community,fnouama/intellij-community,holmes/intellij-community,dslomov/intellij-community,allotria/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,amith01994/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,diorcety/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,signed/intellij-community,semonte/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,robovm/robovm-studio,robovm/robovm-studio,clumsy/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,diorcety/intellij-community,samthor/intellij-community,da1z/intellij-community,FHannes/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,vladmm/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,mglukhikh/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,ol-loginov/intellij-community,supersven/intellij-community,hurricup/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,adedayo/intellij-community,allotria/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,izonder/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,diorcety/intellij-community,vvv1559/intellij-community,supersven/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,xfournet/intellij-community,kool79/intellij-community,samthor/intellij-community,nicolargo/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,semonte/intellij-community,xfournet/intellij-community,TangHao1987/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,robovm/robovm-studio,caot/intellij-community,apixandru/intellij-community,fnouama/intellij-community,adedayo/intellij-community,adedayo/intellij-community,diorcety/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,izonder/intellij-community,Distrotech/intellij-community,samthor/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,hurricup/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,da1z/intellij-community,FHannes/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,youdonghai/intellij-community,slisson/intellij-community,apixandru/intellij-community,FHannes/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,signed/intellij-community,kdwink/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,vvv1559/intellij-community,holmes/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,blademainer/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,akosyakov/intellij-community,Distrotech/intellij-community,asedunov/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,holmes/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,semonte/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,da1z/intellij-community,hurricup/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,suncycheng/intellij-community,holmes/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,allotria/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,signed/intellij-community,slisson/intellij-community,da1z/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,dslomov/intellij-community,slisson/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,caot/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,fitermay/intellij-community,Lekanich/intellij-community,Distrotech/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,slisson/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,jagguli/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community
|
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.engine;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.actions.DebuggerActions;
import com.intellij.debugger.engine.evaluation.EvaluationContext;
import com.intellij.debugger.engine.evaluation.EvaluationContextImpl;
import com.intellij.debugger.engine.events.DebuggerCommandImpl;
import com.intellij.debugger.impl.DebuggerContextUtil;
import com.intellij.debugger.impl.DebuggerSession;
import com.intellij.debugger.impl.DebuggerStateManager;
import com.intellij.debugger.impl.DebuggerUtilsEx;
import com.intellij.debugger.settings.DebuggerSettings;
import com.intellij.debugger.ui.DebuggerContentInfo;
import com.intellij.debugger.ui.breakpoints.Breakpoint;
import com.intellij.debugger.ui.impl.ThreadsPanel;
import com.intellij.debugger.ui.impl.watch.DebuggerTreeNodeImpl;
import com.intellij.debugger.ui.impl.watch.MessageDescriptor;
import com.intellij.debugger.ui.impl.watch.NodeDescriptorImpl;
import com.intellij.debugger.ui.impl.watch.NodeManagerImpl;
import com.intellij.debugger.ui.tree.NodeDescriptor;
import com.intellij.debugger.ui.tree.render.DescriptorLabelListener;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.ui.ExecutionConsole;
import com.intellij.execution.ui.RunnerLayoutUi;
import com.intellij.execution.ui.layout.PlaceInGrid;
import com.intellij.icons.AllIcons;
import com.intellij.idea.ActionsBundle;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.ui.content.Content;
import com.intellij.ui.content.ContentManagerAdapter;
import com.intellij.ui.content.ContentManagerEvent;
import com.intellij.xdebugger.*;
import com.intellij.xdebugger.breakpoints.XBreakpoint;
import com.intellij.xdebugger.breakpoints.XBreakpointHandler;
import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider;
import com.intellij.xdebugger.frame.XStackFrame;
import com.intellij.xdebugger.frame.XValueMarkerProvider;
import com.intellij.xdebugger.impl.XDebuggerUtilImpl;
import com.intellij.xdebugger.impl.actions.XDebuggerActions;
import com.intellij.xdebugger.ui.XDebugTabLayouter;
import com.sun.jdi.event.Event;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.java.debugger.JavaDebuggerEditorsProvider;
import java.util.ArrayList;
import java.util.List;
/**
* @author egor
*/
public class JavaDebugProcess extends XDebugProcess {
private final DebuggerSession myJavaSession;
private final JavaDebuggerEditorsProvider myEditorsProvider;
private final XBreakpointHandler<?>[] myBreakpointHandlers;
private final NodeManagerImpl myNodeManager;
public JavaDebugProcess(@NotNull final XDebugSession session, final DebuggerSession javaSession) {
super(session);
myJavaSession = javaSession;
myEditorsProvider = new JavaDebuggerEditorsProvider();
DebugProcessImpl process = javaSession.getProcess();
List<XBreakpointHandler> handlers = new ArrayList<XBreakpointHandler>();
handlers.add(new JavaBreakpointHandler.JavaLineBreakpointHandler(process));
handlers.add(new JavaBreakpointHandler.JavaExceptionBreakpointHandler(process));
handlers.add(new JavaBreakpointHandler.JavaFieldBreakpointHandler(process));
handlers.add(new JavaBreakpointHandler.JavaMethodBreakpointHandler(process));
handlers.add(new JavaBreakpointHandler.JavaWildcardBreakpointHandler(process));
for (JavaBreakpointHandlerFactory factory : Extensions.getExtensions(JavaBreakpointHandlerFactory.EP_NAME)) {
handlers.add(factory.createHandler(process));
}
myBreakpointHandlers = handlers.toArray(new XBreakpointHandler[handlers.size()]);
process.addDebugProcessListener(new DebugProcessAdapter() {
@Override
public void paused(final SuspendContext suspendContext) {
SuspendContextImpl context = (SuspendContextImpl)suspendContext;
context.initExecutionStacks();
List<Pair<Breakpoint, Event>> descriptors = DebuggerUtilsEx.getEventDescriptors(context);
if (!descriptors.isEmpty()) {
Breakpoint breakpoint = descriptors.get(0).getFirst();
XBreakpoint xBreakpoint = breakpoint.getXBreakpoint();
if (xBreakpoint != null) {
getSession().breakpointReached(xBreakpoint, null, context);
return;
}
}
getSession().positionReached(context);
}
});
myNodeManager = new NodeManagerImpl(session.getProject(), null) {
@Override
public DebuggerTreeNodeImpl createNode(final NodeDescriptor descriptor, EvaluationContext evaluationContext) {
((NodeDescriptorImpl)descriptor).setContext((EvaluationContextImpl)evaluationContext);
final DebuggerTreeNodeImpl node = new DebuggerTreeNodeImpl(null, descriptor);
((NodeDescriptorImpl)descriptor).updateRepresentation((EvaluationContextImpl)evaluationContext, DescriptorLabelListener.DUMMY_LISTENER);
return node;
}
@Override
public DebuggerTreeNodeImpl createMessageNode(MessageDescriptor descriptor) {
return new DebuggerTreeNodeImpl(null, descriptor);
}
};
session.addSessionListener(new XDebugSessionAdapter() {
@Override
public void beforeSessionResume() {
myJavaSession.getProcess().getManagerThread().schedule(new DebuggerCommandImpl() {
@Override
protected void action() throws Exception {
myNodeManager.setHistoryByContext(getDebuggerStateManager().getContext());
}
@Override
public Priority getPriority() {
return Priority.NORMAL;
}
});
}
@Override
public void stackFrameChanged() {
XStackFrame frame = session.getCurrentStackFrame();
if (frame instanceof JavaStackFrame) {
DebuggerContextUtil.setStackFrame(javaSession.getContextManager(), ((JavaStackFrame)frame).getStackFrameProxy());
}
}
});
}
private DebuggerStateManager getDebuggerStateManager() {
return myJavaSession.getContextManager();
}
public DebuggerSession getDebuggerSession() {
return myJavaSession;
}
@NotNull
@Override
public XDebuggerEditorsProvider getEditorsProvider() {
return myEditorsProvider;
}
@Override
public void startStepOver() {
myJavaSession.stepOver(false);
}
@Override
public void startStepInto() {
myJavaSession.stepInto(false, null);
}
@Override
public void startStepOut() {
myJavaSession.stepOut();
}
@Override
public void stop() {
myJavaSession.getProcess().dispose();
}
@Override
public void startPausing() {
myJavaSession.pause();
}
@Override
public void resume() {
myJavaSession.resume();
}
@Override
public void runToPosition(@NotNull XSourcePosition position) {
Document document = FileDocumentManager.getInstance().getDocument(position.getFile());
myJavaSession.runToCursor(document, position.getLine(), false);
}
@NotNull
@Override
public XBreakpointHandler<?>[] getBreakpointHandlers() {
return myBreakpointHandlers;
}
@Override
public boolean checkCanInitBreakpoints() {
return false;
}
@Nullable
@Override
protected ProcessHandler doGetProcessHandler() {
return myJavaSession.getProcess().getExecutionResult().getProcessHandler();
}
@NotNull
@Override
public ExecutionConsole createConsole() {
return myJavaSession.getProcess().getExecutionResult().getExecutionConsole();
}
@NotNull
@Override
public XDebugTabLayouter createTabLayouter() {
return new XDebugTabLayouter() {
@Override
public void registerAdditionalContent(@NotNull RunnerLayoutUi ui) {
final ThreadsPanel panel = new ThreadsPanel(myJavaSession.getProject(), getDebuggerStateManager());
final Content threadsContent = ui.createContent(
DebuggerContentInfo.THREADS_CONTENT, panel, XDebuggerBundle.message("debugger.session.tab.threads.title"),
AllIcons.Debugger.Threads, null);
Disposer.register(threadsContent, panel);
threadsContent.setCloseable(false);
ui.addContent(threadsContent, 0, PlaceInGrid.left, true);
ui.addListener(new ContentManagerAdapter() {
@Override
public void selectionChanged(ContentManagerEvent event) {
if (event.getContent() == threadsContent) {
if (threadsContent.isSelected()) {
panel.setUpdateEnabled(true);
if (panel.isRefreshNeeded()) {
panel.rebuildIfVisible(DebuggerSession.EVENT_CONTEXT);
}
}
else {
panel.setUpdateEnabled(false);
}
}
}
}, threadsContent);
}
};
}
@Override
public void registerAdditionalActions(@NotNull DefaultActionGroup leftToolbar, @NotNull DefaultActionGroup topToolbar) {
Constraints beforeRunner = new Constraints(Anchor.BEFORE, "Runner.Layout");
leftToolbar.add(Separator.getInstance(), beforeRunner);
leftToolbar.add(ActionManager.getInstance().getAction(DebuggerActions.EXPORT_THREADS), beforeRunner);
leftToolbar.add(ActionManager.getInstance().getAction(DebuggerActions.DUMP_THREADS), beforeRunner);
leftToolbar.add(Separator.getInstance(), beforeRunner);
final DefaultActionGroup settings = new DefaultActionGroup("DebuggerSettings", true) {
@Override
public void update(AnActionEvent e) {
e.getPresentation().setText(ActionsBundle.message("group.XDebugger.settings.text"));
e.getPresentation().setIcon(AllIcons.General.SecondaryGroup);
}
@Override
public boolean isDumbAware() {
return true;
}
};
settings.add(new WatchLastMethodReturnValueAction());
settings.add(new AutoVarsSwitchAction());
settings.add(new UnmuteOnStopAction());
settings.addSeparator();
addActionToGroup(settings, XDebuggerActions.AUTO_TOOLTIP);
leftToolbar.add(settings, new Constraints(Anchor.AFTER, "Runner.Layout"));
}
private static class AutoVarsSwitchAction extends ToggleAction {
private volatile boolean myAutoModeEnabled;
public AutoVarsSwitchAction() {
super("", "", AllIcons.Debugger.AutoVariablesMode);
myAutoModeEnabled = DebuggerSettings.getInstance().AUTO_VARIABLES_MODE;
}
@Override
public void update(final AnActionEvent e) {
super.update(e);
final Presentation presentation = e.getPresentation();
final boolean autoModeEnabled = (Boolean)presentation.getClientProperty(SELECTED_PROPERTY);
presentation.setText(autoModeEnabled ? "All-Variables Mode" : "Auto-Variables Mode");
}
@Override
public boolean isSelected(AnActionEvent e) {
return myAutoModeEnabled;
}
@Override
public void setSelected(AnActionEvent e, boolean enabled) {
myAutoModeEnabled = enabled;
DebuggerSettings.getInstance().AUTO_VARIABLES_MODE = enabled;
XDebuggerUtilImpl.rebuildAllSessionsViews(e.getProject());
}
}
private static class WatchLastMethodReturnValueAction extends ToggleAction {
private volatile boolean myWatchesReturnValues;
private final String myTextEnable;
private final String myTextUnavailable;
private final String myMyTextDisable;
public WatchLastMethodReturnValueAction() {
super("", DebuggerBundle.message("action.watch.method.return.value.description"), null);
myWatchesReturnValues = DebuggerSettings.getInstance().WATCH_RETURN_VALUES;
myTextEnable = DebuggerBundle.message("action.watches.method.return.value.enable");
myMyTextDisable = DebuggerBundle.message("action.watches.method.return.value.disable");
myTextUnavailable = DebuggerBundle.message("action.watches.method.return.value.unavailable.reason");
}
@Override
public void update(final AnActionEvent e) {
super.update(e);
final Presentation presentation = e.getPresentation();
final boolean watchValues = (Boolean)presentation.getClientProperty(SELECTED_PROPERTY);
DebugProcessImpl process = getCurrentDebugProcess(e.getProject());
final String actionText = watchValues ? myMyTextDisable : myTextEnable;
if (process == null || process.canGetMethodReturnValue()) {
presentation.setEnabled(true);
presentation.setText(actionText);
}
else {
presentation.setEnabled(false);
presentation.setText(myTextUnavailable);
}
}
@Override
public boolean isSelected(AnActionEvent e) {
return myWatchesReturnValues;
}
@Override
public void setSelected(AnActionEvent e, boolean watch) {
myWatchesReturnValues = watch;
DebuggerSettings.getInstance().WATCH_RETURN_VALUES = watch;
DebugProcessImpl process = getCurrentDebugProcess(e.getProject());
if (process != null) {
process.setWatchMethodReturnValuesEnabled(watch);
}
}
}
@Nullable
private static DebugProcessImpl getCurrentDebugProcess(@Nullable Project project) {
if (project != null) {
XDebugSession session = XDebuggerManager.getInstance(project).getCurrentSession();
if (session != null) {
XDebugProcess process = session.getDebugProcess();
if (process instanceof JavaDebugProcess) {
return ((JavaDebugProcess)process).getDebuggerSession().getProcess();
}
}
}
return null;
}
private static void addActionToGroup(final DefaultActionGroup group, final String actionId) {
AnAction action = ActionManager.getInstance().getAction(actionId);
if (action != null) group.add(action);
}
public NodeManagerImpl getNodeManager() {
return myNodeManager;
}
@Nullable
@Override
public XValueMarkerProvider<?, ?> createValueMarkerProvider() {
return new JavaValueMarker();
}
}
|
java/debugger/impl/src/com/intellij/debugger/engine/JavaDebugProcess.java
|
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.engine;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.actions.DebuggerActions;
import com.intellij.debugger.engine.evaluation.EvaluationContext;
import com.intellij.debugger.engine.evaluation.EvaluationContextImpl;
import com.intellij.debugger.engine.events.DebuggerCommandImpl;
import com.intellij.debugger.impl.DebuggerContextUtil;
import com.intellij.debugger.impl.DebuggerSession;
import com.intellij.debugger.impl.DebuggerStateManager;
import com.intellij.debugger.impl.DebuggerUtilsEx;
import com.intellij.debugger.settings.DebuggerSettings;
import com.intellij.debugger.ui.DebuggerContentInfo;
import com.intellij.debugger.ui.breakpoints.Breakpoint;
import com.intellij.debugger.ui.impl.ThreadsPanel;
import com.intellij.debugger.ui.impl.watch.DebuggerTreeNodeImpl;
import com.intellij.debugger.ui.impl.watch.MessageDescriptor;
import com.intellij.debugger.ui.impl.watch.NodeDescriptorImpl;
import com.intellij.debugger.ui.impl.watch.NodeManagerImpl;
import com.intellij.debugger.ui.tree.NodeDescriptor;
import com.intellij.debugger.ui.tree.render.DescriptorLabelListener;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.ui.ExecutionConsole;
import com.intellij.execution.ui.RunnerLayoutUi;
import com.intellij.execution.ui.layout.PlaceInGrid;
import com.intellij.icons.AllIcons;
import com.intellij.idea.ActionsBundle;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.ui.content.Content;
import com.intellij.ui.content.ContentManagerAdapter;
import com.intellij.ui.content.ContentManagerEvent;
import com.intellij.xdebugger.*;
import com.intellij.xdebugger.breakpoints.XBreakpoint;
import com.intellij.xdebugger.breakpoints.XBreakpointHandler;
import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider;
import com.intellij.xdebugger.frame.XStackFrame;
import com.intellij.xdebugger.frame.XValueMarkerProvider;
import com.intellij.xdebugger.impl.XDebuggerUtilImpl;
import com.intellij.xdebugger.impl.actions.XDebuggerActions;
import com.intellij.xdebugger.ui.XDebugTabLayouter;
import com.sun.jdi.event.Event;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.java.debugger.JavaDebuggerEditorsProvider;
import java.util.ArrayList;
import java.util.List;
/**
* @author egor
*/
public class JavaDebugProcess extends XDebugProcess {
private final DebuggerSession myJavaSession;
private final JavaDebuggerEditorsProvider myEditorsProvider;
private final XBreakpointHandler<?>[] myBreakpointHandlers;
private final NodeManagerImpl myNodeManager;
public JavaDebugProcess(@NotNull final XDebugSession session, final DebuggerSession javaSession) {
super(session);
myJavaSession = javaSession;
myEditorsProvider = new JavaDebuggerEditorsProvider();
DebugProcessImpl process = javaSession.getProcess();
List<XBreakpointHandler> handlers = new ArrayList<XBreakpointHandler>();
handlers.add(new JavaBreakpointHandler.JavaLineBreakpointHandler(process));
handlers.add(new JavaBreakpointHandler.JavaExceptionBreakpointHandler(process));
handlers.add(new JavaBreakpointHandler.JavaFieldBreakpointHandler(process));
handlers.add(new JavaBreakpointHandler.JavaMethodBreakpointHandler(process));
handlers.add(new JavaBreakpointHandler.JavaWildcardBreakpointHandler(process));
for (JavaBreakpointHandlerFactory factory : Extensions.getExtensions(JavaBreakpointHandlerFactory.EP_NAME)) {
handlers.add(factory.createHandler(process));
}
myBreakpointHandlers = handlers.toArray(new XBreakpointHandler[handlers.size()]);
process.addDebugProcessListener(new DebugProcessAdapter() {
@Override
public void paused(final SuspendContext suspendContext) {
SuspendContextImpl context = (SuspendContextImpl)suspendContext;
context.initExecutionStacks();
List<Pair<Breakpoint, Event>> descriptors = DebuggerUtilsEx.getEventDescriptors(context);
if (!descriptors.isEmpty()) {
Breakpoint breakpoint = descriptors.get(0).getFirst();
XBreakpoint xBreakpoint = breakpoint.getXBreakpoint();
if (xBreakpoint != null) {
getSession().breakpointReached(xBreakpoint, null, context);
return;
}
}
getSession().positionReached(context);
}
});
myNodeManager = new NodeManagerImpl(session.getProject(), null) {
@Override
public DebuggerTreeNodeImpl createNode(final NodeDescriptor descriptor, EvaluationContext evaluationContext) {
((NodeDescriptorImpl)descriptor).setContext((EvaluationContextImpl)evaluationContext);
final DebuggerTreeNodeImpl node = new DebuggerTreeNodeImpl(null, descriptor);
((NodeDescriptorImpl)descriptor).updateRepresentation((EvaluationContextImpl)evaluationContext, DescriptorLabelListener.DUMMY_LISTENER);
return node;
}
@Override
public DebuggerTreeNodeImpl createMessageNode(MessageDescriptor descriptor) {
return new DebuggerTreeNodeImpl(null, descriptor);
}
};
session.addSessionListener(new XDebugSessionAdapter() {
@Override
public void beforeSessionResume() {
myJavaSession.getProcess().getManagerThread().schedule(new DebuggerCommandImpl() {
@Override
protected void action() throws Exception {
myNodeManager.setHistoryByContext(getDebuggerStateManager().getContext());
}
@Override
public Priority getPriority() {
return Priority.NORMAL;
}
});
}
@Override
public void stackFrameChanged() {
XStackFrame frame = session.getCurrentStackFrame();
if (frame instanceof JavaStackFrame) {
DebuggerContextUtil.setStackFrame(javaSession.getContextManager(), ((JavaStackFrame)frame).getStackFrameProxy());
}
}
});
}
private DebuggerStateManager getDebuggerStateManager() {
return myJavaSession.getContextManager();
}
public DebuggerSession getDebuggerSession() {
return myJavaSession;
}
@NotNull
@Override
public XDebuggerEditorsProvider getEditorsProvider() {
return myEditorsProvider;
}
@Override
public void startStepOver() {
myJavaSession.stepOver(false);
}
@Override
public void startStepInto() {
myJavaSession.stepInto(false, null);
}
@Override
public void startStepOut() {
myJavaSession.stepOut();
}
@Override
public void stop() {
myJavaSession.getProcess().dispose();
}
@Override
public void startPausing() {
myJavaSession.pause();
}
@Override
public void resume() {
myJavaSession.resume();
}
@Override
public void runToPosition(@NotNull XSourcePosition position) {
Document document = FileDocumentManager.getInstance().getDocument(position.getFile());
myJavaSession.runToCursor(document, position.getLine(), false);
}
@NotNull
@Override
public XBreakpointHandler<?>[] getBreakpointHandlers() {
return myBreakpointHandlers;
}
@Override
public boolean checkCanInitBreakpoints() {
return false;
}
@Nullable
@Override
protected ProcessHandler doGetProcessHandler() {
return myJavaSession.getProcess().getExecutionResult().getProcessHandler();
}
@NotNull
@Override
public ExecutionConsole createConsole() {
return myJavaSession.getProcess().getExecutionResult().getExecutionConsole();
}
@NotNull
@Override
public XDebugTabLayouter createTabLayouter() {
return new XDebugTabLayouter() {
@Override
public void registerAdditionalContent(@NotNull RunnerLayoutUi ui) {
final ThreadsPanel panel = new ThreadsPanel(myJavaSession.getProject(), getDebuggerStateManager());
final Content threadsContent = ui.createContent(
DebuggerContentInfo.THREADS_CONTENT, panel, XDebuggerBundle.message("debugger.session.tab.threads.title"),
AllIcons.Debugger.Threads, null);
Disposer.register(threadsContent, panel);
threadsContent.setCloseable(false);
ui.addContent(threadsContent, 0, PlaceInGrid.left, true);
ui.addListener(new ContentManagerAdapter() {
@Override
public void selectionChanged(ContentManagerEvent event) {
if (event.getContent() == threadsContent) {
if (threadsContent.isSelected()) {
panel.setUpdateEnabled(true);
if (panel.isRefreshNeeded()) {
panel.rebuildIfVisible(DebuggerSession.EVENT_CONTEXT);
}
}
else {
panel.setUpdateEnabled(false);
}
}
}
}, threadsContent);
}
};
}
@Override
public void registerAdditionalActions(@NotNull DefaultActionGroup leftToolbar, @NotNull DefaultActionGroup topToolbar) {
Constraints beforeRunner = new Constraints(Anchor.BEFORE, "Runner.Layout");
leftToolbar.add(Separator.getInstance(), beforeRunner);
leftToolbar.add(ActionManager.getInstance().getAction(DebuggerActions.EXPORT_THREADS), beforeRunner);
leftToolbar.add(ActionManager.getInstance().getAction(DebuggerActions.DUMP_THREADS), beforeRunner);
leftToolbar.add(Separator.getInstance(), beforeRunner);
final DefaultActionGroup settings = new DefaultActionGroup("DebuggerSettings", true) {
@Override
public void update(AnActionEvent e) {
e.getPresentation().setText(ActionsBundle.message("group.XDebugger.settings.text"));
e.getPresentation().setIcon(AllIcons.General.SecondaryGroup);
}
@Override
public boolean isDumbAware() {
return true;
}
};
settings.add(new WatchLastMethodReturnValueAction());
settings.add(new AutoVarsSwitchAction());
settings.add(new UnmuteOnStopAction());
settings.addSeparator();
addActionToGroup(settings, XDebuggerActions.AUTO_TOOLTIP);
leftToolbar.add(settings, new Constraints(Anchor.AFTER, "Runner.Layout"));
}
private static class AutoVarsSwitchAction extends ToggleAction {
private volatile boolean myAutoModeEnabled;
public AutoVarsSwitchAction() {
super("", "", AllIcons.Debugger.AutoVariablesMode);
myAutoModeEnabled = DebuggerSettings.getInstance().AUTO_VARIABLES_MODE;
}
@Override
public void update(final AnActionEvent e) {
super.update(e);
final Presentation presentation = e.getPresentation();
final boolean autoModeEnabled = (Boolean)presentation.getClientProperty(SELECTED_PROPERTY);
presentation.setText(autoModeEnabled ? "All-Variables Mode" : "Auto-Variables Mode");
}
@Override
public boolean isSelected(AnActionEvent e) {
return myAutoModeEnabled;
}
@Override
public void setSelected(AnActionEvent e, boolean enabled) {
myAutoModeEnabled = enabled;
DebuggerSettings.getInstance().AUTO_VARIABLES_MODE = enabled;
XDebuggerUtilImpl.rebuildAllSessionsViews(e.getProject());
}
}
private class WatchLastMethodReturnValueAction extends ToggleAction {
private volatile boolean myWatchesReturnValues;
private final String myTextEnable;
private final String myTextUnavailable;
private final String myMyTextDisable;
public WatchLastMethodReturnValueAction() {
super("", DebuggerBundle.message("action.watch.method.return.value.description"), null);
myWatchesReturnValues = DebuggerSettings.getInstance().WATCH_RETURN_VALUES;
myTextEnable = DebuggerBundle.message("action.watches.method.return.value.enable");
myMyTextDisable = DebuggerBundle.message("action.watches.method.return.value.disable");
myTextUnavailable = DebuggerBundle.message("action.watches.method.return.value.unavailable.reason");
}
@Override
public void update(final AnActionEvent e) {
super.update(e);
final Presentation presentation = e.getPresentation();
final boolean watchValues = (Boolean)presentation.getClientProperty(SELECTED_PROPERTY);
final DebugProcessImpl process = myJavaSession.getProcess();
final String actionText = watchValues ? myMyTextDisable : myTextEnable;
if (process != null && process.canGetMethodReturnValue()) {
presentation.setEnabled(true);
presentation.setText(actionText);
}
else {
presentation.setEnabled(false);
presentation.setText(process == null ? actionText : myTextUnavailable);
}
}
@Override
public boolean isSelected(AnActionEvent e) {
return myWatchesReturnValues;
}
@Override
public void setSelected(AnActionEvent e, boolean watch) {
myWatchesReturnValues = watch;
DebuggerSettings.getInstance().WATCH_RETURN_VALUES = watch;
final DebugProcessImpl process = myJavaSession.getProcess();
if (process != null) {
process.setWatchMethodReturnValuesEnabled(watch);
}
}
}
private static void addActionToGroup(final DefaultActionGroup group, final String actionId) {
AnAction action = ActionManager.getInstance().getAction(actionId);
if (action != null) group.add(action);
}
public NodeManagerImpl getNodeManager() {
return myNodeManager;
}
@Nullable
@Override
public XValueMarkerProvider<?, ?> createValueMarkerProvider() {
return new JavaValueMarker();
}
}
|
java-xdebugger: IDEA-125037 Debugger: "Watch method return values" setting label is changed on stop
|
java/debugger/impl/src/com/intellij/debugger/engine/JavaDebugProcess.java
|
java-xdebugger: IDEA-125037 Debugger: "Watch method return values" setting label is changed on stop
|
|
Java
|
apache-2.0
|
80055035290894f5b9eb43059b8ceca7555cd8a2
| 0
|
permazen/permazen,archiecobbs/jsimpledb,permazen/permazen,archiecobbs/jsimpledb,archiecobbs/jsimpledb,permazen/permazen
|
/*
* Copyright (C) 2015 Archie L. Cobbs. All rights reserved.
*/
package org.jsimpledb.kv.test;
import java.io.Closeable;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.Random;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.jsimpledb.kv.KVDatabase;
import org.jsimpledb.kv.KVPair;
import org.jsimpledb.kv.KVStore;
import org.jsimpledb.kv.KVTransaction;
import org.jsimpledb.kv.KeyRange;
import org.jsimpledb.kv.KeyRanges;
import org.jsimpledb.kv.RetryTransactionException;
import org.jsimpledb.kv.StaleTransactionException;
import org.jsimpledb.kv.TransactionTimeoutException;
import org.jsimpledb.util.ByteUtil;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
public abstract class KVDatabaseTest extends KVTestSupport {
protected ExecutorService executor;
private final AtomicInteger numTransactionAttempts = new AtomicInteger();
private final AtomicInteger numTransactionRetries = new AtomicInteger();
private TreeMap<String, AtomicInteger> retryReasons = new TreeMap<>();
@BeforeClass(dependsOnGroups = "configure")
public void setup() throws Exception {
this.executor = Executors.newFixedThreadPool(33);
for (KVDatabase[] kvdb : this.getDBs()) {
if (kvdb.length > 0)
kvdb[0].start();
}
this.numTransactionAttempts.set(0);
this.numTransactionRetries.set(0);
}
@AfterClass
public void teardown() throws Exception {
this.executor.shutdown();
for (KVDatabase[] kvdb : this.getDBs()) {
if (kvdb.length > 0)
kvdb[0].stop();
}
final double retryRate = (double)this.numTransactionRetries.get() / (double)this.numTransactionAttempts.get();
this.log.info("%n%n****************%n");
this.log.info(String.format("Retry rate: %.2f%% (%d / %d)",
retryRate * 100.0, this.numTransactionRetries.get(), this.numTransactionAttempts.get()));
this.log.info("Retry reasons:");
for (Map.Entry<String, AtomicInteger> entry : this.retryReasons.entrySet())
this.log.info(String.format("%10d %s", entry.getValue().get(), entry.getKey()));
this.log.info("%n%n****************%n");
}
@DataProvider(name = "kvdbs")
protected KVDatabase[][] getDBs() {
final KVDatabase kvdb = this.getKVDatabase();
return kvdb != null ? new KVDatabase[][] { { kvdb } } : new KVDatabase[0][];
}
protected abstract KVDatabase getKVDatabase();
@Test(dataProvider = "kvdbs")
public void testSimpleStuff(KVDatabase store) throws Exception {
// Debug
this.log.info("starting testSimpleStuff() on " + store);
// Clear database
this.log.info("testSimpleStuff() on " + store + ": clearing database");
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.removeRange(null, null);
return null;
}
});
this.log.info("testSimpleStuff() on " + store + ": done clearing database");
// Verify database is empty
this.log.info("testSimpleStuff() on " + store + ": verifying database is empty");
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
KVPair p = tx.getAtLeast(null);
Assert.assertNull(p);
p = tx.getAtMost(null);
Assert.assertNull(p);
Iterator<KVPair> it = tx.getRange(null, null, false);
Assert.assertFalse(it.hasNext());
return null;
}
});
this.log.info("testSimpleStuff() on " + store + ": done verifying database is empty");
// tx 1
this.log.info("testSimpleStuff() on " + store + ": starting tx1");
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
final byte[] x = tx.get(b("01"));
if (x != null)
Assert.assertEquals(tx.get(b("01")), b("02")); // transaction was retried even though it succeeded
tx.put(b("01"), b("02"));
Assert.assertEquals(tx.get(b("01")), b("02"));
return null;
}
});
this.log.info("testSimpleStuff() on " + store + ": committed tx1");
// tx 2
this.log.info("testSimpleStuff() on " + store + ": starting tx2");
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
final byte[] x = tx.get(b("01"));
Assert.assertNotNull(x);
Assert.assertTrue(Arrays.equals(x, b("02")) || Arrays.equals(x, b("03")));
tx.put(b("01"), b("03"));
Assert.assertEquals(tx.get(b("01")), b("03"));
return null;
}
});
this.log.info("testSimpleStuff() on " + store + ": committed tx2");
// tx 3
this.log.info("testSimpleStuff() on " + store + ": starting tx3");
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
final byte[] x = tx.get(b("01"));
Assert.assertEquals(x, b("03"));
tx.put(b("10"), b("01"));
return null;
}
});
this.log.info("testSimpleStuff() on " + store + ": committed tx3");
// Check stale access
this.log.info("testSimpleStuff() on " + store + ": checking stale access");
final KVTransaction tx = this.tryNtimes(store, new Transactional<KVTransaction>() {
@Override
public KVTransaction transact(KVTransaction tx) {
return tx;
}
});
try {
tx.get(b("01"));
assert false;
} catch (StaleTransactionException e) {
// expected
}
this.log.info("finished testSimpleStuff() on " + store);
}
@Test(dataProvider = "kvdbs")
public void testKeyWatch(KVDatabase store) throws Exception {
// Debug
this.log.info("starting testKeyWatch() on " + store);
// Clear database
this.log.info("testKeyWatch() on " + store + ": clearing database");
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.removeRange(null, null);
return null;
}
});
this.log.info("testKeyWatch() on " + store + ": done clearing database");
// Set up the modifications we want to test
final ArrayList<Transactional<Void>> mods = new ArrayList<>();
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.put(b("0123"), b("4567"));
return null;
}
});
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.put(b("0123"), b("89ab"));
return null;
}
});
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.put(b("0123"), tx.encodeCounter(1234));
return null;
}
});
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.adjustCounter(b("0123"), 99);
return null;
}
});
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.removeRange(b("01"), b("02"));
return null;
}
});
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.put(b("0123"), b(""));
return null;
}
});
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.remove(b("0123"));
return null;
}
});
// Set watches, perform modifications, and test notifications
for (Transactional<Void> mod : mods) {
// Set watch
this.log.info("testKeyWatch() on " + store + ": creating key watch for " + mod);
final Future<Void> watch = this.tryNtimes(store, new Transactional<Future<Void>>() {
@Override
public Future<Void> transact(KVTransaction tx) {
try {
return tx.watchKey(b("0123"));
} catch (UnsupportedOperationException e) {
return null;
}
}
});
if (watch == null) {
this.log.info("testKeyWatch() on " + store + ": key watches not supported, bailing out");
return;
}
this.log.info("testKeyWatch() on " + store + ": created key watch: " + watch);
// Perform modification
this.log.info("testKeyWatch() on " + store + ": testing " + mod);
this.tryNtimes(store, mod);
// Get notification
this.log.info("testKeyWatch() on " + store + ": waiting for notification");
final long start = System.nanoTime();
watch.get(1, TimeUnit.SECONDS);
this.log.info("testKeyWatch() on " + store + ": got notification in " + ((System.nanoTime() - start) / 1000000) + "ms");
}
// Done
this.log.info("finished testKeyWatch() on " + store);
}
@Test(dataProvider = "kvdbs")
public void testConflictingTransactions(KVDatabase store) throws Exception {
// Clear database
this.log.info("starting testConflictingTransactions() on " + store);
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.removeRange(null, null);
return null;
}
});
// Both read the same key
final KVTransaction[] txs = new KVTransaction[] { store.createTransaction(), store.createTransaction() };
this.log.info("tx[0] is " + txs[0]);
this.log.info("tx[1] is " + txs[1]);
this.executor.submit(new Reader(txs[0], b("10"))).get();
this.executor.submit(new Reader(txs[1], b("10"))).get();
// Both write to the same key but with different values
final String[] fails = new String[] { "uninitialized status", "uninitialized status" };
Future<?>[] futures = new Future<?>[] {
this.executor.submit(new Writer(txs[0], b("10"), b("01"))),
this.executor.submit(new Writer(txs[1], b("10"), b("02")))
};
// See what happened - we might have gotten a conflict at write time
for (int i = 0; i < 2; i++) {
try {
futures[i].get();
this.log.info(txs[i] + " #" + (i + 1) + " succeeded on write");
fails[i] = null;
} catch (Exception e) {
while (e instanceof ExecutionException)
e = (Exception)e.getCause();
if (!(e instanceof RetryTransactionException))
throw new AssertionError("wrong exception type: " + e, e);
final RetryTransactionException retry = (RetryTransactionException)e;
Assert.assertSame(retry.getTransaction(), txs[i]);
this.log.info(txs[i] + " #" + (i + 1) + " failed on write");
if (this.log.isTraceEnabled())
this.log.trace(txs[i] + " #" + (i + 1) + " write failure exception trace:", e);
fails[i] = "" + e;
}
}
// Show contents of surviving transactions; note exception(s) could occur here also
for (int i = 0; i < 2; i++) {
if (fails[i] == null) {
final Exception e = this.showKV(txs[i], "tx[" + i + "] of " + store + " after write");
if (e != null)
fails[i] = "" + e;
}
}
// If both succeeded, then we should get a conflict on commit instead
for (int i = 0; i < 2; i++) {
if (fails[i] == null)
futures[i] = this.executor.submit(new Committer(txs[i]));
}
for (int i = 0; i < 2; i++) {
if (fails[i] == null) {
try {
futures[i].get();
this.log.info(txs[i] + " #" + (i + 1) + " succeeded on commit");
fails[i] = null;
} catch (AssertionError e) {
throw e;
} catch (Throwable e) {
while (e instanceof ExecutionException)
e = e.getCause();
assert e instanceof RetryTransactionException : "wrong exception type: " + e;
final RetryTransactionException retry = (RetryTransactionException)e;
Assert.assertSame(retry.getTransaction(), txs[i]);
this.log.info(txs[i] + " #" + (i + 1) + " failed on commit");
if (this.log.isTraceEnabled())
this.log.trace(txs[i] + " #" + (i + 1) + " commit failure exception trace:", e);
fails[i] = "" + e;
}
}
}
// Exactly one should have failed and one should have succeeded (for most databases)
if (!this.allowBothTransactionsToFail()) {
assert fails[0] == null || fails[1] == null : "both transactions failed:"
+ "\n fails[0]: " + fails[0] + "\n fails[1]: " + fails[1];
}
assert fails[0] != null || fails[1] != null : "both transactions succeeded";
this.log.info("exactly one transaction failed:\n fails[0]: " + fails[0] + "\n fails[1]: " + fails[1]);
// Verify the resulting change is consistent with the tx that succeeded
final byte[] expected = fails[0] == null ? b("01") : fails[1] == null ? b("02") : null;
if (expected != null) {
final KVTransaction tx2 = store.createTransaction();
this.showKV(tx2, "TX2 of " + store);
byte[] x = this.executor.submit(new Reader(tx2, b("10"))).get();
Assert.assertEquals(x, expected);
tx2.rollback();
}
this.log.info("finished testConflictingTransactions() on " + store);
}
protected boolean allowBothTransactionsToFail() {
return false;
}
@Test(dataProvider = "kvdbs")
public void testNonconflictingTransactions(KVDatabase store) throws Exception {
// Clear database
this.log.info("starting testNonconflictingTransactions() on " + store);
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.removeRange(null, null);
return null;
}
});
// Multiple concurrent transactions with overlapping read ranges and non-intersecting write ranges
int done = 0;
KVTransaction[] txs = new KVTransaction[10];
for (int i = 0; i < txs.length; i++)
txs[i] = store.createTransaction();
while (true) {
boolean finished = true;
for (int i = 0; i < txs.length; i++) {
if (txs[i] == null)
continue;
finished = false;
Future<?> rf = this.executor.submit(new Reader(txs[i], new byte[] { (byte)i }, true));
Future<?> wf = this.executor.submit(new Writer(txs[i], new byte[] { (byte)(i + 128) }, b("02")));
for (Future<?> f : new Future<?>[] { rf, wf }) {
try {
f.get();
} catch (ExecutionException e) {
if (e.getCause() instanceof RetryTransactionException) {
txs[i] = store.createTransaction();
break;
}
throw e;
}
}
}
if (finished)
break;
for (int i = 0; i < txs.length; i++) {
if (txs[i] == null)
continue;
try {
this.numTransactionAttempts.incrementAndGet();
txs[i].commit();
} catch (RetryTransactionException e) {
this.updateRetryStats(e);
txs[i] = store.createTransaction();
continue;
}
txs[i] = null;
}
}
this.log.info("finished testNonconflictingTransactions() on " + store);
}
/**
* This test runs transactions in parallel and verifies there is no "leakage" between them.
* Database must be configured for linearizable isolation.
*
* @param store underlying store
* @throws Exception if an error occurs
*/
@Test(dataProvider = "kvdbs")
public void testParallelTransactions(KVDatabase store) throws Exception {
this.testParallelTransactions(new KVDatabase[] { store });
}
public void testParallelTransactions(KVDatabase[] stores) throws Exception {
this.log.info("starting testParallelTransactions() on " + Arrays.asList(stores));
for (int count = 0; count < 25; count++) {
this.log.info("starting testParallelTransactions() iteration " + count);
final RandomTask[] tasks = new RandomTask[25];
for (int i = 0; i < tasks.length; i++) {
tasks[i] = new RandomTask(i, stores[this.random.nextInt(stores.length)], this.random.nextLong());
tasks[i].start();
}
for (int i = 0; i < tasks.length; i++)
tasks[i].join();
for (int i = 0; i < tasks.length; i++) {
final Throwable fail = tasks[i].getFail();
if (fail != null)
throw new Exception("task #" + i + " failed: >>>" + this.show(fail).trim() + "<<<");
}
this.log.info("finished testParallelTransactions() iteration " + count);
}
this.log.info("finished testParallelTransactions() on " + Arrays.asList(stores));
for (KVDatabase store : stores) {
if (store instanceof Closeable)
((Closeable)store).close();
}
}
/**
* This test runs transactions sequentially and verifies that each transaction sees
* the changes that were committed in the previous transaction.
*
* @param store underlying store
* @throws Exception if an error occurs
*/
@Test(dataProvider = "kvdbs")
public void testSequentialTransactions(KVDatabase store) throws Exception {
this.log.info("starting testSequentialTransactions() on " + store);
// Clear database
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.removeRange(null, null);
return null;
}
});
// Keep an in-memory record of what is in the committed database
final TreeMap<byte[], byte[]> committedData = new TreeMap<byte[], byte[]>(ByteUtil.COMPARATOR);
// Run transactions
for (int i = 0; i < 50; i++) {
final RandomTask task = new RandomTask(i, store, committedData, this.random.nextLong());
task.run();
final Throwable fail = task.getFail();
if (fail != null)
throw new Exception("task #" + i + " failed: >>>" + this.show(fail).trim() + "<<<");
}
this.log.info("finished testSequentialTransactions() on " + store);
}
protected <V> V tryNtimes(KVDatabase kvdb, Transactional<V> transactional) {
RetryTransactionException retry = null;
for (int count = 0; count < this.getNumTries(); count++) {
final KVTransaction tx = kvdb.createTransaction();
try {
final V result = transactional.transact(tx);
this.numTransactionAttempts.incrementAndGet();
tx.commit();
return result;
} catch (RetryTransactionException e) {
this.updateRetryStats(e);
KVDatabaseTest.this.log.debug("attempt #" + (count + 1) + " yeilded " + e);
retry = e;
}
try {
Thread.sleep(100 + count * 200);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
throw retry;
}
protected int getNumTries() {
return 3;
}
protected interface Transactional<V> {
V transact(KVTransaction kvt);
}
protected void updateRetryStats(RetryTransactionException e) {
this.numTransactionRetries.incrementAndGet();
String message = e.getMessage();
if (message != null)
message = this.mapRetryExceptionMessage(message);
synchronized (this) {
AtomicInteger counter = this.retryReasons.get(message);
if (counter == null) {
counter = new AtomicInteger();
this.retryReasons.put(message, counter);
}
counter.incrementAndGet();
}
}
protected String mapRetryExceptionMessage(String message) {
return message.replaceAll("[0-9]+", "NNN");
}
// RandomTask
public class RandomTask extends Thread {
private final int id;
private final KVDatabase store;
private final Random random;
private final TreeMap<byte[], byte[]> committedData; // tracks actual committed data, if known
private final NavigableMap<String, String> committedDataView;
private final ArrayList<String> log = new ArrayList<>(1000);
private Throwable fail;
public RandomTask(int id, KVDatabase store, long seed) {
this(id, store, null, seed);
}
public RandomTask(int id, KVDatabase store, TreeMap<byte[], byte[]> committedData, long seed) {
super("Random[" + id + "]");
this.id = id;
this.store = store;
this.committedData = committedData;
this.committedDataView = stringView(this.committedData);
this.random = new Random(seed);
this.log("seed = " + seed);
}
@Override
public void run() {
KVDatabaseTest.this.log.debug("*** " + this + " STARTING");
try {
this.test();
this.log("succeeded");
} catch (Throwable t) {
final StringWriter buf = new StringWriter();
t.printStackTrace(new PrintWriter(buf, true));
this.log("failed: " + t + "\n" + buf.toString());
this.fail = t;
} finally {
KVDatabaseTest.this.log.debug("*** " + this + " FINISHED");
this.dumpLog(this.fail != null);
}
}
public Throwable getFail() {
return this.fail;
}
@SuppressWarnings("unchecked")
private void test() throws Exception {
// Keep track of key/value pairs that we know should exist in the transaction
final TreeMap<byte[], byte[]> knownValues = new TreeMap<>(ByteUtil.COMPARATOR);
final NavigableMap<String, String> knownValuesView = stringView(knownValues);
final TreeSet<byte[]> putValues = new TreeSet<>(ByteUtil.COMPARATOR);
final NavigableSet<String> putValuesView = stringView(putValues);
// Keep track of known empty ranges
final KeyRanges knownEmpty = new KeyRanges();
// Create transaction
final KVTransaction tx = this.store.createTransaction();
KVDatabaseTest.this.log.debug("*** CREATED TX " + tx);
// Load actual committed database contents (if known) into "known values" tracker
if (this.committedData != null)
knownValues.putAll(this.committedData);
// Save a copy of committed data
final TreeMap<byte[], byte[]> previousCommittedData = this.committedData != null ?
(TreeMap<byte[], byte[]>)this.committedData.clone() : null;
//final NavigableMap<String, String> previousCommittedDataView = stringView(previousCommittedData);
// Verify committed data is accurate before starting
if (this.committedData != null)
Assert.assertEquals(stringView(this.readDatabase(tx)), knownValuesView);
// Note: if this.committedData != null, then knownValues will exactly track the transaction, otherwise,
// knownValues only contains values we know are in there; nothing is known about uncontained values.
// Make a bunch of random changes
Boolean committed = null;
try {
final int limit = this.r(1000);
for (int j = 0; j < limit; j++) {
byte[] key;
byte[] val;
byte[] min;
byte[] max;
KVPair pair;
int option = this.r(62);
boolean knownValuesChanged = false;
if (option < 10) { // get
key = this.rb(1, false);
val = tx.get(key);
this.log("get: " + s(key) + " -> " + s(val));
if (val == null) {
assert !knownValues.containsKey(key) :
this + ": get(" + s(key) + ") returned null but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
knownEmpty.add(new KeyRange(key));
knownValuesChanged = true;
} else if (knownValues.containsKey(key)) {
assert s(knownValues.get(key)).equals(s(val)) :
this + ": get(" + s(key) + ") returned " + s(val) + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView + "\n emptys="
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
} else {
knownValues.put(key, val);
knownValuesChanged = true;
}
assert val == null || !knownEmpty.contains(key) :
this + ": get(" + s(key) + ") returned " + s(val) + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
} else if (option < 20) { // put
key = this.rb(1, false);
val = this.rb(2, true);
this.log("put: " + s(key) + " -> " + s(val));
tx.put(key, val);
knownValues.put(key, val);
putValues.add(key);
knownEmpty.remove(new KeyRange(key));
knownValuesChanged = true;
} else if (option < 30) { // getAtLeast
min = this.rb(1, true);
pair = tx.getAtLeast(min);
this.log("getAtLeast: " + s(min) + " -> " + s(pair));
if (pair == null) {
assert knownValues.tailMap(min).isEmpty() :
this + ": getAtLeast(" + s(min) + ") returned " + null + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
} else if (knownValues.containsKey(pair.getKey()))
assert s(knownValues.get(pair.getKey())).equals(s(pair.getValue())) :
this + ": getAtLeast(" + s(min) + ") returned " + pair + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
else
knownValues.put(pair.getKey(), pair.getValue());
assert pair == null || !knownEmpty.contains(pair.getKey()) :
this + ": getAtLeast(" + s(min) + ") returned " + pair + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
knownEmpty.add(new KeyRange(min, pair != null ? pair.getKey() : null));
knownValuesChanged = true;
} else if (option < 40) { // getAtMost
max = this.rb(1, true);
pair = tx.getAtMost(max);
this.log("getAtMost: " + s(max) + " -> " + s(pair));
if (pair == null) {
assert knownValues.headMap(max).isEmpty() :
this + ": getAtMost(" + s(max) + ") returned " + null + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
} else if (knownValues.containsKey(pair.getKey()))
assert s(knownValues.get(pair.getKey())).equals(s(pair.getValue())) :
this + ": getAtMost(" + s(max) + ") returned " + pair + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
else
knownValues.put(pair.getKey(), pair.getValue());
assert pair == null || !knownEmpty.contains(pair.getKey()) :
this + ": getAtMost(" + s(max) + ") returned " + pair + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
knownEmpty.add(new KeyRange(pair != null ? ByteUtil.getNextKey(pair.getKey()) : ByteUtil.EMPTY, max));
knownValuesChanged = true;
} else if (option < 50) { // remove
key = this.rb(1, false);
if (this.r(5) == 0 && (pair = tx.getAtLeast(this.rb(1, false))) != null)
key = pair.getKey();
this.log("remove: " + s(key));
tx.remove(key);
knownValues.remove(key);
putValues.remove(key);
knownEmpty.add(new KeyRange(key));
knownValuesChanged = true;
} else if (option < 52) { // removeRange
min = this.rb2(2, 20);
do {
max = this.rb2(2, 30);
} while (max != null && min != null && ByteUtil.COMPARATOR.compare(min, max) > 0);
this.log("removeRange: " + s(min) + " to " + s(max));
tx.removeRange(min, max);
if (min == null && max == null) {
knownValues.clear();
putValues.clear();
} else if (min == null) {
knownValues.headMap(max).clear();
putValues.headSet(max).clear();
} else if (max == null) {
knownValues.tailMap(min).clear();
putValues.tailSet(min).clear();
} else {
knownValues.subMap(min, max).clear();
putValues.subSet(min, max).clear();
}
knownEmpty.add(new KeyRange(min != null ? min : ByteUtil.EMPTY, max));
knownValuesChanged = true;
} else if (option < 60) { // adjustCounter
key = this.rb(1, false);
key[0] = (byte)(key[0] & 0x0f);
val = tx.get(key);
long counter = -1;
if (val != null) {
try {
counter = tx.decodeCounter(val);
this.log("adj: found valid value " + s(val) + " (" + counter + ") at key " + s(key));
} catch (IllegalArgumentException e) {
this.log("adj: found bogus value " + s(val) + " at key " + s(key));
val = null;
}
}
if (val == null) {
counter = this.random.nextLong();
final byte[] encodedCounter = tx.encodeCounter(counter);
tx.put(key, encodedCounter);
putValues.add(key);
this.log("adj: initialize " + s(key) + " to " + s(encodedCounter));
}
final long adj = this.random.nextInt(1 << this.random.nextInt(24)) - 1024;
final byte[] encodedCounter = tx.encodeCounter(counter + adj);
this.log("adj: " + s(key) + " by " + adj + " -> should now be " + s(encodedCounter));
tx.adjustCounter(key, adj);
knownValues.put(key, encodedCounter);
knownEmpty.remove(new KeyRange(key));
knownValuesChanged = true;
} else { // sleep
final int millis = this.r(50);
this.log("sleep " + millis + "ms");
try {
Thread.sleep(millis);
} catch (InterruptedException e) {
// ignore
}
}
if (knownValuesChanged) {
this.log("new values:"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView + "\n emptys=" + knownEmpty);
}
// Verify everything we know to be there is there
for (Map.Entry<byte[], byte[]> entry : knownValues.entrySet()) {
final byte[] knownKey = entry.getKey();
final byte[] expected = entry.getValue();
final byte[] actual = tx.get(knownKey);
assert actual != null && ByteUtil.compare(actual, expected) == 0 :
this + ": tx has " + s(actual) + " for key " + s(knownKey) + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
}
// Verify everything we know to no be there is not there
final Iterator<KVPair> iter = tx.getRange(null, null, false);
while (iter.hasNext()) {
pair = iter.next();
assert !knownEmpty.contains(pair.getKey()) :
this + ": tx contains " + pair + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
}
if (iter instanceof Closeable)
((Closeable)iter).close();
}
// Maybe commit
final boolean rollback = this.r(5) == 3;
KVDatabaseTest.this.log.debug("*** " + (rollback ? "ROLLING BACK" : "COMMITTING") + " TX " + tx);
this.log("about to " + (rollback ? "rollback" : "commit") + ":"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView + "\n emptys=" + knownEmpty
+ "\n committed: " + committedDataView);
if (rollback) {
tx.rollback();
committed = false;
this.log("rolled-back");
} else {
try {
KVDatabaseTest.this.numTransactionAttempts.incrementAndGet();
tx.commit();
} catch (RetryTransactionException e) {
KVDatabaseTest.this.updateRetryStats(e);
throw e;
}
committed = true;
KVDatabaseTest.this.log.debug("*** COMMITTED TX " + tx);
this.log("committed");
}
} catch (TransactionTimeoutException e) {
KVDatabaseTest.this.log.debug("*** TX " + tx + " THREW " + e);
this.log("got " + e);
committed = false;
} catch (RetryTransactionException e) { // might have committed, might not have, we don't know for sure
KVDatabaseTest.this.log.debug("*** TX " + tx + " THREW " + e);
this.log("got " + e);
}
// Doing this should always be allowed and shouldn't affect anything
tx.rollback();
// Verify committed database contents are now equal to what's expected
if (this.committedData != null) {
// Read actual content
final TreeMap<byte[], byte[]> actual = new TreeMap<byte[], byte[]>(ByteUtil.COMPARATOR);
final NavigableMap<String, String> actualView = stringView(actual);
actual.putAll(this.readDatabase());
// Update what we think is in the database and then compare to actual content
if (Boolean.TRUE.equals(committed)) {
// Verify
this.log("tx was definitely committed");
assert actualView.equals(knownValuesView) :
this + "\n*** ACTUAL:\n" + actualView + "\n*** EXPECTED:\n" + knownValuesView + "\n";
} else if (Boolean.FALSE.equals(committed)) {
// Verify
this.log("tx was definitely rolled back");
assert actualView.equals(committedDataView) :
this + "\n*** ACTUAL:\n" + actualView + "\n*** EXPECTED:\n" + committedDataView + "\n";
} else {
// We don't know whether transaction got committed or not .. check both possibilities
final boolean matchCommit = actualView.equals(knownValuesView);
final boolean matchRollback = actualView.equals(committedDataView);
this.log("tx was either committed (" + matchCommit + ") or rolled back (" + matchRollback + ")");
// Verify one or the other
assert matchCommit || matchRollback :
this + "\n*** ACTUAL:\n" + actualView
+ "\n*** COMMIT:\n" + knownValuesView
+ "\n*** ROLLBACK:\n" + committedDataView + "\n";
committed = matchCommit;
}
// Update model of database
if (committed) {
this.committedData.clear();
this.committedData.putAll(knownValues);
}
}
}
private TreeMap<byte[], byte[]> readDatabase() {
return KVDatabaseTest.this.tryNtimes(this.store, new Transactional<TreeMap<byte[], byte[]>>() {
@Override
public TreeMap<byte[], byte[]> transact(KVTransaction tx) {
return RandomTask.this.readDatabase(tx);
}
});
}
private TreeMap<byte[], byte[]> readDatabase(KVStore tx) {
final TreeMap<byte[], byte[]> values = new TreeMap<byte[], byte[]>(ByteUtil.COMPARATOR);
final Iterator<KVPair> i = tx.getRange(null, null, false);
while (i.hasNext()) {
final KVPair pair = i.next();
values.put(pair.getKey(), pair.getValue());
}
if (i instanceof AutoCloseable) {
try {
((AutoCloseable)i).close();
} catch (Exception e) {
// ignore
}
}
return values;
}
private String toString(KVStore kv) {
final StringBuilder buf = new StringBuilder();
buf.append('{');
final Iterator<KVPair> i = kv.getRange(null, null, false);
while (i.hasNext()) {
final KVPair pair = i.next();
if (buf.length() > 8)
buf.append(", ");
buf.append(ByteUtil.toString(pair.getKey())).append('=').append(ByteUtil.toString(pair.getValue()));
}
if (i instanceof AutoCloseable) {
try {
((AutoCloseable)i).close();
} catch (Exception e) {
// ignore
}
}
buf.append('}');
return buf.toString();
}
private void log(String s) {
this.log.add(s);
}
private void dumpLog(boolean force) {
if (!force && !KVDatabaseTest.this.log.isTraceEnabled())
return;
synchronized (KVDatabaseTest.this) {
final StringBuilder buf = new StringBuilder(this.log.size() * 40);
for (String s : this.log)
buf.append(s).append('\n');
KVDatabaseTest.this.log.debug("*** BEGIN " + this + " LOG ***\n\n{}\n*** END " + this + " LOG ***", buf);
}
}
private int r(int max) {
return this.random.nextInt(max);
}
private byte[] rb(int len, boolean allowFF) {
final byte[] b = new byte[this.r(len) + 1];
this.random.nextBytes(b);
if (!allowFF && b[0] == (byte)0xff)
b[0] = (byte)random.nextInt(0xff);
return b;
}
private byte[] rb2(int len, int nullchance) {
if (this.r(nullchance) == 0)
return null;
return this.rb(len, true);
}
@Override
public String toString() {
return "Random[" + this.id + "]";
}
}
// Reader
public class Reader implements Callable<byte[]> {
final KVTransaction tx;
final byte[] key;
final boolean range;
public Reader(KVTransaction tx, byte[] key, boolean range) {
this.tx = tx;
this.key = key;
this.range = range;
}
public Reader(KVTransaction tx, byte[] key) {
this(tx, key, false);
}
@Override
public byte[] call() {
if (this.range) {
if (KVDatabaseTest.this.log.isTraceEnabled())
KVDatabaseTest.this.log.trace("reading at least " + s(this.key) + " in " + this.tx);
final KVPair pair = this.tx.getAtLeast(this.key);
KVDatabaseTest.this.log.info("finished reading at least " + s(this.key) + " -> " + pair + " in " + this.tx);
return pair != null ? pair.getValue() : null;
} else {
if (KVDatabaseTest.this.log.isTraceEnabled())
KVDatabaseTest.this.log.trace("reading " + s(this.key) + " in " + this.tx);
final byte[] value = this.tx.get(this.key);
KVDatabaseTest.this.log.info("finished reading " + s(this.key) + " -> " + s(value) + " in " + this.tx);
return value;
}
}
}
// Writer
public class Writer implements Runnable {
final KVTransaction tx;
final byte[] key;
final byte[] value;
public Writer(KVTransaction tx, byte[] key, byte[] value) {
this.tx = tx;
this.key = key;
this.value = value;
}
@Override
public void run() {
try {
KVDatabaseTest.this.log.info("putting " + s(this.key) + " -> " + s(this.value) + " in " + this.tx);
this.tx.put(this.key, this.value);
} catch (RuntimeException e) {
KVDatabaseTest.this.log.info("exception putting " + s(this.key) + " -> " + s(this.value)
+ " in " + this.tx + ": " + e);
if (KVDatabaseTest.this.log.isTraceEnabled()) {
KVDatabaseTest.this.log.trace(this.tx + " put " + s(this.key) + " -> " + s(this.value)
+ " failure exception trace:", e);
}
throw e;
}
}
}
// Committer
public class Committer implements Runnable {
final KVTransaction tx;
public Committer(KVTransaction tx) {
this.tx = tx;
}
@Override
public void run() {
try {
KVDatabaseTest.this.log.info("committing " + this.tx);
KVDatabaseTest.this.numTransactionAttempts.incrementAndGet();
this.tx.commit();
} catch (RuntimeException e) {
if (e instanceof RetryTransactionException)
KVDatabaseTest.this.updateRetryStats((RetryTransactionException)e);
KVDatabaseTest.this.log.info("exception committing " + this.tx + ": " + e);
if (KVDatabaseTest.this.log.isTraceEnabled())
KVDatabaseTest.this.log.trace(this.tx + " commit failure exception trace:", e);
throw e;
}
}
}
}
|
jsimpledb-kv-test/src/main/java/org/jsimpledb/kv/test/KVDatabaseTest.java
|
/*
* Copyright (C) 2015 Archie L. Cobbs. All rights reserved.
*/
package org.jsimpledb.kv.test;
import java.io.Closeable;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.Random;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.jsimpledb.kv.KVDatabase;
import org.jsimpledb.kv.KVPair;
import org.jsimpledb.kv.KVStore;
import org.jsimpledb.kv.KVTransaction;
import org.jsimpledb.kv.KeyRange;
import org.jsimpledb.kv.KeyRanges;
import org.jsimpledb.kv.RetryTransactionException;
import org.jsimpledb.kv.StaleTransactionException;
import org.jsimpledb.kv.TransactionTimeoutException;
import org.jsimpledb.util.ByteUtil;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
public abstract class KVDatabaseTest extends KVTestSupport {
protected ExecutorService executor;
private final AtomicInteger numTransactionAttempts = new AtomicInteger();
private final AtomicInteger numTransactionRetries = new AtomicInteger();
@BeforeClass(dependsOnGroups = "configure")
public void setup() throws Exception {
this.executor = Executors.newFixedThreadPool(33);
for (KVDatabase[] kvdb : this.getDBs()) {
if (kvdb.length > 0)
kvdb[0].start();
}
this.numTransactionAttempts.set(0);
this.numTransactionRetries.set(0);
}
@AfterClass
public void teardown() throws Exception {
this.executor.shutdown();
for (KVDatabase[] kvdb : this.getDBs()) {
if (kvdb.length > 0)
kvdb[0].stop();
}
final double retryRate = (double)this.numTransactionRetries.get() / (double)this.numTransactionAttempts.get();
this.log.info(String.format("%n%n****************%nRetry rate: %.2f%% (%d / %d)%n****************%n",
retryRate * 100.0, this.numTransactionRetries.get(), this.numTransactionAttempts.get()));
}
@DataProvider(name = "kvdbs")
protected KVDatabase[][] getDBs() {
final KVDatabase kvdb = this.getKVDatabase();
return kvdb != null ? new KVDatabase[][] { { kvdb } } : new KVDatabase[0][];
}
protected abstract KVDatabase getKVDatabase();
@Test(dataProvider = "kvdbs")
public void testSimpleStuff(KVDatabase store) throws Exception {
// Debug
this.log.info("starting testSimpleStuff() on " + store);
// Clear database
this.log.info("testSimpleStuff() on " + store + ": clearing database");
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.removeRange(null, null);
return null;
}
});
this.log.info("testSimpleStuff() on " + store + ": done clearing database");
// Verify database is empty
this.log.info("testSimpleStuff() on " + store + ": verifying database is empty");
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
KVPair p = tx.getAtLeast(null);
Assert.assertNull(p);
p = tx.getAtMost(null);
Assert.assertNull(p);
Iterator<KVPair> it = tx.getRange(null, null, false);
Assert.assertFalse(it.hasNext());
return null;
}
});
this.log.info("testSimpleStuff() on " + store + ": done verifying database is empty");
// tx 1
this.log.info("testSimpleStuff() on " + store + ": starting tx1");
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
final byte[] x = tx.get(b("01"));
if (x != null)
Assert.assertEquals(tx.get(b("01")), b("02")); // transaction was retried even though it succeeded
tx.put(b("01"), b("02"));
Assert.assertEquals(tx.get(b("01")), b("02"));
return null;
}
});
this.log.info("testSimpleStuff() on " + store + ": committed tx1");
// tx 2
this.log.info("testSimpleStuff() on " + store + ": starting tx2");
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
final byte[] x = tx.get(b("01"));
Assert.assertNotNull(x);
Assert.assertTrue(Arrays.equals(x, b("02")) || Arrays.equals(x, b("03")));
tx.put(b("01"), b("03"));
Assert.assertEquals(tx.get(b("01")), b("03"));
return null;
}
});
this.log.info("testSimpleStuff() on " + store + ": committed tx2");
// tx 3
this.log.info("testSimpleStuff() on " + store + ": starting tx3");
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
final byte[] x = tx.get(b("01"));
Assert.assertEquals(x, b("03"));
tx.put(b("10"), b("01"));
return null;
}
});
this.log.info("testSimpleStuff() on " + store + ": committed tx3");
// Check stale access
this.log.info("testSimpleStuff() on " + store + ": checking stale access");
final KVTransaction tx = this.tryNtimes(store, new Transactional<KVTransaction>() {
@Override
public KVTransaction transact(KVTransaction tx) {
return tx;
}
});
try {
tx.get(b("01"));
assert false;
} catch (StaleTransactionException e) {
// expected
}
this.log.info("finished testSimpleStuff() on " + store);
}
@Test(dataProvider = "kvdbs")
public void testKeyWatch(KVDatabase store) throws Exception {
// Debug
this.log.info("starting testKeyWatch() on " + store);
// Clear database
this.log.info("testKeyWatch() on " + store + ": clearing database");
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.removeRange(null, null);
return null;
}
});
this.log.info("testKeyWatch() on " + store + ": done clearing database");
// Set up the modifications we want to test
final ArrayList<Transactional<Void>> mods = new ArrayList<>();
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.put(b("0123"), b("4567"));
return null;
}
});
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.put(b("0123"), b("89ab"));
return null;
}
});
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.put(b("0123"), tx.encodeCounter(1234));
return null;
}
});
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.adjustCounter(b("0123"), 99);
return null;
}
});
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.removeRange(b("01"), b("02"));
return null;
}
});
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.put(b("0123"), b(""));
return null;
}
});
mods.add(new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.remove(b("0123"));
return null;
}
});
// Set watches, perform modifications, and test notifications
for (Transactional<Void> mod : mods) {
// Set watch
this.log.info("testKeyWatch() on " + store + ": creating key watch for " + mod);
final Future<Void> watch = this.tryNtimes(store, new Transactional<Future<Void>>() {
@Override
public Future<Void> transact(KVTransaction tx) {
try {
return tx.watchKey(b("0123"));
} catch (UnsupportedOperationException e) {
return null;
}
}
});
if (watch == null) {
this.log.info("testKeyWatch() on " + store + ": key watches not supported, bailing out");
return;
}
this.log.info("testKeyWatch() on " + store + ": created key watch: " + watch);
// Perform modification
this.log.info("testKeyWatch() on " + store + ": testing " + mod);
this.tryNtimes(store, mod);
// Get notification
this.log.info("testKeyWatch() on " + store + ": waiting for notification");
final long start = System.nanoTime();
watch.get(1, TimeUnit.SECONDS);
this.log.info("testKeyWatch() on " + store + ": got notification in " + ((System.nanoTime() - start) / 1000000) + "ms");
}
// Done
this.log.info("finished testKeyWatch() on " + store);
}
@Test(dataProvider = "kvdbs")
public void testConflictingTransactions(KVDatabase store) throws Exception {
// Clear database
this.log.info("starting testConflictingTransactions() on " + store);
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.removeRange(null, null);
return null;
}
});
// Both read the same key
final KVTransaction[] txs = new KVTransaction[] { store.createTransaction(), store.createTransaction() };
this.log.info("tx[0] is " + txs[0]);
this.log.info("tx[1] is " + txs[1]);
this.executor.submit(new Reader(txs[0], b("10"))).get();
this.executor.submit(new Reader(txs[1], b("10"))).get();
// Both write to the same key but with different values
final String[] fails = new String[] { "uninitialized status", "uninitialized status" };
Future<?>[] futures = new Future<?>[] {
this.executor.submit(new Writer(txs[0], b("10"), b("01"))),
this.executor.submit(new Writer(txs[1], b("10"), b("02")))
};
// See what happened - we might have gotten a conflict at write time
for (int i = 0; i < 2; i++) {
try {
futures[i].get();
this.log.info(txs[i] + " #" + (i + 1) + " succeeded on write");
fails[i] = null;
} catch (Exception e) {
while (e instanceof ExecutionException)
e = (Exception)e.getCause();
if (!(e instanceof RetryTransactionException))
throw new AssertionError("wrong exception type: " + e, e);
final RetryTransactionException retry = (RetryTransactionException)e;
Assert.assertSame(retry.getTransaction(), txs[i]);
this.log.info(txs[i] + " #" + (i + 1) + " failed on write");
if (this.log.isTraceEnabled())
this.log.trace(txs[i] + " #" + (i + 1) + " write failure exception trace:", e);
fails[i] = "" + e;
}
}
// Show contents of surviving transactions; note exception(s) could occur here also
for (int i = 0; i < 2; i++) {
if (fails[i] == null) {
final Exception e = this.showKV(txs[i], "tx[" + i + "] of " + store + " after write");
if (e != null)
fails[i] = "" + e;
}
}
// If both succeeded, then we should get a conflict on commit instead
for (int i = 0; i < 2; i++) {
if (fails[i] == null)
futures[i] = this.executor.submit(new Committer(txs[i]));
}
for (int i = 0; i < 2; i++) {
if (fails[i] == null) {
try {
futures[i].get();
this.log.info(txs[i] + " #" + (i + 1) + " succeeded on commit");
fails[i] = null;
} catch (AssertionError e) {
throw e;
} catch (Throwable e) {
while (e instanceof ExecutionException)
e = e.getCause();
assert e instanceof RetryTransactionException : "wrong exception type: " + e;
final RetryTransactionException retry = (RetryTransactionException)e;
Assert.assertSame(retry.getTransaction(), txs[i]);
this.log.info(txs[i] + " #" + (i + 1) + " failed on commit");
if (this.log.isTraceEnabled())
this.log.trace(txs[i] + " #" + (i + 1) + " commit failure exception trace:", e);
fails[i] = "" + e;
}
}
}
// Exactly one should have failed and one should have succeeded (for most databases)
if (!this.allowBothTransactionsToFail()) {
assert fails[0] == null || fails[1] == null : "both transactions failed:"
+ "\n fails[0]: " + fails[0] + "\n fails[1]: " + fails[1];
}
assert fails[0] != null || fails[1] != null : "both transactions succeeded";
this.log.info("exactly one transaction failed:\n fails[0]: " + fails[0] + "\n fails[1]: " + fails[1]);
// Verify the resulting change is consistent with the tx that succeeded
final byte[] expected = fails[0] == null ? b("01") : fails[1] == null ? b("02") : null;
if (expected != null) {
final KVTransaction tx2 = store.createTransaction();
this.showKV(tx2, "TX2 of " + store);
byte[] x = this.executor.submit(new Reader(tx2, b("10"))).get();
Assert.assertEquals(x, expected);
tx2.rollback();
}
this.log.info("finished testConflictingTransactions() on " + store);
}
protected boolean allowBothTransactionsToFail() {
return false;
}
@Test(dataProvider = "kvdbs")
public void testNonconflictingTransactions(KVDatabase store) throws Exception {
// Clear database
this.log.info("starting testNonconflictingTransactions() on " + store);
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.removeRange(null, null);
return null;
}
});
// Multiple concurrent transactions with overlapping read ranges and non-intersecting write ranges
int done = 0;
KVTransaction[] txs = new KVTransaction[10];
for (int i = 0; i < txs.length; i++)
txs[i] = store.createTransaction();
while (true) {
boolean finished = true;
for (int i = 0; i < txs.length; i++) {
if (txs[i] == null)
continue;
finished = false;
Future<?> rf = this.executor.submit(new Reader(txs[i], new byte[] { (byte)i }, true));
Future<?> wf = this.executor.submit(new Writer(txs[i], new byte[] { (byte)(i + 128) }, b("02")));
for (Future<?> f : new Future<?>[] { rf, wf }) {
try {
f.get();
} catch (ExecutionException e) {
if (e.getCause() instanceof RetryTransactionException) {
txs[i] = store.createTransaction();
break;
}
throw e;
}
}
}
if (finished)
break;
for (int i = 0; i < txs.length; i++) {
if (txs[i] == null)
continue;
try {
this.numTransactionAttempts.incrementAndGet();
txs[i].commit();
} catch (RetryTransactionException e) {
this.numTransactionRetries.incrementAndGet();
txs[i] = store.createTransaction();
continue;
}
txs[i] = null;
}
}
this.log.info("finished testNonconflictingTransactions() on " + store);
}
/**
* This test runs transactions in parallel and verifies there is no "leakage" between them.
* Database must be configured for linearizable isolation.
*
* @param store underlying store
* @throws Exception if an error occurs
*/
@Test(dataProvider = "kvdbs")
public void testParallelTransactions(KVDatabase store) throws Exception {
this.testParallelTransactions(new KVDatabase[] { store });
}
public void testParallelTransactions(KVDatabase[] stores) throws Exception {
this.log.info("starting testParallelTransactions() on " + Arrays.asList(stores));
for (int count = 0; count < 25; count++) {
this.log.info("starting testParallelTransactions() iteration " + count);
final RandomTask[] tasks = new RandomTask[25];
for (int i = 0; i < tasks.length; i++) {
tasks[i] = new RandomTask(i, stores[this.random.nextInt(stores.length)], this.random.nextLong());
tasks[i].start();
}
for (int i = 0; i < tasks.length; i++)
tasks[i].join();
for (int i = 0; i < tasks.length; i++) {
final Throwable fail = tasks[i].getFail();
if (fail != null)
throw new Exception("task #" + i + " failed: >>>" + this.show(fail).trim() + "<<<");
}
this.log.info("finished testParallelTransactions() iteration " + count);
}
this.log.info("finished testParallelTransactions() on " + Arrays.asList(stores));
for (KVDatabase store : stores) {
if (store instanceof Closeable)
((Closeable)store).close();
}
}
/**
* This test runs transactions sequentially and verifies that each transaction sees
* the changes that were committed in the previous transaction.
*
* @param store underlying store
* @throws Exception if an error occurs
*/
@Test(dataProvider = "kvdbs")
public void testSequentialTransactions(KVDatabase store) throws Exception {
this.log.info("starting testSequentialTransactions() on " + store);
// Clear database
this.tryNtimes(store, new Transactional<Void>() {
@Override
public Void transact(KVTransaction tx) {
tx.removeRange(null, null);
return null;
}
});
// Keep an in-memory record of what is in the committed database
final TreeMap<byte[], byte[]> committedData = new TreeMap<byte[], byte[]>(ByteUtil.COMPARATOR);
// Run transactions
for (int i = 0; i < 50; i++) {
final RandomTask task = new RandomTask(i, store, committedData, this.random.nextLong());
task.run();
final Throwable fail = task.getFail();
if (fail != null)
throw new Exception("task #" + i + " failed: >>>" + this.show(fail).trim() + "<<<");
}
this.log.info("finished testSequentialTransactions() on " + store);
}
protected <V> V tryNtimes(KVDatabase kvdb, Transactional<V> transactional) {
RetryTransactionException retry = null;
for (int count = 0; count < this.getNumTries(); count++) {
final KVTransaction tx = kvdb.createTransaction();
try {
final V result = transactional.transact(tx);
this.numTransactionAttempts.incrementAndGet();
tx.commit();
return result;
} catch (RetryTransactionException e) {
this.numTransactionRetries.incrementAndGet();
KVDatabaseTest.this.log.debug("attempt #" + (count + 1) + " yeilded " + e);
retry = e;
}
try {
Thread.sleep(100 + count * 200);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
throw retry;
}
protected int getNumTries() {
return 3;
}
protected interface Transactional<V> {
V transact(KVTransaction kvt);
}
// RandomTask
public class RandomTask extends Thread {
private final int id;
private final KVDatabase store;
private final Random random;
private final TreeMap<byte[], byte[]> committedData; // tracks actual committed data, if known
private final NavigableMap<String, String> committedDataView;
private final ArrayList<String> log = new ArrayList<>(1000);
private Throwable fail;
public RandomTask(int id, KVDatabase store, long seed) {
this(id, store, null, seed);
}
public RandomTask(int id, KVDatabase store, TreeMap<byte[], byte[]> committedData, long seed) {
super("Random[" + id + "]");
this.id = id;
this.store = store;
this.committedData = committedData;
this.committedDataView = stringView(this.committedData);
this.random = new Random(seed);
this.log("seed = " + seed);
}
@Override
public void run() {
KVDatabaseTest.this.log.debug("*** " + this + " STARTING");
try {
this.test();
this.log("succeeded");
} catch (Throwable t) {
final StringWriter buf = new StringWriter();
t.printStackTrace(new PrintWriter(buf, true));
this.log("failed: " + t + "\n" + buf.toString());
this.fail = t;
} finally {
KVDatabaseTest.this.log.debug("*** " + this + " FINISHED");
this.dumpLog(this.fail != null);
}
}
public Throwable getFail() {
return this.fail;
}
@SuppressWarnings("unchecked")
private void test() throws Exception {
// Keep track of key/value pairs that we know should exist in the transaction
final TreeMap<byte[], byte[]> knownValues = new TreeMap<>(ByteUtil.COMPARATOR);
final NavigableMap<String, String> knownValuesView = stringView(knownValues);
final TreeSet<byte[]> putValues = new TreeSet<>(ByteUtil.COMPARATOR);
final NavigableSet<String> putValuesView = stringView(putValues);
// Keep track of known empty ranges
final KeyRanges knownEmpty = new KeyRanges();
// Create transaction
final KVTransaction tx = this.store.createTransaction();
KVDatabaseTest.this.log.debug("*** CREATED TX " + tx);
// Load actual committed database contents (if known) into "known values" tracker
if (this.committedData != null)
knownValues.putAll(this.committedData);
// Save a copy of committed data
final TreeMap<byte[], byte[]> previousCommittedData = this.committedData != null ?
(TreeMap<byte[], byte[]>)this.committedData.clone() : null;
//final NavigableMap<String, String> previousCommittedDataView = stringView(previousCommittedData);
// Verify committed data is accurate before starting
if (this.committedData != null)
Assert.assertEquals(stringView(this.readDatabase(tx)), knownValuesView);
// Note: if this.committedData != null, then knownValues will exactly track the transaction, otherwise,
// knownValues only contains values we know are in there; nothing is known about uncontained values.
// Make a bunch of random changes
Boolean committed = null;
try {
final int limit = this.r(1000);
for (int j = 0; j < limit; j++) {
byte[] key;
byte[] val;
byte[] min;
byte[] max;
KVPair pair;
int option = this.r(62);
boolean knownValuesChanged = false;
if (option < 10) { // get
key = this.rb(1, false);
val = tx.get(key);
this.log("get: " + s(key) + " -> " + s(val));
if (val == null) {
assert !knownValues.containsKey(key) :
this + ": get(" + s(key) + ") returned null but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
knownEmpty.add(new KeyRange(key));
knownValuesChanged = true;
} else if (knownValues.containsKey(key)) {
assert s(knownValues.get(key)).equals(s(val)) :
this + ": get(" + s(key) + ") returned " + s(val) + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView + "\n emptys="
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
} else {
knownValues.put(key, val);
knownValuesChanged = true;
}
assert val == null || !knownEmpty.contains(key) :
this + ": get(" + s(key) + ") returned " + s(val) + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
} else if (option < 20) { // put
key = this.rb(1, false);
val = this.rb(2, true);
this.log("put: " + s(key) + " -> " + s(val));
tx.put(key, val);
knownValues.put(key, val);
putValues.add(key);
knownEmpty.remove(new KeyRange(key));
knownValuesChanged = true;
} else if (option < 30) { // getAtLeast
min = this.rb(1, true);
pair = tx.getAtLeast(min);
this.log("getAtLeast: " + s(min) + " -> " + s(pair));
if (pair == null) {
assert knownValues.tailMap(min).isEmpty() :
this + ": getAtLeast(" + s(min) + ") returned " + null + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
} else if (knownValues.containsKey(pair.getKey()))
assert s(knownValues.get(pair.getKey())).equals(s(pair.getValue())) :
this + ": getAtLeast(" + s(min) + ") returned " + pair + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
else
knownValues.put(pair.getKey(), pair.getValue());
assert pair == null || !knownEmpty.contains(pair.getKey()) :
this + ": getAtLeast(" + s(min) + ") returned " + pair + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
knownEmpty.add(new KeyRange(min, pair != null ? pair.getKey() : null));
knownValuesChanged = true;
} else if (option < 40) { // getAtMost
max = this.rb(1, true);
pair = tx.getAtMost(max);
this.log("getAtMost: " + s(max) + " -> " + s(pair));
if (pair == null) {
assert knownValues.headMap(max).isEmpty() :
this + ": getAtMost(" + s(max) + ") returned " + null + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
} else if (knownValues.containsKey(pair.getKey()))
assert s(knownValues.get(pair.getKey())).equals(s(pair.getValue())) :
this + ": getAtMost(" + s(max) + ") returned " + pair + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
else
knownValues.put(pair.getKey(), pair.getValue());
assert pair == null || !knownEmpty.contains(pair.getKey()) :
this + ": getAtMost(" + s(max) + ") returned " + pair + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
knownEmpty.add(new KeyRange(pair != null ? ByteUtil.getNextKey(pair.getKey()) : ByteUtil.EMPTY, max));
knownValuesChanged = true;
} else if (option < 50) { // remove
key = this.rb(1, false);
if (this.r(5) == 0 && (pair = tx.getAtLeast(this.rb(1, false))) != null)
key = pair.getKey();
this.log("remove: " + s(key));
tx.remove(key);
knownValues.remove(key);
putValues.remove(key);
knownEmpty.add(new KeyRange(key));
knownValuesChanged = true;
} else if (option < 52) { // removeRange
min = this.rb2(2, 20);
do {
max = this.rb2(2, 30);
} while (max != null && min != null && ByteUtil.COMPARATOR.compare(min, max) > 0);
this.log("removeRange: " + s(min) + " to " + s(max));
tx.removeRange(min, max);
if (min == null && max == null) {
knownValues.clear();
putValues.clear();
} else if (min == null) {
knownValues.headMap(max).clear();
putValues.headSet(max).clear();
} else if (max == null) {
knownValues.tailMap(min).clear();
putValues.tailSet(min).clear();
} else {
knownValues.subMap(min, max).clear();
putValues.subSet(min, max).clear();
}
knownEmpty.add(new KeyRange(min != null ? min : ByteUtil.EMPTY, max));
knownValuesChanged = true;
} else if (option < 60) { // adjustCounter
key = this.rb(1, false);
key[0] = (byte)(key[0] & 0x0f);
val = tx.get(key);
long counter = -1;
if (val != null) {
try {
counter = tx.decodeCounter(val);
this.log("adj: found valid value " + s(val) + " (" + counter + ") at key " + s(key));
} catch (IllegalArgumentException e) {
this.log("adj: found bogus value " + s(val) + " at key " + s(key));
val = null;
}
}
if (val == null) {
counter = this.random.nextLong();
final byte[] encodedCounter = tx.encodeCounter(counter);
tx.put(key, encodedCounter);
putValues.add(key);
this.log("adj: initialize " + s(key) + " to " + s(encodedCounter));
}
final long adj = this.random.nextInt(1 << this.random.nextInt(24)) - 1024;
final byte[] encodedCounter = tx.encodeCounter(counter + adj);
this.log("adj: " + s(key) + " by " + adj + " -> should now be " + s(encodedCounter));
tx.adjustCounter(key, adj);
knownValues.put(key, encodedCounter);
knownEmpty.remove(new KeyRange(key));
knownValuesChanged = true;
} else { // sleep
final int millis = this.r(50);
this.log("sleep " + millis + "ms");
try {
Thread.sleep(millis);
} catch (InterruptedException e) {
// ignore
}
}
if (knownValuesChanged) {
this.log("new values:"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView + "\n emptys=" + knownEmpty);
}
// Verify everything we know to be there is there
for (Map.Entry<byte[], byte[]> entry : knownValues.entrySet()) {
final byte[] knownKey = entry.getKey();
final byte[] expected = entry.getValue();
final byte[] actual = tx.get(knownKey);
assert actual != null && ByteUtil.compare(actual, expected) == 0 :
this + ": tx has " + s(actual) + " for key " + s(knownKey) + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
}
// Verify everything we know to no be there is not there
final Iterator<KVPair> iter = tx.getRange(null, null, false);
while (iter.hasNext()) {
pair = iter.next();
assert !knownEmpty.contains(pair.getKey()) :
this + ": tx contains " + pair + " but"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView
+ "\n emptys=" + knownEmpty + "\n tx=" + this.toString(tx);
}
if (iter instanceof Closeable)
((Closeable)iter).close();
}
// Maybe commit
final boolean rollback = this.r(5) == 3;
KVDatabaseTest.this.log.debug("*** " + (rollback ? "ROLLING BACK" : "COMMITTING") + " TX " + tx);
this.log("about to " + (rollback ? "rollback" : "commit") + ":"
+ "\n knowns=" + knownValuesView + "\n puts=" + putValuesView + "\n emptys=" + knownEmpty
+ "\n committed: " + committedDataView);
if (rollback) {
tx.rollback();
committed = false;
this.log("rolled-back");
} else {
try {
KVDatabaseTest.this.numTransactionAttempts.incrementAndGet();
tx.commit();
} catch (RetryTransactionException e) {
KVDatabaseTest.this.numTransactionRetries.incrementAndGet();
throw e;
}
committed = true;
KVDatabaseTest.this.log.debug("*** COMMITTED TX " + tx);
this.log("committed");
}
} catch (TransactionTimeoutException e) {
KVDatabaseTest.this.log.debug("*** TX " + tx + " THREW " + e);
this.log("got " + e);
committed = false;
} catch (RetryTransactionException e) { // might have committed, might not have, we don't know for sure
KVDatabaseTest.this.log.debug("*** TX " + tx + " THREW " + e);
this.log("got " + e);
}
// Doing this should always be allowed and shouldn't affect anything
tx.rollback();
// Verify committed database contents are now equal to what's expected
if (this.committedData != null) {
// Read actual content
final TreeMap<byte[], byte[]> actual = new TreeMap<byte[], byte[]>(ByteUtil.COMPARATOR);
final NavigableMap<String, String> actualView = stringView(actual);
actual.putAll(this.readDatabase());
// Update what we think is in the database and then compare to actual content
if (Boolean.TRUE.equals(committed)) {
// Verify
this.log("tx was definitely committed");
assert actualView.equals(knownValuesView) :
this + "\n*** ACTUAL:\n" + actualView + "\n*** EXPECTED:\n" + knownValuesView + "\n";
} else if (Boolean.FALSE.equals(committed)) {
// Verify
this.log("tx was definitely rolled back");
assert actualView.equals(committedDataView) :
this + "\n*** ACTUAL:\n" + actualView + "\n*** EXPECTED:\n" + committedDataView + "\n";
} else {
// We don't know whether transaction got committed or not .. check both possibilities
final boolean matchCommit = actualView.equals(knownValuesView);
final boolean matchRollback = actualView.equals(committedDataView);
this.log("tx was either committed (" + matchCommit + ") or rolled back (" + matchRollback + ")");
// Verify one or the other
assert matchCommit || matchRollback :
this + "\n*** ACTUAL:\n" + actualView
+ "\n*** COMMIT:\n" + knownValuesView
+ "\n*** ROLLBACK:\n" + committedDataView + "\n";
committed = matchCommit;
}
// Update model of database
if (committed) {
this.committedData.clear();
this.committedData.putAll(knownValues);
}
}
}
private TreeMap<byte[], byte[]> readDatabase() {
return KVDatabaseTest.this.tryNtimes(this.store, new Transactional<TreeMap<byte[], byte[]>>() {
@Override
public TreeMap<byte[], byte[]> transact(KVTransaction tx) {
return RandomTask.this.readDatabase(tx);
}
});
}
private TreeMap<byte[], byte[]> readDatabase(KVStore tx) {
final TreeMap<byte[], byte[]> values = new TreeMap<byte[], byte[]>(ByteUtil.COMPARATOR);
final Iterator<KVPair> i = tx.getRange(null, null, false);
while (i.hasNext()) {
final KVPair pair = i.next();
values.put(pair.getKey(), pair.getValue());
}
if (i instanceof AutoCloseable) {
try {
((AutoCloseable)i).close();
} catch (Exception e) {
// ignore
}
}
return values;
}
private String toString(KVStore kv) {
final StringBuilder buf = new StringBuilder();
buf.append('{');
final Iterator<KVPair> i = kv.getRange(null, null, false);
while (i.hasNext()) {
final KVPair pair = i.next();
if (buf.length() > 8)
buf.append(", ");
buf.append(ByteUtil.toString(pair.getKey())).append('=').append(ByteUtil.toString(pair.getValue()));
}
if (i instanceof AutoCloseable) {
try {
((AutoCloseable)i).close();
} catch (Exception e) {
// ignore
}
}
buf.append('}');
return buf.toString();
}
private void log(String s) {
this.log.add(s);
}
private void dumpLog(boolean force) {
if (!force && !KVDatabaseTest.this.log.isTraceEnabled())
return;
synchronized (KVDatabaseTest.this) {
final StringBuilder buf = new StringBuilder(this.log.size() * 40);
for (String s : this.log)
buf.append(s).append('\n');
KVDatabaseTest.this.log.debug("*** BEGIN " + this + " LOG ***\n\n{}\n*** END " + this + " LOG ***", buf);
}
}
private int r(int max) {
return this.random.nextInt(max);
}
private byte[] rb(int len, boolean allowFF) {
final byte[] b = new byte[this.r(len) + 1];
this.random.nextBytes(b);
if (!allowFF && b[0] == (byte)0xff)
b[0] = (byte)random.nextInt(0xff);
return b;
}
private byte[] rb2(int len, int nullchance) {
if (this.r(nullchance) == 0)
return null;
return this.rb(len, true);
}
@Override
public String toString() {
return "Random[" + this.id + "]";
}
}
// Reader
public class Reader implements Callable<byte[]> {
final KVTransaction tx;
final byte[] key;
final boolean range;
public Reader(KVTransaction tx, byte[] key, boolean range) {
this.tx = tx;
this.key = key;
this.range = range;
}
public Reader(KVTransaction tx, byte[] key) {
this(tx, key, false);
}
@Override
public byte[] call() {
if (this.range) {
if (KVDatabaseTest.this.log.isTraceEnabled())
KVDatabaseTest.this.log.trace("reading at least " + s(this.key) + " in " + this.tx);
final KVPair pair = this.tx.getAtLeast(this.key);
KVDatabaseTest.this.log.info("finished reading at least " + s(this.key) + " -> " + pair + " in " + this.tx);
return pair != null ? pair.getValue() : null;
} else {
if (KVDatabaseTest.this.log.isTraceEnabled())
KVDatabaseTest.this.log.trace("reading " + s(this.key) + " in " + this.tx);
final byte[] value = this.tx.get(this.key);
KVDatabaseTest.this.log.info("finished reading " + s(this.key) + " -> " + s(value) + " in " + this.tx);
return value;
}
}
}
// Writer
public class Writer implements Runnable {
final KVTransaction tx;
final byte[] key;
final byte[] value;
public Writer(KVTransaction tx, byte[] key, byte[] value) {
this.tx = tx;
this.key = key;
this.value = value;
}
@Override
public void run() {
try {
KVDatabaseTest.this.log.info("putting " + s(this.key) + " -> " + s(this.value) + " in " + this.tx);
this.tx.put(this.key, this.value);
} catch (RuntimeException e) {
KVDatabaseTest.this.log.info("exception putting " + s(this.key) + " -> " + s(this.value)
+ " in " + this.tx + ": " + e);
if (KVDatabaseTest.this.log.isTraceEnabled()) {
KVDatabaseTest.this.log.trace(this.tx + " put " + s(this.key) + " -> " + s(this.value)
+ " failure exception trace:", e);
}
throw e;
}
}
}
// Committer
public class Committer implements Runnable {
final KVTransaction tx;
public Committer(KVTransaction tx) {
this.tx = tx;
}
@Override
public void run() {
try {
KVDatabaseTest.this.log.info("committing " + this.tx);
KVDatabaseTest.this.numTransactionAttempts.incrementAndGet();
this.tx.commit();
} catch (RuntimeException e) {
if (e instanceof RetryTransactionException)
KVDatabaseTest.this.numTransactionRetries.incrementAndGet();
KVDatabaseTest.this.log.info("exception committing " + this.tx + ": " + e);
if (KVDatabaseTest.this.log.isTraceEnabled())
KVDatabaseTest.this.log.trace(this.tx + " commit failure exception trace:", e);
throw e;
}
}
}
}
|
Report retry exception types in key/value database unit test.
|
jsimpledb-kv-test/src/main/java/org/jsimpledb/kv/test/KVDatabaseTest.java
|
Report retry exception types in key/value database unit test.
|
|
Java
|
apache-2.0
|
ed72a6289fd1927959ca57d4d8a01e405e572cdc
| 0
|
marklogic/java-client-api,marklogic/java-client-api,marklogic/java-client-api,marklogic/java-client-api,marklogic/java-client-api
|
/*
* Copyright 2012-2015 MarkLogic Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.marklogic.client.io;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import javax.xml.namespace.QName;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.ls.DOMImplementationLS;
import org.w3c.dom.ls.LSException;
import org.w3c.dom.ls.LSInput;
import org.w3c.dom.ls.LSOutput;
import org.w3c.dom.ls.LSParser;
import org.w3c.dom.ls.LSResourceResolver;
import com.marklogic.client.MarkLogicIOException;
import com.marklogic.client.MarkLogicInternalException;
import com.marklogic.client.io.marker.BufferableHandle;
import com.marklogic.client.io.marker.ContentHandle;
import com.marklogic.client.io.marker.ContentHandleFactory;
import com.marklogic.client.io.marker.StructureReadHandle;
import com.marklogic.client.io.marker.StructureWriteHandle;
import com.marklogic.client.io.marker.XMLReadHandle;
import com.marklogic.client.io.marker.XMLWriteHandle;
/**
* A DOM Handle represents XML content as a DOM document for reading or writing.
*/
public class DOMHandle
extends BaseHandle<InputStream, OutputStreamSender>
implements OutputStreamSender, BufferableHandle, ContentHandle<Document>,
XMLReadHandle, XMLWriteHandle,
StructureReadHandle, StructureWriteHandle
{
static final private Logger logger = LoggerFactory.getLogger(DOMHandle.class);
private LSResourceResolver resolver;
private Document content;
private DocumentBuilderFactory factory;
private XPath xpathProcessor;
/**
* Creates a factory to create a DOMHandle instance for a DOM document.
* @return the factory
*/
static public ContentHandleFactory newFactory() {
return new ContentHandleFactory() {
@Override
public Class<?>[] getHandledClasses() {
return new Class<?>[]{ Document.class };
}
@Override
public boolean isHandled(Class<?> type) {
return Document.class.isAssignableFrom(type);
}
@Override
public <C> ContentHandle<C> newHandle(Class<C> type) {
@SuppressWarnings("unchecked")
ContentHandle<C> handle = isHandled(type) ?
(ContentHandle<C>) new DOMHandle() : null;
return handle;
}
};
}
/**
* Zero-argument constructor.
*/
public DOMHandle() {
super();
super.setFormat(Format.XML);
setResendable(true);
}
/**
* Initializes the handle with a DOM document for the content.
* @param content a DOM document
*/
public DOMHandle(Document content) {
this();
set(content);
}
/**
* Returns the resolver for resolving references while parsing the document.
* @return the resolver
*/
public LSResourceResolver getResolver() {
return resolver;
}
/**
* Specifies the resolver for resolving references while parsing the document.
* @param resolver the reference resolver
*/
public void setResolver(LSResourceResolver resolver) {
this.resolver = resolver;
}
/**
* Returns the DOM document for the content.
* @return the DOM document
*/
@Override
public Document get() {
return content;
}
/**
* Assigns a DOM document as the content.
* @param content a DOM document
*/
@Override
public void set(Document content) {
this.content = content;
}
/**
* Assigns a DOM document as the content and returns the handle
* as a fluent convenience.
* @param content a DOM document
* @return this handle
*/
public DOMHandle with(Document content) {
set(content);
return this;
}
/**
* Restricts the format to XML.
*/
@Override
public void setFormat(Format format) {
if (format != Format.XML)
throw new IllegalArgumentException("DOMHandle supports the XML format only");
}
/**
* Specifies the mime type of the content and returns the handle
* as a fluent convenience.
* @param mimetype the mime type of the content
* @return this handle
*/
public DOMHandle withMimetype(String mimetype) {
setMimetype(mimetype);
return this;
}
@Override
public void fromBuffer(byte[] buffer) {
if (buffer == null || buffer.length == 0)
content = null;
else
receiveContent(new ByteArrayInputStream(buffer));
}
@Override
public byte[] toBuffer() {
try {
if (content == null)
return null;
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
write(buffer);
return buffer.toByteArray();
} catch (IOException e) {
throw new MarkLogicIOException(e);
}
}
/**
* Returns the DOM document as an XML string.
*/
@Override
public String toString() {
try {
byte[] buffer = toBuffer();
if ( buffer == null ) return null;
else return new String(buffer,"UTF-8");
} catch (UnsupportedEncodingException e) {
throw new MarkLogicIOException(e);
}
}
/**
* Returns the factory for building DOM documents.
* @return the document factory
*/
public DocumentBuilderFactory getFactory() throws ParserConfigurationException {
if (factory == null)
factory = makeDocumentBuilderFactory();
return factory;
}
/**
* Specifies the factory for building DOM documents.
* @param factory the document factory
*/
public void setFactory(DocumentBuilderFactory factory) {
this.factory = factory;
}
protected DocumentBuilderFactory makeDocumentBuilderFactory() throws ParserConfigurationException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
factory.setValidating(false);
// TODO: XInclude
return factory;
}
/**
* Get the processor used to evaluate XPath expressions.
* You might get the XPath processor to configure it. For instance,
* you can configure the XPath processor to declare namespace
* bindings or to set a function or variable resolver.
* @see com.marklogic.client.util.EditableNamespaceContext
* @return the XPath expression processor
*/
public XPath getXPathProcessor() {
if (xpathProcessor == null)
xpathProcessor = makeXPathProcessorFactory().newXPath();
return xpathProcessor;
}
/**
* Specifies the processor used to evaluate XPath expressions against
* the document.
* @param xpathProcessor the XPath expression processor
*/
public void setXPathProcessor(XPath xpathProcessor) {
this.xpathProcessor = xpathProcessor;
}
protected XPathFactory makeXPathProcessorFactory() {
return XPathFactory.newInstance();
}
/**
* Evaluate a string XPath expression against the retrieved document.
* An XPath expression can return a Node or subinterface such as
* Element or Text, a NodeList, or a Boolean, Number, or String value.
* @param xpathExpression the XPath expression as a string
* @param as the type of the value
* @return the value produced by the XPath expression
*/
public <T> T evaluateXPath(String xpathExpression, Class<T> as)
throws XPathExpressionException {
return evaluateXPath(xpathExpression, get(), as);
}
/**
* Evaluate a string XPath expression relative to a node such as a node
* returned by a previous XPath expression.
* An XPath expression can return a Node or subinterface such as
* Element or Text, a NodeList, or a Boolean, Number, or String value.
* @param xpathExpression the XPath expression as a string
* @param context the node for evaluating the expression
* @param as the type of the value
* @return the value produced by the XPath expression
*/
public <T> T evaluateXPath(String xpathExpression, Node context, Class<T> as)
throws XPathExpressionException {
checkContext(context);
return castAs(
getXPathProcessor().evaluate(xpathExpression, context, returnXPathConstant(as)),
as
);
}
/**
* Compile an XPath string expression for efficient evaluation later.
* @param xpathExpression the XPath expression as a string
* @return the compiled XPath expression
*/
public XPathExpression compileXPath(String xpathExpression)
throws XPathExpressionException {
return getXPathProcessor().compile(xpathExpression);
}
/**
* Evaluate a compiled XPath expression against the retrieved document.
* An XPath expression can return a Node or subinterface such as
* Element or Text, a NodeList, or a Boolean, Number, or String value.
* @param xpathExpression an XPath expression compiled previously
* @param as the type of the value
* @return the value produced by the XPath expression
*/
public <T> T evaluateXPath(XPathExpression xpathExpression, Class<T> as)
throws XPathExpressionException {
return evaluateXPath(xpathExpression, get(), as);
}
/**
* Evaluate a compiled XPath expression relative to a node such as a node
* returned by a previous XPath expression.
* An XPath expression can return a Node or subinterface such as
* Element or Text, a NodeList, or a Boolean, Number, or String value.
* @param xpathExpression an XPath expression compiled previously
* @param context the node for evaluating the expression
* @param as the type of the value
* @return the value produced by the XPath expression
*/
public <T> T evaluateXPath(XPathExpression xpathExpression, Node context, Class<T> as)
throws XPathExpressionException {
checkContext(context);
return castAs(
xpathExpression.evaluate(context, returnXPathConstant(as)),
as
);
}
protected void checkContext(Node context) {
if (context == null) {
throw new IllegalStateException("Cannot process empty context");
}
}
protected QName returnXPathConstant(Class<?> as) {
if (as == null) {
throw new IllegalArgumentException("cannot execute XPath as null");
} else if (Node.class.isAssignableFrom(as)) {
return XPathConstants.NODE;
} else if (NodeList.class.isAssignableFrom(as)) {
return XPathConstants.NODESET;
} else if (String.class.isAssignableFrom(as)) {
return XPathConstants.STRING;
} else if (Number.class.isAssignableFrom(as)) {
return XPathConstants.NUMBER;
} else if (Boolean.class.isAssignableFrom(as)) {
return XPathConstants.BOOLEAN;
}
throw new IllegalArgumentException("cannot execute XPath as "+as.getName());
}
protected <T> T castAs(Object result, Class<?> as) {
if (result == null) {
return null;
}
if (!as.isAssignableFrom(result.getClass())) {
throw new IllegalArgumentException("cannot cast "+result.getClass().getName()+" to "+as.getName());
}
@SuppressWarnings("unchecked")
T typedResult = (T) result;
return typedResult;
}
@Override
protected Class<InputStream> receiveAs() {
return InputStream.class;
}
@Override
protected void receiveContent(InputStream content) {
if (content == null) {
this.content = null;
return;
}
try {
if (logger.isInfoEnabled())
logger.info("Parsing DOM document from input stream");
DocumentBuilderFactory factory = getFactory();
if (factory == null) {
throw new MarkLogicInternalException("Failed to make DOM document builder factory");
}
DOMImplementationLS domImpl = (DOMImplementationLS) factory.newDocumentBuilder().getDOMImplementation();
LSParser parser = domImpl.createLSParser(DOMImplementationLS.MODE_SYNCHRONOUS, null);
if (resolver != null) {
parser.getDomConfig().setParameter("resource-resolver", resolver);
}
LSInput domInput = domImpl.createLSInput();
domInput.setEncoding("UTF-8");
domInput.setByteStream(content);
this.content = parser.parse(domInput);
} catch (ParserConfigurationException e) {
logger.error("Failed to parse DOM document from input stream",e);
throw new MarkLogicInternalException(e);
} finally {
try {
content.close();
} catch (IOException e) {
//ignore
}
}
}
@Override
protected OutputStreamSender sendContent() {
if (content == null) {
throw new IllegalStateException("No document to write");
}
return this;
}
@Override
public void write(OutputStream out) throws IOException {
try {
if (logger.isInfoEnabled())
logger.info("Serializing DOM document to output stream");
DocumentBuilderFactory factory = getFactory();
if (factory == null) {
throw new MarkLogicInternalException("Failed to make DOM document builder factory");
}
DOMImplementationLS domImpl = (DOMImplementationLS) factory.newDocumentBuilder().getDOMImplementation();
LSOutput domOutput = domImpl.createLSOutput();
domOutput.setEncoding("UTF-8");
domOutput.setByteStream(out);
domImpl.createLSSerializer().write(content, domOutput);
} catch (DOMException e) {
logger.error("Failed to serialize DOM document to output stream",e);
throw new MarkLogicInternalException(e);
} catch (LSException e) {
logger.error("Failed to serialize DOM document to output stream",e);
throw new MarkLogicInternalException(e);
} catch (ParserConfigurationException e) {
logger.error("Failed to serialize DOM document to output stream",e);
throw new MarkLogicInternalException(e);
}
}
}
|
src/main/java/com/marklogic/client/io/DOMHandle.java
|
/*
* Copyright 2012-2015 MarkLogic Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.marklogic.client.io;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import javax.xml.namespace.QName;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.ls.DOMImplementationLS;
import org.w3c.dom.ls.LSException;
import org.w3c.dom.ls.LSInput;
import org.w3c.dom.ls.LSOutput;
import org.w3c.dom.ls.LSParser;
import org.w3c.dom.ls.LSResourceResolver;
import com.marklogic.client.MarkLogicIOException;
import com.marklogic.client.MarkLogicInternalException;
import com.marklogic.client.io.marker.BufferableHandle;
import com.marklogic.client.io.marker.ContentHandle;
import com.marklogic.client.io.marker.ContentHandleFactory;
import com.marklogic.client.io.marker.StructureReadHandle;
import com.marklogic.client.io.marker.StructureWriteHandle;
import com.marklogic.client.io.marker.XMLReadHandle;
import com.marklogic.client.io.marker.XMLWriteHandle;
/**
* A DOM Handle represents XML content as a DOM document for reading or writing.
*/
public class DOMHandle
extends BaseHandle<InputStream, OutputStreamSender>
implements OutputStreamSender, BufferableHandle, ContentHandle<Document>,
XMLReadHandle, XMLWriteHandle,
StructureReadHandle, StructureWriteHandle
{
static final private Logger logger = LoggerFactory.getLogger(DOMHandle.class);
private LSResourceResolver resolver;
private Document content;
private DocumentBuilderFactory factory;
private XPath xpathProcessor;
/**
* Creates a factory to create a DOMHandle instance for a DOM document.
* @return the factory
*/
static public ContentHandleFactory newFactory() {
return new ContentHandleFactory() {
@Override
public Class<?>[] getHandledClasses() {
return new Class<?>[]{ Document.class };
}
@Override
public boolean isHandled(Class<?> type) {
return Document.class.isAssignableFrom(type);
}
@Override
public <C> ContentHandle<C> newHandle(Class<C> type) {
@SuppressWarnings("unchecked")
ContentHandle<C> handle = isHandled(type) ?
(ContentHandle<C>) new DOMHandle() : null;
return handle;
}
};
}
/**
* Zero-argument constructor.
*/
public DOMHandle() {
super();
super.setFormat(Format.XML);
setResendable(true);
}
/**
* Initializes the handle with a DOM document for the content.
* @param content a DOM document
*/
public DOMHandle(Document content) {
this();
set(content);
}
/**
* Returns the resolver for resolving references while parsing the document.
* @return the resolver
*/
public LSResourceResolver getResolver() {
return resolver;
}
/**
* Specifies the resolver for resolving references while parsing the document.
* @param resolver the reference resolver
*/
public void setResolver(LSResourceResolver resolver) {
this.resolver = resolver;
}
/**
* Returns the DOM document for the content.
* @return the DOM document
*/
@Override
public Document get() {
return content;
}
/**
* Assigns a DOM document as the content.
* @param content a DOM document
*/
@Override
public void set(Document content) {
this.content = content;
}
/**
* Assigns a DOM document as the content and returns the handle
* as a fluent convenience.
* @param content a DOM document
* @return this handle
*/
public DOMHandle with(Document content) {
set(content);
return this;
}
/**
* Restricts the format to XML.
*/
@Override
public void setFormat(Format format) {
if (format != Format.XML)
throw new IllegalArgumentException("DOMHandle supports the XML format only");
}
/**
* Specifies the mime type of the content and returns the handle
* as a fluent convenience.
* @param mimetype the mime type of the content
* @return this handle
*/
public DOMHandle withMimetype(String mimetype) {
setMimetype(mimetype);
return this;
}
@Override
public void fromBuffer(byte[] buffer) {
if (buffer == null || buffer.length == 0)
content = null;
else
receiveContent(new ByteArrayInputStream(buffer));
}
@Override
public byte[] toBuffer() {
try {
if (content == null)
return null;
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
write(buffer);
return buffer.toByteArray();
} catch (IOException e) {
throw new MarkLogicIOException(e);
}
}
/**
* Returns the DOM document as an XML string.
*/
@Override
public String toString() {
try {
return new String(toBuffer(),"UTF-8");
} catch (UnsupportedEncodingException e) {
throw new MarkLogicIOException(e);
}
}
/**
* Returns the factory for building DOM documents.
* @return the document factory
*/
public DocumentBuilderFactory getFactory() throws ParserConfigurationException {
if (factory == null)
factory = makeDocumentBuilderFactory();
return factory;
}
/**
* Specifies the factory for building DOM documents.
* @param factory the document factory
*/
public void setFactory(DocumentBuilderFactory factory) {
this.factory = factory;
}
protected DocumentBuilderFactory makeDocumentBuilderFactory() throws ParserConfigurationException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
factory.setValidating(false);
// TODO: XInclude
return factory;
}
/**
* Get the processor used to evaluate XPath expressions.
* You might get the XPath processor to configure it. For instance,
* you can configure the XPath processor to declare namespace
* bindings or to set a function or variable resolver.
* @see com.marklogic.client.util.EditableNamespaceContext
* @return the XPath expression processor
*/
public XPath getXPathProcessor() {
if (xpathProcessor == null)
xpathProcessor = makeXPathProcessorFactory().newXPath();
return xpathProcessor;
}
/**
* Specifies the processor used to evaluate XPath expressions against
* the document.
* @param xpathProcessor the XPath expression processor
*/
public void setXPathProcessor(XPath xpathProcessor) {
this.xpathProcessor = xpathProcessor;
}
protected XPathFactory makeXPathProcessorFactory() {
return XPathFactory.newInstance();
}
/**
* Evaluate a string XPath expression against the retrieved document.
* An XPath expression can return a Node or subinterface such as
* Element or Text, a NodeList, or a Boolean, Number, or String value.
* @param xpathExpression the XPath expression as a string
* @param as the type of the value
* @return the value produced by the XPath expression
*/
public <T> T evaluateXPath(String xpathExpression, Class<T> as)
throws XPathExpressionException {
return evaluateXPath(xpathExpression, get(), as);
}
/**
* Evaluate a string XPath expression relative to a node such as a node
* returned by a previous XPath expression.
* An XPath expression can return a Node or subinterface such as
* Element or Text, a NodeList, or a Boolean, Number, or String value.
* @param xpathExpression the XPath expression as a string
* @param context the node for evaluating the expression
* @param as the type of the value
* @return the value produced by the XPath expression
*/
public <T> T evaluateXPath(String xpathExpression, Node context, Class<T> as)
throws XPathExpressionException {
checkContext(context);
return castAs(
getXPathProcessor().evaluate(xpathExpression, context, returnXPathConstant(as)),
as
);
}
/**
* Compile an XPath string expression for efficient evaluation later.
* @param xpathExpression the XPath expression as a string
* @return the compiled XPath expression
*/
public XPathExpression compileXPath(String xpathExpression)
throws XPathExpressionException {
return getXPathProcessor().compile(xpathExpression);
}
/**
* Evaluate a compiled XPath expression against the retrieved document.
* An XPath expression can return a Node or subinterface such as
* Element or Text, a NodeList, or a Boolean, Number, or String value.
* @param xpathExpression an XPath expression compiled previously
* @param as the type of the value
* @return the value produced by the XPath expression
*/
public <T> T evaluateXPath(XPathExpression xpathExpression, Class<T> as)
throws XPathExpressionException {
return evaluateXPath(xpathExpression, get(), as);
}
/**
* Evaluate a compiled XPath expression relative to a node such as a node
* returned by a previous XPath expression.
* An XPath expression can return a Node or subinterface such as
* Element or Text, a NodeList, or a Boolean, Number, or String value.
* @param xpathExpression an XPath expression compiled previously
* @param context the node for evaluating the expression
* @param as the type of the value
* @return the value produced by the XPath expression
*/
public <T> T evaluateXPath(XPathExpression xpathExpression, Node context, Class<T> as)
throws XPathExpressionException {
checkContext(context);
return castAs(
xpathExpression.evaluate(context, returnXPathConstant(as)),
as
);
}
protected void checkContext(Node context) {
if (context == null) {
throw new IllegalStateException("Cannot process empty context");
}
}
protected QName returnXPathConstant(Class<?> as) {
if (as == null) {
throw new IllegalArgumentException("cannot execute XPath as null");
} else if (Node.class.isAssignableFrom(as)) {
return XPathConstants.NODE;
} else if (NodeList.class.isAssignableFrom(as)) {
return XPathConstants.NODESET;
} else if (String.class.isAssignableFrom(as)) {
return XPathConstants.STRING;
} else if (Number.class.isAssignableFrom(as)) {
return XPathConstants.NUMBER;
} else if (Boolean.class.isAssignableFrom(as)) {
return XPathConstants.BOOLEAN;
}
throw new IllegalArgumentException("cannot execute XPath as "+as.getName());
}
protected <T> T castAs(Object result, Class<?> as) {
if (result == null) {
return null;
}
if (!as.isAssignableFrom(result.getClass())) {
throw new IllegalArgumentException("cannot cast "+result.getClass().getName()+" to "+as.getName());
}
@SuppressWarnings("unchecked")
T typedResult = (T) result;
return typedResult;
}
@Override
protected Class<InputStream> receiveAs() {
return InputStream.class;
}
@Override
protected void receiveContent(InputStream content) {
if (content == null) {
this.content = null;
return;
}
try {
if (logger.isInfoEnabled())
logger.info("Parsing DOM document from input stream");
DocumentBuilderFactory factory = getFactory();
if (factory == null) {
throw new MarkLogicInternalException("Failed to make DOM document builder factory");
}
DOMImplementationLS domImpl = (DOMImplementationLS) factory.newDocumentBuilder().getDOMImplementation();
LSParser parser = domImpl.createLSParser(DOMImplementationLS.MODE_SYNCHRONOUS, null);
if (resolver != null) {
parser.getDomConfig().setParameter("resource-resolver", resolver);
}
LSInput domInput = domImpl.createLSInput();
domInput.setEncoding("UTF-8");
domInput.setByteStream(content);
this.content = parser.parse(domInput);
} catch (ParserConfigurationException e) {
logger.error("Failed to parse DOM document from input stream",e);
throw new MarkLogicInternalException(e);
} finally {
try {
content.close();
} catch (IOException e) {
//ignore
}
}
}
@Override
protected OutputStreamSender sendContent() {
if (content == null) {
throw new IllegalStateException("No document to write");
}
return this;
}
@Override
public void write(OutputStream out) throws IOException {
try {
if (logger.isInfoEnabled())
logger.info("Serializing DOM document to output stream");
DocumentBuilderFactory factory = getFactory();
if (factory == null) {
throw new MarkLogicInternalException("Failed to make DOM document builder factory");
}
DOMImplementationLS domImpl = (DOMImplementationLS) factory.newDocumentBuilder().getDOMImplementation();
LSOutput domOutput = domImpl.createLSOutput();
domOutput.setEncoding("UTF-8");
domOutput.setByteStream(out);
domImpl.createLSSerializer().write(content, domOutput);
} catch (DOMException e) {
logger.error("Failed to serialize DOM document to output stream",e);
throw new MarkLogicInternalException(e);
} catch (LSException e) {
logger.error("Failed to serialize DOM document to output stream",e);
throw new MarkLogicInternalException(e);
} catch (ParserConfigurationException e) {
logger.error("Failed to serialize DOM document to output stream",e);
throw new MarkLogicInternalException(e);
}
}
}
|
avoid NPE
|
src/main/java/com/marklogic/client/io/DOMHandle.java
|
avoid NPE
|
|
Java
|
apache-2.0
|
f56e7c4fd43cc90430452f68badc2785a4d7ff54
| 0
|
shopizer-ecommerce/shopizer
|
package com.salesmanager.shop.store.facade.product;
import java.util.stream.Collectors;
import org.jsoup.helper.Validate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import com.salesmanager.core.business.exception.ServiceException;
import com.salesmanager.core.business.services.catalog.product.type.ProductTypeService;
import com.salesmanager.core.model.catalog.product.type.ProductType;
import com.salesmanager.core.model.merchant.MerchantStore;
import com.salesmanager.core.model.reference.language.Language;
import com.salesmanager.shop.mapper.catalog.PersistableProductTypeMapper;
import com.salesmanager.shop.mapper.catalog.ReadableProductTypeMapper;
import com.salesmanager.shop.model.catalog.product.type.PersistableProductType;
import com.salesmanager.shop.model.catalog.product.type.ReadableProductType;
import com.salesmanager.shop.model.catalog.product.type.ReadableProductTypeList;
import com.salesmanager.shop.store.api.exception.OperationNotAllowedException;
import com.salesmanager.shop.store.api.exception.ResourceNotFoundException;
import com.salesmanager.shop.store.api.exception.ServiceRuntimeException;
import com.salesmanager.shop.store.controller.product.facade.ProductTypeFacade;
@Service("productTypeFacade")
public class ProductTypeFacadeImpl implements ProductTypeFacade {
@Autowired
private ProductTypeService productTypeService;
@Autowired
private ReadableProductTypeMapper readableProductTypeMapper;
@Autowired
private PersistableProductTypeMapper persistableProductTypeMapper;
@Override
public ReadableProductTypeList getByMerchant(MerchantStore store, Language language, int count, int page) {
Validate.notNull(store, "MerchantStore cannot be null");
ReadableProductTypeList returnList = new ReadableProductTypeList();
try {
Page<ProductType> types = productTypeService.getByMerchant(store, language, page, count);
if(types != null) {
returnList.setList(types.getContent().stream().map(t -> readableProductTypeMapper.convert(t, store, language)).collect(Collectors.toList()));
returnList.setTotalPages(types.getTotalPages());
returnList.setRecordsTotal(types.getTotalElements());
returnList.setRecordsFiltered(types.getSize());
}
return returnList;
} catch (Exception e) {
throw new ServiceRuntimeException(
"An exception occured while getting product types for merchant[ " + store.getCode() + "]", e);
}
}
@Override
public ReadableProductType get(MerchantStore store, Long id, Language language) {
Validate.notNull(store, "MerchantStore cannot be null");
Validate.notNull(id, "ProductType code cannot be empty");
try {
ProductType type = productTypeService.getById(id, store, language);
ReadableProductType readableType = readableProductTypeMapper.convert(type, store, language);
if(readableType == null) {
throw new ResourceNotFoundException("Product type [" + id + "] not found for store [" + store.getCode() + "]");
}
return readableType;
} catch(Exception e) {
throw new ServiceRuntimeException(
"An exception occured while getting product type [" + id + "] not found for store [" + store.getCode() + "]", e);
}
}
@Override
public Long save(PersistableProductType type, MerchantStore store, Language language) {
Validate.notNull(type,"ProductType cannot be null");
Validate.notNull(store,"MerchantStore cannot be null");
Validate.notNull(type.getCode(),"ProductType code cannot be empty");
try {
if(this.exists(type.getCode(), store, language)) {
throw new OperationNotAllowedException(
"Product type [" + type.getCode() + "] already exist for store [" + store.getCode() + "]");
}
ProductType model = persistableProductTypeMapper.convert(type, store, language);
model.setMerchantStore(store);
ProductType saved = productTypeService.saveOrUpdate(model);
return saved.getId();
} catch(Exception e) {
throw new ServiceRuntimeException(
"An exception occured while saving product type",e);
}
}
@Override
public void update(PersistableProductType type, Long id, MerchantStore store, Language language) {
Validate.notNull(type,"ProductType cannot be null");
Validate.notNull(store,"MerchantStore cannot be null");
Validate.notNull(id,"id cannot be empty");
try {
ProductType t = productTypeService.getById(id, store, language);
if(t == null) {
throw new ResourceNotFoundException(
"Product type [" + type.getCode() + "] does not exist for store [" + store.getCode() + "]");
}
type.setId(t.getId());
type.setCode(t.getCode());
ProductType model = persistableProductTypeMapper.convert(type, store, language);
productTypeService.saveOrUpdate(model);
} catch(Exception e) {
throw new ServiceRuntimeException(
"An exception occured while saving product type",e);
}
}
@Override
public void delete(Long id, MerchantStore store, Language language) {
Validate.notNull(store,"MerchantStore cannot be null");
Validate.notNull(id,"id cannot be empty");
try {
ProductType t = productTypeService.getById(id, store, language);
if(t == null) {
throw new ResourceNotFoundException(
"Product type [" + id + "] does not exist for store [" + store.getCode() + "]");
}
productTypeService.delete(t);
} catch(Exception e) {
throw new ServiceRuntimeException(
"An exception occured while saving product type",e);
}
}
@Override
public boolean exists(String code, MerchantStore store, Language language) {
ProductType t;
try {
t = productTypeService.getByCode(code, store, language);
} catch (ServiceException e) {
throw new RuntimeException("An exception occured while getting product type [" + code + "] for merchant store [" + store.getCode() +"]",e);
}
if(t != null) {
return true;
}
return false;
}
}
|
sm-shop/src/main/java/com/salesmanager/shop/store/facade/product/ProductTypeFacadeImpl.java
|
package com.salesmanager.shop.store.facade.product;
import java.util.stream.Collectors;
import org.jsoup.helper.Validate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.stereotype.Service;
import com.salesmanager.core.business.exception.ServiceException;
import com.salesmanager.core.business.services.catalog.product.type.ProductTypeService;
import com.salesmanager.core.model.catalog.product.type.ProductType;
import com.salesmanager.core.model.merchant.MerchantStore;
import com.salesmanager.core.model.reference.language.Language;
import com.salesmanager.shop.mapper.catalog.PersistableProductTypeMapper;
import com.salesmanager.shop.mapper.catalog.ReadableProductTypeMapper;
import com.salesmanager.shop.model.catalog.product.type.PersistableProductType;
import com.salesmanager.shop.model.catalog.product.type.ReadableProductType;
import com.salesmanager.shop.model.catalog.product.type.ReadableProductTypeList;
import com.salesmanager.shop.store.api.exception.OperationNotAllowedException;
import com.salesmanager.shop.store.api.exception.ResourceNotFoundException;
import com.salesmanager.shop.store.api.exception.ServiceRuntimeException;
import com.salesmanager.shop.store.controller.product.facade.ProductTypeFacade;
@Service("productTypeFacade")
public class ProductTypeFacadeImpl implements ProductTypeFacade {
@Autowired
private ProductTypeService productTypeService;
@Autowired
private ReadableProductTypeMapper readableProductTypeMapper;
@Autowired
private PersistableProductTypeMapper persistableProductTypeMapper;
@Override
public ReadableProductTypeList getByMerchant(MerchantStore store, Language language, int count, int page) {
Validate.notNull(store, "MerchantStore cannot be null");
ReadableProductTypeList returnList = new ReadableProductTypeList();
try {
Page<ProductType> types = productTypeService.getByMerchant(store, language, page, count);
if(types != null) {
returnList.setList(types.getContent().stream().map(t -> readableProductTypeMapper.convert(t, store, language)).collect(Collectors.toList()));
returnList.setTotalPages(types.getTotalPages());
returnList.setRecordsTotal(types.getTotalElements());
returnList.setRecordsFiltered(types.getSize());
}
return returnList;
} catch (Exception e) {
throw new ServiceRuntimeException(
"An exception occured while getting product types for merchant[ " + store.getCode() + "]", e);
}
}
@Override
public ReadableProductType get(MerchantStore store, Long id, Language language) {
Validate.notNull(store, "MerchantStore cannot be null");
Validate.notNull(id, "ProductType code cannot be empty");
try {
ProductType type = productTypeService.getById(id, store, language);
ReadableProductType readableType = readableProductTypeMapper.convert(type, store, language);
if(readableType == null) {
throw new ResourceNotFoundException("Product type [" + id + "] not found for store [" + store.getCode() + "]");
}
return readableType;
} catch(Exception e) {
throw new ServiceRuntimeException(
"An exception occured while getting product type [" + id + "] not found for store [" + store.getCode() + "]", e);
}
}
@Override
public Long save(PersistableProductType type, MerchantStore store, Language language) {
Validate.notNull(type,"ProductType cannot be null");
Validate.notNull(store,"MerchantStore cannot be null");
Validate.notNull(type.getCode(),"ProductType code cannot be empty");
try {
if(this.exists(type.getCode(), store, language)) {
throw new OperationNotAllowedException(
"Product type [" + type.getCode() + "] already exist for store [" + store.getCode() + "]");
}
ProductType model = persistableProductTypeMapper.convert(type, store, language);
model.setMerchantStore(store);
productTypeService.saveOrUpdate(model);
return model.getId();
} catch(Exception e) {
throw new ServiceRuntimeException(
"An exception occured while saving product type",e);
}
}
@Override
public void update(PersistableProductType type, Long id, MerchantStore store, Language language) {
Validate.notNull(type,"ProductType cannot be null");
Validate.notNull(store,"MerchantStore cannot be null");
Validate.notNull(id,"id cannot be empty");
try {
ProductType t = productTypeService.getById(id, store, language);
if(t == null) {
throw new ResourceNotFoundException(
"Product type [" + type.getCode() + "] does not exist for store [" + store.getCode() + "]");
}
type.setId(t.getId());
type.setCode(t.getCode());
ProductType model = persistableProductTypeMapper.convert(type, store, language);
productTypeService.saveOrUpdate(model);
} catch(Exception e) {
throw new ServiceRuntimeException(
"An exception occured while saving product type",e);
}
}
@Override
public void delete(Long id, MerchantStore store, Language language) {
Validate.notNull(store,"MerchantStore cannot be null");
Validate.notNull(id,"id cannot be empty");
try {
ProductType t = productTypeService.getById(id, store, language);
if(t == null) {
throw new ResourceNotFoundException(
"Product type [" + id + "] does not exist for store [" + store.getCode() + "]");
}
productTypeService.delete(t);
} catch(Exception e) {
throw new ServiceRuntimeException(
"An exception occured while saving product type",e);
}
}
@Override
public boolean exists(String code, MerchantStore store, Language language) {
ProductType t;
try {
t = productTypeService.getByCode(code, store, language);
} catch (ServiceException e) {
throw new RuntimeException("An exception occured while getting product type [" + code + "] for merchant store [" + store.getCode() +"]",e);
}
if(t != null) {
return true;
}
return false;
}
}
|
Fetch and return saved Entity id
|
sm-shop/src/main/java/com/salesmanager/shop/store/facade/product/ProductTypeFacadeImpl.java
|
Fetch and return saved Entity id
|
|
Java
|
apache-2.0
|
6355eaaf72ef406f84e88247a94110b0f332ad23
| 0
|
etnetera/jmeter,ubikloadpack/jmeter,ra0077/jmeter,d0k1/jmeter,ra0077/jmeter,max3163/jmeter,d0k1/jmeter,etnetera/jmeter,vherilier/jmeter,etnetera/jmeter,vherilier/jmeter,d0k1/jmeter,ubikloadpack/jmeter,etnetera/jmeter,vherilier/jmeter,max3163/jmeter,ra0077/jmeter,vherilier/jmeter,etnetera/jmeter,d0k1/jmeter,ubikloadpack/jmeter,ra0077/jmeter,max3163/jmeter,max3163/jmeter,ubikloadpack/jmeter
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.report.core;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import org.apache.jmeter.samplers.SampleSaveConfiguration;
import org.apache.jmeter.save.CSVSaveService;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.jorphan.util.JOrphanUtils;
import org.apache.log.Logger;
/**
* Reader class for reading CSV files.<reader>
* <p>
* Handles {@link SampleMetadata} reading and sample extraction.
* </p>
*
* @since 2.14
*/
public class CsvSampleReader implements Closeable{
private static final Logger LOG = LoggingManager.getLoggerForClass();
private static final int BUF_SIZE = 10000;
private static final String CHARSET = "ISO8859-1";
private static final char DEFAULT_SEPARATOR =
JMeterUtils.getPropDefault("jmeter.save.saveservice.default_delimiter", ",").charAt(0); //$NON-NLS-1$ //$NON-NLS-2$
private File file;
private BufferedReader reader;
private char separator;
private long row;
private SampleMetadata metadata;
private int columnCount;
private Sample lastSampleRead;
/**
* Instantiates a new csv sample reader.
*
* @param inputFile
* the input file (must not be {@code null})
* @param separator
* the separator
* @param useSaveSampleCfg
* indicates whether the reader uses jmeter
* SampleSaveConfiguration to define metadata
*/
public CsvSampleReader(File inputFile, char separator, boolean useSaveSampleCfg) {
this(inputFile, null, separator, useSaveSampleCfg);
}
/**
* Instantiates a new csv sample reader.
*
* @param inputFile
* the input file (must not be {@code null})
* @param metadata
* the metadata
*/
public CsvSampleReader(File inputFile, SampleMetadata metadata) {
this(inputFile, metadata, DEFAULT_SEPARATOR, false);
}
private CsvSampleReader(File inputFile, SampleMetadata metadata,
char separator, boolean useSaveSampleCfg) {
if (!(inputFile.isFile() && inputFile.canRead())) {
throw new IllegalArgumentException(inputFile.getAbsolutePath()
+ "does not exist or is not readable");
}
this.file = inputFile;
try {
this.reader = new BufferedReader(new InputStreamReader(
new FileInputStream(file), CHARSET), BUF_SIZE);
} catch (FileNotFoundException | UnsupportedEncodingException ex) {
throw new SampleException("Could not create file reader !", ex);
}
if (metadata == null) {
this.metadata = readMetadata(separator, useSaveSampleCfg);
} else {
this.metadata = metadata;
}
this.columnCount = this.metadata.getColumnCount();
this.separator = this.metadata.getSeparator();
this.row = 0;
this.lastSampleRead = nextSample();
}
private SampleMetadata readMetadata(char separator, boolean useSaveSampleCfg) {
try {
SampleMetadata result;
// Read first line
String line = reader.readLine();
if(line == null) {
throw new IllegalArgumentException("File is empty");
}
// When we can use sample save config and there is no header in csv
// file
if (useSaveSampleCfg
&& CSVSaveService.getSampleSaveConfiguration(line,
file.getAbsolutePath()) == null) {
// Build metadata from default save config
LOG.warn("File '"+file.getAbsolutePath()+"' does not contain the field names header, "
+ "ensure the jmeter.save.saveservice.* properties are the same as when the CSV file was created or the file may be read incorrectly");
System.err.println("File '"+file.getAbsolutePath()+"' does not contain the field names header, "
+ "ensure the jmeter.save.saveservice.* properties are the same as when the CSV file was created or the file may be read incorrectly");
result = new SampleMetadata(
SampleSaveConfiguration.staticConfig());
} else {
// Build metadata from headers
result = new SampleMetaDataParser(separator).parse(line);
}
return result;
} catch (Exception e) {
throw new SampleException("Could not read metadata !", e);
}
}
/**
* Gets the metadata.
*
* @return the metadata
*/
public SampleMetadata getMetadata() {
return metadata;
}
private Sample nextSample() {
String[] data;
try {
data = CSVSaveService.csvReadFile(reader, separator);
Sample sample = null;
if (data.length > 0) {
if (data.length < columnCount) {
throw new SampleException("Mismatch between expected number of columns:"+columnCount+" and columns in CSV file:"+data.length+
", check your jmeter.save.saveservice.* configuration");
}
sample = new Sample(row, metadata, data);
}
return sample;
} catch (IOException e) {
throw new SampleException("Could not read sample <" + row + ">", e);
}
}
/**
* Gets next sample from the file.
*
* @return the sample
*/
public Sample readSample() {
Sample out = lastSampleRead;
lastSampleRead = nextSample();
return out;
}
/**
* Gets next sample from file but keep the reading file position.
*
* @return the sample
*/
public Sample peek() {
return lastSampleRead;
}
/**
* Indicates whether the file contains more samples
*
* @return true, if the file contains more samples
*/
public boolean hasNext() {
return lastSampleRead != null;
}
/**
* Close the reader.
*/
@Override
public void close() {
JOrphanUtils.closeQuietly(reader);
}
}
|
src/core/org/apache/jmeter/report/core/CsvSampleReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.report.core;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import org.apache.jmeter.samplers.SampleSaveConfiguration;
import org.apache.jmeter.save.CSVSaveService;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.jorphan.util.JOrphanUtils;
import org.apache.log.Logger;
/**
* Reader class for reading CSV files.<reader>
* <p>
* Handles {@link SampleMetadata} reading and sample extraction.
* </p>
*
* @since 2.14
*/
public class CsvSampleReader implements Closeable{
private static final Logger LOG = LoggingManager.getLoggerForClass();
private static final int BUF_SIZE = 10000;
private static final String CHARSET = "ISO8859-1";
private static final char DEFAULT_SEPARATOR =
JMeterUtils.getPropDefault("jmeter.save.saveservice.default_delimiter", ",").charAt(0); //$NON-NLS-1$ //$NON-NLS-2$
private File file;
private BufferedReader reader;
private char separator;
private long row;
private SampleMetadata metadata;
private int columnCount;
private Sample lastSampleRead;
/**
* Instantiates a new csv sample reader.
*
* @param inputFile
* the input file (must not be {@code null})
* @param separator
* the separator
* @param useSaveSampleCfg
* indicates whether the reader uses jmeter
* SampleSaveConfiguration to define metadata
*/
public CsvSampleReader(File inputFile, char separator, boolean useSaveSampleCfg) {
this(inputFile, null, separator, useSaveSampleCfg);
}
/**
* Instantiates a new csv sample reader.
*
* @param inputFile
* the input file (must not be {@code null})
* @param metadata
* the metadata
*/
public CsvSampleReader(File inputFile, SampleMetadata metadata) {
this(inputFile, metadata, DEFAULT_SEPARATOR, false);
}
private CsvSampleReader(File inputFile, SampleMetadata metadata,
char separator, boolean useSaveSampleCfg) {
if (!(inputFile.isFile() && inputFile.canRead())) {
throw new IllegalArgumentException(inputFile.getAbsolutePath()
+ "does not exist or is not readable");
}
this.file = inputFile;
try {
this.reader = new BufferedReader(new InputStreamReader(
new FileInputStream(file), CHARSET), BUF_SIZE);
} catch (FileNotFoundException | UnsupportedEncodingException ex) {
throw new SampleException("Could not create file reader !", ex);
}
if (metadata == null) {
this.metadata = readMetadata(separator, useSaveSampleCfg);
} else {
this.metadata = metadata;
}
this.columnCount = this.metadata.getColumnCount();
this.separator = this.metadata.getSeparator();
this.row = 0;
this.lastSampleRead = nextSample();
}
private SampleMetadata readMetadata(char separator, boolean useSaveSampleCfg) {
try {
SampleMetadata result;
// Read first line
String line = reader.readLine();
if(line == null) {
throw new IllegalArgumentException("File is empty");
}
// When we can use sample save config and there is no header in csv
// file
if (useSaveSampleCfg
&& CSVSaveService.getSampleSaveConfiguration(line,
file.getAbsolutePath()) == null) {
// Build metadata from default save config
LOG.warn("File '"+file.getAbsolutePath()+"' does not contain the field names header, "
+ "ensure the jmeter.save.saveservice.* properties are the same as when the CSV file was created or the file may be read incorrectly");
System.err.println("File '"+file.getAbsolutePath()+"' does not contain the field names header, "
+ "ensure the jmeter.save.saveservice.* properties are the same as when the CSV file was created or the file may be read incorrectly");
result = new SampleMetadata(
SampleSaveConfiguration.staticConfig());
} else {
// Build metadata from headers
result = new SampleMetaDataParser(separator).parse(line);
}
return result;
} catch (Exception e) {
throw new SampleException("Could not read metadata !", e);
}
}
/**
* Gets the metadata.
*
* @return the metadata
*/
public SampleMetadata getMetadata() {
return metadata;
}
private Sample nextSample() {
String[] data;
try {
data = CSVSaveService.csvReadFile(reader, separator);
Sample sample = null;
if (data.length > 0) {
// TODO is it correct to use a filler ?
if (data.length < columnCount) {
String[] filler = new String[columnCount];
System.arraycopy(data, 0, filler, 0, data.length);
for (int i = data.length; i < columnCount; i++) {
filler[i] = "";
}
data = filler;
}
sample = new Sample(row, metadata, data);
}
return sample;
} catch (IOException e) {
throw new SampleException("Could not read sample <" + row + ">", e);
}
}
/**
* Gets next sample from the file.
*
* @return the sample
*/
public Sample readSample() {
Sample out = lastSampleRead;
lastSampleRead = nextSample();
return out;
}
/**
* Gets next sample from file but keep the reading file position.
*
* @return the sample
*/
public Sample peek() {
return lastSampleRead;
}
/**
* Indicates whether the file contains more samples
*
* @return true, if the file contains more samples
*/
public boolean hasNext() {
return lastSampleRead != null;
}
/**
* Close the reader.
*/
@Override
public void close() {
JOrphanUtils.closeQuietly(reader);
}
}
|
Bug 58987 - Report/Dashboard: Improve error reporting
Throw exception if mismatch between expected number of columns and columns in csv file
Bugzilla Id: 58987
git-svn-id: 5ccfe34f605a6c2f9041ff2965ab60012c62539a@1730206 13f79535-47bb-0310-9956-ffa450edef68
|
src/core/org/apache/jmeter/report/core/CsvSampleReader.java
|
Bug 58987 - Report/Dashboard: Improve error reporting Throw exception if mismatch between expected number of columns and columns in csv file Bugzilla Id: 58987
|
|
Java
|
bsd-3-clause
|
215df06f2ad8106f7f25ed15d38668053adf4d70
| 0
|
edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon
|
/*
* $Id: MathematicalSciencesPublishersArticleIteratorFactory.java,v 1.4 2014-03-07 00:02:50 etenbrink Exp $
*/
/*
Copyright (c) 2000-2014 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.plugin.mathematicalsciencespublishers;
import java.util.Arrays;
import java.util.Iterator;
import java.util.regex.Pattern;
import org.lockss.daemon.*;
import org.lockss.extractor.ArticleMetadataExtractor;
import org.lockss.extractor.ArticleMetadataExtractorFactory;
import org.lockss.extractor.BaseArticleMetadataExtractor;
import org.lockss.extractor.MetadataTarget;
import org.lockss.plugin.*;
import org.lockss.util.Logger;
public class MathematicalSciencesPublishersArticleIteratorFactory
implements ArticleIteratorFactory,
ArticleMetadataExtractorFactory {
protected static Logger log =
Logger.getLogger(MathematicalSciencesPublishersArticleIteratorFactory.class);
// params from tdb file corresponding to AU
protected static final String ROOT_TEMPLATE =
"\"%s%s/%d/\", base_url, journal_id, year";
protected static final String PATTERN_TEMPLATE =
"\"^%s%s/%d/[0-9-]+/p.+[.]xhtml\", base_url, journal_id, year";
// various aspects of an article
// http://msp.org/involve/2013/6-1/p01.xhtml
// http://msp.org/camcos/2012/7-2/p01-s.pdf
// http://msp.org/camcos/2012/7-2/camcos-v7-n2-p01-p.pdf
// http://www.msp.warwick.ac.uk/gt/2006/10/gt-2006-10-025p.pdf
// http://msp.org/ant/2011/5-2/pC1.xhtml
// http://msp.org/ant/2011/5-2/ant-v5-n2-pC1-s.pdf
protected static final Pattern ABSTRACT_PATTERN = Pattern.compile(
"([^/]+)/([0-9]+)/([0-9]+)(-?)([0-9]*)/p([c0-9]+)[.]xhtml$",
Pattern.CASE_INSENSITIVE);
// how to change from one form (aspect) of article to another
protected static final String ABSTRACT_REPLACEMENT = "$1/$2/$3$4$5/p$6.xhtml";
protected static final String SPDF_REPLACEMENT = "$1/$2/$3$4$5/$1-v$3$4n$5-p$6-s.pdf";
protected static final String PPDF_REPLACEMENT = "$1/$2/$3$4$5/$1-v$3$4n$5-p$6-p.pdf";
protected static final String ALT_SPDF_REPLACEMENT = "$1/$2/$3$4$5/$1-$2-$3-$6s.pdf";
protected static final String ALT_PPDF_REPLACEMENT = "$1/$2/$3$4$5/$1-$2-$3-$6p.pdf";
// MSP publisher, article content may look like this but you do not know
// how many of the aspects will exist for a particular journal
//
// msp.org/<journal_id>/<year>/<voliss>/p<page>.xhtml (abstract)
// msp.org/<journal_id>/<year>/<voliss>/<page_id>s.pdf (screen pdf)
// msp.org/<journal_id>/<year>/<voliss>/<page_id>p.pdf (printer pdf)
// msp.org/<journal_id>/<year>/<voliss>/b<page>xhtml (references)
// msp.org/<journal_id>/<year>/<voliss>/f<page>xhtml (forward citations)
//
@Override
public Iterator<ArticleFiles> createArticleIterator(ArchivalUnit au, MetadataTarget target)
throws PluginException {
SubTreeArticleIteratorBuilder builder = new SubTreeArticleIteratorBuilder(au);
builder.setSpec(target,
ROOT_TEMPLATE, PATTERN_TEMPLATE, Pattern.CASE_INSENSITIVE);
// set up Abstract to be an aspect that will trigger an ArticleFiles
// NOTE - for the moment this also means an abstract could be considered a
// FULL_TEXT_CU until this is deprecated
// though the ordered list for role full text will mean if any of the others
// are there, they will become the FTCU
builder.addAspect(
ABSTRACT_PATTERN, ABSTRACT_REPLACEMENT,
ArticleFiles.ROLE_ABSTRACT, ArticleFiles.ROLE_ARTICLE_METADATA);
// set up PDF to be an aspect that will trigger an ArticleFiles
builder.addAspect(
Arrays.asList(SPDF_REPLACEMENT, PPDF_REPLACEMENT,
ALT_SPDF_REPLACEMENT, ALT_PPDF_REPLACEMENT),
ArticleFiles.ROLE_FULL_TEXT_PDF);
// The order in which we want to define full_text_cu.
// First one that exists will get the job
// In this case, there are two jobs, one for counting articles (abstract is
// good) and the other for metadata (PDF is correct)
builder.setFullTextFromRoles(
ArticleFiles.ROLE_FULL_TEXT_PDF,
ArticleFiles.ROLE_ABSTRACT);
return builder.getSubTreeArticleIterator();
}
@Override
public ArticleMetadataExtractor createArticleMetadataExtractor(MetadataTarget target)
throws PluginException {
return new BaseArticleMetadataExtractor(ArticleFiles.ROLE_ARTICLE_METADATA);
}
}
|
plugins/src/org/lockss/plugin/mathematicalsciencespublishers/MathematicalSciencesPublishersArticleIteratorFactory.java
|
/*
* $Id: MathematicalSciencesPublishersArticleIteratorFactory.java,v 1.3 2013-12-20 05:27:28 etenbrink Exp $
*/
/*
Copyright (c) 2000-2013 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.plugin.mathematicalsciencespublishers;
import java.util.Arrays;
import java.util.Iterator;
import java.util.regex.Pattern;
import org.lockss.daemon.*;
import org.lockss.extractor.ArticleMetadataExtractor;
import org.lockss.extractor.ArticleMetadataExtractorFactory;
import org.lockss.extractor.BaseArticleMetadataExtractor;
import org.lockss.extractor.MetadataTarget;
import org.lockss.plugin.*;
import org.lockss.util.Logger;
public class MathematicalSciencesPublishersArticleIteratorFactory
implements ArticleIteratorFactory,
ArticleMetadataExtractorFactory {
protected static Logger log =
Logger.getLogger(MathematicalSciencesPublishersArticleIteratorFactory.class);
// params from tdb file corresponding to AU
protected static final String ROOT_TEMPLATE =
"\"%s%s/%s/\", base_url, journal_id, year";
protected static final String PATTERN_TEMPLATE =
"\"^%s%s/%s/[0-9-]+/p.+[.]xhtml\", base_url, journal_id, year";
// various aspects of an article
// http://msp.org/involve/2013/6-1/p01.xhtml
// http://msp.org/camcos/2012/7-2/*p01-s.pdf
// http://msp.org/camcos/2012/7-2/camcos-v7-n2-p01-p.pdf
// http://www.msp.warwick.ac.uk/gt/2006/10/gt-2006-10-025p.pdf
// http://msp.org/ant/2011/5-2/pC1.xhtml
// http://msp.org/ant/2011/5-2/ant-v5-n2-pC1-s.pdf
protected static final Pattern ABSTRACT_PATTERN = Pattern.compile(
"([^/]+)/([0-9]+)/([0-9]+)(-?)([0-9]*)/p([c0-9]+).xhtml$",
Pattern.CASE_INSENSITIVE);
// how to change from one form (aspect) of article to another
protected static final String ABSTRACT_REPLACEMENT = "$1/$2/$3$4$5/p$6.xhtml";
protected static final String SPDF_REPLACEMENT = "$1/$2/$3$4$5/$1-v$3$4n$5-p$6-s.pdf";
protected static final String PPDF_REPLACEMENT = "$1/$2/$3$4$5/$1-v$3$4n$5-p$6-p.pdf";
protected static final String ALT_SPDF_REPLACEMENT = "$1/$2/$3$4$5/$1-$2-$3-$6s.pdf";
protected static final String ALT_PPDF_REPLACEMENT = "$1/$2/$3$4$5/$1-$2-$3-$6p.pdf";
// MSP publisher, article content may look like this but you do not know
// how many of the aspects will exist for a particular journal
//
// msp.org/<journal_id>/<year>/<voliss>/p<page>.xhtml (abstract)
// msp.org/<journal_id>/<year>/<voliss>/<page_id>s.pdf (screen pdf)
// msp.org/<journal_id>/<year>/<voliss>/<page_id>p.pdf (printer pdf)
// msp.org/<journal_id>/<year>/<voliss>/b<page>xhtml (references)
// msp.org/<journal_id>/<year>/<voliss>/f<page>xhtml (forward citations)
//
@Override
public Iterator<ArticleFiles> createArticleIterator(ArchivalUnit au, MetadataTarget target)
throws PluginException {
SubTreeArticleIteratorBuilder builder = new SubTreeArticleIteratorBuilder(au);
builder.setSpec(target,
ROOT_TEMPLATE, PATTERN_TEMPLATE, Pattern.CASE_INSENSITIVE);
// set up Abstract to be an aspect that will trigger an ArticleFiles
// NOTE - for the moment this also means an abstract could be considered a
// FULL_TEXT_CU until this is deprecated
// though the ordered list for role full text will mean if any of the others
// are there, they will become the FTCU
builder.addAspect(
ABSTRACT_PATTERN, ABSTRACT_REPLACEMENT,
ArticleFiles.ROLE_ABSTRACT, ArticleFiles.ROLE_ARTICLE_METADATA);
// set up PDF to be an aspect that will trigger an ArticleFiles
builder.addAspect(
Arrays.asList(SPDF_REPLACEMENT, PPDF_REPLACEMENT,
ALT_SPDF_REPLACEMENT, ALT_PPDF_REPLACEMENT),
ArticleFiles.ROLE_FULL_TEXT_PDF);
// The order in which we want to define full_text_cu.
// First one that exists will get the job
// In this case, there are two jobs, one for counting articles (abstract is
// good) and the other for metadata (PDF is correct)
builder.setFullTextFromRoles(
ArticleFiles.ROLE_FULL_TEXT_PDF,
ArticleFiles.ROLE_ABSTRACT);
return builder.getSubTreeArticleIterator();
}
@Override
public ArticleMetadataExtractor createArticleMetadataExtractor(MetadataTarget target)
throws PluginException {
return new BaseArticleMetadataExtractor(ArticleFiles.ROLE_ARTICLE_METADATA);
}
}
|
Replace year parameter (string with int) and use char class
git-svn-id: 293778eaa97c8c94097d610b1bd5133a8f478f36@33090 4f837ed2-42f5-46e7-a7a5-fa17313484d4
|
plugins/src/org/lockss/plugin/mathematicalsciencespublishers/MathematicalSciencesPublishersArticleIteratorFactory.java
|
Replace year parameter (string with int) and use char class
|
|
Java
|
bsd-3-clause
|
7f725e69bd198ede812a02983a3ab8597687dacc
| 0
|
credentials/credentials_api
|
/*
* Copyright (c) 2015, the IRMA Team
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the IRMA project nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.irmacard.credentials.info;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
/**
* TODO: Change print statements to proper Logging statements
*/
public class DescriptionStore {
static URI CORE_LOCATION;
static TreeWalkerI treeWalker;
static DescriptionStore ds;
HashMap<Integer,CredentialDescription> credentialDescriptions = new HashMap<Integer, CredentialDescription>();
HashMap<String,IssuerDescription> issuerDescriptions = new HashMap<String, IssuerDescription>();
HashMap<Integer,VerificationDescription> verificationDescriptions = new HashMap<Integer, VerificationDescription>();
/**
* Define the CoreLocation. This has to be set before using the
* DescriptionStore or define a TreeWalker instead.
* @param coreLocation Location of configuration files.
*/
public static void setCoreLocation(URI coreLocation) {
CORE_LOCATION = coreLocation;
}
/**
* Define the TreeWalker. This allows crawling more difficult storage systems,
* like Android's. This has to be set before using the DescriptionStore or define
* a coreLocation instead.
* @param treeWalker
*/
public static void setTreeWalker(TreeWalkerI treeWalker) {
DescriptionStore.treeWalker = treeWalker;
}
/**
* Get DescriptionStore instance
*
* @return The DescriptionStore instance
* @throws Exception if CoreLocation has not been set
*/
public static DescriptionStore getInstance() throws InfoException {
if(ds == null) {
ds = new DescriptionStore();
}
return ds;
}
private DescriptionStore() throws InfoException {
if(CORE_LOCATION != null) {
treeWalker = new TreeWalker(CORE_LOCATION);
}
if (treeWalker != null) {
treeWalker.parseConfiguration(this);
}
}
public CredentialDescription getCredentialDescription(short id) {
return credentialDescriptions.get(new Integer(id));
}
public CredentialDescription getCredentialDescriptionByName(String issuer,
String credID) {
for (CredentialDescription cd : credentialDescriptions.values()) {
if (cd.getIssuerID().equals(issuer)
&& cd.getCredentialID().equals(credID)) {
return cd;
}
}
// TODO: error handling? Exception?
return null;
}
public VerificationDescription getVerificationDescriptionByName(
String verifier, String verificationID) {
for (VerificationDescription vd : verificationDescriptions.values()) {
if (vd.getVerifierID().equals(verifier)
&& vd.getVerificationID().equals(verificationID)) {
return vd;
}
}
// TODO: error handling? Exception?
return null;
}
public void addCredentialDescription(CredentialDescription cd)
throws InfoException {
Integer id = new Integer(cd.getId());
if (credentialDescriptions.containsKey(id)) {
CredentialDescription other = credentialDescriptions.get(id);
throw new InfoException("Cannot add credential " + cd.getName()
+ ". Credential " + other.getCredentialID() + " of issuer "
+ other.getIssuerID() + " has the same id (" + id + ").");
}
credentialDescriptions.put(id, cd);
}
public IssuerDescription getIssuerDescription(String name) {
return issuerDescriptions.get(name);
}
public void addIssuerDescription(IssuerDescription id) throws InfoException {
if (issuerDescriptions.containsKey(id.getID())) {
throw new InfoException("Cannot add issuer " + id.getName()
+ ". An issuer with the id " + id.getID()
+ " already exists.");
}
issuerDescriptions.put(id.getID(), id);
}
public void updateIssuerDescription(IssuerDescription id) {
if (issuerDescriptions.containsKey(id.getID())) {
issuerDescriptions.remove(id.getID());
}
issuerDescriptions.put(id.getID(), id);
}
public void addVerificationDescription(VerificationDescription vd)
throws InfoException {
Integer id = new Integer(vd.getID());
if (verificationDescriptions.containsKey(id)) {
VerificationDescription other = verificationDescriptions.get(id);
throw new InfoException("Cannot add verification "
+ vd.getVerificationID() + " of "
+ vd.getVerifierID() + ". Verification "
+ other.getVerificationID() + " of "
+ other.getVerifierID() + " shares the same id ("
+ id + ").");
}
verificationDescriptions.put(new Integer(vd.getID()), vd);
}
public void updateVerificationDescription(VerificationDescription vd)
throws InfoException {
Integer id = new Integer(vd.getID());
if (verificationDescriptions.containsKey(id)) {
verificationDescriptions.remove(id);
}
verificationDescriptions.put(new Integer(vd.getID()), vd);
}
public Collection<IssuerDescription> getIssuerDescriptions() {
return issuerDescriptions.values();
}
public Collection<VerificationDescription> getVerificationDescriptionsForVerifier(String verifierID) {
ArrayList<VerificationDescription> result = new ArrayList<VerificationDescription>();
for (VerificationDescription vd : verificationDescriptions.values()) {
if (vd.getVerifierID().equals(verifierID)) {
result.add(vd);
}
}
return result;
}
public Collection<VerificationDescription> getVerificationDescriptionsForVerifier(IssuerDescription verifier) {
ArrayList<VerificationDescription> result = new ArrayList<VerificationDescription>();
for (VerificationDescription vd : verificationDescriptions.values()) {
if (vd.getVerifierID().equals(verifier.getID())) {
result.add(vd);
}
}
return result;
}
}
|
src/main/java/org/irmacard/credentials/info/DescriptionStore.java
|
/*
* Copyright (c) 2015, the IRMA Team
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the IRMA project nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.irmacard.credentials.info;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
/**
* TODO: Change print statements to proper Logging statements
*/
public class DescriptionStore {
static URI CORE_LOCATION;
static TreeWalkerI treeWalker;
static DescriptionStore ds;
HashMap<Integer,CredentialDescription> credentialDescriptions = new HashMap<Integer, CredentialDescription>();
HashMap<String,IssuerDescription> issuerDescriptions = new HashMap<String, IssuerDescription>();
HashMap<Integer,VerificationDescription> verificationDescriptions = new HashMap<Integer, VerificationDescription>();
/**
* Define the CoreLocation. This has to be set before using the
* DescriptionStore or define a TreeWalker instead.
* @param coreLocation Location of configuration files.
*/
public static void setCoreLocation(URI coreLocation) {
CORE_LOCATION = coreLocation;
}
/**
* Define the TreeWalker. This allows crawling more difficult storage systems,
* like Android's. This has to be set before using the DescriptionStore or define
* a coreLocation instead.
* @param treeWalker
*/
public static void setTreeWalker(TreeWalkerI treeWalker) {
DescriptionStore.treeWalker = treeWalker;
}
/**
* Get DescriptionStore instance
*
* @return The DescriptionStore instance
* @throws Exception if CoreLocation has not been set
*/
public static DescriptionStore getInstance() throws InfoException {
if(CORE_LOCATION == null && treeWalker == null) {
// TODO: Improve exception type
throw new InfoException(
"Please set CoreLocation before using the DescriptionStore");
}
if(ds == null) {
ds = new DescriptionStore();
}
return ds;
}
private DescriptionStore() throws InfoException {
if(CORE_LOCATION != null) {
treeWalker = new TreeWalker(CORE_LOCATION);
}
treeWalker.parseConfiguration(this);
}
public CredentialDescription getCredentialDescription(short id) {
return credentialDescriptions.get(new Integer(id));
}
public CredentialDescription getCredentialDescriptionByName(String issuer,
String credID) {
for (CredentialDescription cd : credentialDescriptions.values()) {
if (cd.getIssuerID().equals(issuer)
&& cd.getCredentialID().equals(credID)) {
return cd;
}
}
// TODO: error handling? Exception?
return null;
}
public VerificationDescription getVerificationDescriptionByName(
String verifier, String verificationID) {
for (VerificationDescription vd : verificationDescriptions.values()) {
if (vd.getVerifierID().equals(verifier)
&& vd.getVerificationID().equals(verificationID)) {
return vd;
}
}
// TODO: error handling? Exception?
return null;
}
public void addCredentialDescription(CredentialDescription cd)
throws InfoException {
Integer id = new Integer(cd.getId());
if (credentialDescriptions.containsKey(id)) {
CredentialDescription other = credentialDescriptions.get(id);
throw new InfoException("Cannot add credential " + cd.getName()
+ ". Credential " + other.getCredentialID() + " of issuer "
+ other.getIssuerID() + " has the same id (" + id + ").");
}
credentialDescriptions.put(id, cd);
}
public IssuerDescription getIssuerDescription(String name) {
return issuerDescriptions.get(name);
}
public void addIssuerDescription(IssuerDescription id) throws InfoException {
if (issuerDescriptions.containsKey(id.getID())) {
throw new InfoException("Cannot add issuer " + id.getName()
+ ". An issuer with the id " + id.getID()
+ " already exists.");
}
issuerDescriptions.put(id.getID(), id);
}
public void updateIssuerDescription(IssuerDescription id) {
if (issuerDescriptions.containsKey(id.getID())) {
issuerDescriptions.remove(id.getID());
}
issuerDescriptions.put(id.getID(), id);
}
public void addVerificationDescription(VerificationDescription vd)
throws InfoException {
Integer id = new Integer(vd.getID());
if (verificationDescriptions.containsKey(id)) {
VerificationDescription other = verificationDescriptions.get(id);
throw new InfoException("Cannot add verification "
+ vd.getVerificationID() + " of "
+ vd.getVerifierID() + ". Verification "
+ other.getVerificationID() + " of "
+ other.getVerifierID() + " shares the same id ("
+ id + ").");
}
verificationDescriptions.put(new Integer(vd.getID()), vd);
}
public void updateVerificationDescription(VerificationDescription vd)
throws InfoException {
Integer id = new Integer(vd.getID());
if (verificationDescriptions.containsKey(id)) {
verificationDescriptions.remove(id);
}
verificationDescriptions.put(new Integer(vd.getID()), vd);
}
public Collection<IssuerDescription> getIssuerDescriptions() {
return issuerDescriptions.values();
}
public Collection<VerificationDescription> getVerificationDescriptionsForVerifier(String verifierID) {
ArrayList<VerificationDescription> result = new ArrayList<VerificationDescription>();
for (VerificationDescription vd : verificationDescriptions.values()) {
if (vd.getVerifierID().equals(verifierID)) {
result.add(vd);
}
}
return result;
}
public Collection<VerificationDescription> getVerificationDescriptionsForVerifier(IssuerDescription verifier) {
ArrayList<VerificationDescription> result = new ArrayList<VerificationDescription>();
for (VerificationDescription vd : verificationDescriptions.values()) {
if (vd.getVerifierID().equals(verifier.getID())) {
result.add(vd);
}
}
return result;
}
}
|
Allow DescriptionStore without filesystem backing
|
src/main/java/org/irmacard/credentials/info/DescriptionStore.java
|
Allow DescriptionStore without filesystem backing
|
|
Java
|
bsd-3-clause
|
9c93055e3545b2e4bdbc8c2978ae5d575a19525c
| 0
|
fabricebouye/gw2-web-api-mapping,fabricebouye/gw2-web-api-mapping
|
/*
* Copyright (C) 2015-2016 Fabrice Bouyé
* All rights reserved.
*
* This software may be modified and distributed under the terms
* of the BSD license. See the LICENSE file for details.
*/
package api.web.gw2.mapping.v2.characters;
import api.web.gw2.mapping.core.DurationValue;
import api.web.gw2.mapping.core.IdValue;
import api.web.gw2.mapping.core.LevelValue;
import api.web.gw2.mapping.core.ListValue;
import api.web.gw2.mapping.core.MapValue;
import api.web.gw2.mapping.core.OptionalValue;
import api.web.gw2.mapping.core.QuantityValue;
import api.web.gw2.mapping.core.SetValue;
import api.web.gw2.mapping.v2.APIv2;
import api.web.gw2.mapping.v2.characters.inventory.InventoryBag;
import api.web.gw2.mapping.v2.characters.equipment.Equipment;
import java.time.Duration;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
/**
* Defines a character.
* @author Fabrice Bouyé
*/
@APIv2(endpoint = "v2/characters", requiresAuthentication = true, scope = "characters") // NOI18N.
public interface Character {
/**
* Gets the name of this character.
* @return A {@code String}, never {@code null}.
*/
String getName();
/**
* Gets the race of this character.
* @return A {@code CharacterRace} instance, never {@code null}.
*/
CharacterRace getRace();
/**
* Gets the profession of this character.
* @return A {@code CharacterProfession} instance, never {@code null}.
*/
CharacterProfession getProfession();
/**
* Gets the gender of this character.
* @return A {@code CharacterGender} instance, never {@code null}.
*/
CharacterGender getGender();
/**
* Gets the level of this character.
* @return An {@code int} ≥ 1 and ≤ 80.
*/
@LevelValue
int getLevel();
/**
* Gets the id of the guild of this character.
* @return An {@code Optional<String>} instance, never {@code null}.
*/
@IdValue
@OptionalValue
Optional<String> getGuild();
/**
* Gets the creation date of this character.
* @return A {@code ZonedDateTime} instance, never {@code null}.
*/
ZonedDateTime getCreated();
/**
* Gets the age of this character (the amount of second the character was played).
* @return A {@code long}.
*/
@DurationValue
Duration getAge();
/**
* Gets the number of deaths of this character.
* @return An {@code in}.
*/
@QuantityValue
int getDeaths();
/**
* Gets the equipment of this character.
* @return An {@code Optional<List<Equipment>>} instance, never {@code null}:
* <br>If present, the list is non-modifiable and may be empty.
* @see api.web.gw2.mapping.v2.tokeninfo.TokenInfoPermission#BUILDS
* @see api.web.gw2.mapping.v2.tokeninfo.TokenInfoPermission#INVENTORIES
*/
@OptionalValue
@ListValue
Optional<List<Equipment>> getEquipment();
/**
* Gets the inventory bags of this character.
* @return An {@code Optional<List<Bag>>} instance, never {@code null}:
* <br>If present, the list is non-modifiable and may be empty.
* @see api.web.gw2.mapping.v2.tokeninfo.TokenInfoPermission#INVENTORIES
*/
@OptionalValue
@ListValue
Optional<List<InventoryBag>> getBags();
/**
* Gets the crafting disciplines of this character.
* @return An {@code Optional<Set<CharacterCrafting>>} instance, never {@code null}:
* <br>If present, the set is non-modifiable and may be empty.
*/
@OptionalValue
@SetValue
Optional<Set<CharacterCrafting>> getCrafting();
/**
* Gets the specialization of this character.
* @return An {@code Optional<Map<CharacterGameType, Set<CharacterSpecialization>>>} instance, never {@code null}:
* <br>If present, the map is non-modifiable and may be empty.
* @see api.web.gw2.mapping.v2.tokeninfo.TokenInfoPermission#BUILDS
*/
@OptionalValue
@MapValue
@SetValue
Optional<Map<CharacterGameType, Set<CharacterSpecialization>>> getSpecializations();
/**
* Gets the recipes known to this character.
* @return An {@code Optional<Set<Integer>>} instance, never {@code null}:
* <br>If present, the set is non-modifiable and may be empty.
* @see api.web.gw2.mapping.v2.tokeninfo.TokenInfoPermission#INVENTORIES
*/
@OptionalValue
@SetValue
@IdValue
Optional<Set<Integer>> getRecipes();
}
|
src/api/web/gw2/mapping/v2/characters/Character.java
|
/*
* Copyright (C) 2015-2016 Fabrice Bouyé
* All rights reserved.
*
* This software may be modified and distributed under the terms
* of the BSD license. See the LICENSE file for details.
*/
package api.web.gw2.mapping.v2.characters;
import api.web.gw2.mapping.core.DurationValue;
import api.web.gw2.mapping.core.IdValue;
import api.web.gw2.mapping.core.LevelValue;
import api.web.gw2.mapping.core.ListValue;
import api.web.gw2.mapping.core.MapValue;
import api.web.gw2.mapping.core.OptionalValue;
import api.web.gw2.mapping.core.QuantityValue;
import api.web.gw2.mapping.core.SetValue;
import api.web.gw2.mapping.v2.APIv2;
import api.web.gw2.mapping.v2.characters.inventory.InventoryBag;
import api.web.gw2.mapping.v2.characters.equipment.Equipment;
import java.time.Duration;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
/**
* Defines a character.
* @author Fabrice Bouyé
*/
@APIv2(endpoint = "v2/characters", requiresAuthentication = true, scope = "characters") // NOI18N.
public interface Character {
/**
* Gets the name of this character.
* @return A {@code String}, never {@code null}.
*/
String getName();
/**
* Gets the race of this character.
* @return A {@code CharacterRace} instance, never {@code null}.
*/
CharacterRace getRace();
/**
* Gets the profession of this character.
* @return A {@code CharacterProfession} instance, never {@code null}.
*/
CharacterProfession getProfession();
/**
* Gets the gender of this character.
* @return A {@code CharacterGender} instance, never {@code null}.
*/
CharacterGender getGender();
/**
* Gets the level of this character.
* @return An {@code int} ≥ 1 and ≤ 80.
*/
@LevelValue
int getLevel();
/**
* Gets the id of the guild of this character.
* @return An {@code Optional<String>} instance, never {@code null}.
*/
@IdValue
@OptionalValue
Optional<String> getGuild();
/**
* Gets the creation date of this character.
* @return A {@code ZonedDateTime} instance, never {@code null}.
*/
ZonedDateTime getCreated();
/**
* Gets the age of this character (the amount of second the character was played).
* @return A {@code long}.
*/
@DurationValue
Duration getAge();
/**
* Gets the number of deaths of this character.
* @return An {@code in}.
*/
@QuantityValue
int getDeaths();
/**
* Gets the equipment of this character.
* @return An {@code Optional<List<Equipment>>} instance, never {@code null}:
* <br>If present, the list is non-modifiable and may be empty.
* @see api.web.gw2.mapping.v2.tokeninfo.TokenInfoPermission#BUILDS
* @see api.web.gw2.mapping.v2.tokeninfo.TokenInfoPermission#INVENTORIES
*/
@OptionalValue
@ListValue
Optional<List<Equipment>> getEquipment();
/**
* Gets the inventory bags of this character.
* @return An {@code Optional<List<Bag>>} instance, never {@code null}:
* <br>If present, the list is non-modifiable and may be empty.
* @see api.web.gw2.mapping.v2.tokeninfo.TokenInfoPermission#INVENTORIES
*/
@OptionalValue
@ListValue
Optional<List<InventoryBag>> getBags();
/**
* Gets the crafting disciplines of this character.
* @return An {@code Optional<Set<CharacterCrafting>>} instance, never {@code null}:
* <br>If present, the set is non-modifiable and may be empty.
*/
@OptionalValue
@SetValue
Optional<Set<CharacterCrafting>> getCrafting();
/**
* Gets the specialization of this character.
* @return An {@code Optional<Map<CharacterGameType, Set<CharacterSpecialization>>>} instance, never {@code null}:
* <br>If present, the map is non-modifiable and may be empty.
* @see api.web.gw2.mapping.v2.tokeninfo.TokenInfoPermission#BUILDS
*/
@OptionalValue
@MapValue
@SetValue
Optional<Map<CharacterGameType, Set<CharacterSpecialization>>> getSpecialisations();
/**
* Gets the recipes known to this character.
* @return An {@code Optional<Set<Integer>>} instance, never {@code null}:
* <br>If present, the set is non-modifiable and may be empty.
* @see api.web.gw2.mapping.v2.tokeninfo.TokenInfoPermission#INVENTORIES
*/
@OptionalValue
@SetValue
@IdValue
Optional<Set<Integer>> getRecipes();
}
|
Fixed method name spelling.
|
src/api/web/gw2/mapping/v2/characters/Character.java
|
Fixed method name spelling.
|
|
Java
|
mit
|
b3f65dacf8828d4794242634db0273c8eb07e381
| 0
|
douggie/XChange
|
package org.knowm.xchange.livecoin;
import static org.knowm.xchange.currency.Currency.getInstance;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import org.knowm.xchange.currency.Currency;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.Order.OrderType;
import org.knowm.xchange.dto.account.Balance;
import org.knowm.xchange.dto.account.FundingRecord;
import org.knowm.xchange.dto.account.Wallet;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.marketdata.Ticker;
import org.knowm.xchange.dto.marketdata.Trade;
import org.knowm.xchange.dto.marketdata.Trades;
import org.knowm.xchange.dto.marketdata.Trades.TradeSortType;
import org.knowm.xchange.dto.meta.CurrencyMetaData;
import org.knowm.xchange.dto.meta.CurrencyPairMetaData;
import org.knowm.xchange.dto.meta.ExchangeMetaData;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.UserTrade;
import org.knowm.xchange.livecoin.dto.marketdata.LivecoinOrderBook;
import org.knowm.xchange.livecoin.dto.marketdata.LivecoinRestriction;
import org.knowm.xchange.livecoin.dto.marketdata.LivecoinTicker;
import org.knowm.xchange.livecoin.dto.marketdata.LivecoinTrade;
import org.knowm.xchange.livecoin.service.LivecoinAsksBidsData;
import org.knowm.xchange.utils.DateUtils;
public class LivecoinAdapters {
private static final SimpleDateFormat dateFormat =
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS");
static {
dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
}
private LivecoinAdapters() {}
public static CurrencyPair adaptCurrencyPair(LivecoinRestriction product) {
String[] data = product.getCurrencyPair().split("\\/");
return new CurrencyPair(data[0], data[1]);
}
public static OrderBook adaptOrderBook(LivecoinOrderBook book, CurrencyPair currencyPair) {
List<LimitOrder> asks = toLimitOrderList(book.getAsks(), OrderType.ASK, currencyPair);
List<LimitOrder> bids = toLimitOrderList(book.getBids(), OrderType.BID, currencyPair);
return new OrderBook(null, asks, bids);
}
private static List<LimitOrder> toLimitOrderList(
LivecoinAsksBidsData[] levels, OrderType orderType, CurrencyPair currencyPair) {
List<LimitOrder> allLevels = new ArrayList<>(levels.length);
for (LivecoinAsksBidsData ask : levels) {
allLevels.add(
new LimitOrder(orderType, ask.getQuantity(), currencyPair, "0", null, ask.getRate()));
}
return allLevels;
}
public static Map<CurrencyPair, LivecoinOrderBook> adaptToCurrencyPairKeysMap(
Map<String, LivecoinOrderBook> orderBooksRaw) {
Set<Map.Entry<String, LivecoinOrderBook>> entries = orderBooksRaw.entrySet();
Map<CurrencyPair, LivecoinOrderBook> converted = new HashMap<>(entries.size());
for (Map.Entry<String, LivecoinOrderBook> entry : entries) {
String[] currencyPairSplit = entry.getKey().split("/");
CurrencyPair currencyPair = new CurrencyPair(currencyPairSplit[0], currencyPairSplit[1]);
converted.put(currencyPair, entry.getValue());
}
return converted;
}
public static ExchangeMetaData adaptToExchangeMetaData(
ExchangeMetaData exchangeMetaData, List<LivecoinRestriction> products) {
Map<CurrencyPair, CurrencyPairMetaData> currencyPairs = exchangeMetaData.getCurrencyPairs();
Map<Currency, CurrencyMetaData> currencies = exchangeMetaData.getCurrencies();
for (LivecoinRestriction product : products) {
BigDecimal minSize =
product.getMinLimitQuantity() == null ? BigDecimal.ZERO : product.getMinLimitQuantity();
minSize = minSize.setScale(product.getPriceScale(), BigDecimal.ROUND_UNNECESSARY);
CurrencyPair pair = adaptCurrencyPair(product);
CurrencyPairMetaData staticMetaData = exchangeMetaData.getCurrencyPairs().get(pair);
int priceScale = staticMetaData == null ? 8 : staticMetaData.getPriceScale();
if (currencyPairs.containsKey(pair)) {
CurrencyPairMetaData existing = currencyPairs.get(pair);
currencyPairs.put(
pair,
new CurrencyPairMetaData(
existing.getTradingFee(), minSize, existing.getMaximumAmount(), priceScale));
} else {
currencyPairs.put(pair, new CurrencyPairMetaData(null, minSize, null, priceScale));
}
if (!currencies.containsKey(pair.base)) currencies.put(pair.base, null);
if (!currencies.containsKey(pair.counter)) currencies.put(pair.counter, null);
}
return new ExchangeMetaData(currencyPairs, currencies, null, null, true);
}
public static Trades adaptTrades(LivecoinTrade[] nativeTrades, CurrencyPair currencyPair) {
List<Trade> trades = new ArrayList<>(nativeTrades.length);
for (LivecoinTrade trade : nativeTrades) {
OrderType type = trade.getType().equals("SELL") ? OrderType.BID : OrderType.ASK;
Trade t =
new Trade(
type,
trade.getQuantity(),
currencyPair,
trade.getPrice(),
parseDate(trade.getTime()),
String.valueOf(trade.getId()));
trades.add(t);
}
return new Trades(trades, nativeTrades[0].getId(), TradeSortType.SortByID);
}
private static Date parseDate(Long rawDateLong) {
return new Date(rawDateLong * 1000);
}
public static Ticker adaptTicker(LivecoinTicker ticker, CurrencyPair currencyPair) {
BigDecimal last = ticker.getLast();
BigDecimal bid = ticker.getBestBid();
BigDecimal ask = ticker.getBestAsk();
BigDecimal high = ticker.getHigh();
BigDecimal low = ticker.getLow();
BigDecimal volume = ticker.getVolume();
return new Ticker.Builder()
.currencyPair(currencyPair)
.last(last)
.bid(bid)
.ask(ask)
.high(high)
.low(low)
.volume(volume)
.build();
}
public static LimitOrder adaptOpenOrder(Map map) {
String typeName = map.get("type").toString();
OrderType type;
switch (typeName) {
case "MARKET_SELL":
type = OrderType.ASK;
break;
case "LIMIT_SELL":
type = OrderType.ASK;
break;
case "LIMIT_BUY":
type = OrderType.BID;
break;
case "MARKET_BUY":
type = OrderType.BID;
break;
default:
throw new IllegalStateException("Don't understand " + map);
}
String ccyPair = map.get("currencyPair").toString();
String[] pair = ccyPair.split("/");
Currency ccyA = getInstance(pair[0]);
Currency ccyB = getInstance(pair[1]);
BigDecimal startingQuantity = new BigDecimal(map.get("quantity").toString());
BigDecimal remainingQuantity = new BigDecimal(map.get("remainingQuantity").toString());
Order.OrderStatus status =
remainingQuantity.compareTo(startingQuantity) < 0
? Order.OrderStatus.PARTIALLY_FILLED
: Order.OrderStatus.PENDING_NEW;
return new LimitOrder(
type,
remainingQuantity,
new CurrencyPair(ccyA, ccyB),
map.get("id").toString(),
DateUtils.fromUnixTime(Double.valueOf(map.get("issueTime").toString()).longValue()),
new BigDecimal(map.get("price").toString()),
null,
null,
null,
status);
}
public static UserTrade adaptUserTrade(Map map) {
OrderType type = OrderType.BID;
if (map.get("type").toString().equals("SELL")) type = OrderType.ASK;
Currency ccyA = Currency.getInstance(map.get("fixedCurrency").toString());
Currency ccyB = Currency.getInstance(map.get("variableCurrency").toString());
BigDecimal amountA = new BigDecimal(map.get("amount").toString());
BigDecimal amountB = new BigDecimal(map.get("variableAmount").toString());
int scale = Math.max(amountA.scale(), amountB.scale());
BigDecimal price = amountB.divide(amountA, scale, RoundingMode.HALF_UP);
String id = map.get("id").toString();
return new UserTrade(
type,
amountA,
new CurrencyPair(ccyA, ccyB),
price,
DateUtils.fromMillisUtc(Long.valueOf(map.get("date").toString())),
id,
map.get("externalKey").toString(),
new BigDecimal(map.get("fee").toString()),
getInstance(map.get("taxCurrency").toString()));
}
public static FundingRecord adaptFundingRecord(Map map) {
FundingRecord.Type type = FundingRecord.Type.WITHDRAWAL;
if (map.get("type").toString().equals("DEPOSIT")) type = FundingRecord.Type.DEPOSIT;
return new FundingRecord(
map.get("externalKey").toString(),
DateUtils.fromMillisUtc(Long.valueOf(map.get("date").toString())),
getInstance(map.get("fixedCurrency").toString()),
new BigDecimal(map.get("amount").toString()),
map.get("id").toString(),
null,
type,
FundingRecord.Status.COMPLETE,
null,
new BigDecimal(map.get("fee").toString()),
null);
}
public static List<Wallet> adaptWallets(List<Map> data) {
Map<Currency, WalletBuilder> wallets = new HashMap<>();
for (Map balance : data) {
String type = balance.get("type").toString();
String ccy = balance.get("currency").toString();
String value = balance.get("value").toString();
Currency curr = getInstance(ccy);
WalletBuilder builder = wallets.get(curr);
if (builder == null) {
builder = new WalletBuilder(curr);
}
builder.add(type, value);
wallets.put(curr, builder);
}
List<Wallet> res = new ArrayList<>();
for (WalletBuilder builder : wallets.values()) {
res.add(builder.build());
}
return res;
}
static class WalletBuilder {
private Currency currency;
private Map<String, BigDecimal> map = new HashMap<>();
WalletBuilder(Currency currency) {
this.currency = currency;
}
public Wallet build() {
return new Wallet(
currency.getCurrencyCode(),
new Balance(
currency,
map.get("total"),
map.get("available"),
map.get("trade"),
BigDecimal.ZERO,
BigDecimal.ZERO,
BigDecimal.ZERO,
BigDecimal.ZERO));
}
public void add(String type, String value) {
map.put(type, new BigDecimal(value));
}
}
}
|
xchange-livecoin/src/main/java/org/knowm/xchange/livecoin/LivecoinAdapters.java
|
package org.knowm.xchange.livecoin;
import static org.knowm.xchange.currency.Currency.getInstance;
import java.math.BigDecimal;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import org.knowm.xchange.currency.Currency;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.Order.OrderType;
import org.knowm.xchange.dto.account.Balance;
import org.knowm.xchange.dto.account.FundingRecord;
import org.knowm.xchange.dto.account.Wallet;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.marketdata.Ticker;
import org.knowm.xchange.dto.marketdata.Trade;
import org.knowm.xchange.dto.marketdata.Trades;
import org.knowm.xchange.dto.marketdata.Trades.TradeSortType;
import org.knowm.xchange.dto.meta.CurrencyMetaData;
import org.knowm.xchange.dto.meta.CurrencyPairMetaData;
import org.knowm.xchange.dto.meta.ExchangeMetaData;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.UserTrade;
import org.knowm.xchange.livecoin.dto.marketdata.LivecoinOrderBook;
import org.knowm.xchange.livecoin.dto.marketdata.LivecoinRestriction;
import org.knowm.xchange.livecoin.dto.marketdata.LivecoinTicker;
import org.knowm.xchange.livecoin.dto.marketdata.LivecoinTrade;
import org.knowm.xchange.livecoin.service.LivecoinAsksBidsData;
import org.knowm.xchange.utils.DateUtils;
public class LivecoinAdapters {
private static final SimpleDateFormat dateFormat =
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS");
static {
dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
}
private LivecoinAdapters() {}
public static CurrencyPair adaptCurrencyPair(LivecoinRestriction product) {
String[] data = product.getCurrencyPair().split("\\/");
return new CurrencyPair(data[0], data[1]);
}
public static OrderBook adaptOrderBook(LivecoinOrderBook book, CurrencyPair currencyPair) {
List<LimitOrder> asks = toLimitOrderList(book.getAsks(), OrderType.ASK, currencyPair);
List<LimitOrder> bids = toLimitOrderList(book.getBids(), OrderType.BID, currencyPair);
return new OrderBook(null, asks, bids);
}
private static List<LimitOrder> toLimitOrderList(
LivecoinAsksBidsData[] levels, OrderType orderType, CurrencyPair currencyPair) {
List<LimitOrder> allLevels = new ArrayList<>(levels.length);
for (LivecoinAsksBidsData ask : levels) {
allLevels.add(
new LimitOrder(orderType, ask.getQuantity(), currencyPair, "0", null, ask.getRate()));
}
return allLevels;
}
public static Map<CurrencyPair, LivecoinOrderBook> adaptToCurrencyPairKeysMap(
Map<String, LivecoinOrderBook> orderBooksRaw) {
Set<Map.Entry<String, LivecoinOrderBook>> entries = orderBooksRaw.entrySet();
Map<CurrencyPair, LivecoinOrderBook> converted = new HashMap<>(entries.size());
for (Map.Entry<String, LivecoinOrderBook> entry : entries) {
String[] currencyPairSplit = entry.getKey().split("/");
CurrencyPair currencyPair = new CurrencyPair(currencyPairSplit[0], currencyPairSplit[1]);
converted.put(currencyPair, entry.getValue());
}
return converted;
}
public static ExchangeMetaData adaptToExchangeMetaData(
ExchangeMetaData exchangeMetaData, List<LivecoinRestriction> products) {
Map<CurrencyPair, CurrencyPairMetaData> currencyPairs = exchangeMetaData.getCurrencyPairs();
Map<Currency, CurrencyMetaData> currencies = exchangeMetaData.getCurrencies();
for (LivecoinRestriction product : products) {
BigDecimal minSize =
product.getMinLimitQuantity() == null ? BigDecimal.ZERO : product.getMinLimitQuantity();
minSize = minSize.setScale(product.getPriceScale(), BigDecimal.ROUND_UNNECESSARY);
CurrencyPair pair = adaptCurrencyPair(product);
CurrencyPairMetaData staticMetaData = exchangeMetaData.getCurrencyPairs().get(pair);
int priceScale = staticMetaData == null ? 8 : staticMetaData.getPriceScale();
if (currencyPairs.containsKey(pair)) {
CurrencyPairMetaData existing = currencyPairs.get(pair);
currencyPairs.put(
pair,
new CurrencyPairMetaData(
existing.getTradingFee(), minSize, existing.getMaximumAmount(), priceScale));
} else {
currencyPairs.put(pair, new CurrencyPairMetaData(null, minSize, null, priceScale));
}
if (!currencies.containsKey(pair.base)) currencies.put(pair.base, null);
if (!currencies.containsKey(pair.counter)) currencies.put(pair.counter, null);
}
return new ExchangeMetaData(currencyPairs, currencies, null, null, true);
}
public static Trades adaptTrades(LivecoinTrade[] nativeTrades, CurrencyPair currencyPair) {
List<Trade> trades = new ArrayList<>(nativeTrades.length);
for (LivecoinTrade trade : nativeTrades) {
OrderType type = trade.getType().equals("SELL") ? OrderType.BID : OrderType.ASK;
Trade t =
new Trade(
type,
trade.getQuantity(),
currencyPair,
trade.getPrice(),
parseDate(trade.getTime()),
String.valueOf(trade.getId()));
trades.add(t);
}
return new Trades(trades, nativeTrades[0].getId(), TradeSortType.SortByID);
}
private static Date parseDate(Long rawDateLong) {
return new Date(rawDateLong * 1000);
}
public static Ticker adaptTicker(LivecoinTicker ticker, CurrencyPair currencyPair) {
BigDecimal last = ticker.getLast();
BigDecimal bid = ticker.getBestBid();
BigDecimal ask = ticker.getBestAsk();
BigDecimal high = ticker.getHigh();
BigDecimal low = ticker.getLow();
BigDecimal volume = ticker.getVolume();
return new Ticker.Builder()
.currencyPair(currencyPair)
.last(last)
.bid(bid)
.ask(ask)
.high(high)
.low(low)
.volume(volume)
.build();
}
public static LimitOrder adaptOpenOrder(Map map) {
String typeName = map.get("type").toString();
OrderType type;
switch (typeName) {
case "MARKET_SELL":
type = OrderType.ASK;
break;
case "LIMIT_SELL":
type = OrderType.ASK;
break;
case "LIMIT_BUY":
type = OrderType.BID;
break;
case "MARKET_BUY":
type = OrderType.BID;
break;
default:
throw new IllegalStateException("Don't understand " + map);
}
String ccyPair = map.get("currencyPair").toString();
String[] pair = ccyPair.split("/");
Currency ccyA = getInstance(pair[0]);
Currency ccyB = getInstance(pair[1]);
BigDecimal startingQuantity = new BigDecimal(map.get("quantity").toString());
BigDecimal remainingQuantity = new BigDecimal(map.get("remainingQuantity").toString());
Order.OrderStatus status =
remainingQuantity.compareTo(startingQuantity) < 0
? Order.OrderStatus.PARTIALLY_FILLED
: Order.OrderStatus.PENDING_NEW;
return new LimitOrder(
type,
remainingQuantity,
new CurrencyPair(ccyA, ccyB),
map.get("id").toString(),
DateUtils.fromUnixTime(Double.valueOf(map.get("issueTime").toString()).longValue()),
new BigDecimal(map.get("price").toString()),
null,
null,
null,
status);
}
public static UserTrade adaptUserTrade(Map map) {
OrderType type = OrderType.BID;
if (map.get("type").toString().equals("SELL")) type = OrderType.ASK;
Currency ccyA = Currency.getInstance(map.get("fixedCurrency").toString());
Currency ccyB = Currency.getInstance(map.get("variableCurrency").toString());
return new UserTrade(
type,
new BigDecimal(map.get("amount").toString()),
new CurrencyPair(ccyA, ccyB),
new BigDecimal(map.get("variableAmount").toString()),
DateUtils.fromMillisUtc(Long.valueOf(map.get("date").toString())),
map.get("id").toString(),
map.get("externalKey").toString(),
new BigDecimal(map.get("fee").toString()),
getInstance(map.get("taxCurrency").toString()));
}
public static FundingRecord adaptFundingRecord(Map map) {
FundingRecord.Type type = FundingRecord.Type.WITHDRAWAL;
if (map.get("type").toString().equals("DEPOSIT")) type = FundingRecord.Type.DEPOSIT;
return new FundingRecord(
map.get("externalKey").toString(),
DateUtils.fromMillisUtc(Long.valueOf(map.get("date").toString())),
getInstance(map.get("fixedCurrency").toString()),
new BigDecimal(map.get("amount").toString()),
map.get("id").toString(),
null,
type,
FundingRecord.Status.COMPLETE,
null,
new BigDecimal(map.get("fee").toString()),
null);
}
public static List<Wallet> adaptWallets(List<Map> data) {
Map<Currency, WalletBuilder> wallets = new HashMap<>();
for (Map balance : data) {
String type = balance.get("type").toString();
String ccy = balance.get("currency").toString();
String value = balance.get("value").toString();
Currency curr = getInstance(ccy);
WalletBuilder builder = wallets.get(curr);
if (builder == null) {
builder = new WalletBuilder(curr);
}
builder.add(type, value);
wallets.put(curr, builder);
}
List<Wallet> res = new ArrayList<>();
for (WalletBuilder builder : wallets.values()) {
res.add(builder.build());
}
return res;
}
static class WalletBuilder {
private Currency currency;
private Map<String, BigDecimal> map = new HashMap<>();
WalletBuilder(Currency currency) {
this.currency = currency;
}
public Wallet build() {
return new Wallet(
currency.getCurrencyCode(),
new Balance(
currency,
map.get("total"),
map.get("available"),
map.get("trade"),
BigDecimal.ZERO,
BigDecimal.ZERO,
BigDecimal.ZERO,
BigDecimal.ZERO));
}
public void add(String type, String value) {
map.put(type, new BigDecimal(value));
}
}
}
|
[livecoin] fixed price on UserTrade
|
xchange-livecoin/src/main/java/org/knowm/xchange/livecoin/LivecoinAdapters.java
|
[livecoin] fixed price on UserTrade
|
|
Java
|
mit
|
1d59975e873b0f73740094c7ecb2a26a26b361e1
| 0
|
InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service
|
package com.worth.ifs;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.flywaydb.core.Flyway;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
/**
* This is the base class for testing REST services with full integration with a running "data" layer server on
* port "23456" and its underlying database.
*
* Tests can rollback their changes to the database by using the @Rollback annotation.
*
*/
@RunWith(SpringJUnit4ClassRunner.class)
@Transactional
public abstract class BaseRestServiceIntegrationTest<RestServiceType> extends BaseWebIntegrationTest {
private static final Log LOG = LogFactory.getLog(BaseRestServiceIntegrationTest.class);
protected RestServiceType service;
@Autowired
protected abstract void setRestService(RestServiceType service);
@Value("${flyway.url}")
public String databaseUrl;
@Value("${flyway.user}")
public String databaseUser;
@Value("${flyway.password}")
public String databasePassword;
public String locations = "db/migration,db/integration";
private void cleanAndMigrateDatabaseWithPatches(String[] patchLocations){
LOG.info("cleanAndMigrateDatabaseWithPatches");
Flyway f = new Flyway();
f.setDataSource(databaseUrl, databaseUser, databasePassword);
f.setLocations(patchLocations);
f.clean();
f.migrate();
}
/**
* Need to do a db reset, because spring can't do a @rollback on rest calls...
*/
@PostConstruct @PreDestroy
public void recreateDatabase(){
cleanAndMigrateDatabaseWithPatches(locations.split("\\s*,\\s*"));
}
}
|
ifs-data-service/src/test/java/com/worth/ifs/BaseRestServiceIntegrationTest.java
|
package com.worth.ifs;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.flywaydb.core.Flyway;
import org.junit.Before;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
/**
* This is the base class for testing REST services with full integration with a running "data" layer server on
* port "23456" and its underlying database.
*
* Tests can rollback their changes to the database by using the @Rollback annotation.
*
*/
@RunWith(SpringJUnit4ClassRunner.class)
public abstract class BaseRestServiceIntegrationTest<RestServiceType> extends BaseWebIntegrationTest {
private static final Log LOG = LogFactory.getLog(BaseRestServiceIntegrationTest.class);
protected RestServiceType service;
@Autowired
protected abstract void setRestService(RestServiceType service);
@Value("${flyway.url}")
public String databaseUrl;
@Value("${flyway.user}")
public String databaseUser;
@Value("${flyway.password}")
public String databasePassword;
@Value("${flyway.locations}")
public String locations;
private void cleanAndMigrateDatabaseWithPatches(String[] patchLocations){
LOG.info("cleanAndMigrateDatabaseWithPatches");
Flyway f = new Flyway();
f.setDataSource(databaseUrl, databaseUser, databasePassword);
f.setLocations(patchLocations);
f.clean();
f.migrate();
}
/**
* Need to do a db reset, because spring can't do a @rollback on rest calls...
*/
@Before
public void recreateDatabase(){
cleanAndMigrateDatabaseWithPatches(locations.split("\\s*,\\s*"));
}
}
|
INFUND-1889 changed rest integration unit test
|
ifs-data-service/src/test/java/com/worth/ifs/BaseRestServiceIntegrationTest.java
|
INFUND-1889 changed rest integration unit test
|
|
Java
|
mit
|
20d65142c89cae30ac592cc54626b2ad0d26453b
| 0
|
raphaelm/opacclient,opacapp/opacclient,johan12345/opacclient,raphaelm/opacclient,opacapp/opacclient,ruediger-w/opacclient,ruediger-w/opacclient,opacapp/opacclient,opacapp/opacclient,johan12345/opacclient,johan12345/opacclient,ruediger-w/opacclient,johan12345/opacclient,johan12345/opacclient,ruediger-w/opacclient,ruediger-w/opacclient,opacapp/opacclient,raphaelm/opacclient
|
package de.geeksfactory.opacclient.apis;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.message.BasicNameValuePair;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.json.JSONException;
import org.json.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import de.geeksfactory.opacclient.apis.OpacApi.MultiStepResult.Status;
import de.geeksfactory.opacclient.i18n.StringProvider;
import de.geeksfactory.opacclient.networking.HttpClientFactory;
import de.geeksfactory.opacclient.networking.HttpUtils;
import de.geeksfactory.opacclient.networking.NotReachableException;
import de.geeksfactory.opacclient.networking.SSLSecurityException;
import de.geeksfactory.opacclient.objects.Account;
import de.geeksfactory.opacclient.objects.AccountData;
import de.geeksfactory.opacclient.objects.Copy;
import de.geeksfactory.opacclient.objects.Detail;
import de.geeksfactory.opacclient.objects.DetailedItem;
import de.geeksfactory.opacclient.objects.Filter;
import de.geeksfactory.opacclient.objects.Filter.Option;
import de.geeksfactory.opacclient.objects.LentItem;
import de.geeksfactory.opacclient.objects.Library;
import de.geeksfactory.opacclient.objects.ReservedItem;
import de.geeksfactory.opacclient.objects.SearchRequestResult;
import de.geeksfactory.opacclient.objects.SearchResult;
import de.geeksfactory.opacclient.objects.SearchResult.MediaType;
import de.geeksfactory.opacclient.searchfields.DropdownSearchField;
import de.geeksfactory.opacclient.searchfields.SearchField;
import de.geeksfactory.opacclient.searchfields.SearchQuery;
import de.geeksfactory.opacclient.searchfields.TextSearchField;
public class Adis extends ApacheBaseApi implements OpacApi {
protected static HashMap<String, MediaType> types = new HashMap<>();
protected static HashSet<String> ignoredFieldNames = new HashSet<>();
static {
types.put("Buch", MediaType.BOOK);
types.put("Band", MediaType.BOOK);
types.put("DVD-ROM", MediaType.CD_SOFTWARE);
types.put("CD-ROM", MediaType.CD_SOFTWARE);
types.put("Medienkombination", MediaType.PACKAGE);
types.put("DVD-Video", MediaType.DVD);
types.put("DVD", MediaType.DVD);
types.put("Noten", MediaType.SCORE_MUSIC);
types.put("Konsolenspiel", MediaType.GAME_CONSOLE);
types.put("Spielkonsole", MediaType.GAME_CONSOLE);
types.put("CD", MediaType.CD);
types.put("Zeitschrift", MediaType.MAGAZINE);
types.put("Zeitschriftenheft", MediaType.MAGAZINE);
types.put("Zeitung", MediaType.NEWSPAPER);
types.put("Beitrag E-Book", MediaType.EBOOK);
types.put("Elektronische Ressource", MediaType.EBOOK);
types.put("E-Book", MediaType.EBOOK);
types.put("Karte", MediaType.MAP);
types.put("E-Ressource", MediaType.EBOOK);
types.put("Munzinger", MediaType.EBOOK);
types.put("E-Audio", MediaType.EAUDIO);
types.put("Blu-Ray", MediaType.BLURAY);
// TODO: The following fields from Berlin make no sense and don't work
// when they are displayed alone.
// We can only include them if we automatically deselect the "Verbund"
// checkbox
// when one of these dropdowns has a value other than "".
ignoredFieldNames.add("oder Bezirk");
ignoredFieldNames.add("oder Bibliothek");
}
protected String opac_url = "";
protected JSONObject data;
protected Library library;
protected int s_requestCount = 0;
protected String s_service;
protected String s_sid;
protected List<String> s_exts;
protected String s_alink;
protected List<NameValuePair> s_pageform;
protected int s_lastpage;
protected Document s_reusedoc;
protected String s_nextbutton = "$Toolbar_5";
protected String s_previousbutton = "$Toolbar_4";
public static Map<String, List<String>> getQueryParams(String url) {
try {
Map<String, List<String>> params = new HashMap<>();
String[] urlParts = url.split("\\?");
if (urlParts.length > 1) {
String query = urlParts[1];
for (String param : query.split("&")) {
String[] pair = param.split("=");
String key = URLDecoder.decode(pair[0], "UTF-8");
String value = "";
if (pair.length > 1) {
value = URLDecoder.decode(pair[1], "UTF-8");
}
List<String> values = params.get(key);
if (values == null) {
values = new ArrayList<>();
params.put(key, values);
}
values.add(value);
}
}
return params;
} catch (UnsupportedEncodingException ex) {
throw new AssertionError(ex);
}
}
public Document htmlGet(String url) throws
IOException {
if (!url.contains("requestCount") && s_requestCount >= 0) {
url = url + (url.contains("?") ? "&" : "?") + "requestCount="
+ s_requestCount;
}
HttpGet httpget = new HttpGet(cleanUrl(url));
HttpResponse response;
try {
response = http_client.execute(httpget);
} catch (javax.net.ssl.SSLPeerUnverifiedException e) {
throw new SSLSecurityException(e.getMessage());
} catch (javax.net.ssl.SSLException e) {
// Can be "Not trusted server certificate" or can be a
// aborted/interrupted handshake/connection
if (e.getMessage().contains("timed out")
|| e.getMessage().contains("reset by")) {
e.printStackTrace();
throw new NotReachableException(e.getMessage());
} else {
throw new SSLSecurityException(e.getMessage());
}
} catch (InterruptedIOException e) {
e.printStackTrace();
throw new NotReachableException(e.getMessage());
} catch (IOException e) {
if (e.getMessage() != null && e.getMessage().contains("Request aborted")) {
e.printStackTrace();
throw new NotReachableException(e.getMessage());
} else {
throw e;
}
}
if (response.getStatusLine().getStatusCode() >= 400) {
throw new NotReachableException(response.getStatusLine().getReasonPhrase());
}
String html = convertStreamToString(response.getEntity().getContent(),
getDefaultEncoding());
HttpUtils.consume(response.getEntity());
Document doc = Jsoup.parse(html);
Pattern patRequestCount = Pattern.compile("requestCount=([0-9]+)");
for (Element a : doc.select("a")) {
Matcher objid_matcher = patRequestCount.matcher(a.attr("href"));
if (objid_matcher.matches()) {
s_requestCount = Integer.parseInt(objid_matcher.group(1));
}
}
doc.setBaseUri(url);
return doc;
}
public Document htmlPost(String url, List<NameValuePair> data)
throws IOException {
HttpPost httppost = new HttpPost(cleanUrl(url));
boolean rcf = false;
for (NameValuePair nv : data) {
if (nv.getName().equals("requestCount")) {
rcf = true;
break;
}
}
if (!rcf) {
data.add(new BasicNameValuePair("requestCount", s_requestCount + ""));
}
httppost.setEntity(new UrlEncodedFormEntity(data, getDefaultEncoding()));
HttpResponse response;
try {
response = http_client.execute(httppost);
} catch (javax.net.ssl.SSLPeerUnverifiedException e) {
throw new SSLSecurityException(e.getMessage());
} catch (javax.net.ssl.SSLException e) {
// Can be "Not trusted server certificate" or can be a
// aborted/interrupted handshake/connection
if (e.getMessage().contains("timed out")
|| e.getMessage().contains("reset by")) {
e.printStackTrace();
throw new NotReachableException(e.getMessage());
} else {
throw new SSLSecurityException(e.getMessage());
}
} catch (InterruptedIOException e) {
e.printStackTrace();
throw new NotReachableException(e.getMessage());
} catch (IOException e) {
if (e.getMessage() != null && e.getMessage().contains("Request aborted")) {
e.printStackTrace();
throw new NotReachableException(e.getMessage());
} else {
throw e;
}
}
if (response.getStatusLine().getStatusCode() >= 400) {
throw new NotReachableException(response.getStatusLine().getReasonPhrase());
}
String html = convertStreamToString(response.getEntity().getContent(),
getDefaultEncoding());
HttpUtils.consume(response.getEntity());
Document doc = Jsoup.parse(html);
Pattern patRequestCount = Pattern
.compile(".*requestCount=([0-9]+)[^0-9].*");
for (Element a : doc.select("a")) {
Matcher objid_matcher = patRequestCount.matcher(a.attr("href"));
if (objid_matcher.matches()) {
s_requestCount = Integer.parseInt(objid_matcher.group(1));
}
}
doc.setBaseUri(url);
return doc;
}
@Override
public void start() throws IOException {
try {
s_requestCount = -1;
Document doc = htmlGet(opac_url + "?"
+ data.getString("startparams"));
Pattern padSid = Pattern
.compile(".*;jsessionid=([0-9A-Fa-f]+)[^0-9A-Fa-f].*");
for (Element navitem : doc
.select("#unav li a, #hnav li a, .tree_ul li a, .search-adv")) {
// Düsseldorf uses a custom layout where the navbar is .tree_ul
// in Stuttgart, the navbar is #hnav and advanced search is linked outside the
// navbar as .search-adv-repeat
if (navitem.attr("href").contains("service=")) {
s_service = getQueryParams(navitem.attr("href")).get(
"service").get(0);
}
if (navitem.text().contains("Erweiterte Suche")) {
s_exts = getQueryParams(navitem.attr("href")).get("sp");
}
Matcher objid_matcher = padSid.matcher(navitem.attr("href"));
if (objid_matcher.matches()) {
s_sid = objid_matcher.group(1);
}
}
if (s_exts == null) {
s_exts = Collections.singletonList("SS6");
}
} catch (JSONException e) {
throw new RuntimeException(e);
}
super.start();
}
@Override
protected String getDefaultEncoding() {
return "UTF-8";
}
@Override
public SearchRequestResult search(List<SearchQuery> queries)
throws IOException, OpacErrorException {
start();
// TODO: There are also libraries with a different search form,
// s_exts=SS2 instead of s_exts=SS6
// e.g. munich. Treat them differently!
Document doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams());
int dropdownTextCount = 0;
int totalCount = 0;
List<NameValuePair> nvpairs = new ArrayList<>();
for (SearchQuery query : queries) {
if (!query.getValue().equals("")) {
totalCount++;
if (query.getSearchField() instanceof DropdownSearchField) {
doc.select("select#" + query.getKey())
.val(query.getValue());
continue;
}
if (query.getSearchField() instanceof TextSearchField &&
query.getSearchField().getData() != null &&
!query.getSearchField().getData().optBoolean("selectable", true) &&
doc.select("#" + query.getKey()).size() > 0) {
doc.select("#" + query.getKey())
.val(query.getValue());
continue;
}
dropdownTextCount++;
if (s_exts.get(0).equals("SS2")
|| (query.getSearchField().getData() != null && !query
.getSearchField().getData()
.optBoolean("selectable", true))) {
doc.select("input#" + query.getKey()).val(query.getValue());
} else {
if (doc.select("select#SUCH01_1").size() == 0 &&
doc.select("input[fld=FELD01_" + dropdownTextCount + "]").size() > 0) {
// Hack needed for Nürnberg
doc.select("input[fld=FELD01_" + dropdownTextCount + "]").first()
.previousElementSibling().val(query.getKey());
doc.select("input[fld=FELD01_" + dropdownTextCount + "]")
.val(query.getValue());
} else {
doc.select("select#SUCH01_" + dropdownTextCount).val(query.getKey());
doc.select("input#FELD01_" + dropdownTextCount).val(query.getValue());
}
}
if (dropdownTextCount > 4) {
throw new OpacErrorException(stringProvider.getQuantityString(
StringProvider.LIMITED_NUM_OF_CRITERIA, 4, 4));
}
}
}
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
nvpairs.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
nvpairs.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
nvpairs.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
if (totalCount == 0) {
throw new OpacErrorException(
stringProvider.getString(StringProvider.NO_CRITERIA_INPUT));
}
Document docresults = htmlPost(opac_url + ";jsessionid=" + s_sid,
nvpairs);
return parse_search_wrapped(docresults, 1);
}
private String getSpParams() {
return getSpParams(null);
}
private String getSpParams(String overrideSecond) {
if (overrideSecond != null && s_exts.size() == 1) {
return "&sp=" + overrideSecond;
}
StringBuilder builder = new StringBuilder();
int i = 0;
for (String sp : s_exts) {
builder.append("&sp=");
if (i == 1 && overrideSecond != null) {
builder.append(overrideSecond);
} else {
builder.append(sp);
}
i++;
}
return builder.toString();
}
public class SingleResultFound extends Exception {
}
protected SearchRequestResult parse_search_wrapped(Document doc, int page) throws IOException, OpacErrorException {
try {
return parse_search(doc, page);
} catch (SingleResultFound e) {
// Zurück zur Trefferliste
List<NameValuePair> nvpairs = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
nvpairs.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
String name = "$Toolbar_0";
if (doc.select("[id^=Toolbar_][title*=Trefferliste]").size() > 0) {
// In Stuttgart, "Trefferliste" is Nr. 5, in Zurich its Nr. 1. Ofen, 0 ("back") works as well.
name = doc.select("[id^=Toolbar_][title*=Trefferliste]").first().attr("name");
}
nvpairs.add(new BasicNameValuePair(name + ".x", "1"));
nvpairs.add(new BasicNameValuePair(name + ".y", "1"));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs);
try {
return parse_search(doc, page);
} catch (SingleResultFound e1) {
throw new NotReachableException();
}
}
}
private SearchRequestResult parse_search(Document doc, int page)
throws OpacErrorException, SingleResultFound {
if (doc.select(".message h1").size() > 0
&& doc.select("#right #R06").size() == 0) {
throw new OpacErrorException(doc.select(".message h1").text());
}
if (doc.select("#OPACLI").text().contains("nicht gefunden")) {
throw new OpacErrorException(
stringProvider.getString(StringProvider.NO_RESULTS));
}
int total_result_count = -1;
List<SearchResult> results = new ArrayList<>();
if (doc.select("#R06").size() > 0) {
Pattern patNum = Pattern
.compile(".*Treffer: .* von ([0-9]+)[^0-9]*");
Matcher matcher = patNum.matcher(doc.select("#R06").text()
.trim());
if (matcher.matches()) {
total_result_count = Integer.parseInt(matcher.group(1));
} else if (doc.select("#R06").text().trim().endsWith("Treffer: 1")) {
total_result_count = 1;
}
}
if (doc.select("#R03").size() == 1
&& doc.select("#R03").text().trim()
.endsWith("Treffer: 1")) {
throw new SingleResultFound();
}
Pattern patId = Pattern
.compile("javascript:.*htmlOnLink\\('([0-9A-Za-z]+)'\\)");
int nr = 1;
String selector_row, selector_link, selector_img, selector_num, selector_text;
if (doc.select("table.rTable_table tbody").size() > 0) {
selector_row = "table.rTable_table tbody tr";
selector_link = ".rTable_td_text a";
selector_text = ".rList_name";
selector_img = ".rTable_td_img img, .rTable_td_text img";
selector_num = "tr td:first-child";
} else {
// New version, e.g. Berlin
selector_row = ".rList li.rList_li_even, .rList li.rList_li_odd";
selector_link = ".rList_titel a";
selector_text = ".rList_name";
selector_img = ".rlist_icon img, .rList_titel img, .rList_medium .icon, .rList_availability .icon, .rList_img img";
selector_num = ".rList_num";
}
for (Element tr : doc.select(selector_row)) {
SearchResult res = new SearchResult();
Element innerele = tr.select(selector_link).first();
innerele.select("img").remove();
String descr = innerele.html();
for (Element n : tr.select(selector_text)) {
String t = n.text().replace("\u00a0", " ").trim();
if (t.length() > 0) {
descr += "<br />" + t.trim();
}
}
res.setInnerhtml(descr);
try {
res.setNr(Integer.parseInt(tr.select(selector_num).text().trim()));
} catch (NumberFormatException e) {
res.setNr(nr);
}
Matcher matcher = patId.matcher(tr.select(selector_link).first().attr("href"));
if (matcher.matches()) {
res.setId(matcher.group(1));
}
for (Element img : tr.select(selector_img)) {
String ttext = img.attr("title");
String src = img.attr("abs:src");
if (types.containsKey(ttext)) {
res.setType(types.get(ttext));
} else if (ttext.contains("+")
&& types.containsKey(ttext.split("\\+")[0].trim())) {
res.setType(types.get(ttext.split("\\+")[0].trim()));
} else if (ttext.matches(".*ist verf.+gbar") ||
ttext.contains("is available") ||
img.attr("href").contains("verfu_ja")) {
res.setStatus(SearchResult.Status.GREEN);
} else if (ttext.matches(".*nicht verf.+gbar") ||
ttext.contains("not available") ||
img.attr("href").contains("verfu_nein")) {
res.setStatus(SearchResult.Status.RED);
}
}
results.add(res);
nr++;
}
updatePageform(doc);
s_lastpage = page;
String nextButton =
doc.select("input[title=nächster], input[title=Vorwärts blättern]").attr("name");
String previousButton =
doc.select("input[title=nächster], input[title=Rückwärts blättern]").attr("name");
if (!nextButton.equals("")) s_nextbutton = nextButton;
if (!previousButton.equals("")) s_previousbutton = previousButton;
return new SearchRequestResult(results, total_result_count, page);
}
@Override
public void init(Library library, HttpClientFactory httpClientFactory) {
super.init(library, httpClientFactory);
this.library = library;
this.data = library.getData();
try {
this.opac_url = data.getString("baseurl");
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@Override
public SearchRequestResult filterResults(Filter filter, Option option)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public SearchRequestResult searchGetPage(int page) throws IOException,
OpacErrorException {
SearchRequestResult res = null;
while (page != s_lastpage) {
List<NameValuePair> nvpairs = s_pageform;
int i = 0;
List<Integer> indexes = new ArrayList<>();
for (NameValuePair np : nvpairs) {
if (np.getName().contains("$Toolbar_")) {
indexes.add(i);
}
i++;
}
for (int j = indexes.size() - 1; j >= 0; j--) {
nvpairs.remove((int) indexes.get(j));
}
int p;
if (page > s_lastpage) {
nvpairs.add(new BasicNameValuePair(s_nextbutton + ".x", "1"));
nvpairs.add(new BasicNameValuePair(s_nextbutton + ".y", "1"));
p = s_lastpage + 1;
} else {
nvpairs.add(new BasicNameValuePair(s_previousbutton + ".x", "1"));
nvpairs.add(new BasicNameValuePair(s_previousbutton + ".y", "1"));
p = s_lastpage - 1;
}
Document docresults = htmlPost(opac_url + ";jsessionid=" + s_sid,
nvpairs);
res = parse_search_wrapped(docresults, p);
}
return res;
}
@Override
public DetailedItem getResultById(String id, String homebranch)
throws IOException, OpacErrorException {
Document doc;
List<NameValuePair> nvpairs;
if (id == null && s_reusedoc != null) {
doc = s_reusedoc;
} else if (id.startsWith("http")) {
return parseResult(id, htmlGet(id));
} else {
nvpairs = s_pageform;
int i = 0;
List<Integer> indexes = new ArrayList<>();
for (NameValuePair np : nvpairs) {
if (np.getName().contains("$Toolbar_")
|| np.getName().contains("selected")) {
indexes.add(i);
}
i++;
}
for (int j = indexes.size() - 1; j >= 0; j--) {
nvpairs.remove((int) indexes.get(j));
}
nvpairs.add(new BasicNameValuePair("selected", "ZTEXT " + id));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs);
List<NameValuePair> form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))
&& !"selected".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
form.add(new BasicNameValuePair("selected", "ZTEXT " + id));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
// Yep, two times.
}
// Reset
updatePageform(doc);
nvpairs = s_pageform;
nvpairs.add(new BasicNameValuePair("$Toolbar_1.x", "1"));
nvpairs.add(new BasicNameValuePair("$Toolbar_1.y", "1"));
parse_search_wrapped(htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs), 1);
nvpairs = s_pageform;
nvpairs.add(new BasicNameValuePair("$Toolbar_3.x", "1"));
nvpairs.add(new BasicNameValuePair("$Toolbar_3.y", "1"));
parse_search_wrapped(htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs), 1);
return parseResult(id, doc);
}
DetailedItem parseResult(String id, Document doc)
throws IOException, OpacErrorException {
List<NameValuePair> nvpairs;
DetailedItem res = new DetailedItem();
if (doc.select("#R001 img").size() == 1) {
String cover_url = doc.select("#R001 img").first().absUrl("src");
if (!cover_url.endsWith("erne.gif")) {
// If there is no cover, the first image usually is the "n Stars" rating badge
res.setCover(cover_url);
}
}
for (Element tr : doc.select("#R06 .aDISListe table tbody tr")) {
if (tr.children().size() < 2) {
continue;
}
String title = tr.child(0).text().trim();
String value = tr.child(1).text().trim();
if (value.contains("hier klicken") || value.startsWith("zur ") ||
title.contains("URL")) {
res.addDetail(new Detail(title, tr.child(1).select("a").first().absUrl("href")));
} else {
res.addDetail(new Detail(title, value));
}
if (title.contains("Titel") && res.getTitle() == null) {
res.setTitle(value.split("[:/;]")[0].trim());
}
}
if (res.getTitle() == null) {
for (Detail d : res.getDetails()) {
if (d.getDesc().contains("Gesamtwerk")
|| d.getDesc().contains("Zeitschrift")) {
res.setTitle(d.getContent());
break;
}
}
}
if (doc.select(
"input[value*=Reservieren], input[value*=Vormerken], " +
"input[value*=Einzelbestellung]")
.size() > 0 && id != null) {
res.setReservable(true);
res.setReservation_info(id);
}
DateTimeFormatter fmt = DateTimeFormat.forPattern("dd.MM.yyyy").withLocale(Locale.GERMAN);
if (doc.select("#R08 table.rTable_table, #R09 table.rTable_table").size() > 0) {
Element table = doc.select("#R08 table.rTable_table, #R09 table.rTable_table").first();
Map<Integer, String> colmap = new HashMap<>();
int i = 0;
for (Element th : table.select("thead tr th")) {
String head = th.text().trim();
if (head.contains("Bibliothek") || head.contains("Library")) {
colmap.put(i, "branch");
} else if (head.contains("Standort") || head.contains("Location")) {
colmap.put(i, "location");
} else if (head.contains("Signatur") || head.contains("Call number")) {
colmap.put(i, "signature");
} else if (head.contains("URL")) {
colmap.put(i, "url");
} else if (head.contains("Status") || head.contains("Hinweis")
|| head.contains("Leihfrist") || head.matches(".*Verf.+gbarkeit.*")) {
colmap.put(i, "status");
}
i++;
}
for (Element tr : table.select("tbody tr")) {
Copy copy = new Copy();
for (Entry<Integer, String> entry : colmap.entrySet()) {
if (entry.getValue().equals("status")) {
String status = tr.child(entry.getKey()).text().trim();
String currentStatus =
copy.getStatus() != null ? copy.getStatus() + " - " : "";
if (status.contains(" am: ")) {
copy.setStatus(currentStatus + status.split("-")[0]);
try {
copy.setReturnDate(fmt.parseLocalDate(status.split(": ")[1]));
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
} else {
copy.setStatus(currentStatus + status);
}
} else {
copy.set(entry.getValue(), tr.child(entry.getKey()).text().trim());
}
}
res.addCopy(copy);
}
}
if (doc.select("a:contains(Zitierlink)").size() > 0) {
res.setId(doc.select("a:contains(Zitierlink)").attr("href"));
} else {
res.setId(""); // null would be overridden by the UI, because there _is_
// an id,< we just can not use it.
}
return res;
}
@Override
public DetailedItem getResult(int position) throws IOException,
OpacErrorException {
if (s_reusedoc != null) {
return getResultById(null, null);
}
throw new UnsupportedOperationException();
}
@Override
public ReservationResult reservation(DetailedItem item, Account account,
int useraction, String selection) throws IOException {
Document doc;
List<NameValuePair> nvpairs;
ReservationResult res = null;
if (selection != null && selection.equals("")) {
selection = null;
}
if (s_pageform == null) {
return new ReservationResult(Status.ERROR);
}
// Load details
nvpairs = s_pageform;
int i = 0;
List<Integer> indexes = new ArrayList<>();
for (NameValuePair np : nvpairs) {
if (np.getName().contains("$Toolbar_")
|| np.getName().contains("selected")) {
indexes.add(i);
}
i++;
}
for (int j = indexes.size() - 1; j >= 0; j--) {
nvpairs.remove((int) indexes.get(j));
}
nvpairs.add(new BasicNameValuePair("selected", "ZTEXT "
+ item.getReservation_info()));
htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs);
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs); // Yep, two
// times.
List<NameValuePair> form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& (!"submit".equals(input.attr("type"))
|| input.val().contains("Reservieren")
|| input.val().contains("Einzelbestellung")
|| input.val().contains("Vormerken"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (doc.select(".message h1").size() > 0) {
String msg = doc.select(".message h1").text().trim();
res = new ReservationResult(MultiStepResult.Status.ERROR, msg);
form = new ArrayList<>();
for (Element input : doc.select("input")) {
if (!"image".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
} else {
try {
doc = handleLoginForm(doc, account);
} catch (OpacErrorException e1) {
return new ReservationResult(MultiStepResult.Status.ERROR,
e1.getMessage());
}
if (useraction == 0 && selection == null) {
res = new ReservationResult(
MultiStepResult.Status.CONFIRMATION_NEEDED);
List<String[]> details = new ArrayList<>();
details.add(new String[]{doc.select("#F23").text()});
res.setDetails(details);
} else if (doc.select("#AUSGAB_1").size() > 0 && (selection == null || "confirmed".equals(selection))) {
List<Map<String, String>> sel = new ArrayList<>();
for (Element opt : doc.select("#AUSGAB_1 option")) {
if (opt.text().trim().length() > 0) {
Map<String, String> selopt = new HashMap<>();
selopt.put("key", opt.val());
selopt.put("value", opt.text());
sel.add(selopt);
}
}
res = new ReservationResult(
MultiStepResult.Status.SELECTION_NEEDED, doc.select(
"#AUSGAB_1").first().parent().select("span").text());
res.setSelection(sel);
} else if (doc.select("#FSET01 select[name=select$0]").size() > 0 &&
(selection == null || !selection.contains("_SEP_"))) {
// Munich: "Benachrichtigung mit E-Mail"
List<Map<String, String>> sel = new ArrayList<>();
for (Element opt : doc.select("select[name=select$0] option")) {
if (opt.text().trim().length() > 0) {
Map<String, String> selopt = new HashMap<>();
selopt.put("value", opt.text());
if (selection != null) {
selopt.put("key", opt.val() + "_SEP_" + selection);
} else {
selopt.put("key", opt.val());
}
sel.add(selopt);
}
}
res = new ReservationResult(
MultiStepResult.Status.SELECTION_NEEDED, doc.select(
"#FSET01 select[name=select$0]").first().parent().select("span").text());
res.setSelection(sel);
} else if (selection != null || doc.select("#AUSGAB_1").size() == 0) {
if (doc.select("#AUSGAB_1").size() > 0 && selection != null) {
if (selection.contains("_SEP_")) {
doc.select("#AUSGAB_1").attr("value", selection.split("_SEP_")[1]);
} else {
doc.select("#AUSGAB_1").attr("value", selection);
}
}
if (doc.select("#FSET01 select[name=select$0]").size() > 0 && selection != null) {
if (selection.contains("_SEP_")) {
doc.select("#FSET01 select[name=select$0]")
.attr("value", selection.split("_SEP_")[0]);
} else {
doc.select("#FSET01 select[name=select$0]").attr("value", selection);
}
}
if (doc.select("#BENJN_1").size() > 0) {
// Notification not requested because some libraries notify by snail mail
// and take a fee for it (Example: Stuttgart_Uni)
doc.select("#BENJN_1").attr("value", "Nein");
}
if (doc.select(".message h1").size() > 0) {
String msg = doc.select(".message h1").text().trim();
form = new ArrayList<>();
for (Element input : doc.select("input")) {
if (!"image".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"),
input.attr("value")));
}
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (!msg.contains("Reservation ist erfolgt")) {
res = new ReservationResult(
MultiStepResult.Status.ERROR, msg);
} else {
res = new ReservationResult(MultiStepResult.Status.OK,
msg);
}
} else {
form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"),
input.attr("value")));
}
}
form.add(new BasicNameValuePair("textButton",
"Reservation abschicken"));
res = new ReservationResult(MultiStepResult.Status.OK);
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (doc.select("input[name=textButton]").attr("value")
.contains("kostenpflichtig bestellen")) {
// Munich
form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"),
input.attr("value")));
}
}
form.add(new BasicNameValuePair("textButton",
doc.select("input[name=textButton]").first().attr("value")));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
}
if (doc.select(".message h1").size() > 0) {
String msg = doc.select(".message h1").text().trim();
form = new ArrayList<>();
for (Element input : doc.select("input")) {
if (!"image".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input
.attr("name"), input.attr("value")));
}
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (!msg.contains("Reservation ist erfolgt")) {
res = new ReservationResult(
MultiStepResult.Status.ERROR, msg);
} else {
res = new ReservationResult(
MultiStepResult.Status.OK, msg);
}
} else if (doc.select("#R01").text()
.contains("Informationen zu Ihrer Reservation")) {
String msg = doc.select("#OPACLI").text().trim();
form = new ArrayList<>();
for (Element input : doc.select("input")) {
if (!"image".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input
.attr("name"), input.attr("value")));
}
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (!msg.contains("Reservation ist erfolgt")) {
res = new ReservationResult(
MultiStepResult.Status.ERROR, msg);
} else {
res = new ReservationResult(
MultiStepResult.Status.OK, msg);
}
}
}
}
}
if (res == null
|| res.getStatus() == MultiStepResult.Status.SELECTION_NEEDED
|| res.getStatus() == MultiStepResult.Status.CONFIRMATION_NEEDED) {
form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
Element button = doc.select("input[value=Abbrechen], input[value=Zurück]").first();
form.add(new BasicNameValuePair(button.attr("name"), button.attr("value")));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
}
// Reset
updatePageform(doc);
try {
nvpairs = s_pageform;
nvpairs.add(new BasicNameValuePair("$Toolbar_1.x", "1"));
nvpairs.add(new BasicNameValuePair("$Toolbar_1.y", "1"));
parse_search_wrapped(htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs),
1);
nvpairs = s_pageform;
nvpairs.add(new BasicNameValuePair("$Toolbar_3.x", "1"));
nvpairs.add(new BasicNameValuePair("$Toolbar_3.y", "1"));
parse_search_wrapped(htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs),
1);
} catch (OpacErrorException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return res;
}
void updatePageform(Document doc) {
s_pageform = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
s_pageform.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
}
@Override
public ProlongResult prolong(String media, Account account, int useraction,
String selection) throws IOException {
String alink = null;
Document doc;
start();
doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams("SBK"));
try {
doc = handleLoginForm(doc, account);
} catch (OpacErrorException e) {
return new ProlongResult(Status.ERROR, e.getMessage());
}
for (Element tr : doc.select(".rTable_div tr")) {
if (tr.select("a").size() == 1) {
if (tr.select("a").first().absUrl("href")
.contains("sp=SZA")) {
alink = tr.select("a").first().absUrl("href");
}
}
}
if (alink == null) {
return new ProlongResult(Status.ERROR);
}
doc = htmlGet(alink);
List<NameValuePair> form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
for (Element tr : doc.select(".rTable_div tr")) {
if (tr.select("input").attr("name").equals(media.split("\\|")[0])) {
boolean disabled = tr.select("input").hasAttr("disabled");
try {
disabled = (
disabled
|| tr.child(4).text().matches(".*nicht verl.+ngerbar.*")
|| tr.child(4).text().matches(".*Verl.+ngerung nicht m.+glich.*")
);
} catch (Exception e) {
}
if (disabled) {
form.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
form.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
htmlPost(opac_url + ";jsessionid=" + s_sid, form);
return new ProlongResult(Status.ERROR, tr.child(4).text().trim());
}
}
}
form.add(new BasicNameValuePair(media.split("\\|")[0], "on"));
// Stuttgart: textButton$0, others: textButton$1
String buttonName = doc.select("input[value=Markierte Titel verlängern]").attr("name");
form.add(new BasicNameValuePair(!"".equals(buttonName) ? buttonName : "textButton$1",
"Markierte Titel verlängern"));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
form.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
form.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
htmlPost(opac_url + ";jsessionid=" + s_sid, form);
return new ProlongResult(Status.OK);
}
@Override
public ProlongAllResult prolongAll(Account account, int useraction,
String selection) throws IOException {
String alink = null;
Document doc;
start();
doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams("SBK"));
try {
doc = handleLoginForm(doc, account);
} catch (OpacErrorException e) {
return new ProlongAllResult(Status.ERROR, e.getMessage());
}
for (Element tr : doc.select(".rTable_div tr")) {
if (tr.select("a").size() == 1) {
if (tr.select("a").first().absUrl("href")
.contains("sp=SZA")) {
alink = tr.select("a").first().absUrl("href");
}
}
}
if (alink == null) {
return new ProlongAllResult(Status.ERROR);
}
doc = htmlGet(alink);
List<NameValuePair> form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
if ("checkbox".equals(input.attr("type"))
&& !input.hasAttr("disabled")) {
form.add(new BasicNameValuePair(input.attr("name"), "on"));
}
}
// Stuttgart: textButton$0, others: textButton$1
String buttonName = doc.select("input[value=Markierte Titel verlängern]").attr("name");
form.add(new BasicNameValuePair(!"".equals(buttonName) ? buttonName : "textButton$1",
"Markierte Titel verlängern"));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
List<Map<String, String>> result = new ArrayList<>();
for (Element tr : doc.select(".rTable_div tbody tr")) {
Map<String, String> line = new HashMap<>();
line.put(ProlongAllResult.KEY_LINE_TITLE,
tr.child(3).text().split("[:/;]")[0].trim());
line.put(ProlongAllResult.KEY_LINE_NEW_RETURNDATE, tr.child(1)
.text());
line.put(ProlongAllResult.KEY_LINE_MESSAGE, tr.child(4).text());
result.add(line);
}
form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
form.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
form.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
htmlPost(opac_url + ";jsessionid=" + s_sid, form);
return new ProlongAllResult(Status.OK, result);
}
@Override
public CancelResult cancel(String media, Account account, int useraction,
String selection) throws IOException, OpacErrorException {
String rlink = null;
Document doc;
rlink = media.split("\\|")[1].replace("requestCount=", "fooo=");
start();
doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams("SBK"));
try {
doc = handleLoginForm(doc, account);
} catch (OpacErrorException e) {
return new CancelResult(Status.ERROR, e.getMessage());
}
for (Element tr : doc.select(".rTable_div tr")) {
String url = media.split("\\|")[1].toUpperCase(Locale.GERMAN);
String sp = "SZM";
if (url.contains("SP=")) {
Map<String, String> qp = getQueryParamsFirst(url);
if (qp.containsKey("SP")) {
sp = qp.get("SP");
}
}
if (tr.select("a").size() == 1) {
if ((tr.text().contains("Reservationen") || tr.text().contains("Vormerkung") ||
tr.text().contains("Bestellung"))
&& !tr.child(0).text().trim().equals("")
&& tr.select("a").first().attr("href")
.toUpperCase(Locale.GERMAN)
.contains("SP=" + sp)) {
rlink = tr.select("a").first().absUrl("href");
}
}
}
if (rlink == null) {
return new CancelResult(Status.ERROR);
}
doc = htmlGet(rlink);
List<NameValuePair> form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
form.add(new BasicNameValuePair(media.split("\\|")[0], "on"));
// Stuttgart: textButton, others: textButton$0
String buttonName = doc.select("input[value=Markierte Titel löschen]").attr("name");
form.add(new BasicNameValuePair(!"".equals(buttonName) ? buttonName : "textButton$0",
"Markierte Titel löschen"));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
form.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
form.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
htmlPost(opac_url + ";jsessionid=" + s_sid, form);
return new CancelResult(Status.OK);
}
@Override
public AccountData account(Account account) throws IOException,
JSONException, OpacErrorException {
start();
Document doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams("SBK"));
doc = handleLoginForm(doc, account);
boolean split_title_author = true;
if (doc.head().html().contains("VOEBB")) {
split_title_author = false;
}
AccountData adata = new AccountData(account.getId());
for (Element tr : doc.select(".aDISListe tr")) {
if (tr.child(0).text().matches(".*F.+llige Geb.+hren.*")) {
adata.setPendingFees(tr.child(1).text().trim());
}
if (tr.child(0).text().matches(".*Ausweis g.+ltig bis.*")) {
adata.setValidUntil(tr.child(1).text().trim());
}
}
DateTimeFormatter fmt = DateTimeFormat.forPattern("dd.MM.yyyy").withLocale(Locale.GERMAN);
// Ausleihen
String alink = null;
int anum = 0;
List<LentItem> lent = new ArrayList<>();
for (Element tr : doc.select(".rTable_div tr")) {
if (tr.select("a").size() == 1) {
if (tr.select("a").first().absUrl("href").contains("sp=SZA")) {
alink = tr.select("a").first().absUrl("href");
anum = Integer.parseInt(tr.child(0).text().trim());
}
}
}
if (alink != null) {
Document adoc = htmlGet(alink);
s_alink = alink;
List<NameValuePair> form = new ArrayList<>();
String prolongTest = null;
for (Element input : adoc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
if (input.attr("type").equals("checkbox")
&& !input.hasAttr("value")) {
input.val("on");
}
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
} else if (input.val().matches(".+verl.+ngerbar.+")) {
prolongTest = input.attr("name");
}
}
if (prolongTest != null) {
form.add(new BasicNameValuePair(prolongTest,
"Markierte Titel verlängerbar?"));
Document adoc_new = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (adoc_new.select(".message h1").size() == 0) {
adoc = adoc_new;
}
}
parseMediaList(adoc, alink, lent, split_title_author);
assert (lent.size() == anum);
form = new ArrayList<>();
boolean cancelButton = false;
for (Element input : adoc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
if ("submit".equals(input.attr("type")) &&
"Abbrechen".equals(input.attr("value")) && !cancelButton) {
// Stuttgart: Cancel button instead of toolbar back button
form.add(new BasicNameValuePair(input.attr("name"), input.attr("value")));
cancelButton = true;
}
}
if (!cancelButton) {
form.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
form.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
} else {
assert (anum == 0);
}
adata.setLent(lent);
List<String[]> rlinks = new ArrayList<>();
int rnum = 0;
List<ReservedItem> res = new ArrayList<>();
for (Element tr : doc.select(".rTable_div tr")) {
if (tr.select("a").size() == 1) {
if ((tr.text().contains("Reservationen")
|| tr.text().contains("Vormerkung")
|| tr.text().contains("Fernleihbestellung")
|| tr.text().contains("Bereitstellung")
|| tr.text().contains("Bestellw")
|| tr.text().contains("Magazin"))
&& !tr.child(0).text().trim().equals("")) {
rlinks.add(new String[]{
tr.select("a").text(),
tr.select("a").first().absUrl("href"),
});
rnum += Integer.parseInt(tr.child(0).text().trim());
}
}
}
for (String[] rlink : rlinks) {
Document rdoc = htmlGet(rlink[1]);
boolean error =
parseReservationList(rdoc, rlink, split_title_author, res, fmt, stringProvider);
if (error) {
// Maybe we should send a bug report here, but using ACRA breaks
// the unit tests
adata.setWarning("Beim Abrufen der Reservationen ist ein Problem aufgetreten");
}
List<NameValuePair> form = new ArrayList<>();
for (Element input : rdoc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
form.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
form.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
htmlPost(opac_url + ";jsessionid=" + s_sid, form);
}
assert (res.size() == rnum);
adata.setReservations(res);
return adata;
}
static boolean parseReservationList(Document doc, String[] rlink, boolean split_title_author,
List<ReservedItem> res, DateTimeFormatter fmt, StringProvider stringProvider) {
boolean error = false;
boolean interlib = doc.html().contains("Ihre Fernleih-Bestellung");
boolean stacks = doc.html().contains("aus dem Magazin");
boolean provision = doc.html().contains("Ihre Bereitstellung");
Map<String, Integer> colmap = new HashMap<>();
colmap.put("title", 2);
colmap.put("branch", 1);
colmap.put("expirationdate", 0);
int i = 0;
for (Element th : doc.select(".rTable_div thead tr th")) {
if (th.text().contains("Bis")) {
colmap.put("expirationdate", i);
}
if (th.text().contains("Ausgabeort")) {
colmap.put("branch", i);
}
if (th.text().contains("Titel")) {
colmap.put("title", i);
}
if (th.text().contains("Hinweis")) {
colmap.put("status", i);
}
i++;
}
for (Element tr : doc.select(".rTable_div tbody tr")) {
if (tr.children().size() >= colmap.size()) {
ReservedItem item = new ReservedItem();
String text = tr.child(colmap.get("title")).html();
text = Jsoup.parse(text.replaceAll("(?i)<br[^>]*>", ";")).text();
if (split_title_author) {
String[] split = text.split("[:/;\n]");
item.setTitle(split[0].replaceFirst("([^:;\n]+)[:;\n](.*)$", "$1").trim());
if (split.length > 1) {
item.setAuthor(
split[1].replaceFirst("([^:;\n]+)[:;\n](.*)$", "$1").trim());
}
} else {
item.setTitle(text);
}
String branch = tr.child(colmap.get("branch")).text().trim();
if (interlib) {
branch = stringProvider
.getFormattedString(StringProvider.INTERLIB_BRANCH, branch);
} else if (stacks) {
branch = stringProvider
.getFormattedString(StringProvider.STACKS_BRANCH, branch);
} else if (provision) {
branch = stringProvider
.getFormattedString(StringProvider.PROVISION_BRANCH, branch);
}
item.setBranch(branch);
if (colmap.containsKey("status")) {
String status = tr.child(colmap.get("status")).text().trim();
if (!"".equals(status)) item.setStatus(status);
}
if (rlink[0].contains("Abholbereit") || rlink[0].contains("Bereitstellung")) {
// Abholbereite Bestellungen
item.setStatus("bereit");
if (tr.child(0).text().trim().length() >= 10) {
item.setExpirationDate(fmt.parseLocalDate(
tr.child(colmap.get("expirationdate")).text().trim()
.substring(0, 10)));
}
} else {
// Nicht abholbereite
if (tr.select("input[type=checkbox]").size() > 0
&& (rlink[1].toUpperCase(Locale.GERMAN).contains(
"SP=SZM") || rlink[1].toUpperCase(
Locale.GERMAN).contains("SP=SZW") || rlink[1].toUpperCase(
Locale.GERMAN).contains("SP=SZB"))) {
item.setCancelData(
tr.select("input[type=checkbox]").attr("name") + "|" +
rlink[1]);
}
}
res.add(item);
} else {
// This is a strange bug where sometimes there is only three
// columns
error = true;
}
}
return error;
}
static void parseMediaList(Document adoc, String alink, List<LentItem> lent,
boolean split_title_author) {
DateTimeFormatter fmt = DateTimeFormat.forPattern("dd.MM.yyyy").withLocale(Locale.GERMAN);
for (Element tr : adoc.select(".rTable_div tbody tr")) {
LentItem item = new LentItem();
String text = Jsoup.parse(tr.child(3).html().replaceAll("(?i)<br[^>]*>", "#"))
.text();
if (text.contains(" / ")) {
// Format "Titel / Autor #Sig#Nr", z.B. normale Ausleihe in Berlin
String[] split = text.split("[/#\n]");
String title = split[0];
//Is always the last one...
String id = split[split.length - 1];
item.setId(id);
if (split_title_author) {
title = title.replaceFirst("([^:;\n]+)[:;\n](.*)$", "$1");
}
item.setTitle(title.trim());
if (split.length > 1) {
item.setAuthor(split[1].replaceFirst("([^:;\n]+)[:;\n](.*)$", "$1").trim());
}
} else {
// Format "Autor: Titel - Verlag - ISBN:... #Nummer", z.B. Fernleihe in Berlin
String[] split = text.split("#");
String[] aut_tit = split[0].split(": ");
item.setAuthor(aut_tit[0].replaceFirst("([^:;\n]+)[:;\n](.*)$", "$1").trim());
if (aut_tit.length > 1) {
item.setTitle(
aut_tit[1].replaceFirst("([^:;\n]+)[:;\n](.*)$", "$1").trim());
}
//Is always the last one...
String id = split[split.length - 1];
item.setId(id);
}
String date = tr.child(1).text().trim();
if (date.contains("-")) {
// Nürnberg: "29.03.2016 - 26.04.2016"
// for beginning and end date in one field
date = date.split("-")[1].trim();
}
try {
item.setDeadline(fmt.parseLocalDate(date));
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
item.setHomeBranch(tr.child(2).text().trim());
if (tr.select("input[type=checkbox]").hasAttr("disabled")) {
item.setRenewable(false);
} else {
try {
item.setRenewable(
!tr.child(4).text().matches(".*nicht verl.+ngerbar.*")
&& !tr.child(4).text().matches(".*Verl.+ngerung nicht m.+glich.*")
);
} catch (Exception e) {
}
item.setProlongData(
tr.select("input[type=checkbox]").attr("name") + "|" + alink);
}
lent.add(item);
}
}
protected Document handleLoginForm(Document doc, Account account)
throws IOException, OpacErrorException {
if (doc.select("#LPASSW_1").size() == 0) {
return doc;
}
doc.select("#LPASSW_1").val(account.getPassword());
List<NameValuePair> form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
if (input.attr("id").equals("L#AUSW_1")
|| input.attr("fld").equals("L#AUSW_1")
|| input.attr("id").equals("IDENT_1")
|| input.attr("id").equals("LMATNR_1")) {
input.attr("value", account.getName());
}
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
Element inputSend = doc.select("input[type=submit]").first();
form.add(new BasicNameValuePair(inputSend.attr("name"), inputSend
.attr("value")));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (doc.select(".message h1, .alert").size() > 0) {
String msg = doc.select(".message h1, .alert").text().trim();
form = new ArrayList<>();
for (Element input : doc.select("input")) {
if (!"image".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (!msg.contains("Sie sind angemeldet") && !msg.contains("jetzt angemeldet")) {
throw new OpacErrorException(msg);
}
return doc;
} else {
return doc;
}
}
@Override
public List<SearchField> parseSearchFields() throws IOException,
JSONException {
start();
Document doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams());
List<SearchField> fields = new ArrayList<>();
// dropdown to select which field you want to search in
Elements searchoptions = doc.select("#SUCH01_1 option");
if (searchoptions.size() == 0 && doc.select("input[fld=FELD01_1]").size() > 0) {
// Hack is needed in Nuernberg
searchoptions = doc.select("input[fld=FELD01_1]").first().previousElementSibling()
.select("option");
}
Set<String> fieldIds = new HashSet<>();
for (Element opt : searchoptions) {
// Damit doppelte Optionen nicht mehrfach auftauchen
// (bei Stadtbücherei Stuttgart der Fall)
if (fieldIds.contains(opt.attr("value"))) continue;
TextSearchField field = new TextSearchField();
field.setId(opt.attr("value"));
field.setDisplayName(opt.text());
field.setHint("");
fields.add(field);
fieldIds.add(field.getId());
}
// Save data so that the search() function knows that this
// is not a selectable search field
JSONObject selectableData = new JSONObject();
selectableData.put("selectable", false);
for (Element row : doc.select("div[id~=F\\d+], .search-adv-source")) {
if (row.select("input[type=text]").size() == 1
&& row.select("input, select").first().tagName()
.equals("input")) {
// A single text search field
Element input = row.select("input[type=text]").first();
TextSearchField field = new TextSearchField();
field.setId(input.attr("id"));
field.setDisplayName(row.select("label").first().text());
field.setHint("");
field.setData(selectableData);
fields.add(field);
} else if (row.select("select").size() == 1
&& row.select("input[type=text]").size() == 0) {
// Things like language, media type, etc.
Element select = row.select("select").first();
DropdownSearchField field = new DropdownSearchField();
field.setId(select.id());
field.setDisplayName(row.select("label").first().text());
for (Element opt : select.select("option")) {
field.addDropdownValue(opt.attr("value"), opt.text());
}
fields.add(field);
} else if (row.select("select").size() == 0
&& row.select("input[type=text]").size() == 3
&& row.select("label").size() == 3) {
// Three text inputs.
// Year single/from/to or things like Band-/Heft-/Satznummer
String name1 = row.select("label").get(0).text();
String name2 = row.select("label").get(1).text();
String name3 = row.select("label").get(2).text();
Element input1 = row.select("input[type=text]").get(0);
Element input2 = row.select("input[type=text]").get(1);
Element input3 = row.select("input[type=text]").get(2);
if (name2.contains("von") && name3.contains("bis")) {
TextSearchField field1 = new TextSearchField();
field1.setId(input1.id());
field1.setDisplayName(name1);
field1.setHint("");
field1.setData(selectableData);
fields.add(field1);
TextSearchField field2 = new TextSearchField();
field2.setId(input2.id());
field2.setDisplayName(name2.replace("von", "").trim());
field2.setHint("von");
field2.setData(selectableData);
fields.add(field2);
TextSearchField field3 = new TextSearchField();
field3.setId(input3.id());
field3.setDisplayName(name3.replace("bis", "").trim());
field3.setHint("bis");
field3.setHalfWidth(true);
field3.setData(selectableData);
fields.add(field3);
} else {
TextSearchField field1 = new TextSearchField();
field1.setId(input1.id());
field1.setDisplayName(name1);
field1.setHint("");
field1.setData(selectableData);
fields.add(field1);
TextSearchField field2 = new TextSearchField();
field2.setId(input2.id());
field2.setDisplayName(name2);
field2.setHint("");
field2.setData(selectableData);
fields.add(field2);
TextSearchField field3 = new TextSearchField();
field3.setId(input3.id());
field3.setDisplayName(name3);
field3.setHint("");
field3.setData(selectableData);
fields.add(field3);
}
}
}
for (Iterator<SearchField> iterator = fields.iterator(); iterator
.hasNext(); ) {
SearchField field = iterator.next();
if (ignoredFieldNames.contains(field.getDisplayName())) {
iterator.remove();
}
}
return fields;
}
@Override
public String getShareUrl(String id, String title) {
if (id.startsWith("http")) {
return id;
} else {
return null;
}
}
@Override
public int getSupportFlags() {
return SUPPORT_FLAG_ACCOUNT_PROLONG_ALL
| SUPPORT_FLAG_ENDLESS_SCROLLING | SUPPORT_FLAG_WARN_RESERVATION_FEES;
}
@Override
public void checkAccountData(Account account) throws IOException,
JSONException, OpacErrorException {
start();
Document doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams("SBK"));
handleLoginForm(doc, account);
}
@Override
public void setLanguage(String language) {
// TODO Auto-generated method stub
}
@Override
public Set<String> getSupportedLanguages() throws IOException {
// TODO Auto-generated method stub
return null;
}
}
|
opacclient/libopac/src/main/java/de/geeksfactory/opacclient/apis/Adis.java
|
package de.geeksfactory.opacclient.apis;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.message.BasicNameValuePair;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.json.JSONException;
import org.json.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import de.geeksfactory.opacclient.apis.OpacApi.MultiStepResult.Status;
import de.geeksfactory.opacclient.i18n.StringProvider;
import de.geeksfactory.opacclient.networking.HttpClientFactory;
import de.geeksfactory.opacclient.networking.HttpUtils;
import de.geeksfactory.opacclient.networking.NotReachableException;
import de.geeksfactory.opacclient.networking.SSLSecurityException;
import de.geeksfactory.opacclient.objects.Account;
import de.geeksfactory.opacclient.objects.AccountData;
import de.geeksfactory.opacclient.objects.Copy;
import de.geeksfactory.opacclient.objects.Detail;
import de.geeksfactory.opacclient.objects.DetailedItem;
import de.geeksfactory.opacclient.objects.Filter;
import de.geeksfactory.opacclient.objects.Filter.Option;
import de.geeksfactory.opacclient.objects.LentItem;
import de.geeksfactory.opacclient.objects.Library;
import de.geeksfactory.opacclient.objects.ReservedItem;
import de.geeksfactory.opacclient.objects.SearchRequestResult;
import de.geeksfactory.opacclient.objects.SearchResult;
import de.geeksfactory.opacclient.objects.SearchResult.MediaType;
import de.geeksfactory.opacclient.searchfields.DropdownSearchField;
import de.geeksfactory.opacclient.searchfields.SearchField;
import de.geeksfactory.opacclient.searchfields.SearchQuery;
import de.geeksfactory.opacclient.searchfields.TextSearchField;
public class Adis extends ApacheBaseApi implements OpacApi {
protected static HashMap<String, MediaType> types = new HashMap<>();
protected static HashSet<String> ignoredFieldNames = new HashSet<>();
static {
types.put("Buch", MediaType.BOOK);
types.put("Band", MediaType.BOOK);
types.put("DVD-ROM", MediaType.CD_SOFTWARE);
types.put("CD-ROM", MediaType.CD_SOFTWARE);
types.put("Medienkombination", MediaType.PACKAGE);
types.put("DVD-Video", MediaType.DVD);
types.put("DVD", MediaType.DVD);
types.put("Noten", MediaType.SCORE_MUSIC);
types.put("Konsolenspiel", MediaType.GAME_CONSOLE);
types.put("Spielkonsole", MediaType.GAME_CONSOLE);
types.put("CD", MediaType.CD);
types.put("Zeitschrift", MediaType.MAGAZINE);
types.put("Zeitschriftenheft", MediaType.MAGAZINE);
types.put("Zeitung", MediaType.NEWSPAPER);
types.put("Beitrag E-Book", MediaType.EBOOK);
types.put("Elektronische Ressource", MediaType.EBOOK);
types.put("E-Book", MediaType.EBOOK);
types.put("Karte", MediaType.MAP);
types.put("E-Ressource", MediaType.EBOOK);
types.put("Munzinger", MediaType.EBOOK);
types.put("E-Audio", MediaType.EAUDIO);
types.put("Blu-Ray", MediaType.BLURAY);
// TODO: The following fields from Berlin make no sense and don't work
// when they are displayed alone.
// We can only include them if we automatically deselect the "Verbund"
// checkbox
// when one of these dropdowns has a value other than "".
ignoredFieldNames.add("oder Bezirk");
ignoredFieldNames.add("oder Bibliothek");
}
protected String opac_url = "";
protected JSONObject data;
protected Library library;
protected int s_requestCount = 0;
protected String s_service;
protected String s_sid;
protected List<String> s_exts;
protected String s_alink;
protected List<NameValuePair> s_pageform;
protected int s_lastpage;
protected Document s_reusedoc;
protected String s_nextbutton = "$Toolbar_5";
protected String s_previousbutton = "$Toolbar_4";
public static Map<String, List<String>> getQueryParams(String url) {
try {
Map<String, List<String>> params = new HashMap<>();
String[] urlParts = url.split("\\?");
if (urlParts.length > 1) {
String query = urlParts[1];
for (String param : query.split("&")) {
String[] pair = param.split("=");
String key = URLDecoder.decode(pair[0], "UTF-8");
String value = "";
if (pair.length > 1) {
value = URLDecoder.decode(pair[1], "UTF-8");
}
List<String> values = params.get(key);
if (values == null) {
values = new ArrayList<>();
params.put(key, values);
}
values.add(value);
}
}
return params;
} catch (UnsupportedEncodingException ex) {
throw new AssertionError(ex);
}
}
public Document htmlGet(String url) throws
IOException {
if (!url.contains("requestCount") && s_requestCount >= 0) {
url = url + (url.contains("?") ? "&" : "?") + "requestCount="
+ s_requestCount;
}
HttpGet httpget = new HttpGet(cleanUrl(url));
HttpResponse response;
try {
response = http_client.execute(httpget);
} catch (javax.net.ssl.SSLPeerUnverifiedException e) {
throw new SSLSecurityException(e.getMessage());
} catch (javax.net.ssl.SSLException e) {
// Can be "Not trusted server certificate" or can be a
// aborted/interrupted handshake/connection
if (e.getMessage().contains("timed out")
|| e.getMessage().contains("reset by")) {
e.printStackTrace();
throw new NotReachableException(e.getMessage());
} else {
throw new SSLSecurityException(e.getMessage());
}
} catch (InterruptedIOException e) {
e.printStackTrace();
throw new NotReachableException(e.getMessage());
} catch (IOException e) {
if (e.getMessage() != null && e.getMessage().contains("Request aborted")) {
e.printStackTrace();
throw new NotReachableException(e.getMessage());
} else {
throw e;
}
}
if (response.getStatusLine().getStatusCode() >= 400) {
throw new NotReachableException(response.getStatusLine().getReasonPhrase());
}
String html = convertStreamToString(response.getEntity().getContent(),
getDefaultEncoding());
HttpUtils.consume(response.getEntity());
Document doc = Jsoup.parse(html);
Pattern patRequestCount = Pattern.compile("requestCount=([0-9]+)");
for (Element a : doc.select("a")) {
Matcher objid_matcher = patRequestCount.matcher(a.attr("href"));
if (objid_matcher.matches()) {
s_requestCount = Integer.parseInt(objid_matcher.group(1));
}
}
doc.setBaseUri(url);
return doc;
}
public Document htmlPost(String url, List<NameValuePair> data)
throws IOException {
HttpPost httppost = new HttpPost(cleanUrl(url));
boolean rcf = false;
for (NameValuePair nv : data) {
if (nv.getName().equals("requestCount")) {
rcf = true;
break;
}
}
if (!rcf) {
data.add(new BasicNameValuePair("requestCount", s_requestCount + ""));
}
httppost.setEntity(new UrlEncodedFormEntity(data, getDefaultEncoding()));
HttpResponse response;
try {
response = http_client.execute(httppost);
} catch (javax.net.ssl.SSLPeerUnverifiedException e) {
throw new SSLSecurityException(e.getMessage());
} catch (javax.net.ssl.SSLException e) {
// Can be "Not trusted server certificate" or can be a
// aborted/interrupted handshake/connection
if (e.getMessage().contains("timed out")
|| e.getMessage().contains("reset by")) {
e.printStackTrace();
throw new NotReachableException(e.getMessage());
} else {
throw new SSLSecurityException(e.getMessage());
}
} catch (InterruptedIOException e) {
e.printStackTrace();
throw new NotReachableException(e.getMessage());
} catch (IOException e) {
if (e.getMessage() != null && e.getMessage().contains("Request aborted")) {
e.printStackTrace();
throw new NotReachableException(e.getMessage());
} else {
throw e;
}
}
if (response.getStatusLine().getStatusCode() >= 400) {
throw new NotReachableException(response.getStatusLine().getReasonPhrase());
}
String html = convertStreamToString(response.getEntity().getContent(),
getDefaultEncoding());
HttpUtils.consume(response.getEntity());
Document doc = Jsoup.parse(html);
Pattern patRequestCount = Pattern
.compile(".*requestCount=([0-9]+)[^0-9].*");
for (Element a : doc.select("a")) {
Matcher objid_matcher = patRequestCount.matcher(a.attr("href"));
if (objid_matcher.matches()) {
s_requestCount = Integer.parseInt(objid_matcher.group(1));
}
}
doc.setBaseUri(url);
return doc;
}
@Override
public void start() throws IOException {
try {
s_requestCount = -1;
Document doc = htmlGet(opac_url + "?"
+ data.getString("startparams"));
Pattern padSid = Pattern
.compile(".*;jsessionid=([0-9A-Fa-f]+)[^0-9A-Fa-f].*");
for (Element navitem : doc
.select("#unav li a, #hnav li a, .tree_ul li a, .search-adv")) {
// Düsseldorf uses a custom layout where the navbar is .tree_ul
// in Stuttgart, the navbar is #hnav and advanced search is linked outside the
// navbar as .search-adv-repeat
if (navitem.attr("href").contains("service=")) {
s_service = getQueryParams(navitem.attr("href")).get(
"service").get(0);
}
if (navitem.text().contains("Erweiterte Suche")) {
s_exts = getQueryParams(navitem.attr("href")).get("sp");
}
Matcher objid_matcher = padSid.matcher(navitem.attr("href"));
if (objid_matcher.matches()) {
s_sid = objid_matcher.group(1);
}
}
if (s_exts == null) {
s_exts = Collections.singletonList("SS6");
}
} catch (JSONException e) {
throw new RuntimeException(e);
}
super.start();
}
@Override
protected String getDefaultEncoding() {
return "UTF-8";
}
@Override
public SearchRequestResult search(List<SearchQuery> queries)
throws IOException, OpacErrorException {
start();
// TODO: There are also libraries with a different search form,
// s_exts=SS2 instead of s_exts=SS6
// e.g. munich. Treat them differently!
Document doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams());
int dropdownTextCount = 0;
int totalCount = 0;
List<NameValuePair> nvpairs = new ArrayList<>();
for (SearchQuery query : queries) {
if (!query.getValue().equals("")) {
totalCount++;
if (query.getSearchField() instanceof DropdownSearchField) {
doc.select("select#" + query.getKey())
.val(query.getValue());
continue;
}
if (query.getSearchField() instanceof TextSearchField &&
query.getSearchField().getData() != null &&
!query.getSearchField().getData().optBoolean("selectable", true) &&
doc.select("#" + query.getKey()).size() > 0) {
doc.select("#" + query.getKey())
.val(query.getValue());
continue;
}
dropdownTextCount++;
if (s_exts.get(0).equals("SS2")
|| (query.getSearchField().getData() != null && !query
.getSearchField().getData()
.optBoolean("selectable", true))) {
doc.select("input#" + query.getKey()).val(query.getValue());
} else {
if (doc.select("select#SUCH01_1").size() == 0 &&
doc.select("input[fld=FELD01_" + dropdownTextCount + "]").size() > 0) {
// Hack needed for Nürnberg
doc.select("input[fld=FELD01_" + dropdownTextCount + "]").first()
.previousElementSibling().val(query.getKey());
doc.select("input[fld=FELD01_" + dropdownTextCount + "]")
.val(query.getValue());
} else {
doc.select("select#SUCH01_" + dropdownTextCount).val(query.getKey());
doc.select("input#FELD01_" + dropdownTextCount).val(query.getValue());
}
}
if (dropdownTextCount > 4) {
throw new OpacErrorException(stringProvider.getQuantityString(
StringProvider.LIMITED_NUM_OF_CRITERIA, 4, 4));
}
}
}
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
nvpairs.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
nvpairs.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
nvpairs.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
if (totalCount == 0) {
throw new OpacErrorException(
stringProvider.getString(StringProvider.NO_CRITERIA_INPUT));
}
Document docresults = htmlPost(opac_url + ";jsessionid=" + s_sid,
nvpairs);
return parse_search_wrapped(docresults, 1);
}
private String getSpParams() {
return getSpParams(null);
}
private String getSpParams(String overrideSecond) {
if (overrideSecond != null && s_exts.size() == 1) {
return "&sp=" + overrideSecond;
}
StringBuilder builder = new StringBuilder();
int i = 0;
for (String sp : s_exts) {
builder.append("&sp=");
if (i == 1 && overrideSecond != null) {
builder.append(overrideSecond);
} else {
builder.append(sp);
}
i++;
}
return builder.toString();
}
public class SingleResultFound extends Exception {
}
protected SearchRequestResult parse_search_wrapped(Document doc, int page) throws IOException, OpacErrorException {
try {
return parse_search(doc, page);
} catch (SingleResultFound e) {
// Zurück zur Trefferliste
List<NameValuePair> nvpairs = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
nvpairs.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
String name = "$Toolbar_0";
if (doc.select("[id^=Toolbar_][title*=Trefferliste]").size() > 0) {
// In Stuttgart, "Trefferliste" is Nr. 5, in Zurich its Nr. 1. Ofen, 0 ("back") works as well.
name = doc.select("[id^=Toolbar_][title*=Trefferliste]").first().attr("name");
}
nvpairs.add(new BasicNameValuePair(name + ".x", "1"));
nvpairs.add(new BasicNameValuePair(name + ".y", "1"));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs);
try {
return parse_search(doc, page);
} catch (SingleResultFound e1) {
throw new NotReachableException();
}
}
}
private SearchRequestResult parse_search(Document doc, int page)
throws OpacErrorException, SingleResultFound {
if (doc.select(".message h1").size() > 0
&& doc.select("#right #R06").size() == 0) {
throw new OpacErrorException(doc.select(".message h1").text());
}
if (doc.select("#OPACLI").text().contains("nicht gefunden")) {
throw new OpacErrorException(
stringProvider.getString(StringProvider.NO_RESULTS));
}
int total_result_count = -1;
List<SearchResult> results = new ArrayList<>();
if (doc.select("#R06").size() > 0) {
Pattern patNum = Pattern
.compile(".*Treffer: .* von ([0-9]+)[^0-9]*");
Matcher matcher = patNum.matcher(doc.select("#R06").text()
.trim());
if (matcher.matches()) {
total_result_count = Integer.parseInt(matcher.group(1));
} else if (doc.select("#R06").text().trim().endsWith("Treffer: 1")) {
total_result_count = 1;
}
}
if (doc.select("#R03").size() == 1
&& doc.select("#R03").text().trim()
.endsWith("Treffer: 1")) {
throw new SingleResultFound();
}
Pattern patId = Pattern
.compile("javascript:.*htmlOnLink\\('([0-9A-Za-z]+)'\\)");
int nr = 1;
String selector_row, selector_link, selector_img, selector_num, selector_text;
if (doc.select("table.rTable_table tbody").size() > 0) {
selector_row = "table.rTable_table tbody tr";
selector_link = ".rTable_td_text a";
selector_text = ".rList_name";
selector_img = ".rTable_td_img img, .rTable_td_text img";
selector_num = "tr td:first-child";
} else {
// New version, e.g. Berlin
selector_row = ".rList li.rList_li_even, .rList li.rList_li_odd";
selector_link = ".rList_titel a";
selector_text = ".rList_name";
selector_img = ".rlist_icon img, .rList_titel img, .rList_medium .icon, .rList_availability .icon, .rList_img img";
selector_num = ".rList_num";
}
for (Element tr : doc.select(selector_row)) {
SearchResult res = new SearchResult();
Element innerele = tr.select(selector_link).first();
innerele.select("img").remove();
String descr = innerele.html();
for (Element n : tr.select(selector_text)) {
String t = n.text().replace("\u00a0", " ").trim();
if (t.length() > 0) {
descr += "<br />" + t.trim();
}
}
res.setInnerhtml(descr);
try {
res.setNr(Integer.parseInt(tr.select(selector_num).text().trim()));
} catch (NumberFormatException e) {
res.setNr(nr);
}
Matcher matcher = patId.matcher(tr.select(selector_link).first().attr("href"));
if (matcher.matches()) {
res.setId(matcher.group(1));
}
for (Element img : tr.select(selector_img)) {
String ttext = img.attr("title");
String src = img.attr("abs:src");
if (types.containsKey(ttext)) {
res.setType(types.get(ttext));
} else if (ttext.contains("+")
&& types.containsKey(ttext.split("\\+")[0].trim())) {
res.setType(types.get(ttext.split("\\+")[0].trim()));
} else if (ttext.matches(".*ist verf.+gbar") ||
ttext.contains("is available") ||
img.attr("href").contains("verfu_ja")) {
res.setStatus(SearchResult.Status.GREEN);
} else if (ttext.matches(".*nicht verf.+gbar") ||
ttext.contains("not available") ||
img.attr("href").contains("verfu_nein")) {
res.setStatus(SearchResult.Status.RED);
}
}
results.add(res);
nr++;
}
updatePageform(doc);
s_lastpage = page;
String nextButton =
doc.select("input[title=nächster], input[title=Vorwärts blättern]").attr("name");
String previousButton =
doc.select("input[title=nächster], input[title=Rückwärts blättern]").attr("name");
if (!nextButton.equals("")) s_nextbutton = nextButton;
if (!previousButton.equals("")) s_previousbutton = previousButton;
return new SearchRequestResult(results, total_result_count, page);
}
@Override
public void init(Library library, HttpClientFactory httpClientFactory) {
super.init(library, httpClientFactory);
this.library = library;
this.data = library.getData();
try {
this.opac_url = data.getString("baseurl");
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@Override
public SearchRequestResult filterResults(Filter filter, Option option)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public SearchRequestResult searchGetPage(int page) throws IOException,
OpacErrorException {
SearchRequestResult res = null;
while (page != s_lastpage) {
List<NameValuePair> nvpairs = s_pageform;
int i = 0;
List<Integer> indexes = new ArrayList<>();
for (NameValuePair np : nvpairs) {
if (np.getName().contains("$Toolbar_")) {
indexes.add(i);
}
i++;
}
for (int j = indexes.size() - 1; j >= 0; j--) {
nvpairs.remove((int) indexes.get(j));
}
int p;
if (page > s_lastpage) {
nvpairs.add(new BasicNameValuePair(s_nextbutton + ".x", "1"));
nvpairs.add(new BasicNameValuePair(s_nextbutton + ".y", "1"));
p = s_lastpage + 1;
} else {
nvpairs.add(new BasicNameValuePair(s_previousbutton + ".x", "1"));
nvpairs.add(new BasicNameValuePair(s_previousbutton + ".y", "1"));
p = s_lastpage - 1;
}
Document docresults = htmlPost(opac_url + ";jsessionid=" + s_sid,
nvpairs);
res = parse_search_wrapped(docresults, p);
}
return res;
}
@Override
public DetailedItem getResultById(String id, String homebranch)
throws IOException, OpacErrorException {
Document doc;
List<NameValuePair> nvpairs;
if (id == null && s_reusedoc != null) {
doc = s_reusedoc;
} else {
nvpairs = s_pageform;
int i = 0;
List<Integer> indexes = new ArrayList<>();
for (NameValuePair np : nvpairs) {
if (np.getName().contains("$Toolbar_")
|| np.getName().contains("selected")) {
indexes.add(i);
}
i++;
}
for (int j = indexes.size() - 1; j >= 0; j--) {
nvpairs.remove((int) indexes.get(j));
}
nvpairs.add(new BasicNameValuePair("selected", "ZTEXT " + id));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs);
List<NameValuePair> form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))
&& !"selected".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
form.add(new BasicNameValuePair("selected", "ZTEXT " + id));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
// Yep, two times.
}
// Reset
updatePageform(doc);
nvpairs = s_pageform;
nvpairs.add(new BasicNameValuePair("$Toolbar_1.x", "1"));
nvpairs.add(new BasicNameValuePair("$Toolbar_1.y", "1"));
parse_search_wrapped(htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs), 1);
nvpairs = s_pageform;
nvpairs.add(new BasicNameValuePair("$Toolbar_3.x", "1"));
nvpairs.add(new BasicNameValuePair("$Toolbar_3.y", "1"));
parse_search_wrapped(htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs), 1);
return parseResult(id, doc);
}
DetailedItem parseResult(String id, Document doc)
throws IOException, OpacErrorException {
List<NameValuePair> nvpairs;
DetailedItem res = new DetailedItem();
if (doc.select("#R001 img").size() == 1) {
String cover_url = doc.select("#R001 img").first().absUrl("src");
if (!cover_url.endsWith("erne.gif")) {
// If there is no cover, the first image usually is the "n Stars" rating badge
res.setCover(cover_url);
}
}
for (Element tr : doc.select("#R06 .aDISListe table tbody tr")) {
if (tr.children().size() < 2) {
continue;
}
String title = tr.child(0).text().trim();
String value = tr.child(1).text().trim();
if (value.contains("hier klicken") || value.startsWith("zur ") ||
title.contains("URL")) {
res.addDetail(new Detail(title, tr.child(1).select("a").first().absUrl("href")));
} else {
res.addDetail(new Detail(title, value));
}
if (title.contains("Titel") && res.getTitle() == null) {
res.setTitle(value.split("[:/;]")[0].trim());
}
}
if (res.getTitle() == null) {
for (Detail d : res.getDetails()) {
if (d.getDesc().contains("Gesamtwerk")
|| d.getDesc().contains("Zeitschrift")) {
res.setTitle(d.getContent());
break;
}
}
}
if (doc.select(
"input[value*=Reservieren], input[value*=Vormerken], " +
"input[value*=Einzelbestellung]")
.size() > 0 && id != null) {
res.setReservable(true);
res.setReservation_info(id);
}
DateTimeFormatter fmt = DateTimeFormat.forPattern("dd.MM.yyyy").withLocale(Locale.GERMAN);
if (doc.select("#R08 table.rTable_table, #R09 table.rTable_table").size() > 0) {
Element table = doc.select("#R08 table.rTable_table, #R09 table.rTable_table").first();
Map<Integer, String> colmap = new HashMap<>();
int i = 0;
for (Element th : table.select("thead tr th")) {
String head = th.text().trim();
if (head.contains("Bibliothek") || head.contains("Library")) {
colmap.put(i, "branch");
} else if (head.contains("Standort") || head.contains("Location")) {
colmap.put(i, "location");
} else if (head.contains("Signatur") || head.contains("Call number")) {
colmap.put(i, "signature");
} else if (head.contains("URL")) {
colmap.put(i, "url");
} else if (head.contains("Status") || head.contains("Hinweis")
|| head.contains("Leihfrist") || head.matches(".*Verf.+gbarkeit.*")) {
colmap.put(i, "status");
}
i++;
}
for (Element tr : table.select("tbody tr")) {
Copy copy = new Copy();
for (Entry<Integer, String> entry : colmap.entrySet()) {
if (entry.getValue().equals("status")) {
String status = tr.child(entry.getKey()).text().trim();
String currentStatus =
copy.getStatus() != null ? copy.getStatus() + " - " : "";
if (status.contains(" am: ")) {
copy.setStatus(currentStatus + status.split("-")[0]);
try {
copy.setReturnDate(fmt.parseLocalDate(status.split(": ")[1]));
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
} else {
copy.setStatus(currentStatus + status);
}
} else {
copy.set(entry.getValue(), tr.child(entry.getKey()).text().trim());
}
}
res.addCopy(copy);
}
}
res.setId(""); // null would be overridden by the UI, because there _is_
// an id,< we just can not use it.
return res;
}
@Override
public DetailedItem getResult(int position) throws IOException,
OpacErrorException {
if (s_reusedoc != null) {
return getResultById(null, null);
}
throw new UnsupportedOperationException();
}
@Override
public ReservationResult reservation(DetailedItem item, Account account,
int useraction, String selection) throws IOException {
Document doc;
List<NameValuePair> nvpairs;
ReservationResult res = null;
if (selection != null && selection.equals("")) {
selection = null;
}
if (s_pageform == null) {
return new ReservationResult(Status.ERROR);
}
// Load details
nvpairs = s_pageform;
int i = 0;
List<Integer> indexes = new ArrayList<>();
for (NameValuePair np : nvpairs) {
if (np.getName().contains("$Toolbar_")
|| np.getName().contains("selected")) {
indexes.add(i);
}
i++;
}
for (int j = indexes.size() - 1; j >= 0; j--) {
nvpairs.remove((int) indexes.get(j));
}
nvpairs.add(new BasicNameValuePair("selected", "ZTEXT "
+ item.getReservation_info()));
htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs);
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs); // Yep, two
// times.
List<NameValuePair> form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& (!"submit".equals(input.attr("type"))
|| input.val().contains("Reservieren")
|| input.val().contains("Einzelbestellung")
|| input.val().contains("Vormerken"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (doc.select(".message h1").size() > 0) {
String msg = doc.select(".message h1").text().trim();
res = new ReservationResult(MultiStepResult.Status.ERROR, msg);
form = new ArrayList<>();
for (Element input : doc.select("input")) {
if (!"image".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
} else {
try {
doc = handleLoginForm(doc, account);
} catch (OpacErrorException e1) {
return new ReservationResult(MultiStepResult.Status.ERROR,
e1.getMessage());
}
if (useraction == 0 && selection == null) {
res = new ReservationResult(
MultiStepResult.Status.CONFIRMATION_NEEDED);
List<String[]> details = new ArrayList<>();
details.add(new String[]{doc.select("#F23").text()});
res.setDetails(details);
} else if (doc.select("#AUSGAB_1").size() > 0 && (selection == null || "confirmed".equals(selection))) {
List<Map<String, String>> sel = new ArrayList<>();
for (Element opt : doc.select("#AUSGAB_1 option")) {
if (opt.text().trim().length() > 0) {
Map<String, String> selopt = new HashMap<>();
selopt.put("key", opt.val());
selopt.put("value", opt.text());
sel.add(selopt);
}
}
res = new ReservationResult(
MultiStepResult.Status.SELECTION_NEEDED, doc.select(
"#AUSGAB_1").first().parent().select("span").text());
res.setSelection(sel);
} else if (doc.select("#FSET01 select[name=select$0]").size() > 0 &&
(selection == null || !selection.contains("_SEP_"))) {
// Munich: "Benachrichtigung mit E-Mail"
List<Map<String, String>> sel = new ArrayList<>();
for (Element opt : doc.select("select[name=select$0] option")) {
if (opt.text().trim().length() > 0) {
Map<String, String> selopt = new HashMap<>();
selopt.put("value", opt.text());
if (selection != null) {
selopt.put("key", opt.val() + "_SEP_" + selection);
} else {
selopt.put("key", opt.val());
}
sel.add(selopt);
}
}
res = new ReservationResult(
MultiStepResult.Status.SELECTION_NEEDED, doc.select(
"#FSET01 select[name=select$0]").first().parent().select("span").text());
res.setSelection(sel);
} else if (selection != null || doc.select("#AUSGAB_1").size() == 0) {
if (doc.select("#AUSGAB_1").size() > 0 && selection != null) {
if (selection.contains("_SEP_")) {
doc.select("#AUSGAB_1").attr("value", selection.split("_SEP_")[1]);
} else {
doc.select("#AUSGAB_1").attr("value", selection);
}
}
if (doc.select("#FSET01 select[name=select$0]").size() > 0 && selection != null) {
if (selection.contains("_SEP_")) {
doc.select("#FSET01 select[name=select$0]")
.attr("value", selection.split("_SEP_")[0]);
} else {
doc.select("#FSET01 select[name=select$0]").attr("value", selection);
}
}
if (doc.select("#BENJN_1").size() > 0) {
// Notification not requested because some libraries notify by snail mail
// and take a fee for it (Example: Stuttgart_Uni)
doc.select("#BENJN_1").attr("value", "Nein");
}
if (doc.select(".message h1").size() > 0) {
String msg = doc.select(".message h1").text().trim();
form = new ArrayList<>();
for (Element input : doc.select("input")) {
if (!"image".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"),
input.attr("value")));
}
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (!msg.contains("Reservation ist erfolgt")) {
res = new ReservationResult(
MultiStepResult.Status.ERROR, msg);
} else {
res = new ReservationResult(MultiStepResult.Status.OK,
msg);
}
} else {
form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"),
input.attr("value")));
}
}
form.add(new BasicNameValuePair("textButton",
"Reservation abschicken"));
res = new ReservationResult(MultiStepResult.Status.OK);
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (doc.select("input[name=textButton]").attr("value")
.contains("kostenpflichtig bestellen")) {
// Munich
form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"),
input.attr("value")));
}
}
form.add(new BasicNameValuePair("textButton",
doc.select("input[name=textButton]").first().attr("value")));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
}
if (doc.select(".message h1").size() > 0) {
String msg = doc.select(".message h1").text().trim();
form = new ArrayList<>();
for (Element input : doc.select("input")) {
if (!"image".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input
.attr("name"), input.attr("value")));
}
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (!msg.contains("Reservation ist erfolgt")) {
res = new ReservationResult(
MultiStepResult.Status.ERROR, msg);
} else {
res = new ReservationResult(
MultiStepResult.Status.OK, msg);
}
} else if (doc.select("#R01").text()
.contains("Informationen zu Ihrer Reservation")) {
String msg = doc.select("#OPACLI").text().trim();
form = new ArrayList<>();
for (Element input : doc.select("input")) {
if (!"image".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input
.attr("name"), input.attr("value")));
}
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (!msg.contains("Reservation ist erfolgt")) {
res = new ReservationResult(
MultiStepResult.Status.ERROR, msg);
} else {
res = new ReservationResult(
MultiStepResult.Status.OK, msg);
}
}
}
}
}
if (res == null
|| res.getStatus() == MultiStepResult.Status.SELECTION_NEEDED
|| res.getStatus() == MultiStepResult.Status.CONFIRMATION_NEEDED) {
form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
Element button = doc.select("input[value=Abbrechen], input[value=Zurück]").first();
form.add(new BasicNameValuePair(button.attr("name"), button.attr("value")));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
}
// Reset
updatePageform(doc);
try {
nvpairs = s_pageform;
nvpairs.add(new BasicNameValuePair("$Toolbar_1.x", "1"));
nvpairs.add(new BasicNameValuePair("$Toolbar_1.y", "1"));
parse_search_wrapped(htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs),
1);
nvpairs = s_pageform;
nvpairs.add(new BasicNameValuePair("$Toolbar_3.x", "1"));
nvpairs.add(new BasicNameValuePair("$Toolbar_3.y", "1"));
parse_search_wrapped(htmlPost(opac_url + ";jsessionid=" + s_sid, nvpairs),
1);
} catch (OpacErrorException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return res;
}
void updatePageform(Document doc) {
s_pageform = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
s_pageform.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
}
@Override
public ProlongResult prolong(String media, Account account, int useraction,
String selection) throws IOException {
String alink = null;
Document doc;
start();
doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams("SBK"));
try {
doc = handleLoginForm(doc, account);
} catch (OpacErrorException e) {
return new ProlongResult(Status.ERROR, e.getMessage());
}
for (Element tr : doc.select(".rTable_div tr")) {
if (tr.select("a").size() == 1) {
if (tr.select("a").first().absUrl("href")
.contains("sp=SZA")) {
alink = tr.select("a").first().absUrl("href");
}
}
}
if (alink == null) {
return new ProlongResult(Status.ERROR);
}
doc = htmlGet(alink);
List<NameValuePair> form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
for (Element tr : doc.select(".rTable_div tr")) {
if (tr.select("input").attr("name").equals(media.split("\\|")[0])) {
boolean disabled = tr.select("input").hasAttr("disabled");
try {
disabled = (
disabled
|| tr.child(4).text().matches(".*nicht verl.+ngerbar.*")
|| tr.child(4).text().matches(".*Verl.+ngerung nicht m.+glich.*")
);
} catch (Exception e) {
}
if (disabled) {
form.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
form.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
htmlPost(opac_url + ";jsessionid=" + s_sid, form);
return new ProlongResult(Status.ERROR, tr.child(4).text().trim());
}
}
}
form.add(new BasicNameValuePair(media.split("\\|")[0], "on"));
// Stuttgart: textButton$0, others: textButton$1
String buttonName = doc.select("input[value=Markierte Titel verlängern]").attr("name");
form.add(new BasicNameValuePair(!"".equals(buttonName) ? buttonName : "textButton$1",
"Markierte Titel verlängern"));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
form.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
form.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
htmlPost(opac_url + ";jsessionid=" + s_sid, form);
return new ProlongResult(Status.OK);
}
@Override
public ProlongAllResult prolongAll(Account account, int useraction,
String selection) throws IOException {
String alink = null;
Document doc;
start();
doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams("SBK"));
try {
doc = handleLoginForm(doc, account);
} catch (OpacErrorException e) {
return new ProlongAllResult(Status.ERROR, e.getMessage());
}
for (Element tr : doc.select(".rTable_div tr")) {
if (tr.select("a").size() == 1) {
if (tr.select("a").first().absUrl("href")
.contains("sp=SZA")) {
alink = tr.select("a").first().absUrl("href");
}
}
}
if (alink == null) {
return new ProlongAllResult(Status.ERROR);
}
doc = htmlGet(alink);
List<NameValuePair> form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
if ("checkbox".equals(input.attr("type"))
&& !input.hasAttr("disabled")) {
form.add(new BasicNameValuePair(input.attr("name"), "on"));
}
}
// Stuttgart: textButton$0, others: textButton$1
String buttonName = doc.select("input[value=Markierte Titel verlängern]").attr("name");
form.add(new BasicNameValuePair(!"".equals(buttonName) ? buttonName : "textButton$1",
"Markierte Titel verlängern"));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
List<Map<String, String>> result = new ArrayList<>();
for (Element tr : doc.select(".rTable_div tbody tr")) {
Map<String, String> line = new HashMap<>();
line.put(ProlongAllResult.KEY_LINE_TITLE,
tr.child(3).text().split("[:/;]")[0].trim());
line.put(ProlongAllResult.KEY_LINE_NEW_RETURNDATE, tr.child(1)
.text());
line.put(ProlongAllResult.KEY_LINE_MESSAGE, tr.child(4).text());
result.add(line);
}
form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
form.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
form.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
htmlPost(opac_url + ";jsessionid=" + s_sid, form);
return new ProlongAllResult(Status.OK, result);
}
@Override
public CancelResult cancel(String media, Account account, int useraction,
String selection) throws IOException, OpacErrorException {
String rlink = null;
Document doc;
rlink = media.split("\\|")[1].replace("requestCount=", "fooo=");
start();
doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams("SBK"));
try {
doc = handleLoginForm(doc, account);
} catch (OpacErrorException e) {
return new CancelResult(Status.ERROR, e.getMessage());
}
for (Element tr : doc.select(".rTable_div tr")) {
String url = media.split("\\|")[1].toUpperCase(Locale.GERMAN);
String sp = "SZM";
if (url.contains("SP=")) {
Map<String, String> qp = getQueryParamsFirst(url);
if (qp.containsKey("SP")) {
sp = qp.get("SP");
}
}
if (tr.select("a").size() == 1) {
if ((tr.text().contains("Reservationen") || tr.text().contains("Vormerkung") ||
tr.text().contains("Bestellung"))
&& !tr.child(0).text().trim().equals("")
&& tr.select("a").first().attr("href")
.toUpperCase(Locale.GERMAN)
.contains("SP=" + sp)) {
rlink = tr.select("a").first().absUrl("href");
}
}
}
if (rlink == null) {
return new CancelResult(Status.ERROR);
}
doc = htmlGet(rlink);
List<NameValuePair> form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
form.add(new BasicNameValuePair(media.split("\\|")[0], "on"));
// Stuttgart: textButton, others: textButton$0
String buttonName = doc.select("input[value=Markierte Titel löschen]").attr("name");
form.add(new BasicNameValuePair(!"".equals(buttonName) ? buttonName : "textButton$0",
"Markierte Titel löschen"));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
form.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
form.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
htmlPost(opac_url + ";jsessionid=" + s_sid, form);
return new CancelResult(Status.OK);
}
@Override
public AccountData account(Account account) throws IOException,
JSONException, OpacErrorException {
start();
Document doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams("SBK"));
doc = handleLoginForm(doc, account);
boolean split_title_author = true;
if (doc.head().html().contains("VOEBB")) {
split_title_author = false;
}
AccountData adata = new AccountData(account.getId());
for (Element tr : doc.select(".aDISListe tr")) {
if (tr.child(0).text().matches(".*F.+llige Geb.+hren.*")) {
adata.setPendingFees(tr.child(1).text().trim());
}
if (tr.child(0).text().matches(".*Ausweis g.+ltig bis.*")) {
adata.setValidUntil(tr.child(1).text().trim());
}
}
DateTimeFormatter fmt = DateTimeFormat.forPattern("dd.MM.yyyy").withLocale(Locale.GERMAN);
// Ausleihen
String alink = null;
int anum = 0;
List<LentItem> lent = new ArrayList<>();
for (Element tr : doc.select(".rTable_div tr")) {
if (tr.select("a").size() == 1) {
if (tr.select("a").first().absUrl("href").contains("sp=SZA")) {
alink = tr.select("a").first().absUrl("href");
anum = Integer.parseInt(tr.child(0).text().trim());
}
}
}
if (alink != null) {
Document adoc = htmlGet(alink);
s_alink = alink;
List<NameValuePair> form = new ArrayList<>();
String prolongTest = null;
for (Element input : adoc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
if (input.attr("type").equals("checkbox")
&& !input.hasAttr("value")) {
input.val("on");
}
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
} else if (input.val().matches(".+verl.+ngerbar.+")) {
prolongTest = input.attr("name");
}
}
if (prolongTest != null) {
form.add(new BasicNameValuePair(prolongTest,
"Markierte Titel verlängerbar?"));
Document adoc_new = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (adoc_new.select(".message h1").size() == 0) {
adoc = adoc_new;
}
}
parseMediaList(adoc, alink, lent, split_title_author);
assert (lent.size() == anum);
form = new ArrayList<>();
boolean cancelButton = false;
for (Element input : adoc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
if ("submit".equals(input.attr("type")) &&
"Abbrechen".equals(input.attr("value")) && !cancelButton) {
// Stuttgart: Cancel button instead of toolbar back button
form.add(new BasicNameValuePair(input.attr("name"), input.attr("value")));
cancelButton = true;
}
}
if (!cancelButton) {
form.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
form.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
} else {
assert (anum == 0);
}
adata.setLent(lent);
List<String[]> rlinks = new ArrayList<>();
int rnum = 0;
List<ReservedItem> res = new ArrayList<>();
for (Element tr : doc.select(".rTable_div tr")) {
if (tr.select("a").size() == 1) {
if ((tr.text().contains("Reservationen")
|| tr.text().contains("Vormerkung")
|| tr.text().contains("Fernleihbestellung")
|| tr.text().contains("Bereitstellung")
|| tr.text().contains("Bestellw")
|| tr.text().contains("Magazin"))
&& !tr.child(0).text().trim().equals("")) {
rlinks.add(new String[]{
tr.select("a").text(),
tr.select("a").first().absUrl("href"),
});
rnum += Integer.parseInt(tr.child(0).text().trim());
}
}
}
for (String[] rlink : rlinks) {
Document rdoc = htmlGet(rlink[1]);
boolean error =
parseReservationList(rdoc, rlink, split_title_author, res, fmt, stringProvider);
if (error) {
// Maybe we should send a bug report here, but using ACRA breaks
// the unit tests
adata.setWarning("Beim Abrufen der Reservationen ist ein Problem aufgetreten");
}
List<NameValuePair> form = new ArrayList<>();
for (Element input : rdoc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
form.add(new BasicNameValuePair("$Toolbar_0.x", "1"));
form.add(new BasicNameValuePair("$Toolbar_0.y", "1"));
htmlPost(opac_url + ";jsessionid=" + s_sid, form);
}
assert (res.size() == rnum);
adata.setReservations(res);
return adata;
}
static boolean parseReservationList(Document doc, String[] rlink, boolean split_title_author,
List<ReservedItem> res, DateTimeFormatter fmt, StringProvider stringProvider) {
boolean error = false;
boolean interlib = doc.html().contains("Ihre Fernleih-Bestellung");
boolean stacks = doc.html().contains("aus dem Magazin");
boolean provision = doc.html().contains("Ihre Bereitstellung");
Map<String, Integer> colmap = new HashMap<>();
colmap.put("title", 2);
colmap.put("branch", 1);
colmap.put("expirationdate", 0);
int i = 0;
for (Element th : doc.select(".rTable_div thead tr th")) {
if (th.text().contains("Bis")) {
colmap.put("expirationdate", i);
}
if (th.text().contains("Ausgabeort")) {
colmap.put("branch", i);
}
if (th.text().contains("Titel")) {
colmap.put("title", i);
}
if (th.text().contains("Hinweis")) {
colmap.put("status", i);
}
i++;
}
for (Element tr : doc.select(".rTable_div tbody tr")) {
if (tr.children().size() >= colmap.size()) {
ReservedItem item = new ReservedItem();
String text = tr.child(colmap.get("title")).html();
text = Jsoup.parse(text.replaceAll("(?i)<br[^>]*>", ";")).text();
if (split_title_author) {
String[] split = text.split("[:/;\n]");
item.setTitle(split[0].replaceFirst("([^:;\n]+)[:;\n](.*)$", "$1").trim());
if (split.length > 1) {
item.setAuthor(
split[1].replaceFirst("([^:;\n]+)[:;\n](.*)$", "$1").trim());
}
} else {
item.setTitle(text);
}
String branch = tr.child(colmap.get("branch")).text().trim();
if (interlib) {
branch = stringProvider
.getFormattedString(StringProvider.INTERLIB_BRANCH, branch);
} else if (stacks) {
branch = stringProvider
.getFormattedString(StringProvider.STACKS_BRANCH, branch);
} else if (provision) {
branch = stringProvider
.getFormattedString(StringProvider.PROVISION_BRANCH, branch);
}
item.setBranch(branch);
if (colmap.containsKey("status")) {
String status = tr.child(colmap.get("status")).text().trim();
if (!"".equals(status)) item.setStatus(status);
}
if (rlink[0].contains("Abholbereit") || rlink[0].contains("Bereitstellung")) {
// Abholbereite Bestellungen
item.setStatus("bereit");
if (tr.child(0).text().trim().length() >= 10) {
item.setExpirationDate(fmt.parseLocalDate(
tr.child(colmap.get("expirationdate")).text().trim()
.substring(0, 10)));
}
} else {
// Nicht abholbereite
if (tr.select("input[type=checkbox]").size() > 0
&& (rlink[1].toUpperCase(Locale.GERMAN).contains(
"SP=SZM") || rlink[1].toUpperCase(
Locale.GERMAN).contains("SP=SZW") || rlink[1].toUpperCase(
Locale.GERMAN).contains("SP=SZB"))) {
item.setCancelData(
tr.select("input[type=checkbox]").attr("name") + "|" +
rlink[1]);
}
}
res.add(item);
} else {
// This is a strange bug where sometimes there is only three
// columns
error = true;
}
}
return error;
}
static void parseMediaList(Document adoc, String alink, List<LentItem> lent,
boolean split_title_author) {
DateTimeFormatter fmt = DateTimeFormat.forPattern("dd.MM.yyyy").withLocale(Locale.GERMAN);
for (Element tr : adoc.select(".rTable_div tbody tr")) {
LentItem item = new LentItem();
String text = Jsoup.parse(tr.child(3).html().replaceAll("(?i)<br[^>]*>", "#"))
.text();
if (text.contains(" / ")) {
// Format "Titel / Autor #Sig#Nr", z.B. normale Ausleihe in Berlin
String[] split = text.split("[/#\n]");
String title = split[0];
//Is always the last one...
String id = split[split.length - 1];
item.setId(id);
if (split_title_author) {
title = title.replaceFirst("([^:;\n]+)[:;\n](.*)$", "$1");
}
item.setTitle(title.trim());
if (split.length > 1) {
item.setAuthor(split[1].replaceFirst("([^:;\n]+)[:;\n](.*)$", "$1").trim());
}
} else {
// Format "Autor: Titel - Verlag - ISBN:... #Nummer", z.B. Fernleihe in Berlin
String[] split = text.split("#");
String[] aut_tit = split[0].split(": ");
item.setAuthor(aut_tit[0].replaceFirst("([^:;\n]+)[:;\n](.*)$", "$1").trim());
if (aut_tit.length > 1) {
item.setTitle(
aut_tit[1].replaceFirst("([^:;\n]+)[:;\n](.*)$", "$1").trim());
}
//Is always the last one...
String id = split[split.length - 1];
item.setId(id);
}
String date = tr.child(1).text().trim();
if (date.contains("-")) {
// Nürnberg: "29.03.2016 - 26.04.2016"
// for beginning and end date in one field
date = date.split("-")[1].trim();
}
try {
item.setDeadline(fmt.parseLocalDate(date));
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
item.setHomeBranch(tr.child(2).text().trim());
if (tr.select("input[type=checkbox]").hasAttr("disabled")) {
item.setRenewable(false);
} else {
try {
item.setRenewable(
!tr.child(4).text().matches(".*nicht verl.+ngerbar.*")
&& !tr.child(4).text().matches(".*Verl.+ngerung nicht m.+glich.*")
);
} catch (Exception e) {
}
item.setProlongData(
tr.select("input[type=checkbox]").attr("name") + "|" + alink);
}
lent.add(item);
}
}
protected Document handleLoginForm(Document doc, Account account)
throws IOException, OpacErrorException {
if (doc.select("#LPASSW_1").size() == 0) {
return doc;
}
doc.select("#LPASSW_1").val(account.getPassword());
List<NameValuePair> form = new ArrayList<>();
for (Element input : doc.select("input, select")) {
if (!"image".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"submit".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
if (input.attr("id").equals("L#AUSW_1")
|| input.attr("fld").equals("L#AUSW_1")
|| input.attr("id").equals("IDENT_1")
|| input.attr("id").equals("LMATNR_1")) {
input.attr("value", account.getName());
}
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
Element inputSend = doc.select("input[type=submit]").first();
form.add(new BasicNameValuePair(inputSend.attr("name"), inputSend
.attr("value")));
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (doc.select(".message h1, .alert").size() > 0) {
String msg = doc.select(".message h1, .alert").text().trim();
form = new ArrayList<>();
for (Element input : doc.select("input")) {
if (!"image".equals(input.attr("type"))
&& !"checkbox".equals(input.attr("type"))
&& !"".equals(input.attr("name"))) {
form.add(new BasicNameValuePair(input.attr("name"), input
.attr("value")));
}
}
doc = htmlPost(opac_url + ";jsessionid=" + s_sid, form);
if (!msg.contains("Sie sind angemeldet") && !msg.contains("jetzt angemeldet")) {
throw new OpacErrorException(msg);
}
return doc;
} else {
return doc;
}
}
@Override
public List<SearchField> parseSearchFields() throws IOException,
JSONException {
start();
Document doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams());
List<SearchField> fields = new ArrayList<>();
// dropdown to select which field you want to search in
Elements searchoptions = doc.select("#SUCH01_1 option");
if (searchoptions.size() == 0 && doc.select("input[fld=FELD01_1]").size() > 0) {
// Hack is needed in Nuernberg
searchoptions = doc.select("input[fld=FELD01_1]").first().previousElementSibling()
.select("option");
}
Set<String> fieldIds = new HashSet<>();
for (Element opt : searchoptions) {
// Damit doppelte Optionen nicht mehrfach auftauchen
// (bei Stadtbücherei Stuttgart der Fall)
if (fieldIds.contains(opt.attr("value"))) continue;
TextSearchField field = new TextSearchField();
field.setId(opt.attr("value"));
field.setDisplayName(opt.text());
field.setHint("");
fields.add(field);
fieldIds.add(field.getId());
}
// Save data so that the search() function knows that this
// is not a selectable search field
JSONObject selectableData = new JSONObject();
selectableData.put("selectable", false);
for (Element row : doc.select("div[id~=F\\d+], .search-adv-source")) {
if (row.select("input[type=text]").size() == 1
&& row.select("input, select").first().tagName()
.equals("input")) {
// A single text search field
Element input = row.select("input[type=text]").first();
TextSearchField field = new TextSearchField();
field.setId(input.attr("id"));
field.setDisplayName(row.select("label").first().text());
field.setHint("");
field.setData(selectableData);
fields.add(field);
} else if (row.select("select").size() == 1
&& row.select("input[type=text]").size() == 0) {
// Things like language, media type, etc.
Element select = row.select("select").first();
DropdownSearchField field = new DropdownSearchField();
field.setId(select.id());
field.setDisplayName(row.select("label").first().text());
for (Element opt : select.select("option")) {
field.addDropdownValue(opt.attr("value"), opt.text());
}
fields.add(field);
} else if (row.select("select").size() == 0
&& row.select("input[type=text]").size() == 3
&& row.select("label").size() == 3) {
// Three text inputs.
// Year single/from/to or things like Band-/Heft-/Satznummer
String name1 = row.select("label").get(0).text();
String name2 = row.select("label").get(1).text();
String name3 = row.select("label").get(2).text();
Element input1 = row.select("input[type=text]").get(0);
Element input2 = row.select("input[type=text]").get(1);
Element input3 = row.select("input[type=text]").get(2);
if (name2.contains("von") && name3.contains("bis")) {
TextSearchField field1 = new TextSearchField();
field1.setId(input1.id());
field1.setDisplayName(name1);
field1.setHint("");
field1.setData(selectableData);
fields.add(field1);
TextSearchField field2 = new TextSearchField();
field2.setId(input2.id());
field2.setDisplayName(name2.replace("von", "").trim());
field2.setHint("von");
field2.setData(selectableData);
fields.add(field2);
TextSearchField field3 = new TextSearchField();
field3.setId(input3.id());
field3.setDisplayName(name3.replace("bis", "").trim());
field3.setHint("bis");
field3.setHalfWidth(true);
field3.setData(selectableData);
fields.add(field3);
} else {
TextSearchField field1 = new TextSearchField();
field1.setId(input1.id());
field1.setDisplayName(name1);
field1.setHint("");
field1.setData(selectableData);
fields.add(field1);
TextSearchField field2 = new TextSearchField();
field2.setId(input2.id());
field2.setDisplayName(name2);
field2.setHint("");
field2.setData(selectableData);
fields.add(field2);
TextSearchField field3 = new TextSearchField();
field3.setId(input3.id());
field3.setDisplayName(name3);
field3.setHint("");
field3.setData(selectableData);
fields.add(field3);
}
}
}
for (Iterator<SearchField> iterator = fields.iterator(); iterator
.hasNext(); ) {
SearchField field = iterator.next();
if (ignoredFieldNames.contains(field.getDisplayName())) {
iterator.remove();
}
}
return fields;
}
@Override
public String getShareUrl(String id, String title) {
// TODO Auto-generated method stub
return null;
}
@Override
public int getSupportFlags() {
return SUPPORT_FLAG_ACCOUNT_PROLONG_ALL
| SUPPORT_FLAG_ENDLESS_SCROLLING | SUPPORT_FLAG_WARN_RESERVATION_FEES;
}
@Override
public void checkAccountData(Account account) throws IOException,
JSONException, OpacErrorException {
start();
Document doc = htmlGet(opac_url + ";jsessionid=" + s_sid + "?service="
+ s_service + getSpParams("SBK"));
handleLoginForm(doc, account);
}
@Override
public void setLanguage(String language) {
// TODO Auto-generated method stub
}
@Override
public Set<String> getSupportedLanguages() throws IOException {
// TODO Auto-generated method stub
return null;
}
}
|
Adis: implement getResultById and getShareUrl if "Zitierlink" available
(e.g. Stuttgart)
|
opacclient/libopac/src/main/java/de/geeksfactory/opacclient/apis/Adis.java
|
Adis: implement getResultById and getShareUrl if "Zitierlink" available (e.g. Stuttgart)
|
|
Java
|
mit
|
47522be43af0ef9fb2aa41de765c4c538febd1c9
| 0
|
DemigodsRPG/Demigods3
|
package com.censoredsoftware.Demigods.Episodes.Demo;
import com.censoredsoftware.Demigods.Engine.Demigods;
import com.censoredsoftware.Demigods.Engine.Object.Deity.Deity;
import com.censoredsoftware.Demigods.Engine.Object.Structure.Structure;
import com.censoredsoftware.Demigods.Engine.Object.Task.TaskSet;
import com.censoredsoftware.Demigods.Episodes.Demo.Deity.Insignian.OmegaX17;
import com.censoredsoftware.Demigods.Episodes.Demo.Structure.Altar;
import com.censoredsoftware.Demigods.Episodes.Demo.Structure.Obelisk;
import com.censoredsoftware.Demigods.Episodes.Demo.Structure.Shrine;
import com.censoredsoftware.Demigods.Episodes.Demo.Task.Tutorial;
public class EpisodeDemo
{
public static enum Deities implements Demigods.ListedDeity
{
// GODS
// ZEUS(new Zeus()),
// POSEIDON(new Poseidon()),
// TITANS
// OCEANUS(new Oceanus()),
// PROMETHEUS(new Prometheus()),
// DONATORS
// DISCO(new DrD1sco()),
OMEGA(new OmegaX17());
private Deity deity;
private Deities(Deity deity)
{
this.deity = deity;
}
@Override
public Deity getDeity()
{
return deity;
}
}
public static enum Tasks implements Demigods.ListedTaskSet
{
/**
* The Tutorial TaskSet.
*/
Tutorial(new Tutorial());
private TaskSet taskSet;
private Tasks(TaskSet tasks)
{
this.taskSet = tasks;
}
@Override
public TaskSet getTaskSet()
{
return taskSet;
}
}
public static enum Structures implements Demigods.ListedStructure
{
ALTAR(new Altar()), SHRINE(new Shrine()), OBELISK(new Obelisk());
private Structure structure;
private Structures(Structure structure)
{
this.structure = structure;
}
@Override
public Structure getStructure()
{
return structure;
}
}
}
|
src/main/java/com/censoredsoftware/Demigods/Episodes/Demo/EpisodeDemo.java
|
package com.censoredsoftware.Demigods.Episodes.Demo;
import com.censoredsoftware.Demigods.Engine.Demigods;
import com.censoredsoftware.Demigods.Engine.Object.Deity.Deity;
import com.censoredsoftware.Demigods.Engine.Object.Structure.Structure;
import com.censoredsoftware.Demigods.Engine.Object.Task.TaskSet;
import com.censoredsoftware.Demigods.Episodes.Demo.Deity.Insignian.DrD1sco;
import com.censoredsoftware.Demigods.Episodes.Demo.Structure.Altar;
import com.censoredsoftware.Demigods.Episodes.Demo.Structure.Obelisk;
import com.censoredsoftware.Demigods.Episodes.Demo.Structure.Shrine;
import com.censoredsoftware.Demigods.Episodes.Demo.Task.Tutorial;
public class EpisodeDemo
{
public static enum Deities implements Demigods.ListedDeity
{
// GODS
// ZEUS(new Zeus()),
// POSEIDON(new Poseidon()),
// TITANS
// OCEANUS(new Oceanus()),
// PROMETHEUS(new Prometheus()),
// DONATORS
DISCO(new DrD1sco()),
// OMEGA(new OmegaX17())
;
private Deity deity;
private Deities(Deity deity)
{
this.deity = deity;
}
@Override
public Deity getDeity()
{
return deity;
}
}
public static enum Tasks implements Demigods.ListedTaskSet
{
/**
* The Tutorial TaskSet.
*/
Tutorial(new Tutorial());
private TaskSet taskSet;
private Tasks(TaskSet tasks)
{
this.taskSet = tasks;
}
@Override
public TaskSet getTaskSet()
{
return taskSet;
}
}
public static enum Structures implements Demigods.ListedStructure
{
ALTAR(new Altar()), SHRINE(new Shrine()), OBELISK(new Obelisk());
private Structure structure;
private Structures(Structure structure)
{
this.structure = structure;
}
@Override
public Structure getStructure()
{
return structure;
}
}
}
|
Pinpoint lag.
|
src/main/java/com/censoredsoftware/Demigods/Episodes/Demo/EpisodeDemo.java
|
Pinpoint lag.
|
|
Java
|
mpl-2.0
|
6642a02d15ee4ccf2cb05020a3e044134db455e5
| 0
|
joansmith/RoyalCommands,RoyalDev/RoyalCommands
|
package org.royaldev.royalcommands;
/*
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
This plugin was written by jkcclemens <jkc.clemens@gmail.com>.
If forked and not credited, alert him.
*/
// import org.yaml.snakeyaml <- SnakeYAML start
import net.milkbowl.vault.chat.Chat;
import net.milkbowl.vault.economy.Economy;
import net.milkbowl.vault.permission.Permission;
import org.bukkit.Bukkit;
import org.bukkit.command.CommandSender;
import org.bukkit.command.ConsoleCommandSender;
import org.bukkit.entity.Player;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.RegisteredServiceProvider;
import org.bukkit.plugin.java.JavaPlugin;
import org.kitteh.vanish.VanishPlugin;
import org.royaldev.royalcommands.listeners.RoyalCommandsBlockListener;
import org.royaldev.royalcommands.listeners.RoyalCommandsEntityListener;
import org.royaldev.royalcommands.listeners.RoyalCommandsPlayerListener;
import org.royaldev.royalcommands.listeners.SignListener;
import org.royaldev.royalcommands.rcommands.*;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.xml.parsers.DocumentBuilderFactory;
import java.io.*;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
public class RoyalCommands extends JavaPlugin {
public ConfigManager whl;
public static Permission permission = null;
public static Economy economy = null;
public static Chat chat = null;
public String version = null;
public String newVersion = null;
public List<String> muteCmds = new ArrayList<String>();
public List<String> blockedItems = new ArrayList<String>();
public List<String> motd = new ArrayList<String>();
public List<String> commandCooldowns = new ArrayList<String>();
public List<String> whitelist = new ArrayList<String>();
public Boolean showcommands = null;
public Boolean disablegetip = null;
public Boolean useWelcome = null;
public Boolean buildPerm = null;
public Boolean backDeath = null;
public Boolean motdLogin = null;
public Boolean dropExtras = null;
public Boolean kitPerms = null;
public Boolean explodeFire = null;
public Boolean sendToSpawn = null;
public Boolean stsBack = null;
public Boolean stsNew = null;
public static Boolean otherHelp = null;
public Boolean customHelp = null;
public Boolean useVNP = null;
public Boolean cooldownAliases = null;
public Boolean useWhitelist = null;
public Boolean smoothTime = null;
public String banMessage = null;
public String kickMessage = null;
public String defaultWarn = null;
public String welcomeMessage = null;
public String noBuildMessage = null;
public String bcastFormat = null;
public String whoFormat = null;
public static Integer defaultStack = null;
public Integer warnBan = null;
public Integer spawnmobLimit = null;
public Integer helpAmount = null;
public Double maxNear = null;
public Double defaultNear = null;
public Double gTeleCd = null;
public Float explodePower = null;
public Float maxExplodePower = null;
public RoyalCommands() {
pconfm = new PConfManager(this);
}
public static Map<String, Map<String, Object>> commands = null;
public static Plugin[] plugins = null;
public Metrics m = null;
// Permissions with Vault
public Boolean setupPermissions() {
RegisteredServiceProvider<Permission> permissionProvider = getServer().getServicesManager().getRegistration(net.milkbowl.vault.permission.Permission.class);
if (permissionProvider != null) permission = permissionProvider.getProvider();
return (permission != null);
}
public Boolean setupChat() {
RegisteredServiceProvider<Chat> chatProvider = getServer().getServicesManager().getRegistration(net.milkbowl.vault.chat.Chat.class);
if (chatProvider != null) chat = chatProvider.getProvider();
return (chat != null);
}
private Boolean setupEconomy() {
RegisteredServiceProvider<Economy> economyProvider = getServer().getServicesManager().getRegistration(net.milkbowl.vault.economy.Economy.class);
if (economyProvider != null) economy = economyProvider.getProvider();
return (economy != null);
}
private final RoyalCommandsPlayerListener playerListener = new RoyalCommandsPlayerListener(this);
private final RoyalCommandsBlockListener blockListener = new RoyalCommandsBlockListener(this);
private final RoyalCommandsEntityListener entityListener = new RoyalCommandsEntityListener(this);
private final SignListener signListener = new SignListener(this);
public final PConfManager pconfm;
public Logger log = Logger.getLogger("Minecraft");
VanishPlugin vp = null;
public boolean isVanished(Player p) {
if (!useVNP) return false;
if (vp == null) {
vp = (VanishPlugin) Bukkit.getServer().getPluginManager().getPlugin("VanishNoPacket");
return false;
} else return vp.getManager().isVanished(p.getName());
}
public void reloadConfigVals() {
if (whl != null) whl.load();
showcommands = getConfig().getBoolean("view_commands");
disablegetip = getConfig().getBoolean("disable_getip");
useWelcome = getConfig().getBoolean("enable_welcome_message");
buildPerm = getConfig().getBoolean("use_build_perm");
backDeath = getConfig().getBoolean("back_on_death");
motdLogin = getConfig().getBoolean("motd_on_login");
dropExtras = getConfig().getBoolean("drop_extras");
kitPerms = getConfig().getBoolean("use_exclusive_kit_perms");
explodeFire = getConfig().getBoolean("explode_fire");
sendToSpawn = getConfig().getBoolean("send_to_spawn");
stsBack = getConfig().getBoolean("sts_back");
stsNew = getConfig().getBoolean("send_to_spawn_new");
otherHelp = getConfig().getBoolean("other_plugins_in_help");
customHelp = getConfig().getBoolean("use_custom_help");
useVNP = getConfig().getBoolean("use_vanish");
cooldownAliases = getConfig().getBoolean("cooldowns_match_aliases");
useWhitelist = getConfig().getBoolean("use_whitelist");
smoothTime = getConfig().getBoolean("use_smooth_time");
banMessage = RUtils.colorize(getConfig().getString("default_ban_message"));
noBuildMessage = RUtils.colorize(getConfig().getString("no_build_message"));
kickMessage = RUtils.colorize(getConfig().getString("default_kick_message"));
defaultWarn = RUtils.colorize(getConfig().getString("default_warn_message"));
welcomeMessage = RUtils.colorize(getConfig().getString("welcome_message"));
bcastFormat = RUtils.colorize(getConfig().getString("bcast_format"));
whoFormat = getConfig().getString("who_format");
defaultStack = getConfig().getInt("default_stack_size");
warnBan = getConfig().getInt("max_warns_before_ban");
spawnmobLimit = getConfig().getInt("spawnmob_limit");
helpAmount = getConfig().getInt("help_lines");
maxNear = getConfig().getDouble("max_near_radius");
defaultNear = getConfig().getDouble("default_near_radius");
gTeleCd = getConfig().getDouble("global_teleport_cooldown");
explodePower = (float) getConfig().getDouble("explode_power");
maxExplodePower = (float) getConfig().getDouble("max_explode_power");
muteCmds = getConfig().getStringList("mute_blocked_commands");
blockedItems = getConfig().getStringList("blocked_spawn_items");
motd = getConfig().getStringList("motd");
commandCooldowns = getConfig().getStringList("command_cooldowns");
if (whl != null) whitelist = whl.getStringList("whitelist");
Help.reloadHelp();
}
public void loadConfiguration() {
// This until I get a custom YAML wrapper using SnakeYAML going.
if (!new File(getDataFolder() + File.separator + "config.yml").exists()) saveDefaultConfig();
File file = new File(this.getDataFolder() + File.separator + "userdata" + File.separator);
boolean exists = file.exists();
if (!exists) {
try {
boolean success = new File(this.getDataFolder() + File.separator + "userdata").mkdir();
if (success) {
log.info("[RoyalCommands] Created userdata directory.");
}
} catch (Exception e) {
log.severe("[RoyalCommands] Failed to make userdata directory!");
log.severe(e.getMessage());
}
}
File whitelist = new File(getDataFolder() + File.separator + "whitelist.yml");
if (!whitelist.exists()) {
try {
boolean success = whitelist.createNewFile();
if (!success) {
log.severe("[RoyalCommands] Could not create whitelist.yml!");
} else {
try {
BufferedWriter out = new BufferedWriter(new FileWriter(whitelist.getAbsolutePath()));
out.write("whitelist:\n");
out.write("- jkcclemens\n");
out.write("- other_guy\n");
out.close();
} catch (IOException e) {
//ignore
}
}
} catch (Exception e) {
log.severe("[RoyalCommands] Could not create whitelist.yml!");
e.printStackTrace();
}
}
File rules = new File(this.getDataFolder() + File.separator + "rules.txt");
if (!rules.exists()) {
try {
boolean success = new File(this.getDataFolder() + File.separator + "rules.txt").createNewFile();
if (!success) {
log.severe("[RoyalCommands] Could not create rules.txt!");
} else {
try {
BufferedWriter out = new BufferedWriter(new FileWriter(this.getDataFolder() + File.separator + "rules.txt"));
out.write("###\n");
out.write("&2Page 1:\n");
out.write(" 1. Be kind\n");
out.write(" 2. Be courteous\n");
out.write(" 3. Be respectful\n");
out.write("###\n");
out.write("&2Page 2:\n");
out.write(" 4. Be cool\n");
out.close();
} catch (IOException e) {
//ignore
}
}
} catch (Exception e) {
log.severe("[RoyalCommands] Could not create rules.txt!");
e.printStackTrace();
}
}
File help = new File(this.getDataFolder() + File.separator + "help.txt");
if (!help.exists()) {
try {
boolean success = new File(this.getDataFolder() + File.separator + "help.txt").createNewFile();
if (!success) {
log.severe("[RoyalCommands] Could not create help.txt!");
} else {
try {
BufferedWriter out = new BufferedWriter(new FileWriter(this.getDataFolder() + File.separator + "help.txt"));
out.write("###\n");
out.write("&2Page 1:\n");
out.write(" 1. Do some awesome things\n");
out.write(" 2. You must meow to join\n");
out.write(" 3. The admins didn't change this\n");
out.write("###\n");
out.write("&2Page 2:\n");
out.write(" 4. Tell them to\n");
out.close();
} catch (IOException e) {
//ignore
}
}
} catch (Exception e) {
log.severe("[RoyalCommands] Could not create help.txt!");
e.printStackTrace();
}
}
File warps = new File(this.getDataFolder() + File.separator + "warps.yml");
if (!warps.exists()) {
try {
boolean success = new File(this.getDataFolder() + File.separator + "warps.yml").createNewFile();
if (success) {
try {
FileWriter fstream = new FileWriter(this.getDataFolder() + File.separator + "warps.yml");
BufferedWriter out = new BufferedWriter(fstream);
out.write("warps:");
out.close();
} catch (Exception e) {
log.severe("[RoyalCommands] Could not write to warps file.");
}
log.info("[RoyalCommands] Created warps file.");
}
} catch (Exception e) {
log.severe("[RoyalCommands] Failed to make warps file!");
log.severe(e.getMessage());
}
}
}
// getFinalArg taken from EssentialsCommand.java - Essentials by
// EssentialsTeam
public String getFinalArg(final String[] args, final int start) {
final StringBuilder bldr = new StringBuilder();
for (int i = start; i < args.length; i++) {
if (i != start) bldr.append(" ");
bldr.append(args[i]);
}
return bldr.toString();
}
// updateCheck() from MilkBowl's Vault
public String updateCheck(String currentVersion) throws Exception {
String pluginUrlString = "http://dev.bukkit.org/server-mods/royalcommands/files.rss";
try {
URL url = new URL(pluginUrlString);
Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(url.openConnection().getInputStream());
doc.getDocumentElement().normalize();
NodeList nodes = doc.getElementsByTagName("item");
Node firstNode = nodes.item(0);
if (firstNode.getNodeType() == 1) {
Element firstElement = (Element) firstNode;
NodeList firstElementTagName = firstElement.getElementsByTagName("title");
Element firstNameElement = (Element) firstElementTagName.item(0);
NodeList firstNodes = firstNameElement.getChildNodes();
return firstNodes.item(0).getNodeValue();
}
} catch (Exception ignored) {
}
return currentVersion;
}
public boolean isAuthorized(final Player player, final String node) {
return player instanceof ConsoleCommandSender || this.setupPermissions() && (RoyalCommands.permission.has(player, "rcmds.admin") || RoyalCommands.permission.has(player, node));
}
public boolean isAuthorized(final CommandSender player, final String node) {
return player instanceof ConsoleCommandSender || this.setupPermissions() && (RoyalCommands.permission.has((Player) player, "rcmds.admin") || RoyalCommands.permission.has(player, node));
}
public void onEnable() {
commands = getDescription().getCommands();
plugins = getServer().getPluginManager().getPlugins();
loadConfiguration();
reloadConfigVals();
try {
whl = new ConfigManager(getDataFolder().getAbsolutePath() + File.separator + "whitelist.yml");
} catch (FileNotFoundException e) {
log.warning("[RoyalCommands] Could not find whitelist.yml!");
whl = null;
}
setupEconomy();
setupChat();
try {
m = new Metrics(this);
m.start();
} catch (Exception ignore) {
log.warning("[RoyalCommands] Could not start Metrics!");
}
version = getDescription().getVersion();
// yet again, borrowed from MilkBowl
this.getServer().getScheduler().scheduleAsyncRepeatingTask(this, new Runnable() {
@Override
public void run() {
try {
newVersion = updateCheck(version);
String oldVersion = version;
Integer nVI = Integer.valueOf(newVersion.replaceAll("\\D+", ""));
Integer oVI = Integer.valueOf(version.replaceAll("\\D+", ""));
if (nVI > oVI || (oldVersion.contains("pre") && nVI.equals(oVI))) {
log.warning(newVersion + " is out! You are running v" + oldVersion);
log.warning("Update RoyalCommands at: http://dev.bukkit.org/server-mods/royalcommands");
}
} catch (Exception ignored) {
// ignore exceptions
}
}
}, 0, 36000);
vp = (VanishPlugin) Bukkit.getServer().getPluginManager().getPlugin("VanishNoPacket");
PluginManager pm = this.getServer().getPluginManager();
pm.registerEvents(playerListener, this);
pm.registerEvents(entityListener, this);
pm.registerEvents(blockListener, this);
pm.registerEvents(signListener, this);
getCommand("level").setExecutor(new Level(this));
getCommand("setlevel").setExecutor(new Setlevel(this));
getCommand("sci").setExecutor(new Sci(this));
getCommand("speak").setExecutor(new Speak(this));
getCommand("facepalm").setExecutor(new Facepalm(this));
getCommand("slap").setExecutor(new Slap(this));
getCommand("harm").setExecutor(new Harm(this));
getCommand("starve").setExecutor(new Starve(this));
getCommand("banned").setExecutor(new Banned(this));
getCommand("setarmor").setExecutor(new Setarmor(this));
getCommand("getip").setExecutor(new GetIP(this));
getCommand("compareip").setExecutor(new CmdCompareIP(this));
getCommand("ragequit").setExecutor(new RageQuit(this));
getCommand("quit").setExecutor(new Quit(this));
getCommand("rank").setExecutor(new Rank(this));
getCommand("freeze").setExecutor(new Freeze(this));
getCommand("fakeop").setExecutor(new Fakeop(this));
getCommand("vtp").setExecutor(new Vtp(this));
getCommand("vtphere").setExecutor(new Vtphere(this));
getCommand("megastrike").setExecutor(new MegaStrike(this));
getCommand("pext").setExecutor(new Pext(this));
getCommand("item").setExecutor(new CmdItem(this));
getCommand("clearinventory").setExecutor(new ClearInventory(this));
getCommand("weather").setExecutor(new Weather(this));
getCommand("fixchunk").setExecutor(new FixChunk(this));
getCommand("give").setExecutor(new CmdGive(this));
getCommand("message").setExecutor(new Message(this));
getCommand("reply").setExecutor(new Reply(this));
getCommand("gamemode").setExecutor(new Gamemode(this));
getCommand("mute").setExecutor(new Mute(this));
getCommand("ban").setExecutor(new Ban(this));
getCommand("kick").setExecutor(new Kick(this));
getCommand("time").setExecutor(new Time(this));
getCommand("home").setExecutor(new Home(this));
getCommand("sethome").setExecutor(new SetHome(this));
getCommand("delhome").setExecutor(new DelHome(this));
getCommand("listhome").setExecutor(new ListHome(this));
getCommand("strike").setExecutor(new Strike(this));
getCommand("jump").setExecutor(new Jump(this));
getCommand("warn").setExecutor(new Warn(this));
getCommand("clearwarns").setExecutor(new ClearWarns(this));
getCommand("warp").setExecutor(new Warp(this));
getCommand("setwarp").setExecutor(new SetWarp(this));
getCommand("delwarp").setExecutor(new DelWarp(this));
getCommand("repair").setExecutor(new Repair(this));
getCommand("unban").setExecutor(new Unban(this));
getCommand("heal").setExecutor(new Heal(this));
getCommand("feed").setExecutor(new Feed(this));
getCommand("god").setExecutor(new God(this));
getCommand("banreason").setExecutor(new Banreason(this));
getCommand("setspawn").setExecutor(new SetSpawn(this));
getCommand("spawn").setExecutor(new Spawn(this));
getCommand("banip").setExecutor(new BanIP(this));
getCommand("unbanip").setExecutor(new UnbanIP(this));
getCommand("list").setExecutor(new CmdList(this));
getCommand("back").setExecutor(new Back(this));
getCommand("teleport").setExecutor(new Teleport(this));
getCommand("teleporthere").setExecutor(new TeleportHere(this));
getCommand("teleportrequest").setExecutor(new TeleportRequest(this));
getCommand("tpaccept").setExecutor(new TpAccept(this));
getCommand("tpdeny").setExecutor(new TpDeny(this));
getCommand("listwarns").setExecutor(new ListWarns(this));
getCommand("more").setExecutor(new More(this));
getCommand("teleportrequesthere").setExecutor(new TeleportRequestHere(this));
getCommand("spy").setExecutor(new Spy(this));
getCommand("spawnmob").setExecutor(new SpawnMob(this));
getCommand("afk").setExecutor(new Afk(this));
getCommand("assign").setExecutor(new Assign(this));
getCommand("onehitkill").setExecutor(new OneHitKill(this));
getCommand("burn").setExecutor(new Burn(this));
getCommand("kickall").setExecutor(new KickAll(this));
getCommand("world").setExecutor(new CmdWorld(this));
getCommand("jail").setExecutor(new Jail(this));
getCommand("setjail").setExecutor(new SetJail(this));
getCommand("less").setExecutor(new Less(this));
getCommand("spawner").setExecutor(new Spawner(this));
getCommand("tp2p").setExecutor(new Tp2p(this));
getCommand("motd").setExecutor(new Motd(this));
getCommand("deljail").setExecutor(new DelJail(this));
getCommand("force").setExecutor(new Force(this));
getCommand("ping").setExecutor(new CmdPing(this));
getCommand("invsee").setExecutor(new CmdInvsee(this));
getCommand("realname").setExecutor(new CmdRealName(this));
getCommand("nick").setExecutor(new Nick(this));
getCommand("ingot2block").setExecutor(new Ingot2Block(this));
getCommand("near").setExecutor(new CmdNear(this));
getCommand("kill").setExecutor(new CmdKill(this));
getCommand("suicide").setExecutor(new Suicide(this));
getCommand("killall").setExecutor(new KillAll(this));
getCommand("muteall").setExecutor(new MuteAll(this));
getCommand("kit").setExecutor(new CmdKit(this));
getCommand("rules").setExecutor(new CmdRules(this));
getCommand("broadcast").setExecutor(new CmdBroadcast(this));
getCommand("hug").setExecutor(new CmdHug(this));
getCommand("explode").setExecutor(new CmdExplode(this));
getCommand("ride").setExecutor(new CmdRide(this));
getCommand("whobanned").setExecutor(new CmdWhoBanned(this));
getCommand("tppos").setExecutor(new CmdTppos(this));
getCommand("ignore").setExecutor(new CmdIgnore(this));
getCommand("help").setExecutor(new CmdHelp(this));
getCommand("coords").setExecutor(new CmdCoords(this));
getCommand("tpall").setExecutor(new CmdTpAll(this));
getCommand("tpaall").setExecutor(new CmdTpaAll(this));
getCommand("vip").setExecutor(new CmdVip(this));
getCommand("dump").setExecutor(new CmdDump(this));
getCommand("seen").setExecutor(new CmdSeen(this));
getCommand("tempban").setExecutor(new CmdTempban(this));
getCommand("tptoggle").setExecutor(new CmdTpToggle(this));
getCommand("kits").setExecutor(new CmdKits(this));
getCommand("lag").setExecutor(new CmdLag(this));
getCommand("mem").setExecutor(new CmdMem(this));
getCommand("entities").setExecutor(new CmdEntities(this));
getCommand("invmod").setExecutor(new CmdInvmod(this));
getCommand("workbench").setExecutor(new CmdWorkbench(this));
getCommand("enchantingtable").setExecutor(new CmdEnchantingTable(this));
getCommand("trade").setExecutor(new CmdTrade(this));
getCommand("furnace").setExecutor(new CmdFurnace(this));
getCommand("enchant").setExecutor(new CmdEnchant(this));
getCommand("whitelist").setExecutor(new CmdWhitelist(this));
getCommand("fireball").setExecutor(new CmdFireball(this));
getCommand("fly").setExecutor(new CmdFly(this));
getCommand("playertime").setExecutor(new CmdPlayerTime(this));
getCommand("compass").setExecutor(new CmdCompass(this));
getCommand("helmet").setExecutor(new CmdHelmet(this));
getCommand("worldmanager").setExecutor(new CmdWorldManager(this));
getCommand("biome").setExecutor(new CmdBiome(this));
getCommand("rcmds").setExecutor(new Rcmds(this));
log.info("[RoyalCommands] RoyalCommands v" + version + " initiated.");
}
public void onDisable() {
getServer().getScheduler().cancelTasks(this);
log.info("[RoyalCommands] RoyalCommands v" + version + " disabled.");
}
}
|
src/org/royaldev/royalcommands/RoyalCommands.java
|
package org.royaldev.royalcommands;
/*
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
This plugin was written by jkcclemens <jkc.clemens@gmail.com>.
If forked and not credited, alert him.
*/
// import org.yaml.snakeyaml <- SnakeYAML start
import net.milkbowl.vault.chat.Chat;
import net.milkbowl.vault.economy.Economy;
import net.milkbowl.vault.permission.Permission;
import org.bukkit.Bukkit;
import org.bukkit.command.CommandSender;
import org.bukkit.command.ConsoleCommandSender;
import org.bukkit.entity.Player;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.RegisteredServiceProvider;
import org.bukkit.plugin.java.JavaPlugin;
import org.kitteh.vanish.VanishPlugin;
import org.royaldev.royalcommands.listeners.RoyalCommandsBlockListener;
import org.royaldev.royalcommands.listeners.RoyalCommandsEntityListener;
import org.royaldev.royalcommands.listeners.RoyalCommandsPlayerListener;
import org.royaldev.royalcommands.listeners.SignListener;
import org.royaldev.royalcommands.rcommands.*;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.xml.parsers.DocumentBuilderFactory;
import java.io.*;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
public class RoyalCommands extends JavaPlugin {
public ConfigManager whl;
public static Permission permission = null;
public static Economy economy = null;
public static Chat chat = null;
public String version = null;
public String newVersion = null;
public List<String> muteCmds = new ArrayList<String>();
public List<String> blockedItems = new ArrayList<String>();
public List<String> motd = new ArrayList<String>();
public List<String> commandCooldowns = new ArrayList<String>();
public List<String> whitelist = new ArrayList<String>();
public Boolean showcommands = null;
public Boolean disablegetip = null;
public Boolean useWelcome = null;
public Boolean buildPerm = null;
public Boolean backDeath = null;
public Boolean motdLogin = null;
public Boolean dropExtras = null;
public Boolean kitPerms = null;
public Boolean explodeFire = null;
public Boolean sendToSpawn = null;
public Boolean stsBack = null;
public Boolean stsNew = null;
public static Boolean otherHelp = null;
public Boolean customHelp = null;
public Boolean useVNP = null;
public Boolean cooldownAliases = null;
public Boolean useWhitelist = null;
public Boolean smoothTime = null;
public String banMessage = null;
public String kickMessage = null;
public String defaultWarn = null;
public String welcomeMessage = null;
public String noBuildMessage = null;
public String bcastFormat = null;
public String whoFormat = null;
public static Integer defaultStack = null;
public Integer warnBan = null;
public Integer spawnmobLimit = null;
public Integer helpAmount = null;
public Double maxNear = null;
public Double defaultNear = null;
public Double gTeleCd = null;
public Float explodePower = null;
public Float maxExplodePower = null;
public RoyalCommands() {
pconfm = new PConfManager(this);
}
public static Map<String, Map<String, Object>> commands = null;
public static Plugin[] plugins = null;
public Metrics m = null;
// Permissions with Vault
public Boolean setupPermissions() {
RegisteredServiceProvider<Permission> permissionProvider = getServer().getServicesManager().getRegistration(net.milkbowl.vault.permission.Permission.class);
if (permissionProvider != null) permission = permissionProvider.getProvider();
return (permission != null);
}
public Boolean setupChat() {
RegisteredServiceProvider<Chat> chatProvider = getServer().getServicesManager().getRegistration(net.milkbowl.vault.chat.Chat.class);
if (chatProvider != null) chat = chatProvider.getProvider();
return (chat != null);
}
private Boolean setupEconomy() {
RegisteredServiceProvider<Economy> economyProvider = getServer().getServicesManager().getRegistration(net.milkbowl.vault.economy.Economy.class);
if (economyProvider != null) economy = economyProvider.getProvider();
return (economy != null);
}
private final RoyalCommandsPlayerListener playerListener = new RoyalCommandsPlayerListener(this);
private final RoyalCommandsBlockListener blockListener = new RoyalCommandsBlockListener(this);
private final RoyalCommandsEntityListener entityListener = new RoyalCommandsEntityListener(this);
private final SignListener signListener = new SignListener(this);
public final PConfManager pconfm;
public Logger log = Logger.getLogger("Minecraft");
VanishPlugin vp = null;
public boolean isVanished(Player p) {
if (!useVNP) return false;
if (vp == null) {
vp = (VanishPlugin) Bukkit.getServer().getPluginManager().getPlugin("VanishNoPacket");
return false;
} else return vp.getManager().isVanished(p.getName());
}
public void reloadConfigVals() {
if (whl != null) whl.load();
showcommands = getConfig().getBoolean("view_commands");
disablegetip = getConfig().getBoolean("disable_getip");
useWelcome = getConfig().getBoolean("enable_welcome_message");
buildPerm = getConfig().getBoolean("use_build_perm");
backDeath = getConfig().getBoolean("back_on_death");
motdLogin = getConfig().getBoolean("motd_on_login");
dropExtras = getConfig().getBoolean("drop_extras");
kitPerms = getConfig().getBoolean("use_exclusive_kit_perms");
explodeFire = getConfig().getBoolean("explode_fire");
sendToSpawn = getConfig().getBoolean("send_to_spawn");
stsBack = getConfig().getBoolean("sts_back");
stsNew = getConfig().getBoolean("send_to_spawn_new");
otherHelp = getConfig().getBoolean("other_plugins_in_help");
customHelp = getConfig().getBoolean("use_custom_help");
useVNP = getConfig().getBoolean("use_vanish");
cooldownAliases = getConfig().getBoolean("cooldowns_match_aliases");
useWhitelist = getConfig().getBoolean("use_whitelist");
smoothTime = getConfig().getBoolean("use_smooth_time");
banMessage = RUtils.colorize(getConfig().getString("default_ban_message"));
noBuildMessage = RUtils.colorize(getConfig().getString("no_build_message"));
kickMessage = RUtils.colorize(getConfig().getString("default_kick_message"));
defaultWarn = RUtils.colorize(getConfig().getString("default_warn_message"));
welcomeMessage = RUtils.colorize(getConfig().getString("welcome_message"));
bcastFormat = RUtils.colorize(getConfig().getString("bcast_format"));
whoFormat = getConfig().getString("who_format");
defaultStack = getConfig().getInt("default_stack_size");
warnBan = getConfig().getInt("max_warns_before_ban");
spawnmobLimit = getConfig().getInt("spawnmob_limit");
helpAmount = getConfig().getInt("help_lines");
maxNear = getConfig().getDouble("max_near_radius");
defaultNear = getConfig().getDouble("default_near_radius");
gTeleCd = getConfig().getDouble("global_teleport_cooldown");
explodePower = (float) getConfig().getDouble("explode_power");
maxExplodePower = (float) getConfig().getDouble("max_explode_power");
muteCmds = getConfig().getStringList("mute_blocked_commands");
blockedItems = getConfig().getStringList("blocked_spawn_items");
motd = getConfig().getStringList("motd");
commandCooldowns = getConfig().getStringList("command_cooldowns");
if (whl != null) whitelist = whl.getStringList("whitelist");
Help.reloadHelp();
}
public void loadConfiguration() {
// This until I get a custom YAML wrapper using SnakeYAML going.
if (!new File(getDataFolder() + File.separator + "config.yml").exists()) saveDefaultConfig();
File file = new File(this.getDataFolder() + File.separator + "userdata" + File.separator);
boolean exists = file.exists();
if (!exists) {
try {
boolean success = new File(this.getDataFolder() + File.separator + "userdata").mkdir();
if (success) {
log.info("[RoyalCommands] Created userdata directory.");
}
} catch (Exception e) {
log.severe("[RoyalCommands] Failed to make userdata directory!");
log.severe(e.getMessage());
}
}
File whitelist = new File(getDataFolder() + File.separator + "whitelist.yml");
if (!whitelist.exists()) {
try {
boolean success = whitelist.createNewFile();
if (!success) {
log.severe("[RoyalCommands] Could not create whitelist.yml!");
} else {
try {
BufferedWriter out = new BufferedWriter(new FileWriter(whitelist.getAbsolutePath()));
out.write("whitelist:\n");
out.write("- jkcclemens\n");
out.write("- other_guy\n");
out.close();
} catch (IOException e) {
//ignore
}
}
} catch (Exception e) {
log.severe("[RoyalCommands] Could not create whitelist.yml!");
e.printStackTrace();
}
}
File rules = new File(this.getDataFolder() + File.separator + "rules.txt");
if (!rules.exists()) {
try {
boolean success = new File(this.getDataFolder() + File.separator + "rules.txt").createNewFile();
if (!success) {
log.severe("[RoyalCommands] Could not create rules.txt!");
} else {
try {
BufferedWriter out = new BufferedWriter(new FileWriter(this.getDataFolder() + File.separator + "rules.txt"));
out.write("###\n");
out.write("&2Page 1:\n");
out.write(" 1. Be kind\n");
out.write(" 2. Be courteous\n");
out.write(" 3. Be respectful\n");
out.write("###\n");
out.write("&2Page 2:\n");
out.write(" 4. Be cool\n");
out.close();
} catch (IOException e) {
//ignore
}
}
} catch (Exception e) {
log.severe("[RoyalCommands] Could not create rules.txt!");
e.printStackTrace();
}
}
File help = new File(this.getDataFolder() + File.separator + "help.txt");
if (!help.exists()) {
try {
boolean success = new File(this.getDataFolder() + File.separator + "help.txt").createNewFile();
if (!success) {
log.severe("[RoyalCommands] Could not create help.txt!");
} else {
try {
BufferedWriter out = new BufferedWriter(new FileWriter(this.getDataFolder() + File.separator + "help.txt"));
out.write("###\n");
out.write("&2Page 1:\n");
out.write(" 1. Do some awesome things\n");
out.write(" 2. You must meow to join\n");
out.write(" 3. The admins didn't change this\n");
out.write("###\n");
out.write("&2Page 2:\n");
out.write(" 4. Tell them to\n");
out.close();
} catch (IOException e) {
//ignore
}
}
} catch (Exception e) {
log.severe("[RoyalCommands] Could not create help.txt!");
e.printStackTrace();
}
}
File warps = new File(this.getDataFolder() + File.separator + "warps.yml");
if (!warps.exists()) {
try {
boolean success = new File(this.getDataFolder() + File.separator + "warps.yml").createNewFile();
if (success) {
try {
FileWriter fstream = new FileWriter(this.getDataFolder() + File.separator + "warps.yml");
BufferedWriter out = new BufferedWriter(fstream);
out.write("warps:");
out.close();
} catch (Exception e) {
log.severe("[RoyalCommands] Could not write to warps file.");
}
log.info("[RoyalCommands] Created warps file.");
}
} catch (Exception e) {
log.severe("[RoyalCommands] Failed to make warps file!");
log.severe(e.getMessage());
}
}
}
// getFinalArg taken from EssentialsCommand.java - Essentials by
// EssentialsTeam
public String getFinalArg(final String[] args, final int start) {
final StringBuilder bldr = new StringBuilder();
for (int i = start; i < args.length; i++) {
if (i != start) bldr.append(" ");
bldr.append(args[i]);
}
return bldr.toString();
}
// updateCheck() from MilkBowl's Vault
public String updateCheck(String currentVersion) throws Exception {
String pluginUrlString = "http://dev.bukkit.org/server-mods/royalcommands/files.rss";
try {
URL url = new URL(pluginUrlString);
Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(url.openConnection().getInputStream());
doc.getDocumentElement().normalize();
NodeList nodes = doc.getElementsByTagName("item");
Node firstNode = nodes.item(0);
if (firstNode.getNodeType() == 1) {
Element firstElement = (Element) firstNode;
NodeList firstElementTagName = firstElement.getElementsByTagName("title");
Element firstNameElement = (Element) firstElementTagName.item(0);
NodeList firstNodes = firstNameElement.getChildNodes();
return firstNodes.item(0).getNodeValue();
}
} catch (Exception ignored) {
}
return currentVersion;
}
public boolean isAuthorized(final Player player, final String node) {
return player instanceof ConsoleCommandSender || this.setupPermissions() && (RoyalCommands.permission.has(player, "rcmds.admin") || RoyalCommands.permission.has(player, node));
}
public boolean isAuthorized(final CommandSender player, final String node) {
return player instanceof ConsoleCommandSender || this.setupPermissions() && (RoyalCommands.permission.has((Player) player, "rcmds.admin") || RoyalCommands.permission.has(player, node));
}
public void onEnable() {
commands = getDescription().getCommands();
loadConfiguration();
reloadConfigVals();
try {
whl = new ConfigManager(getDataFolder().getAbsolutePath() + File.separator + "whitelist.yml");
} catch (FileNotFoundException e) {
log.warning("[RoyalCommands] Could not find whitelist.yml!");
whl = null;
}
setupEconomy();
setupChat();
try {
m = new Metrics(this);
m.start();
} catch (Exception ignore) {
log.warning("[RoyalCommands] Could not start Metrics!");
}
version = getDescription().getVersion();
plugins = getServer().getPluginManager().getPlugins();
// yet again, borrowed from MilkBowl
this.getServer().getScheduler().scheduleAsyncRepeatingTask(this, new Runnable() {
@Override
public void run() {
try {
newVersion = updateCheck(version);
String oldVersion = version;
Integer nVI = Integer.valueOf(newVersion.replaceAll("\\D+", ""));
Integer oVI = Integer.valueOf(version.replaceAll("\\D+", ""));
if (nVI > oVI || (oldVersion.contains("pre") && nVI.equals(oVI))) {
log.warning(newVersion + " is out! You are running v" + oldVersion);
log.warning("Update RoyalCommands at: http://dev.bukkit.org/server-mods/royalcommands");
}
} catch (Exception ignored) {
// ignore exceptions
}
}
}, 0, 36000);
vp = (VanishPlugin) Bukkit.getServer().getPluginManager().getPlugin("VanishNoPacket");
PluginManager pm = this.getServer().getPluginManager();
pm.registerEvents(playerListener, this);
pm.registerEvents(entityListener, this);
pm.registerEvents(blockListener, this);
pm.registerEvents(signListener, this);
getCommand("level").setExecutor(new Level(this));
getCommand("setlevel").setExecutor(new Setlevel(this));
getCommand("sci").setExecutor(new Sci(this));
getCommand("speak").setExecutor(new Speak(this));
getCommand("facepalm").setExecutor(new Facepalm(this));
getCommand("slap").setExecutor(new Slap(this));
getCommand("harm").setExecutor(new Harm(this));
getCommand("starve").setExecutor(new Starve(this));
getCommand("banned").setExecutor(new Banned(this));
getCommand("setarmor").setExecutor(new Setarmor(this));
getCommand("getip").setExecutor(new GetIP(this));
getCommand("compareip").setExecutor(new CmdCompareIP(this));
getCommand("ragequit").setExecutor(new RageQuit(this));
getCommand("quit").setExecutor(new Quit(this));
getCommand("rank").setExecutor(new Rank(this));
getCommand("freeze").setExecutor(new Freeze(this));
getCommand("fakeop").setExecutor(new Fakeop(this));
getCommand("vtp").setExecutor(new Vtp(this));
getCommand("vtphere").setExecutor(new Vtphere(this));
getCommand("megastrike").setExecutor(new MegaStrike(this));
getCommand("pext").setExecutor(new Pext(this));
getCommand("item").setExecutor(new CmdItem(this));
getCommand("clearinventory").setExecutor(new ClearInventory(this));
getCommand("weather").setExecutor(new Weather(this));
getCommand("fixchunk").setExecutor(new FixChunk(this));
getCommand("give").setExecutor(new CmdGive(this));
getCommand("message").setExecutor(new Message(this));
getCommand("reply").setExecutor(new Reply(this));
getCommand("gamemode").setExecutor(new Gamemode(this));
getCommand("mute").setExecutor(new Mute(this));
getCommand("ban").setExecutor(new Ban(this));
getCommand("kick").setExecutor(new Kick(this));
getCommand("time").setExecutor(new Time(this));
getCommand("home").setExecutor(new Home(this));
getCommand("sethome").setExecutor(new SetHome(this));
getCommand("delhome").setExecutor(new DelHome(this));
getCommand("listhome").setExecutor(new ListHome(this));
getCommand("strike").setExecutor(new Strike(this));
getCommand("jump").setExecutor(new Jump(this));
getCommand("warn").setExecutor(new Warn(this));
getCommand("clearwarns").setExecutor(new ClearWarns(this));
getCommand("warp").setExecutor(new Warp(this));
getCommand("setwarp").setExecutor(new SetWarp(this));
getCommand("delwarp").setExecutor(new DelWarp(this));
getCommand("repair").setExecutor(new Repair(this));
getCommand("unban").setExecutor(new Unban(this));
getCommand("heal").setExecutor(new Heal(this));
getCommand("feed").setExecutor(new Feed(this));
getCommand("god").setExecutor(new God(this));
getCommand("banreason").setExecutor(new Banreason(this));
getCommand("setspawn").setExecutor(new SetSpawn(this));
getCommand("spawn").setExecutor(new Spawn(this));
getCommand("banip").setExecutor(new BanIP(this));
getCommand("unbanip").setExecutor(new UnbanIP(this));
getCommand("list").setExecutor(new CmdList(this));
getCommand("back").setExecutor(new Back(this));
getCommand("teleport").setExecutor(new Teleport(this));
getCommand("teleporthere").setExecutor(new TeleportHere(this));
getCommand("teleportrequest").setExecutor(new TeleportRequest(this));
getCommand("tpaccept").setExecutor(new TpAccept(this));
getCommand("tpdeny").setExecutor(new TpDeny(this));
getCommand("listwarns").setExecutor(new ListWarns(this));
getCommand("more").setExecutor(new More(this));
getCommand("teleportrequesthere").setExecutor(new TeleportRequestHere(this));
getCommand("spy").setExecutor(new Spy(this));
getCommand("spawnmob").setExecutor(new SpawnMob(this));
getCommand("afk").setExecutor(new Afk(this));
getCommand("assign").setExecutor(new Assign(this));
getCommand("onehitkill").setExecutor(new OneHitKill(this));
getCommand("burn").setExecutor(new Burn(this));
getCommand("kickall").setExecutor(new KickAll(this));
getCommand("world").setExecutor(new CmdWorld(this));
getCommand("jail").setExecutor(new Jail(this));
getCommand("setjail").setExecutor(new SetJail(this));
getCommand("less").setExecutor(new Less(this));
getCommand("spawner").setExecutor(new Spawner(this));
getCommand("tp2p").setExecutor(new Tp2p(this));
getCommand("motd").setExecutor(new Motd(this));
getCommand("deljail").setExecutor(new DelJail(this));
getCommand("force").setExecutor(new Force(this));
getCommand("ping").setExecutor(new CmdPing(this));
getCommand("invsee").setExecutor(new CmdInvsee(this));
getCommand("realname").setExecutor(new CmdRealName(this));
getCommand("nick").setExecutor(new Nick(this));
getCommand("ingot2block").setExecutor(new Ingot2Block(this));
getCommand("near").setExecutor(new CmdNear(this));
getCommand("kill").setExecutor(new CmdKill(this));
getCommand("suicide").setExecutor(new Suicide(this));
getCommand("killall").setExecutor(new KillAll(this));
getCommand("muteall").setExecutor(new MuteAll(this));
getCommand("kit").setExecutor(new CmdKit(this));
getCommand("rules").setExecutor(new CmdRules(this));
getCommand("broadcast").setExecutor(new CmdBroadcast(this));
getCommand("hug").setExecutor(new CmdHug(this));
getCommand("explode").setExecutor(new CmdExplode(this));
getCommand("ride").setExecutor(new CmdRide(this));
getCommand("whobanned").setExecutor(new CmdWhoBanned(this));
getCommand("tppos").setExecutor(new CmdTppos(this));
getCommand("ignore").setExecutor(new CmdIgnore(this));
getCommand("help").setExecutor(new CmdHelp(this));
getCommand("coords").setExecutor(new CmdCoords(this));
getCommand("tpall").setExecutor(new CmdTpAll(this));
getCommand("tpaall").setExecutor(new CmdTpaAll(this));
getCommand("vip").setExecutor(new CmdVip(this));
getCommand("dump").setExecutor(new CmdDump(this));
getCommand("seen").setExecutor(new CmdSeen(this));
getCommand("tempban").setExecutor(new CmdTempban(this));
getCommand("tptoggle").setExecutor(new CmdTpToggle(this));
getCommand("kits").setExecutor(new CmdKits(this));
getCommand("lag").setExecutor(new CmdLag(this));
getCommand("mem").setExecutor(new CmdMem(this));
getCommand("entities").setExecutor(new CmdEntities(this));
getCommand("invmod").setExecutor(new CmdInvmod(this));
getCommand("workbench").setExecutor(new CmdWorkbench(this));
getCommand("enchantingtable").setExecutor(new CmdEnchantingTable(this));
getCommand("trade").setExecutor(new CmdTrade(this));
getCommand("furnace").setExecutor(new CmdFurnace(this));
getCommand("enchant").setExecutor(new CmdEnchant(this));
getCommand("whitelist").setExecutor(new CmdWhitelist(this));
getCommand("fireball").setExecutor(new CmdFireball(this));
getCommand("fly").setExecutor(new CmdFly(this));
getCommand("playertime").setExecutor(new CmdPlayerTime(this));
getCommand("compass").setExecutor(new CmdCompass(this));
getCommand("helmet").setExecutor(new CmdHelmet(this));
getCommand("worldmanager").setExecutor(new CmdWorldManager(this));
getCommand("biome").setExecutor(new CmdBiome(this));
getCommand("rcmds").setExecutor(new Rcmds(this));
log.info("[RoyalCommands] RoyalCommands v" + version + " initiated.");
}
public void onDisable() {
getServer().getScheduler().cancelTasks(this);
log.info("[RoyalCommands] RoyalCommands v" + version + " disabled.");
}
}
|
Fixed: NPE on start
|
src/org/royaldev/royalcommands/RoyalCommands.java
|
Fixed: NPE on start
|
|
Java
|
lgpl-2.1
|
1f1a64c6f44eee94905d82c3dbe7afa3b6a7abc4
| 0
|
xwiki/xwiki-enterprise,xwiki/xwiki-enterprise
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.test.ui;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.xwiki.administration.test.po.AdministrationPage;
import org.xwiki.administration.test.po.LocalizationAdministrationSectionPage;
import org.xwiki.model.reference.LocalDocumentReference;
import org.xwiki.rest.model.jaxb.Page;
import org.xwiki.test.ui.browser.IgnoreBrowser;
import org.xwiki.test.ui.po.ViewPage;
import org.xwiki.test.ui.po.editor.WikiEditPage;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Verify the ability to change the language.
*
* @version $Id$
* @since 2.4RC1
*/
public class LanguageTest extends AbstractTest
{
@Rule
public AdminAuthenticationRule adminAuthenticationRule = new AdminAuthenticationRule(getUtil());
/**
* Make sure we set back the language to monolingual and english for other tests that come thereafter
*/
@AfterClass
public static void afterClass()
{
reset();
}
/**
* Ensure the default language is English and that the wiki is in monolingual mode
*/
@Before
public void before() throws Exception
{
reset();
}
private static void reset()
{
// Reset default language configuration
setLanguageSettings(false, "en");
// Reset current language
getDriver().manage().deleteCookieNamed("language");
}
@Test
@IgnoreBrowser(value = "internet.*", version = "8\\.*", reason = "See http://jira.xwiki.org/browse/XE-1146")
public void testChangeLanguageInMonolingualModeUsingTheAdministrationPreference()
{
WikiEditPage edit = WikiEditPage.gotoPage("Test", "LanguageTest");
edit.setContent("{{velocity}}context = ($xcontext.language), doc = ($doc.language), "
+ "default = ($doc.defaultLanguage), tdoc = ($tdoc.language), "
+ "tdocdefault = ($tdoc.defaultLanguage){{/velocity}}");
ViewPage vp = edit.clickSaveAndView();
// Current language must be "en"
Assert.assertEquals("Invalid content", vp.getContent(),
"context = (en), doc = (), default = (en), tdoc = (), tdocdefault = (en)");
// Change default language to "fr"
AdministrationPage adminPage = AdministrationPage.gotoPage();
LocalizationAdministrationSectionPage sectionPage = adminPage.clickLocalizationSection();
sectionPage.setDefaultLanguage("fr");
sectionPage.clickSave();
// Now language must be "fr"
vp = getUtil().gotoPage("Test", "LanguageTest");
Assert.assertTrue("Page not in French!", isPageInFrench());
Assert.assertEquals("Invalid content", vp.getContent(),
"context = (fr), doc = (), default = (en), tdoc = (), tdocdefault = (en)");
}
@Test
@IgnoreBrowser(value = "internet.*", version = "8\\.*", reason = "See http://jira.xwiki.org/browse/XE-1146")
public void testPassingLanguageInRequestHasNoEffectInMonoligualMode()
{
getUtil().gotoPage("Main", "WebHome", "view", "language=fr");
Assert.assertTrue("Page not in English!", isPageInEnglish());
}
@Test
@IgnoreBrowser(value = "internet.*", version = "8\\.*", reason = "See http://jira.xwiki.org/browse/XE-1146")
public void testChangeLanguageInMultilingualModeUsingTheLanguageRequestParameter()
{
setLanguageSettings(true, "en");
getUtil().gotoPage("Main", "WebHome", "view", "language=fr");
Assert.assertTrue("Page not in French!", isPageInFrench());
}
@Test
@IgnoreBrowser(value = "internet.*", version = "8\\.*", reason = "See http://jira.xwiki.org/browse/XE-1146")
public void testHeaderCorrectLanguage()
{
setLanguageSettings(true, "en");
getUtil().gotoPage("Main", "Test", "view");
checkLanguageTagsArePresent("en");
getUtil().gotoPage("Main", "Test", "view", "language=fr");
checkLanguageTagsArePresent("fr");
}
@Test
public void testTranslateDocument() throws Exception
{
LocalDocumentReference referenceDEFAULT = new LocalDocumentReference("LanguageTest", "Page");
LocalDocumentReference referenceFR = new LocalDocumentReference(referenceDEFAULT, Locale.FRENCH);
// Cleanup
getUtil().rest().delete(referenceFR);
getUtil().rest().delete(referenceDEFAULT);
// Set 2 locales
setLanguageSettings(true, "en", Arrays.asList("en", "fr"));
// Create default version
ViewPage viewPage = getUtil().createPage("LanguageTest", "Page", "en content", "en title");
// Edit the page
WikiEditPage editPage = viewPage.editWiki();
// Make sure current translation is the right one
assertTrue(getDriver().hasElement(By.xpath("//strong[text()='You are editing the original page (en).']")));
assertEquals(Arrays.asList(Locale.FRENCH), editPage.getNotExistingLocales());
assertEquals(Arrays.asList(), editPage.getExistingLocales());
// Translated to French
editPage = editPage.clickTranslate("fr");
editPage.setTitle("titre fr");
editPage.setContent("contenu fr");
viewPage = editPage.clickSaveAndView();
// Make sure both have the right content
Page page = getUtil().rest().get(referenceFR);
assertEquals("titre fr", page.getTitle());
assertEquals("contenu fr", page.getContent());
page = getUtil().rest().get(referenceDEFAULT);
assertEquals("en title", page.getTitle());
assertEquals("en content", page.getContent());
// Make sure two locales are listed for this page in the UI
assertEquals(new HashSet<>(Arrays.asList(Locale.ENGLISH, Locale.FRENCH)), new HashSet<>(viewPage.getLocales()));
// Switch to en
viewPage.clickLocale(Locale.ENGLISH);
// Verify edit mode informations
editPage = viewPage.editWiki();
assertEquals(Arrays.asList(), editPage.getNotExistingLocales());
assertEquals(Arrays.asList(Locale.FRENCH), editPage.getExistingLocales());
}
/**
* Assert that the given <code>language</code> is present in various attributes and tags on the page
*
* @param language the language to use, should be a valid language, e.g. "en"
*/
private void checkLanguageTagsArePresent(String language)
{
WebElement html = getDriver().findElement(By.tagName("html"));
Assert.assertEquals(language, html.getAttribute("lang"));
Assert.assertEquals(language, html.getAttribute("xml:lang"));
ViewPage vp = new ViewPage();
Assert.assertEquals("locale=" + language, vp.getHTMLMetaDataValue("gwt:property"));
// For retro-compatibility only
Assert.assertEquals(language, vp.getHTMLMetaDataValue("language"));
String content = getDriver().getPageSource();
Assert.assertTrue(content.contains("language=" + language));
}
/**
* Check if the currently displayed page is in English, by looking at the "Log-Out" link
*/
private boolean isPageInEnglish()
{
return getDriver().findElement(By.className("xdocLastModification")).getText().toLowerCase()
.contains("last modified by");
}
/**
* Check if the currently displayed page is in French, by looking at the "Log-Out" link
*/
private boolean isPageInFrench()
{
return getDriver().findElement(By.className("xdocLastModification")).getText().toLowerCase()
.contains("modifié par");
}
private static void setLanguageSettings(boolean isMultiLingual, String defaultLanguage)
{
setLanguageSettings(isMultiLingual, defaultLanguage, null);
}
private static void setLanguageSettings(boolean isMultiLingual, String defaultLanguage,
List<String> supportedLanguages)
{
AdministrationPage adminPage = AdministrationPage.gotoPage();
LocalizationAdministrationSectionPage sectionPage = adminPage.clickLocalizationSection();
sectionPage.setMultiLingual(isMultiLingual);
if (defaultLanguage != null) {
sectionPage.setDefaultLanguage(defaultLanguage);
}
if (supportedLanguages != null) {
sectionPage.setSupportedLanguages(supportedLanguages);
}
sectionPage.clickSave();
}
}
|
xwiki-enterprise-test/xwiki-enterprise-test-ui/src/test/it/org/xwiki/test/ui/LanguageTest.java
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.test.ui;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.xwiki.administration.test.po.AdministrationPage;
import org.xwiki.administration.test.po.LocalizationAdministrationSectionPage;
import org.xwiki.model.reference.LocalDocumentReference;
import org.xwiki.rest.model.jaxb.Page;
import org.xwiki.test.ui.browser.IgnoreBrowser;
import org.xwiki.test.ui.po.ViewPage;
import org.xwiki.test.ui.po.editor.WikiEditPage;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Verify the ability to change the language.
*
* @version $Id$
* @since 2.4RC1
*/
public class LanguageTest extends AbstractTest
{
@Rule
public AdminAuthenticationRule adminAuthenticationRule = new AdminAuthenticationRule(getUtil());
/**
* Make sure we set back the language to monolingual and english for other tests that come thereafter
*/
@AfterClass
public static void afterClass()
{
setLanguageSettings(false, "en");
}
/**
* Ensure the default language is English and that the wiki is in monolingual mode
*/
@Before
public void before() throws Exception
{
setLanguageSettings(false, "en");
}
@Test
@IgnoreBrowser(value = "internet.*", version = "8\\.*", reason = "See http://jira.xwiki.org/browse/XE-1146")
public void testChangeLanguageInMonolingualModeUsingTheAdministrationPreference()
{
WikiEditPage edit = WikiEditPage.gotoPage("Test", "LanguageTest");
edit.setContent("{{velocity}}context = ($xcontext.language), doc = ($doc.language), "
+ "default = ($doc.defaultLanguage), tdoc = ($tdoc.language), "
+ "tdocdefault = ($tdoc.defaultLanguage){{/velocity}}");
ViewPage vp = edit.clickSaveAndView();
// Current language must be "en"
Assert.assertEquals("Invalid content", vp.getContent(),
"context = (en), doc = (), default = (en), tdoc = (), tdocdefault = (en)");
// Change default language to "fr"
AdministrationPage adminPage = AdministrationPage.gotoPage();
LocalizationAdministrationSectionPage sectionPage = adminPage.clickLocalizationSection();
sectionPage.setDefaultLanguage("fr");
sectionPage.clickSave();
// Now language must be "fr"
vp = getUtil().gotoPage("Test", "LanguageTest");
Assert.assertTrue("Page not in French!", isPageInFrench());
Assert.assertEquals("Invalid content", vp.getContent(),
"context = (fr), doc = (), default = (en), tdoc = (), tdocdefault = (en)");
}
@Test
@IgnoreBrowser(value = "internet.*", version = "8\\.*", reason = "See http://jira.xwiki.org/browse/XE-1146")
public void testPassingLanguageInRequestHasNoEffectInMonoligualMode()
{
getUtil().gotoPage("Main", "WebHome", "view", "language=fr");
Assert.assertTrue("Page not in English!", isPageInEnglish());
}
@Test
@IgnoreBrowser(value = "internet.*", version = "8\\.*", reason = "See http://jira.xwiki.org/browse/XE-1146")
public void testChangeLanguageInMultilingualModeUsingTheLanguageRequestParameter()
{
setLanguageSettings(true, "en");
getUtil().gotoPage("Main", "WebHome", "view", "language=fr");
Assert.assertTrue("Page not in French!", isPageInFrench());
}
@Test
@IgnoreBrowser(value = "internet.*", version = "8\\.*", reason = "See http://jira.xwiki.org/browse/XE-1146")
public void testHeaderCorrectLanguage()
{
setLanguageSettings(true, "en");
// if we don't use language=default, the value stored in cookies is used, which might be wrong
getUtil().gotoPage("Main", "Test", "view", "language=default");
checkLanguageTagsArePresent("en");
getUtil().gotoPage("Main", "Test", "view", "language=fr");
checkLanguageTagsArePresent("fr");
}
@Test
public void testTranslateDocument() throws Exception
{
LocalDocumentReference referenceDEFAULT = new LocalDocumentReference("LanguageTest", "Page");
LocalDocumentReference referenceFR = new LocalDocumentReference(referenceDEFAULT, Locale.FRENCH);
// Cleanup
getUtil().rest().delete(referenceFR);
getUtil().rest().delete(referenceDEFAULT);
// Set 2 locales
setLanguageSettings(true, "en", Arrays.asList("en", "fr"));
// Create default version
ViewPage viewPage = getUtil().createPage("LanguageTest", "Page", "en content", "en title");
// Edit the page
WikiEditPage editPage = viewPage.editWiki();
// Make sure current translation is the right one
assertTrue(getDriver().hasElement(By.xpath("//strong[text()='You are editing the original page (en).']")));
assertEquals(Arrays.asList(Locale.FRENCH), editPage.getNotExistingLocales());
assertEquals(Arrays.asList(), editPage.getExistingLocales());
// Translated to French
editPage = editPage.clickTranslate("fr");
editPage.setTitle("titre fr");
editPage.setContent("contenu fr");
viewPage = editPage.clickSaveAndView();
// Make sure both have the right content
Page page = getUtil().rest().get(referenceFR);
assertEquals("titre fr", page.getTitle());
assertEquals("contenu fr", page.getContent());
page = getUtil().rest().get(referenceDEFAULT);
assertEquals("en title", page.getTitle());
assertEquals("en content", page.getContent());
// Make sure two locales are listed for this page in the UI
assertEquals(new HashSet<>(Arrays.asList(Locale.ENGLISH, Locale.FRENCH)), new HashSet<>(viewPage.getLocales()));
// Switch to en
viewPage.clickLocale(Locale.ENGLISH);
// Verify edit mode informations
editPage = viewPage.editWiki();
assertEquals(Arrays.asList(), editPage.getNotExistingLocales());
assertEquals(Arrays.asList(Locale.FRENCH), editPage.getExistingLocales());
}
/**
* Assert that the given <code>language</code> is present in various attributes and tags on the page
*
* @param language the language to use, should be a valid language, e.g. "en"
*/
private void checkLanguageTagsArePresent(String language)
{
WebElement html = getDriver().findElement(By.tagName("html"));
Assert.assertEquals(language, html.getAttribute("lang"));
Assert.assertEquals(language, html.getAttribute("xml:lang"));
ViewPage vp = new ViewPage();
Assert.assertEquals("locale=" + language, vp.getHTMLMetaDataValue("gwt:property"));
// For retro-compatibility only
Assert.assertEquals(language, vp.getHTMLMetaDataValue("language"));
String content = getDriver().getPageSource();
Assert.assertTrue(content.contains("language=" + language));
}
/**
* Check if the currently displayed page is in English, by looking at the "Log-Out" link
*/
private boolean isPageInEnglish()
{
return getDriver().findElement(By.className("xdocLastModification")).getText().toLowerCase()
.contains("last modified by");
}
/**
* Check if the currently displayed page is in French, by looking at the "Log-Out" link
*/
private boolean isPageInFrench()
{
return getDriver().findElement(By.className("xdocLastModification")).getText().toLowerCase()
.contains("modifié par");
}
private static void setLanguageSettings(boolean isMultiLingual, String defaultLanguage)
{
setLanguageSettings(isMultiLingual, defaultLanguage, null);
}
private static void setLanguageSettings(boolean isMultiLingual, String defaultLanguage,
List<String> supportedLanguages)
{
AdministrationPage adminPage = AdministrationPage.gotoPage();
LocalizationAdministrationSectionPage sectionPage = adminPage.clickLocalizationSection();
sectionPage.setMultiLingual(isMultiLingual);
if (defaultLanguage != null) {
sectionPage.setDefaultLanguage(defaultLanguage);
}
if (supportedLanguages != null) {
sectionPage.setSupportedLanguages(supportedLanguages);
}
sectionPage.clickSave();
}
}
|
[misc] Fix test
|
xwiki-enterprise-test/xwiki-enterprise-test-ui/src/test/it/org/xwiki/test/ui/LanguageTest.java
|
[misc] Fix test
|
|
Java
|
apache-2.0
|
f4653acb60c94f3a49ab9d0cdf577dc5bb240581
| 0
|
pspaude/uPortal,ASU-Capstone/uPortal,phillips1021/uPortal,doodelicious/uPortal,joansmith/uPortal,EdiaEducationTechnology/uPortal,Jasig/uPortal,joansmith/uPortal,Jasig/uPortal-start,EsupPortail/esup-uportal,EsupPortail/esup-uportal,phillips1021/uPortal,jameswennmacher/uPortal,groybal/uPortal,jhelmer-unicon/uPortal,joansmith/uPortal,mgillian/uPortal,apetro/uPortal,timlevett/uPortal,Mines-Albi/esup-uportal,ChristianMurphy/uPortal,phillips1021/uPortal,jl1955/uPortal5,joansmith/uPortal,EdiaEducationTechnology/uPortal,groybal/uPortal,groybal/uPortal,ASU-Capstone/uPortal-Forked,kole9273/uPortal,ChristianMurphy/uPortal,andrewstuart/uPortal,vertein/uPortal,jameswennmacher/uPortal,stalele/uPortal,Jasig/SSP-Platform,drewwills/uPortal,GIP-RECIA/esco-portail,pspaude/uPortal,groybal/uPortal,andrewstuart/uPortal,drewwills/uPortal,vertein/uPortal,apetro/uPortal,cousquer/uPortal,ASU-Capstone/uPortal-Forked,mgillian/uPortal,Jasig/SSP-Platform,ASU-Capstone/uPortal,vertein/uPortal,chasegawa/uPortal,GIP-RECIA/esco-portail,cousquer/uPortal,phillips1021/uPortal,jl1955/uPortal5,Mines-Albi/esup-uportal,doodelicious/uPortal,doodelicious/uPortal,phillips1021/uPortal,jhelmer-unicon/uPortal,andrewstuart/uPortal,chasegawa/uPortal,chasegawa/uPortal,MichaelVose2/uPortal,pspaude/uPortal,MichaelVose2/uPortal,vertein/uPortal,MichaelVose2/uPortal,doodelicious/uPortal,jameswennmacher/uPortal,EsupPortail/esup-uportal,bjagg/uPortal,ASU-Capstone/uPortal-Forked,andrewstuart/uPortal,kole9273/uPortal,Mines-Albi/esup-uportal,GIP-RECIA/esup-uportal,stalele/uPortal,mgillian/uPortal,EdiaEducationTechnology/uPortal,Mines-Albi/esup-uportal,stalele/uPortal,jhelmer-unicon/uPortal,vbonamy/esup-uportal,Jasig/uPortal-start,apetro/uPortal,jl1955/uPortal5,jl1955/uPortal5,GIP-RECIA/esco-portail,GIP-RECIA/esup-uportal,andrewstuart/uPortal,vbonamy/esup-uportal,timlevett/uPortal,drewwills/uPortal,EdiaEducationTechnology/uPortal,apetro/uPortal,chasegawa/uPortal,MichaelVose2/uPortal,ASU-Capstone/uPortal-Forked,ASU-Capstone/uPortal,kole9273/uPortal,pspaude/uPortal,chasegawa/uPortal,apetro/uPortal,jonathanmtran/uPortal,jonathanmtran/uPortal,Jasig/SSP-Platform,jameswennmacher/uPortal,stalele/uPortal,GIP-RECIA/esup-uportal,EsupPortail/esup-uportal,drewwills/uPortal,Jasig/uPortal,joansmith/uPortal,vbonamy/esup-uportal,vbonamy/esup-uportal,MichaelVose2/uPortal,stalele/uPortal,jameswennmacher/uPortal,kole9273/uPortal,jonathanmtran/uPortal,timlevett/uPortal,timlevett/uPortal,kole9273/uPortal,ASU-Capstone/uPortal,groybal/uPortal,EsupPortail/esup-uportal,Jasig/SSP-Platform,GIP-RECIA/esup-uportal,jhelmer-unicon/uPortal,jl1955/uPortal5,vbonamy/esup-uportal,cousquer/uPortal,GIP-RECIA/esup-uportal,ChristianMurphy/uPortal,Mines-Albi/esup-uportal,jhelmer-unicon/uPortal,ASU-Capstone/uPortal-Forked,doodelicious/uPortal,bjagg/uPortal,Jasig/uPortal,bjagg/uPortal,Jasig/SSP-Platform,ASU-Capstone/uPortal
|
/**
* Copyright 2001 The JA-SIG Collaborative. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by the JA-SIG Collaborative
* (http://www.jasig.org/)."
*
* THIS SOFTWARE IS PROVIDED BY THE JA-SIG COLLABORATIVE "AS IS" AND ANY
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE JA-SIG COLLABORATIVE OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*
*
* formatted with JxBeauty (c) johann.langhofer@nextra.at
*/
package org.jasig.portal.security.provider;
import org.jasig.portal.security.PermissionManager;
import org.jasig.portal.security.Permission;
import org.jasig.portal.RdbmServices;
import org.jasig.portal.services.LogService;
import org.jasig.portal.AuthorizationException;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.ResultSet;
import java.util.ArrayList;
/**
* @author Bernie Durfee (bdurfee@interactivebusiness.com)
*/
public class ReferencePermissionManager extends PermissionManager {
/**
* This constructor ensures that the PermissionManager will be created with an owner specified
* @param owner
*/
public ReferencePermissionManager (String owner) {
// Make sure to call the constructor of the PermissionManager class
super(owner);
}
/**
* Add a new Permission to the system.
* @param newPermission
*/
public void setPermission (Permission newPermission) {
Connection connection = RdbmServices.getConnection();
try {
StringBuffer updateStatement = new StringBuffer(1000);
updateStatement.append("INSERT INTO UP_PERMISSIONS (OWNER, PRINCIPAL, ACTIVITY, TARGET, PERMISSION_TYPE, EFFECTIVE, EXPIRES) VALUES (");
updateStatement.append("'" + m_owner + "',");
if (newPermission.getPrincipal() != null) {
updateStatement.append("'" + newPermission.getPrincipal() + "',");
}
else {
updateStatement.append("'*',");
}
if (newPermission.getActivity() != null) {
updateStatement.append("'" + newPermission.getActivity() + "',");
}
else {
updateStatement.append("'*',");
}
if (newPermission.getTarget() != null) {
updateStatement.append("'" + newPermission.getTarget() + "',");
}
else {
updateStatement.append("'*',");
}
if (newPermission.getType() != null) {
updateStatement.append("'" + newPermission.getType() + "',");
}
else {
updateStatement.append("'*',");
}
updateStatement.append("null, null");
updateStatement.append(")");
Statement statement = connection.createStatement();
statement.executeUpdate(updateStatement.toString());
} catch (Exception e) {
e.printStackTrace();
} finally {
RdbmServices.releaseConnection(connection);
}
}
/**
* Add a new set of Permission objects to the system.
* @param newPermissions
*/
public void setPermissions (Permission[] newPermissions) {
for (int i = 0; i < newPermissions.length; i++) {
setPermission(newPermissions[i]);
}
}
/**
* Retrieve an array of Permission objects based on the given parameters. Any null parameters
* will be ignored. So to retrieve a set of Permission objects for a given principal you would call
* this method like pm.getPermissions('principal name', null, null, null)
* @param principal
* @param activity
* @param target
* @param type
* @return
* @exception AuthorizationException
*/
public Permission[] getPermissions (String principal, String activity, String target, String type) throws AuthorizationException {
if (principal == null) {
principal = "*";
}
if (activity == null) {
activity = "*";
}
if (target == null) {
target = "*";
}
if (type == null) {
type = "*";
}
Connection connection = RdbmServices.getConnection();
try {
StringBuffer queryString = new StringBuffer(255);
queryString.append("SELECT * FROM UP_PERMISSIONS WHERE OWNER = '" + m_owner.toUpperCase() + "'");
queryString.append(" AND PRINCIPAL = '" + principal.toUpperCase() + "'");
queryString.append(" AND ACTIVITY = '" + activity.toUpperCase() + "'");
queryString.append(" AND TARGET = '" + target.toUpperCase() + "'");
queryString.append(" AND PERMISSION_TYPE = '" + type.toUpperCase() + "'");
Statement statement = connection.createStatement();
LogService.log(LogService.DEBUG, queryString.toString());
ResultSet rs = statement.executeQuery(queryString.toString());
ArrayList permissions = new ArrayList();
while (rs.next()) {
Permission permission = new ReferencePermission(m_owner);
permission.setPrincipal(rs.getString("PRINCIPAL"));
permission.setActivity(rs.getString("ACTIVITY"));
permission.setTarget(rs.getString("TARGET"));
permission.setType(rs.getString("PERMISSION_TYPE"));
permission.setEffective(rs.getDate("EFFECTIVE"));
permission.setExpires(rs.getDate("EXPIRES"));
permissions.add(permission);
}
// Return the array of permissions
return ((Permission[])permissions.toArray(new Permission[0]));
} catch (Exception e) {
throw new AuthorizationException(e.getMessage());
} finally {
RdbmServices.releaseConnection(connection);
}
}
}
|
source/org/jasig/portal/security/provider/ReferencePermissionManager.java
|
/**
* Copyright 2001 The JA-SIG Collaborative. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by the JA-SIG Collaborative
* (http://www.jasig.org/)."
*
* THIS SOFTWARE IS PROVIDED BY THE JA-SIG COLLABORATIVE "AS IS" AND ANY
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE JA-SIG COLLABORATIVE OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*
*
* formatted with JxBeauty (c) johann.langhofer@nextra.at
*/
package org.jasig.portal.security.provider;
import org.jasig.portal.security.PermissionManager;
import org.jasig.portal.security.Permission;
import org.jasig.portal.RdbmServices;
import org.jasig.portal.services.LogService;
import org.jasig.portal.AuthorizationException;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.ResultSet;
import java.util.ArrayList;
/**
* @author Bernie Durfee (bdurfee@interactivebusiness.com)
*/
public class ReferencePermissionManager extends PermissionManager {
/**
* This constructor ensures that the PermissionManager will be created with an owner specified
* @param owner
*/
public ReferencePermissionManager (String owner) {
// Make sure to call the constructor of the PermissionManager class
super(owner);
}
/**
* Add a new Permission to the system.
* @param newPermission
*/
public void setPermission (Permission newPermission) {
Connection connection = RdbmServices.getConnection();
try {
StringBuffer updateStatement = new StringBuffer(1000);
updateStatement.append("INSERT INTO UP_PERMISSIONS (OWNER, PRINCIPAL, ACTIVITY, TARGET, PERMISSION_TYPE, EFFECTIVE, EXPIRES) VALUES (");
updateStatement.append("'" + m_owner + "',");
if (newPermission.getPrincipal() != null) {
updateStatement.append("'" + newPermission.getPrincipal() + "',");
}
else {
updateStatement.append("'*',");
}
if (newPermission.getActivity() != null) {
updateStatement.append("'" + newPermission.getActivity() + "',");
}
else {
updateStatement.append("'*',");
}
if (newPermission.getTarget() != null) {
updateStatement.append("'" + newPermission.getTarget() + "',");
}
else {
updateStatement.append("'*',");
}
if (newPermission.getType() != null) {
updateStatement.append("'" + newPermission.getType() + "',");
}
else {
updateStatement.append("'*',");
}
updateStatement.append("null, null");
updateStatement.append(")");
Statement statement = connection.createStatement();
statement.executeUpdate(updateStatement.toString());
} catch (Exception e) {
e.printStackTrace();
} finally {
RdbmServices.releaseConnection(connection);
}
}
/**
* Add a new set of Permission objects to the system.
* @param newPermissions
*/
public void setPermissions (Permission[] newPermissions) {
for (int i = 0; i < newPermissions.length; i++) {
setPermission(newPermissions[i]);
}
}
/**
* Retrieve an array of Permission objects based on the given parameters. Any null parameters
* will be ignored. So to retrieve a set of Permission objects for a given principal you would call
* this method like pm.getPermissions('principal name', null, null, null)
* @param principal
* @param activity
* @param target
* @param type
* @return
* @exception AuthorizationException
*/
public Permission[] getPermissions (String principal, String activity, String target, String type) throws AuthorizationException {
if (principal == null) {
principal = "*";
}
if (activity == null) {
activity = "*";
}
if (target == null) {
target = "*";
}
if (type == null) {
type = "*";
}
Connection connection = RdbmServices.getConnection();
try {
StringBuffer queryString = new StringBuffer(255);
queryString.append("SELECT * FROM UP_PERMISSIONS WHERE OWNER = '" + m_owner.toUpperCase() + "'");
queryString.append(" AND PRINCIPAL = '" + principal.toUpperCase() + "'");
queryString.append(" AND ACTIVITY = '" + activity.toUpperCase() + "'");
queryString.append(" AND TARGET = '" + target.toUpperCase() + "'");
queryString.append(" AND PERMISSION_TYPE = '" + type.toUpperCase() + "'");
Statement statement = connection.createStatement();
LogService.log(LogService.DEBUG, queryString.toString());
ResultSet rs = statement.executeQuery(queryString.toString());
ArrayList permissions = new ArrayList();
while (rs.next()) {
Permission permission = new ReferencePermission(m_owner);
permission.setPrincipal(rs.getString("PRINCIPAL"));
permission.setActivity(rs.getString("ACTIVITY"));
permission.setTarget(rs.getString("TARGET"));
permission.setType(rs.getString("PERMISSION_TYPE"));
permission.setEffective(rs.getDate("EFFECTIVE"));
permission.setExpires(rs.getDate("EXPIRES"));
permissions.add(permission);
}
// Return the array of permissions
return ((Permission[])permissions.toArray(new Permission[0]));
} catch (Exception e) {
throw new AuthorizationException(e.getMessage());
}
}
}
|
fixed unreleased connection
git-svn-id: 477788cc2a8229a747c5b8073e47c1d0f6ec0604@4887 f5dbab47-78f9-eb45-b975-e544023573eb
|
source/org/jasig/portal/security/provider/ReferencePermissionManager.java
|
fixed unreleased connection
|
|
Java
|
apache-2.0
|
4495b5b92f30337aa8ecefaca78b4a6a2852493e
| 0
|
jsbaidwan/QuakeReport
|
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.quakereport;
import android.content.Intent;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListView;
import java.util.ArrayList;
import java.util.List;
public class EarthquakeActivity extends AppCompatActivity {
/** URL for the earthquake data from USGS dataset */
private static final String UGSG_REQUEST_URL =
"https://earthquake.usgs.gov/fdsnws/event/1/query?" +
"format=geojson&eventtype=earthquake&orderby=time&minmag=6&limit=10";
/** Adapter for the list of earthquakes */
private EarthquakeAdapter mAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.earthquake_activity);
// Find a reference to the {@link ListView} in the layout
ListView earthquakeListView = (ListView) findViewById(R.id.list);
// Create a new adapter that takes an empty list of earthquake as input
mAdapter = new EarthquakeAdapter(this, new ArrayList<Earthquake>());
// Set the adapter on the {@link ListView}
// so the list can be populated in the user interface
earthquakeListView.setAdapter(mAdapter);
earthquakeListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long l) {
// Find the current earthquake that is clicked on
Earthquake currentEarthquake = mAdapter.getItem(position);
// Convert the String URL into URI object (to pass into the Intent constructor)
Uri earthquakeUri = Uri.parse(currentEarthquake.getUrl());
// Create a new intent to view the earthquake URI
Intent websiteIntent = new Intent(Intent.ACTION_VIEW, earthquakeUri);
// Send the intent to launch a new activity
startActivity(websiteIntent);
}
});
}
/**
* {@link AsyncTask} to perform the network request on the background thread and then update
* the UI with the list of earthquake in the response.
*
* AsyncTask has three generic parameters: the input type, a type used for progress updates,
* and an output type. Our task will take a String URL, and return an Earthquake. We don't
* do progress update, so the second parameter is void.
*
* We'll only override two of the method of AsyncTask: donInBackground() and onPostExecute().
* The doInBackground() method runs on a background thread, so it can run long-running code
* (like network activity), without interfering with responsiveness of the app.
* Then the onPostExecute() is passed the result of doInBackground() method, but run on the
* UI thread, so it can used the produced data to update the UI
*/
private class EarthquakeAsyncTask extends AsyncTask<String, Void, List<Earthquake>> {
}
|
app/src/main/java/com/example/android/quakereport/EarthquakeActivity.java
|
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.quakereport;
import android.content.Intent;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListView;
import java.util.ArrayList;
import java.util.List;
public class EarthquakeActivity extends AppCompatActivity {
/** URL for the earthquake data from USGS dataset */
private static final String UGSG_REQUEST_URL =
"https://earthquake.usgs.gov/fdsnws/event/1/query?" +
"format=geojson&eventtype=earthquake&orderby=time&minmag=6&limit=10";
/** Adapter for the list of earthquakes */
private EarthquakeAdapter mAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.earthquake_activity);
// Find a reference to the {@link ListView} in the layout
ListView earthquakeListView = (ListView) findViewById(R.id.list);
// Create a new adapter that takes an empty list of earthquake as input
mAdapter = new EarthquakeAdapter(this, new ArrayList<Earthquake>());
// Set the adapter on the {@link ListView}
// so the list can be populated in the user interface
earthquakeListView.setAdapter(mAdapter);
earthquakeListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
// Find the current earthquake that is clicked on
Earthquake currentEarthquake = adapter.getItem(position);
// Convert the String URL into URI object (to pass into the Intent constructor)
Uri earthquakeUri = Uri.parse(currentEarthquake.getUrl());
// Create a new intent to view the earthquake URI
Intent websiteIntent = new Intent(Intent.ACTION_VIEW, earthquakeUri);
// Send the intent to launch a new activity
startActivity(websiteIntent);
}
});
}
private class EarthquakeAsyncTask extends AsyncTask<String, Void, List<Earthquake>> {
@Override
protected List<Earthquake> doInBackground(String... urls) {
return;
}
@Override
protected void onPreExecute(List<Earthquake> data) {
super.onPreExecute();
}
}
}
|
implementing AsyncTask
|
app/src/main/java/com/example/android/quakereport/EarthquakeActivity.java
|
implementing AsyncTask
|
|
Java
|
apache-2.0
|
8de50f4c2a2047d8600b5b91ea5553f7638bf440
| 0
|
Nastel/tnt4j-stream-gc
|
/*
* Copyright 2014-2015 JKOOL, LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tnt4j.stream.java.gc;
import java.io.IOException;
import java.lang.instrument.Instrumentation;
import java.lang.management.GarbageCollectorMXBean;
import java.util.List;
import javax.management.NotificationEmitter;
import javax.management.NotificationListener;
import com.nastel.jkool.tnt4j.TrackingLogger;
import com.nastel.jkool.tnt4j.utils.Utils;
public class GCTracker {
private static final String DEFAULT_SOURCE_NAME = "org.tnt4j.stream.java.gc";
/*
* Tracking logger instance where all GC tracking messages are recorded.
*/
private static TrackingLogger logger;
protected static void createTracker(String sourceName) {
try {
logger = TrackingLogger.getInstance(sourceName);
logger.open();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static TrackingLogger getTracker() {
return logger;
}
public static void installTracker() {
installTracker(DEFAULT_SOURCE_NAME);
}
public static void installTracker(String sourceName) {
if (logger == null) {
createTracker(sourceName);
List<GarbageCollectorMXBean> gcbeans = java.lang.management.ManagementFactory.getGarbageCollectorMXBeans();
for (GarbageCollectorMXBean gcbean : gcbeans) {
NotificationEmitter emitter = (NotificationEmitter) gcbean;
NotificationListener listener = new GCNotificationListener(logger);
emitter.addNotificationListener(listener, null, null);
}
}
}
/**
* Entry point to be loaded as -javaagent:jarpath[=source-name]
* Example: -javaagent:tnt4j-stream-gc.jar
*
* @param options parameters if any
* @param inst instrumentation handle
*/
public static void premain(String options, Instrumentation inst) throws IOException {
if (Utils.isEmpty(options)) {
GCTracker.installTracker();
} else {
GCTracker.installTracker(options);
}
System.out.println("GCTracker: gc.tracker=" + GCTracker.getTracker().getSource());
}
}
|
src/org/tnt4j/stream/java/gc/GCTracker.java
|
/*
* Copyright 2014-2015 JKOOL, LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tnt4j.stream.java.gc;
import java.io.IOException;
import java.lang.instrument.Instrumentation;
import java.lang.management.GarbageCollectorMXBean;
import java.util.List;
import javax.management.NotificationEmitter;
import javax.management.NotificationListener;
import com.nastel.jkool.tnt4j.TrackingLogger;
public class GCTracker {
/*
* Tracking logger instance where all GC tracking messages are recorded.
*/
private static TrackingLogger logger;
protected static void createTracker() {
try {
logger = TrackingLogger.getInstance("org.tnt4j.stream.java.gc");
logger.open();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static TrackingLogger getTracker() {
return logger;
}
public static void installTracker() {
if (logger == null) {
createTracker();
List<GarbageCollectorMXBean> gcbeans = java.lang.management.ManagementFactory.getGarbageCollectorMXBeans();
for (GarbageCollectorMXBean gcbean : gcbeans) {
NotificationEmitter emitter = (NotificationEmitter) gcbean;
NotificationListener listener = new GCNotificationListener(logger);
emitter.addNotificationListener(listener, null, null);
}
}
}
/**
* Entry point to be loaded as -javaagent:jarpath
* Example: -javaagent:tnt4j-stream-gc.jar
*
* @param options parameters if any
* @param inst instrumentation handle
*/
public static void premain(String options, Instrumentation inst) throws IOException {
GCTracker.installTracker();
System.out.println("GCTracker: gc.tracker=" + GCTracker.getTracker().getSource());
}
}
|
ability to override source name for -javaagent
ability to override source name for -javaagent
|
src/org/tnt4j/stream/java/gc/GCTracker.java
|
ability to override source name for -javaagent
|
|
Java
|
apache-2.0
|
86a34ab77953b9d77e0cb2c4575f9240b73590a1
| 0
|
kalaspuffar/pdfbox,apache/pdfbox,kalaspuffar/pdfbox,apache/pdfbox
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.common;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.pdfbox.cos.COSArray;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.cos.COSString;
/**
* This class represents a node in a name tree.
*
* @author Ben Litchfield
*
* @param <T> The type of the values in this name tree.
*/
public abstract class PDNameTreeNode<T extends COSObjectable> implements COSObjectable
{
private static final Log LOG = LogFactory.getLog(PDNameTreeNode.class);
private final COSDictionary node;
private PDNameTreeNode<T> parent;
/**
* Constructor.
*/
protected PDNameTreeNode()
{
node = new COSDictionary();
}
/**
* Constructor.
*
* @param dict The dictionary that holds the name information.
*/
protected PDNameTreeNode( COSDictionary dict )
{
node = dict;
}
/**
* Convert this standard java object to a COS object.
*
* @return The cos object that matches this Java object.
*/
@Override
public COSDictionary getCOSObject()
{
return node;
}
/**
* Returns the parent node.
*
* @return parent node
*/
public PDNameTreeNode<T> getParent()
{
return parent;
}
/**
* Sets the parent to the given node.
*
* @param parentNode the node to be set as parent
*/
public void setParent(PDNameTreeNode<T> parentNode)
{
parent = parentNode;
calculateLimits();
}
/**
* Determines if this is a root node or not.
*
* @return true if this is a root node
*/
public boolean isRootNode()
{
return parent == null;
}
/**
* Return the children of this node. This list will contain PDNameTreeNode objects.
*
* @return The list of children or null if there are no children.
*/
public List<PDNameTreeNode<T>> getKids()
{
List<PDNameTreeNode<T>> retval = null;
COSArray kids = (COSArray)node.getDictionaryObject( COSName.KIDS );
if( kids != null )
{
List<PDNameTreeNode<T>> pdObjects = new ArrayList<>();
for( int i=0; i<kids.size(); i++ )
{
pdObjects.add( createChildNode( (COSDictionary)kids.getObject(i) ) );
}
retval = new COSArrayList<>(pdObjects, kids);
}
return retval;
}
/**
* Set the children of this named tree.
*
* @param kids The children of this named tree.
*/
public void setKids( List<? extends PDNameTreeNode<T>> kids )
{
if (kids != null && kids.size() > 0)
{
for (PDNameTreeNode<T> kidsNode : kids)
{
kidsNode.setParent(this);
}
node.setItem( COSName.KIDS, COSArrayList.converterToCOSArray( kids ) );
// root nodes with kids don't have Names
if (isRootNode())
{
node.setItem(COSName.NAMES, null);
}
}
else
{
// remove kids
node.setItem(COSName.KIDS, null);
// remove Limits
node.setItem(COSName.LIMITS, null);
}
calculateLimits();
}
private void calculateLimits()
{
if (isRootNode())
{
node.setItem(COSName.LIMITS, null);
}
else
{
List<PDNameTreeNode<T>> kids = getKids();
if (kids != null && kids.size() > 0)
{
PDNameTreeNode<T> firstKid = kids.get(0);
PDNameTreeNode<T> lastKid = kids.get(kids.size() - 1);
String lowerLimit = firstKid.getLowerLimit();
setLowerLimit(lowerLimit);
String upperLimit = lastKid.getUpperLimit();
setUpperLimit(upperLimit);
}
else
{
try
{
Map<String, T> names = getNames();
if (names != null && names.size() > 0)
{
Set<String> strings = names.keySet();
String[] keys = strings.toArray(new String[strings.size()]);
String lowerLimit = keys[0];
setLowerLimit(lowerLimit);
String upperLimit = keys[keys.length-1];
setUpperLimit(upperLimit);
}
else
{
node.setItem(COSName.LIMITS, null);
}
}
catch (IOException exception)
{
node.setItem(COSName.LIMITS, null);
LOG.error("Error while calculating the Limits of a PageNameTreeNode:", exception);
}
}
}
}
/**
* The name to retrieve.
*
* @param name The name in the tree.
* @return The value of the name in the tree.
* @throws IOException If an there is a problem creating the destinations.
*/
public T getValue( String name ) throws IOException
{
T retval = null;
Map<String, T> names = getNames();
if( names != null )
{
retval = names.get( name );
}
else
{
List<PDNameTreeNode<T>> kids = getKids();
if (kids != null)
{
for( int i=0; i<kids.size() && retval == null; i++ )
{
PDNameTreeNode<T> childNode = kids.get( i );
String upperLimit = childNode.getUpperLimit();
String lowerLimit = childNode.getLowerLimit();
if (upperLimit == null || lowerLimit == null ||
upperLimit.compareTo(lowerLimit) < 0 ||
(lowerLimit.compareTo(name) <= 0 && upperLimit.compareTo(name) >= 0))
{
retval = childNode.getValue( name );
}
}
}
else
{
LOG.warn("NameTreeNode does not have \"names\" nor \"kids\" objects.");
}
}
return retval;
}
/**
* This will return a map of names on this level. The key will be a string,
* and the value will depend on where this class is being used.
*
* @return ordered map of COS objects or <code>null</code> if the dictionary
* contains no 'Names' entry on this level.
*
* @throws IOException If there is an error while creating the sub types.
* @see #getKids()
*/
public Map<String, T> getNames() throws IOException
{
COSArray namesArray = (COSArray)node.getDictionaryObject( COSName.NAMES );
if( namesArray != null )
{
Map<String, T> names = new LinkedHashMap<>();
for( int i=0; i<namesArray.size(); i+=2 )
{
COSString key = (COSString)namesArray.getObject(i);
COSBase cosValue = namesArray.getObject( i+1 );
names.put( key.getString(), convertCOSToPD(cosValue) );
}
return Collections.unmodifiableMap(names);
}
else
{
return null;
}
}
/**
* Method to convert the COS value in the name tree to the PD Model object. The
* default implementation will simply return the given COSBase object.
* Subclasses should do something specific.
*
* @param base The COS object to convert.
* @return The converted PD Model object.
* @throws IOException If there is an error during creation.
*/
protected abstract T convertCOSToPD( COSBase base ) throws IOException;
/**
* Create a child node object.
*
* @param dic The dictionary for the child node object to refer to.
* @return The new child node object.
*/
protected abstract PDNameTreeNode<T> createChildNode( COSDictionary dic );
/**
* Set the names of for this node. The keys should be java.lang.String and the
* values must be a COSObjectable. This method will set the appropriate upper and lower
* limits based on the keys in the map.
*
* @param names map of names to objects, or <code>null</code>
*/
public void setNames( Map<String, T> names )
{
if( names == null )
{
node.setItem( COSName.NAMES, (COSObjectable)null );
node.setItem( COSName.LIMITS, (COSObjectable)null);
}
else
{
COSArray array = new COSArray();
List<String> keys = new ArrayList<>(names.keySet());
Collections.sort(keys);
for (String key : keys)
{
array.add(new COSString(key));
array.add(names.get(key));
}
node.setItem(COSName.NAMES, array);
calculateLimits();
}
}
/**
* Get the highest value for a key in the name map.
*
* @return The highest value for a key in the map.
*/
public String getUpperLimit()
{
String retval = null;
COSArray arr = (COSArray)node.getDictionaryObject( COSName.LIMITS );
if( arr != null )
{
retval = arr.getString( 1 );
}
return retval;
}
/**
* Set the highest value for the key in the map.
*
* @param upper The new highest value for a key in the map.
*/
private void setUpperLimit( String upper )
{
COSArray arr = (COSArray)node.getDictionaryObject( COSName.LIMITS );
if( arr == null )
{
arr = new COSArray();
arr.add( null );
arr.add( null );
node.setItem(COSName.LIMITS, arr);
}
arr.setString( 1, upper );
}
/**
* Get the lowest value for a key in the name map.
*
* @return The lowest value for a key in the map.
*/
public String getLowerLimit()
{
String retval = null;
COSArray arr = (COSArray)node.getDictionaryObject( COSName.LIMITS );
if( arr != null )
{
retval = arr.getString( 0 );
}
return retval;
}
/**
* Set the lowest value for the key in the map.
*
* @param lower The new lowest value for a key in the map.
*/
private void setLowerLimit( String lower )
{
COSArray arr = (COSArray)node.getDictionaryObject( COSName.LIMITS );
if( arr == null )
{
arr = new COSArray();
arr.add( null );
arr.add( null );
node.setItem(COSName.LIMITS, arr);
}
arr.setString( 0, lower );
}
}
|
pdfbox/src/main/java/org/apache/pdfbox/pdmodel/common/PDNameTreeNode.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.common;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.pdfbox.cos.COSArray;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.cos.COSString;
/**
* This class represents a node in a name tree.
*
* @author Ben Litchfield
*
* @param <T> The type of the values in this name tree.
*/
public abstract class PDNameTreeNode<T extends COSObjectable> implements COSObjectable
{
private static final Log LOG = LogFactory.getLog(PDNameTreeNode.class);
private final COSDictionary node;
private PDNameTreeNode<T> parent;
/**
* Constructor.
*/
protected PDNameTreeNode()
{
node = new COSDictionary();
}
/**
* Constructor.
*
* @param dict The dictionary that holds the name information.
*/
protected PDNameTreeNode( COSDictionary dict )
{
node = dict;
}
/**
* Convert this standard java object to a COS object.
*
* @return The cos object that matches this Java object.
*/
@Override
public COSDictionary getCOSObject()
{
return node;
}
/**
* Returns the parent node.
*
* @return parent node
*/
public PDNameTreeNode<T> getParent()
{
return parent;
}
/**
* Sets the parent to the given node.
*
* @param parentNode the node to be set as parent
*/
public void setParent(PDNameTreeNode<T> parentNode)
{
parent = parentNode;
calculateLimits();
}
/**
* Determines if this is a root node or not.
*
* @return true if this is a root node
*/
public boolean isRootNode()
{
return parent == null;
}
/**
* Return the children of this node. This list will contain PDNameTreeNode objects.
*
* @return The list of children or null if there are no children.
*/
public List<PDNameTreeNode<T>> getKids()
{
List<PDNameTreeNode<T>> retval = null;
COSArray kids = (COSArray)node.getDictionaryObject( COSName.KIDS );
if( kids != null )
{
List<PDNameTreeNode<T>> pdObjects = new ArrayList<>();
for( int i=0; i<kids.size(); i++ )
{
pdObjects.add( createChildNode( (COSDictionary)kids.getObject(i) ) );
}
retval = new COSArrayList<>(pdObjects, kids);
}
return retval;
}
/**
* Set the children of this named tree.
*
* @param kids The children of this named tree.
*/
public void setKids( List<? extends PDNameTreeNode<T>> kids )
{
if (kids != null && kids.size() > 0)
{
for (PDNameTreeNode<T> kidsNode : kids)
{
kidsNode.setParent(this);
}
node.setItem( COSName.KIDS, COSArrayList.converterToCOSArray( kids ) );
// root nodes with kids don't have Names
if (isRootNode())
{
node.setItem(COSName.NAMES, null);
}
}
else
{
// remove kids
node.setItem(COSName.KIDS, null);
// remove Limits
node.setItem(COSName.LIMITS, null);
}
calculateLimits();
}
private void calculateLimits()
{
if (isRootNode())
{
node.setItem(COSName.LIMITS, null);
}
else
{
List<PDNameTreeNode<T>> kids = getKids();
if (kids != null && kids.size() > 0)
{
PDNameTreeNode<T> firstKid = kids.get(0);
PDNameTreeNode<T> lastKid = kids.get(kids.size() - 1);
String lowerLimit = firstKid.getLowerLimit();
setLowerLimit(lowerLimit);
String upperLimit = lastKid.getUpperLimit();
setUpperLimit(upperLimit);
}
else
{
try
{
Map<String, T> names = getNames();
if (names != null && names.size() > 0)
{
Set<String> strings = names.keySet();
String[] keys = strings.toArray(new String[strings.size()]);
String lowerLimit = keys[0];
setLowerLimit(lowerLimit);
String upperLimit = keys[keys.length-1];
setUpperLimit(upperLimit);
}
else
{
node.setItem(COSName.LIMITS, null);
}
}
catch (IOException exception)
{
node.setItem(COSName.LIMITS, null);
LOG.error("Error while calculating the Limits of a PageNameTreeNode:", exception);
}
}
}
}
/**
* The name to retrieve.
*
* @param name The name in the tree.
* @return The value of the name in the tree.
* @throws IOException If an there is a problem creating the destinations.
*/
public T getValue( String name ) throws IOException
{
T retval = null;
Map<String, T> names = getNames();
if( names != null )
{
retval = names.get( name );
}
else
{
List<PDNameTreeNode<T>> kids = getKids();
if (kids != null)
{
for( int i=0; i<kids.size() && retval == null; i++ )
{
PDNameTreeNode<T> childNode = kids.get( i );
String upperLimit = childNode.getUpperLimit();
String lowerLimit = childNode.getLowerLimit();
if (upperLimit == null || lowerLimit == null ||
upperLimit.compareTo(lowerLimit) < 0 ||
(lowerLimit.compareTo(name) <= 0 && upperLimit.compareTo(name) >= 0))
{
retval = childNode.getValue( name );
}
}
}
else
{
LOG.warn("NameTreeNode does not have \"names\" nor \"kids\" objects.");
}
}
return retval;
}
/**
* This will return a map of names. The key will be a string, and the
* value will depend on where this class is being used.
*
* @return ordered map of cos objects or <code>null</code> if dictionary
* contains no 'Names' entry
* @throws IOException If there is an error while creating the sub types.
*/
public Map<String, T> getNames() throws IOException
{
COSArray namesArray = (COSArray)node.getDictionaryObject( COSName.NAMES );
if( namesArray != null )
{
Map<String, T> names = new LinkedHashMap<>();
for( int i=0; i<namesArray.size(); i+=2 )
{
COSString key = (COSString)namesArray.getObject(i);
COSBase cosValue = namesArray.getObject( i+1 );
names.put( key.getString(), convertCOSToPD(cosValue) );
}
return Collections.unmodifiableMap(names);
}
else
{
return null;
}
}
/**
* Method to convert the COS value in the name tree to the PD Model object. The
* default implementation will simply return the given COSBase object.
* Subclasses should do something specific.
*
* @param base The COS object to convert.
* @return The converted PD Model object.
* @throws IOException If there is an error during creation.
*/
protected abstract T convertCOSToPD( COSBase base ) throws IOException;
/**
* Create a child node object.
*
* @param dic The dictionary for the child node object to refer to.
* @return The new child node object.
*/
protected abstract PDNameTreeNode<T> createChildNode( COSDictionary dic );
/**
* Set the names of for this node. The keys should be java.lang.String and the
* values must be a COSObjectable. This method will set the appropriate upper and lower
* limits based on the keys in the map.
*
* @param names map of names to objects, or <code>null</code>
*/
public void setNames( Map<String, T> names )
{
if( names == null )
{
node.setItem( COSName.NAMES, (COSObjectable)null );
node.setItem( COSName.LIMITS, (COSObjectable)null);
}
else
{
COSArray array = new COSArray();
List<String> keys = new ArrayList<>(names.keySet());
Collections.sort(keys);
for (String key : keys)
{
array.add(new COSString(key));
array.add(names.get(key));
}
node.setItem(COSName.NAMES, array);
calculateLimits();
}
}
/**
* Get the highest value for a key in the name map.
*
* @return The highest value for a key in the map.
*/
public String getUpperLimit()
{
String retval = null;
COSArray arr = (COSArray)node.getDictionaryObject( COSName.LIMITS );
if( arr != null )
{
retval = arr.getString( 1 );
}
return retval;
}
/**
* Set the highest value for the key in the map.
*
* @param upper The new highest value for a key in the map.
*/
private void setUpperLimit( String upper )
{
COSArray arr = (COSArray)node.getDictionaryObject( COSName.LIMITS );
if( arr == null )
{
arr = new COSArray();
arr.add( null );
arr.add( null );
node.setItem(COSName.LIMITS, arr);
}
arr.setString( 1, upper );
}
/**
* Get the lowest value for a key in the name map.
*
* @return The lowest value for a key in the map.
*/
public String getLowerLimit()
{
String retval = null;
COSArray arr = (COSArray)node.getDictionaryObject( COSName.LIMITS );
if( arr != null )
{
retval = arr.getString( 0 );
}
return retval;
}
/**
* Set the lowest value for the key in the map.
*
* @param lower The new lowest value for a key in the map.
*/
private void setLowerLimit( String lower )
{
COSArray arr = (COSArray)node.getDictionaryObject( COSName.LIMITS );
if( arr == null )
{
arr = new COSArray();
arr.add( null );
arr.add( null );
node.setItem(COSName.LIMITS, arr);
}
arr.setString( 0, lower );
}
}
|
PDFBOX-4071: improve javadoc
git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1840252 13f79535-47bb-0310-9956-ffa450edef68
|
pdfbox/src/main/java/org/apache/pdfbox/pdmodel/common/PDNameTreeNode.java
|
PDFBOX-4071: improve javadoc
|
|
Java
|
apache-2.0
|
a7ab9cb83d2c47d980b0b9f6b702e01bd1cf4139
| 0
|
ollie314/kafka,noslowerdna/kafka,Esquive/kafka,mihbor/kafka,themarkypantz/kafka,noslowerdna/kafka,sslavic/kafka,TiVo/kafka,KevinLiLu/kafka,Ishiihara/kafka,lindong28/kafka,TiVo/kafka,sebadiaz/kafka,lindong28/kafka,mihbor/kafka,rhauch/kafka,rhauch/kafka,ErikKringen/kafka,richhaase/kafka,sslavic/kafka,guozhangwang/kafka,KevinLiLu/kafka,TiVo/kafka,rhauch/kafka,sslavic/kafka,sebadiaz/kafka,ErikKringen/kafka,MyPureCloud/kafka,MyPureCloud/kafka,lindong28/kafka,guozhangwang/kafka,airbnb/kafka,KevinLiLu/kafka,gf53520/kafka,ollie314/kafka,apache/kafka,gf53520/kafka,geeag/kafka,MyPureCloud/kafka,noslowerdna/kafka,TiVo/kafka,Ishiihara/kafka,airbnb/kafka,apache/kafka,ollie314/kafka,Chasego/kafka,gf53520/kafka,Ishiihara/kafka,gf53520/kafka,ErikKringen/kafka,Chasego/kafka,ErikKringen/kafka,sebadiaz/kafka,airbnb/kafka,eribeiro/kafka,ollie314/kafka,sslavic/kafka,mihbor/kafka,geeag/kafka,airbnb/kafka,mihbor/kafka,Esquive/kafka,richhaase/kafka,MyPureCloud/kafka,Ishiihara/kafka,themarkypantz/kafka,KevinLiLu/kafka,apache/kafka,richhaase/kafka,rhauch/kafka,eribeiro/kafka,Esquive/kafka,geeag/kafka,apache/kafka,guozhangwang/kafka,Chasego/kafka,ijuma/kafka,ijuma/kafka,eribeiro/kafka,guozhangwang/kafka,Chasego/kafka,noslowerdna/kafka,lindong28/kafka,ijuma/kafka,sebadiaz/kafka,Esquive/kafka,geeag/kafka,themarkypantz/kafka,eribeiro/kafka,themarkypantz/kafka,ijuma/kafka,richhaase/kafka
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.kafka.streams.integration;
import kafka.utils.MockTime;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster;
import org.apache.kafka.streams.integration.utils.IntegrationTestUtils;
import org.apache.kafka.streams.kstream.KGroupedStream;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KStreamBuilder;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.apache.kafka.streams.kstream.ValueMapper;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.QueryableStoreTypes;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
import org.apache.kafka.streams.state.ReadOnlyWindowStore;
import org.apache.kafka.streams.state.StreamsMetadata;
import org.apache.kafka.streams.state.WindowStoreIterator;
import org.apache.kafka.streams.errors.InvalidStateStoreException;
import org.apache.kafka.test.MockKeyValueMapper;
import org.apache.kafka.test.TestCondition;
import org.apache.kafka.test.TestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class QueryableStateIntegrationTest {
private static final int NUM_BROKERS = 2;
@ClassRule
public static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(NUM_BROKERS);
private final MockTime mockTime = CLUSTER.time;
private static final String STREAM_ONE = "stream-one";
private static final String STREAM_TWO = "stream-two";
private static final String STREAM_CONCURRENT = "stream-concurrent";
private static final String OUTPUT_TOPIC = "output";
private static final String OUTPUT_TOPIC_CONCURRENT = "output-concurrent";
private static final String STREAM_THREE = "stream-three";
private static final int NUM_PARTITIONS = NUM_BROKERS;
private static final int NUM_REPLICAS = NUM_BROKERS;
// sufficiently large window size such that everything falls into 1 window
private static final long WINDOW_SIZE = TimeUnit.MILLISECONDS.convert(2, TimeUnit.DAYS);
private static final String OUTPUT_TOPIC_THREE = "output-three";
private Properties streamsConfiguration;
private List<String> inputValues;
private Set<String> inputValuesKeys;
private KafkaStreams kafkaStreams;
private Comparator<KeyValue<String, String>> stringComparator;
private Comparator<KeyValue<String, Long>> stringLongComparator;
@BeforeClass
public static void createTopics() {
CLUSTER.createTopic(STREAM_ONE);
CLUSTER.createTopic(STREAM_CONCURRENT);
CLUSTER.createTopic(STREAM_TWO, NUM_PARTITIONS, NUM_REPLICAS);
CLUSTER.createTopic(STREAM_THREE, 4, 1);
CLUSTER.createTopic(OUTPUT_TOPIC);
CLUSTER.createTopic(OUTPUT_TOPIC_CONCURRENT);
CLUSTER.createTopic(OUTPUT_TOPIC_THREE);
}
@Before
public void before() throws IOException {
streamsConfiguration = new Properties();
final String applicationId = "queryable-state";
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
streamsConfiguration
.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
streamsConfiguration.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, CLUSTER.zKConnectString());
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory("qs-test").getPath());
streamsConfiguration.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsConfiguration.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
stringComparator = new Comparator<KeyValue<String, String>>() {
@Override
public int compare(final KeyValue<String, String> o1,
final KeyValue<String, String> o2) {
return o1.key.compareTo(o2.key);
}
};
stringLongComparator = new Comparator<KeyValue<String, Long>>() {
@Override
public int compare(final KeyValue<String, Long> o1,
final KeyValue<String, Long> o2) {
return o1.key.compareTo(o2.key);
}
};
inputValues = Arrays.asList(
"hello world",
"all streams lead to kafka",
"streams",
"kafka streams",
"the cat in the hat",
"green eggs and ham",
"that sam i am",
"up the creek without a paddle",
"run forest run",
"a tank full of gas",
"eat sleep rave repeat",
"one jolly sailor",
"king of the world");
inputValuesKeys = new HashSet<>();
for (final String sentence : inputValues) {
final String[] words = sentence.split("\\W+");
for (final String word : words) {
inputValuesKeys.add(word);
}
}
}
@After
public void shutdown() throws IOException {
if (kafkaStreams != null) {
kafkaStreams.close();
}
IntegrationTestUtils.purgeLocalStreamsState(streamsConfiguration);
}
/**
* Creates a typical word count topology
*
* @param inputTopic
* @param outputTopic
* @param streamsConfiguration config
* @return
*/
private KafkaStreams createCountStream(final String inputTopic, final String outputTopic, final Properties streamsConfiguration) {
final KStreamBuilder builder = new KStreamBuilder();
final Serde<String> stringSerde = Serdes.String();
final KStream<String, String> textLines = builder.stream(stringSerde, stringSerde, inputTopic);
final KGroupedStream<String, String> groupedByWord = textLines
.flatMapValues(new ValueMapper<String, Iterable<String>>() {
@Override
public Iterable<String> apply(final String value) {
return Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+"));
}
})
.groupBy(MockKeyValueMapper.<String, String>SelectValueMapper());
// Create a State Store for the all time word count
groupedByWord.count("word-count-store-" + inputTopic).to(Serdes.String(), Serdes.Long(), outputTopic);
// Create a Windowed State Store that contains the word count for every 1 minute
groupedByWord.count(TimeWindows.of(WINDOW_SIZE), "windowed-word-count-store-" + inputTopic);
return new KafkaStreams(builder, streamsConfiguration);
}
private class StreamRunnable implements Runnable {
private final KafkaStreams myStream;
private boolean closed = false;
StreamRunnable(final String inputTopic, final String outputTopic, final int queryPort) {
final Properties props = (Properties) streamsConfiguration.clone();
props.put(StreamsConfig.APPLICATION_SERVER_CONFIG, "localhost:" + queryPort);
myStream = createCountStream(inputTopic, outputTopic, props);
}
@Override
public void run() {
myStream.start();
}
public void close() {
if (!closed) {
myStream.close();
closed = true;
}
}
public boolean isClosed() {
return closed;
}
public final KafkaStreams getStream() {
return myStream;
}
}
private void verifyAllKVKeys(final StreamRunnable[] streamRunnables, final KafkaStreams streams,
final Set<String> keys, final String storeName) throws Exception {
for (final String key : keys) {
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
final StreamsMetadata metadata = streams.metadataForKey(storeName, key, new StringSerializer());
if (metadata == null) {
return false;
}
final int index = metadata.hostInfo().port();
final KafkaStreams streamsWithKey = streamRunnables[index].getStream();
final ReadOnlyKeyValueStore<String, Long> store;
try {
store = streamsWithKey.store(storeName, QueryableStoreTypes.<String, Long>keyValueStore());
} catch (final IllegalStateException e) {
// Kafka Streams instance may have closed but rebalance hasn't happened
return false;
} catch (final InvalidStateStoreException e) {
// rebalance
return false;
}
return store != null && store.get(key) != null;
}
}, 30000, "waiting for metadata, store and value to be non null");
}
}
private void verifyAllWindowedKeys(final StreamRunnable[] streamRunnables, final KafkaStreams streams,
final Set<String> keys, final String storeName,
final Long from, final Long to) throws Exception {
for (final String key : keys) {
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
final StreamsMetadata metadata = streams.metadataForKey(storeName, key, new StringSerializer());
if (metadata == null) {
return false;
}
final int index = metadata.hostInfo().port();
final KafkaStreams streamsWithKey = streamRunnables[index].getStream();
final ReadOnlyWindowStore<String, Long> store;
try {
store = streamsWithKey.store(storeName, QueryableStoreTypes.<String, Long>windowStore());
} catch (final IllegalStateException e) {
// Kafka Streams instance may have closed but rebalance hasn't happened
return false;
} catch (InvalidStateStoreException e) {
// rebalance
return false;
}
return store != null && store.fetch(key, from, to) != null;
}
}, 30000, "waiting for metadata, store and value to be non null");
}
}
@Test
public void queryOnRebalance() throws Exception {
final int numThreads = NUM_PARTITIONS;
final StreamRunnable[] streamRunnables = new StreamRunnable[numThreads];
final Thread[] streamThreads = new Thread[numThreads];
final int numIterations = 500000;
// create concurrent producer
final ProducerRunnable producerRunnable = new ProducerRunnable(STREAM_TWO, inputValues, numIterations);
final Thread producerThread = new Thread(producerRunnable);
// create three stream threads
for (int i = 0; i < numThreads; i++) {
streamRunnables[i] = new StreamRunnable(STREAM_TWO, OUTPUT_TOPIC_THREE, i);
streamThreads[i] = new Thread(streamRunnables[i]);
streamThreads[i].start();
}
producerThread.start();
try {
waitUntilAtLeastNumRecordProcessed(OUTPUT_TOPIC_THREE, 1);
for (int i = 0; i < numThreads; i++) {
verifyAllKVKeys(streamRunnables, streamRunnables[i].getStream(), inputValuesKeys,
"word-count-store-" + STREAM_TWO);
verifyAllWindowedKeys(streamRunnables, streamRunnables[i].getStream(), inputValuesKeys,
"windowed-word-count-store-" + STREAM_TWO, 0L, WINDOW_SIZE);
}
// kill N-1 threads
for (int i = 1; i < numThreads; i++) {
streamRunnables[i].close();
streamThreads[i].interrupt();
streamThreads[i].join();
}
// query from the remaining thread
verifyAllKVKeys(streamRunnables, streamRunnables[0].getStream(), inputValuesKeys,
"word-count-store-" + STREAM_TWO);
verifyAllWindowedKeys(streamRunnables, streamRunnables[0].getStream(), inputValuesKeys,
"windowed-word-count-store-" + STREAM_TWO, 0L, WINDOW_SIZE);
} finally {
for (int i = 0; i < numThreads; i++) {
if (!streamRunnables[i].isClosed()) {
streamRunnables[i].close();
streamThreads[i].interrupt();
streamThreads[i].join();
}
}
producerRunnable.shutdown();
producerThread.interrupt();
producerThread.join();
}
}
@Test
public void concurrentAccesses() throws Exception {
final int numIterations = 500000;
final ProducerRunnable producerRunnable = new ProducerRunnable(STREAM_CONCURRENT, inputValues, numIterations);
final Thread producerThread = new Thread(producerRunnable);
kafkaStreams = createCountStream(STREAM_CONCURRENT, OUTPUT_TOPIC_CONCURRENT, streamsConfiguration);
kafkaStreams.start();
producerThread.start();
try {
waitUntilAtLeastNumRecordProcessed(OUTPUT_TOPIC_CONCURRENT, 1);
final ReadOnlyKeyValueStore<String, Long>
keyValueStore = kafkaStreams.store("word-count-store-" + STREAM_CONCURRENT, QueryableStoreTypes.<String, Long>keyValueStore());
final ReadOnlyWindowStore<String, Long> windowStore =
kafkaStreams.store("windowed-word-count-store-" + STREAM_CONCURRENT, QueryableStoreTypes.<String, Long>windowStore());
final Map<String, Long> expectedWindowState = new HashMap<>();
final Map<String, Long> expectedCount = new HashMap<>();
while (producerRunnable.getCurrIteration() < numIterations) {
verifyGreaterOrEqual(inputValuesKeys.toArray(new String[inputValuesKeys.size()]), expectedWindowState,
expectedCount, windowStore, keyValueStore, false);
}
// finally check if all keys are there
verifyGreaterOrEqual(inputValuesKeys.toArray(new String[inputValuesKeys.size()]), expectedWindowState,
expectedCount, windowStore, keyValueStore, true);
} finally {
producerRunnable.shutdown();
producerThread.interrupt();
producerThread.join();
}
}
@Test
public void shouldBeAbleToQueryState() throws Exception {
final KStreamBuilder builder = new KStreamBuilder();
final String[] keys = {"hello", "goodbye", "welcome", "go", "kafka"};
final Set<KeyValue<String, String>> batch1 = new TreeSet<>(stringComparator);
batch1.addAll(Arrays.asList(
new KeyValue<>(keys[0], "hello"),
new KeyValue<>(keys[1], "goodbye"),
new KeyValue<>(keys[2], "welcome"),
new KeyValue<>(keys[3], "go"),
new KeyValue<>(keys[4], "kafka")));
final Set<KeyValue<String, Long>> expectedCount = new TreeSet<>(stringLongComparator);
for (final String key : keys) {
expectedCount.add(new KeyValue<>(key, 1L));
}
IntegrationTestUtils.produceKeyValuesSynchronously(
STREAM_ONE,
batch1,
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
StringSerializer.class,
StringSerializer.class,
new Properties()),
mockTime);
final KStream<String, String> s1 = builder.stream(STREAM_ONE);
// Non Windowed
s1.groupByKey().count("my-count").to(Serdes.String(), Serdes.Long(), OUTPUT_TOPIC);
s1.groupByKey().count(TimeWindows.of(WINDOW_SIZE), "windowed-count");
kafkaStreams = new KafkaStreams(builder, streamsConfiguration);
kafkaStreams.start();
waitUntilAtLeastNumRecordProcessed(OUTPUT_TOPIC, 1);
final ReadOnlyKeyValueStore<String, Long>
myCount = kafkaStreams.store("my-count", QueryableStoreTypes.<String, Long>keyValueStore());
final ReadOnlyWindowStore<String, Long> windowStore =
kafkaStreams.store("windowed-count", QueryableStoreTypes.<String, Long>windowStore());
verifyCanGetByKey(keys,
expectedCount,
expectedCount,
windowStore,
myCount);
verifyRangeAndAll(expectedCount, myCount);
}
@Test
public void shouldNotMakeStoreAvailableUntilAllStoresAvailable() throws Exception {
final KStreamBuilder builder = new KStreamBuilder();
final KStream<String, String> stream = builder.stream(STREAM_THREE);
final String storeName = "count-by-key";
stream.groupByKey().count(storeName);
kafkaStreams = new KafkaStreams(builder, streamsConfiguration);
kafkaStreams.start();
final KeyValue<String, String> hello = KeyValue.pair("hello", "hello");
IntegrationTestUtils.produceKeyValuesSynchronously(
STREAM_THREE,
Arrays.asList(hello, hello, hello, hello, hello, hello, hello, hello),
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
StringSerializer.class,
StringSerializer.class,
new Properties()),
mockTime);
final int maxWaitMs = 30000;
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
try {
kafkaStreams.store(storeName, QueryableStoreTypes.<String, Long>keyValueStore());
return true;
} catch (InvalidStateStoreException ise) {
return false;
}
}
}, maxWaitMs, "waiting for store " + storeName);
final ReadOnlyKeyValueStore<String, Long> store = kafkaStreams.store(storeName, QueryableStoreTypes.<String, Long>keyValueStore());
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return new Long(8).equals(store.get("hello"));
}
}, maxWaitMs, "wait for count to be 8");
// close stream
kafkaStreams.close();
// start again
kafkaStreams = new KafkaStreams(builder, streamsConfiguration);
kafkaStreams.start();
// make sure we never get any value other than 8 for hello
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
try {
assertEquals(Long.valueOf(8L), kafkaStreams.store(storeName, QueryableStoreTypes.<String, Long>keyValueStore()).get("hello"));
return true;
} catch (InvalidStateStoreException ise) {
return false;
}
}
}, maxWaitMs, "waiting for store " + storeName);
}
private void verifyRangeAndAll(final Set<KeyValue<String, Long>> expectedCount,
final ReadOnlyKeyValueStore<String, Long> myCount) {
final Set<KeyValue<String, Long>> countRangeResults = new TreeSet<>(stringLongComparator);
final Set<KeyValue<String, Long>> countAllResults = new TreeSet<>(stringLongComparator);
final Set<KeyValue<String, Long>>
expectedRangeResults =
new TreeSet<>(stringLongComparator);
expectedRangeResults.addAll(Arrays.asList(
new KeyValue<>("hello", 1L),
new KeyValue<>("go", 1L),
new KeyValue<>("goodbye", 1L),
new KeyValue<>("kafka", 1L)
));
try (final KeyValueIterator<String, Long> range = myCount.range("go", "kafka")) {
while (range.hasNext()) {
countRangeResults.add(range.next());
}
}
try (final KeyValueIterator<String, Long> all = myCount.all()) {
while (all.hasNext()) {
countAllResults.add(all.next());
}
}
assertThat(countRangeResults, equalTo(expectedRangeResults));
assertThat(countAllResults, equalTo(expectedCount));
}
private void verifyCanGetByKey(final String[] keys,
final Set<KeyValue<String, Long>> expectedWindowState,
final Set<KeyValue<String, Long>> expectedCount,
final ReadOnlyWindowStore<String, Long> windowStore,
final ReadOnlyKeyValueStore<String, Long> myCount)
throws InterruptedException {
final Set<KeyValue<String, Long>> windowState = new TreeSet<>(stringLongComparator);
final Set<KeyValue<String, Long>> countState = new TreeSet<>(stringLongComparator);
final long timeout = System.currentTimeMillis() + 30000;
while (windowState.size() < 5 &&
countState.size() < 5 &&
System.currentTimeMillis() < timeout) {
Thread.sleep(10);
for (final String key : keys) {
windowState.addAll(fetch(windowStore, key));
final Long value = myCount.get(key);
if (value != null) {
countState.add(new KeyValue<>(key, value));
}
}
}
assertThat(windowState, equalTo(expectedWindowState));
assertThat(countState, equalTo(expectedCount));
}
/**
* Verify that the new count is greater than or equal to the previous count.
* Note: this method changes the values in expectedWindowState and expectedCount
*
* @param keys All the keys we ever expect to find
* @param expectedWindowedCount Expected windowed count
* @param expectedCount Expected count
* @param windowStore Window Store
* @param keyValueStore Key-value store
* @param failIfKeyNotFound if true, tests fails if an expected key is not found in store. If false,
* the method merely inserts the new found key into the list of
* expected keys.
* @throws InterruptedException
*/
private void verifyGreaterOrEqual(final String[] keys,
final Map<String, Long> expectedWindowedCount,
final Map<String, Long> expectedCount,
final ReadOnlyWindowStore<String, Long> windowStore,
final ReadOnlyKeyValueStore<String, Long> keyValueStore,
final boolean failIfKeyNotFound)
throws InterruptedException {
final Map<String, Long> windowState = new HashMap<>();
final Map<String, Long> countState = new HashMap<>();
for (final String key : keys) {
final Map<String, Long> map = fetchMap(windowStore, key);
if (map.equals(Collections.<String, Long>emptyMap()) && failIfKeyNotFound) {
fail("Key not found " + key);
}
windowState.putAll(map);
final Long value = keyValueStore.get(key);
if (value != null) {
countState.put(key, value);
} else if (failIfKeyNotFound) {
fail("Key not found " + key);
}
}
for (final Map.Entry<String, Long> actualWindowStateEntry : windowState.entrySet()) {
if (expectedWindowedCount.containsKey(actualWindowStateEntry.getKey())) {
final Long expectedValue = expectedWindowedCount.get(actualWindowStateEntry.getKey());
assertTrue(actualWindowStateEntry.getValue() >= expectedValue);
}
// return this for next round of comparisons
expectedWindowedCount.put(actualWindowStateEntry.getKey(), actualWindowStateEntry.getValue());
}
for (final Map.Entry<String, Long> actualCountStateEntry : countState.entrySet()) {
if (expectedCount.containsKey(actualCountStateEntry.getKey())) {
final Long expectedValue = expectedCount.get(actualCountStateEntry.getKey());
assertTrue(actualCountStateEntry.getValue() >= expectedValue);
}
// return this for next round of comparisons
expectedCount.put(actualCountStateEntry.getKey(), actualCountStateEntry.getValue());
}
}
private void waitUntilAtLeastNumRecordProcessed(final String topic, final int numRecs) throws InterruptedException {
final Properties config = new Properties();
config.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
config.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "queryable-state-consumer");
config.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
config.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
StringDeserializer.class.getName());
config.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
LongDeserializer.class.getName());
IntegrationTestUtils.waitUntilMinValuesRecordsReceived(
config,
topic,
numRecs,
60 * 1000);
}
private Set<KeyValue<String, Long>> fetch(final ReadOnlyWindowStore<String, Long> store,
final String key) {
final WindowStoreIterator<Long> fetch = store.fetch(key, 0, System.currentTimeMillis());
if (fetch.hasNext()) {
final KeyValue<Long, Long> next = fetch.next();
return Collections.singleton(KeyValue.pair(key, next.value));
}
return Collections.emptySet();
}
private Map<String, Long> fetchMap(final ReadOnlyWindowStore<String, Long> store,
final String key) {
final WindowStoreIterator<Long> fetch = store.fetch(key, 0, System.currentTimeMillis());
if (fetch.hasNext()) {
final KeyValue<Long, Long> next = fetch.next();
return Collections.singletonMap(key, next.value);
}
return Collections.emptyMap();
}
/**
* A class that periodically produces records in a separate thread
*/
private class ProducerRunnable implements Runnable {
private final String topic;
private final List<String> inputValues;
private final int numIterations;
private int currIteration = 0;
boolean shutdown = false;
ProducerRunnable(final String topic, final List<String> inputValues, final int numIterations) {
this.topic = topic;
this.inputValues = inputValues;
this.numIterations = numIterations;
}
private synchronized void incrementInteration() {
currIteration++;
}
public synchronized int getCurrIteration() {
return currIteration;
}
public synchronized void shutdown() {
shutdown = true;
}
@Override
public void run() {
final Properties producerConfig = new Properties();
producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
producerConfig.put(ProducerConfig.RETRIES_CONFIG, 0);
producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
final KafkaProducer<String, String>
producer =
new KafkaProducer<>(producerConfig, new StringSerializer(), new StringSerializer());
while (getCurrIteration() < numIterations && !shutdown) {
for (int i = 0; i < inputValues.size(); i++) {
producer.send(new ProducerRecord<String, String>(topic, inputValues.get(i)));
}
incrementInteration();
}
}
}
}
|
streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.kafka.streams.integration;
import kafka.utils.MockTime;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster;
import org.apache.kafka.streams.integration.utils.IntegrationTestUtils;
import org.apache.kafka.streams.kstream.KGroupedStream;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KStreamBuilder;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.apache.kafka.streams.kstream.ValueMapper;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.QueryableStoreTypes;
import org.apache.kafka.streams.state.ReadOnlyKeyValueStore;
import org.apache.kafka.streams.state.ReadOnlyWindowStore;
import org.apache.kafka.streams.state.StreamsMetadata;
import org.apache.kafka.streams.state.WindowStoreIterator;
import org.apache.kafka.streams.errors.InvalidStateStoreException;
import org.apache.kafka.test.MockKeyValueMapper;
import org.apache.kafka.test.TestCondition;
import org.apache.kafka.test.TestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class QueryableStateIntegrationTest {
private static final int NUM_BROKERS = 2;
@ClassRule
public static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(NUM_BROKERS);
private final MockTime mockTime = CLUSTER.time;
private static final String STREAM_ONE = "stream-one";
private static final String STREAM_TWO = "stream-two";
private static final String STREAM_CONCURRENT = "stream-concurrent";
private static final String OUTPUT_TOPIC = "output";
private static final String OUTPUT_TOPIC_CONCURRENT = "output-concurrent";
private static final String STREAM_THREE = "stream-three";
private static final int NUM_PARTITIONS = NUM_BROKERS;
private static final int NUM_REPLICAS = NUM_BROKERS;
// sufficiently large window size such that everything falls into 1 window
private static final long WINDOW_SIZE = TimeUnit.MILLISECONDS.convert(2, TimeUnit.DAYS);
private static final String OUTPUT_TOPIC_THREE = "output-three";
private Properties streamsConfiguration;
private List<String> inputValues;
private Set<String> inputValuesKeys;
private KafkaStreams kafkaStreams;
private Comparator<KeyValue<String, String>> stringComparator;
private Comparator<KeyValue<String, Long>> stringLongComparator;
@BeforeClass
public static void createTopics() {
CLUSTER.createTopic(STREAM_ONE);
CLUSTER.createTopic(STREAM_CONCURRENT);
CLUSTER.createTopic(STREAM_TWO, NUM_PARTITIONS, NUM_REPLICAS);
CLUSTER.createTopic(STREAM_THREE, 4, 1);
CLUSTER.createTopic(OUTPUT_TOPIC);
CLUSTER.createTopic(OUTPUT_TOPIC_CONCURRENT);
CLUSTER.createTopic(OUTPUT_TOPIC_THREE);
}
@Before
public void before() throws IOException {
streamsConfiguration = new Properties();
final String applicationId = "queryable-state";
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
streamsConfiguration
.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
streamsConfiguration.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, CLUSTER.zKConnectString());
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory("qs-test").getPath());
streamsConfiguration.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsConfiguration.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
stringComparator = new Comparator<KeyValue<String, String>>() {
@Override
public int compare(final KeyValue<String, String> o1,
final KeyValue<String, String> o2) {
return o1.key.compareTo(o2.key);
}
};
stringLongComparator = new Comparator<KeyValue<String, Long>>() {
@Override
public int compare(final KeyValue<String, Long> o1,
final KeyValue<String, Long> o2) {
return o1.key.compareTo(o2.key);
}
};
inputValues = Arrays.asList(
"hello world",
"all streams lead to kafka",
"streams",
"kafka streams",
"the cat in the hat",
"green eggs and ham",
"that sam i am",
"up the creek without a paddle",
"run forest run",
"a tank full of gas",
"eat sleep rave repeat",
"one jolly sailor",
"king of the world");
inputValuesKeys = new HashSet<>();
for (final String sentence : inputValues) {
final String[] words = sentence.split("\\W+");
for (final String word : words) {
inputValuesKeys.add(word);
}
}
}
@After
public void shutdown() throws IOException {
if (kafkaStreams != null) {
kafkaStreams.close();
}
IntegrationTestUtils.purgeLocalStreamsState(streamsConfiguration);
}
/**
* Creates a typical word count topology
*
* @param inputTopic
* @param outputTopic
* @param streamsConfiguration config
* @return
*/
private KafkaStreams createCountStream(final String inputTopic, final String outputTopic, final Properties streamsConfiguration) {
final KStreamBuilder builder = new KStreamBuilder();
final Serde<String> stringSerde = Serdes.String();
final KStream<String, String> textLines = builder.stream(stringSerde, stringSerde, inputTopic);
final KGroupedStream<String, String> groupedByWord = textLines
.flatMapValues(new ValueMapper<String, Iterable<String>>() {
@Override
public Iterable<String> apply(final String value) {
return Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+"));
}
})
.groupBy(MockKeyValueMapper.<String, String>SelectValueMapper());
// Create a State Store for the all time word count
groupedByWord.count("word-count-store-" + inputTopic).to(Serdes.String(), Serdes.Long(), outputTopic);
// Create a Windowed State Store that contains the word count for every 1 minute
groupedByWord.count(TimeWindows.of(WINDOW_SIZE), "windowed-word-count-store-" + inputTopic);
return new KafkaStreams(builder, streamsConfiguration);
}
private class StreamRunnable implements Runnable {
private final KafkaStreams myStream;
private boolean closed = false;
StreamRunnable(final String inputTopic, final String outputTopic, final int queryPort) {
final Properties props = (Properties) streamsConfiguration.clone();
props.put(StreamsConfig.APPLICATION_SERVER_CONFIG, "localhost:" + queryPort);
myStream = createCountStream(inputTopic, outputTopic, props);
}
@Override
public void run() {
myStream.start();
}
public void close() {
if (!closed) {
myStream.close();
closed = true;
}
}
public boolean isClosed() {
return closed;
}
public final KafkaStreams getStream() {
return myStream;
}
}
private void verifyAllKVKeys(final StreamRunnable[] streamRunnables, final KafkaStreams streams,
final Set<String> keys, final String storeName) throws Exception {
for (final String key : keys) {
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
final StreamsMetadata metadata = streams.metadataForKey(storeName, key, new StringSerializer());
if (metadata == null) {
return false;
}
final int index = metadata.hostInfo().port();
final KafkaStreams streamsWithKey = streamRunnables[index].getStream();
final ReadOnlyKeyValueStore<String, Long> store;
try {
store = streamsWithKey.store(storeName, QueryableStoreTypes.<String, Long>keyValueStore());
} catch (final IllegalStateException e) {
// Kafka Streams instance may have closed but rebalance hasn't happened
return false;
}
return store != null && store.get(key) != null;
}
}, 30000, "waiting for metadata, store and value to be non null");
}
}
private void verifyAllWindowedKeys(final StreamRunnable[] streamRunnables, final KafkaStreams streams,
final Set<String> keys, final String storeName,
final Long from, final Long to) throws Exception {
for (final String key : keys) {
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
final StreamsMetadata metadata = streams.metadataForKey(storeName, key, new StringSerializer());
if (metadata == null) {
return false;
}
final int index = metadata.hostInfo().port();
final KafkaStreams streamsWithKey = streamRunnables[index].getStream();
final ReadOnlyWindowStore<String, Long> store;
try {
store = streamsWithKey.store(storeName, QueryableStoreTypes.<String, Long>windowStore());
} catch (final IllegalStateException e) {
// Kafka Streams instance may have closed but rebalance hasn't happened
return false;
}
return store != null && store.fetch(key, from, to) != null;
}
}, 30000, "waiting for metadata, store and value to be non null");
}
}
@Test
public void queryOnRebalance() throws Exception {
final int numThreads = NUM_PARTITIONS;
final StreamRunnable[] streamRunnables = new StreamRunnable[numThreads];
final Thread[] streamThreads = new Thread[numThreads];
final int numIterations = 500000;
// create concurrent producer
final ProducerRunnable producerRunnable = new ProducerRunnable(STREAM_TWO, inputValues, numIterations);
final Thread producerThread = new Thread(producerRunnable);
// create three stream threads
for (int i = 0; i < numThreads; i++) {
streamRunnables[i] = new StreamRunnable(STREAM_TWO, OUTPUT_TOPIC_THREE, i);
streamThreads[i] = new Thread(streamRunnables[i]);
streamThreads[i].start();
}
producerThread.start();
try {
waitUntilAtLeastNumRecordProcessed(OUTPUT_TOPIC_THREE, 1);
for (int i = 0; i < numThreads; i++) {
verifyAllKVKeys(streamRunnables, streamRunnables[i].getStream(), inputValuesKeys,
"word-count-store-" + STREAM_TWO);
verifyAllWindowedKeys(streamRunnables, streamRunnables[i].getStream(), inputValuesKeys,
"windowed-word-count-store-" + STREAM_TWO, 0L, WINDOW_SIZE);
}
// kill N-1 threads
for (int i = 1; i < numThreads; i++) {
streamRunnables[i].close();
streamThreads[i].interrupt();
streamThreads[i].join();
}
// query from the remaining thread
verifyAllKVKeys(streamRunnables, streamRunnables[0].getStream(), inputValuesKeys,
"word-count-store-" + STREAM_TWO);
verifyAllWindowedKeys(streamRunnables, streamRunnables[0].getStream(), inputValuesKeys,
"windowed-word-count-store-" + STREAM_TWO, 0L, WINDOW_SIZE);
} finally {
for (int i = 0; i < numThreads; i++) {
if (!streamRunnables[i].isClosed()) {
streamRunnables[i].close();
streamThreads[i].interrupt();
streamThreads[i].join();
}
}
producerRunnable.shutdown();
producerThread.interrupt();
producerThread.join();
}
}
@Test
public void concurrentAccesses() throws Exception {
final int numIterations = 500000;
final ProducerRunnable producerRunnable = new ProducerRunnable(STREAM_CONCURRENT, inputValues, numIterations);
final Thread producerThread = new Thread(producerRunnable);
kafkaStreams = createCountStream(STREAM_CONCURRENT, OUTPUT_TOPIC_CONCURRENT, streamsConfiguration);
kafkaStreams.start();
producerThread.start();
try {
waitUntilAtLeastNumRecordProcessed(OUTPUT_TOPIC_CONCURRENT, 1);
final ReadOnlyKeyValueStore<String, Long>
keyValueStore = kafkaStreams.store("word-count-store-" + STREAM_CONCURRENT, QueryableStoreTypes.<String, Long>keyValueStore());
final ReadOnlyWindowStore<String, Long> windowStore =
kafkaStreams.store("windowed-word-count-store-" + STREAM_CONCURRENT, QueryableStoreTypes.<String, Long>windowStore());
final Map<String, Long> expectedWindowState = new HashMap<>();
final Map<String, Long> expectedCount = new HashMap<>();
while (producerRunnable.getCurrIteration() < numIterations) {
verifyGreaterOrEqual(inputValuesKeys.toArray(new String[inputValuesKeys.size()]), expectedWindowState,
expectedCount, windowStore, keyValueStore, false);
}
// finally check if all keys are there
verifyGreaterOrEqual(inputValuesKeys.toArray(new String[inputValuesKeys.size()]), expectedWindowState,
expectedCount, windowStore, keyValueStore, true);
} finally {
producerRunnable.shutdown();
producerThread.interrupt();
producerThread.join();
}
}
@Test
public void shouldBeAbleToQueryState() throws Exception {
final KStreamBuilder builder = new KStreamBuilder();
final String[] keys = {"hello", "goodbye", "welcome", "go", "kafka"};
final Set<KeyValue<String, String>> batch1 = new TreeSet<>(stringComparator);
batch1.addAll(Arrays.asList(
new KeyValue<>(keys[0], "hello"),
new KeyValue<>(keys[1], "goodbye"),
new KeyValue<>(keys[2], "welcome"),
new KeyValue<>(keys[3], "go"),
new KeyValue<>(keys[4], "kafka")));
final Set<KeyValue<String, Long>> expectedCount = new TreeSet<>(stringLongComparator);
for (final String key : keys) {
expectedCount.add(new KeyValue<>(key, 1L));
}
IntegrationTestUtils.produceKeyValuesSynchronously(
STREAM_ONE,
batch1,
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
StringSerializer.class,
StringSerializer.class,
new Properties()),
mockTime);
final KStream<String, String> s1 = builder.stream(STREAM_ONE);
// Non Windowed
s1.groupByKey().count("my-count").to(Serdes.String(), Serdes.Long(), OUTPUT_TOPIC);
s1.groupByKey().count(TimeWindows.of(WINDOW_SIZE), "windowed-count");
kafkaStreams = new KafkaStreams(builder, streamsConfiguration);
kafkaStreams.start();
waitUntilAtLeastNumRecordProcessed(OUTPUT_TOPIC, 1);
final ReadOnlyKeyValueStore<String, Long>
myCount = kafkaStreams.store("my-count", QueryableStoreTypes.<String, Long>keyValueStore());
final ReadOnlyWindowStore<String, Long> windowStore =
kafkaStreams.store("windowed-count", QueryableStoreTypes.<String, Long>windowStore());
verifyCanGetByKey(keys,
expectedCount,
expectedCount,
windowStore,
myCount);
verifyRangeAndAll(expectedCount, myCount);
}
@Test
public void shouldNotMakeStoreAvailableUntilAllStoresAvailable() throws Exception {
final KStreamBuilder builder = new KStreamBuilder();
final KStream<String, String> stream = builder.stream(STREAM_THREE);
final String storeName = "count-by-key";
stream.groupByKey().count(storeName);
kafkaStreams = new KafkaStreams(builder, streamsConfiguration);
kafkaStreams.start();
final KeyValue<String, String> hello = KeyValue.pair("hello", "hello");
IntegrationTestUtils.produceKeyValuesSynchronously(
STREAM_THREE,
Arrays.asList(hello, hello, hello, hello, hello, hello, hello, hello),
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
StringSerializer.class,
StringSerializer.class,
new Properties()),
mockTime);
final int maxWaitMs = 30000;
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
try {
kafkaStreams.store(storeName, QueryableStoreTypes.<String, Long>keyValueStore());
return true;
} catch (InvalidStateStoreException ise) {
return false;
}
}
}, maxWaitMs, "waiting for store " + storeName);
final ReadOnlyKeyValueStore<String, Long> store = kafkaStreams.store(storeName, QueryableStoreTypes.<String, Long>keyValueStore());
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
return new Long(8).equals(store.get("hello"));
}
}, maxWaitMs, "wait for count to be 8");
// close stream
kafkaStreams.close();
// start again
kafkaStreams = new KafkaStreams(builder, streamsConfiguration);
kafkaStreams.start();
// make sure we never get any value other than 8 for hello
TestUtils.waitForCondition(new TestCondition() {
@Override
public boolean conditionMet() {
try {
assertEquals(Long.valueOf(8L), kafkaStreams.store(storeName, QueryableStoreTypes.<String, Long>keyValueStore()).get("hello"));
return true;
} catch (InvalidStateStoreException ise) {
return false;
}
}
}, maxWaitMs, "waiting for store " + storeName);
}
private void verifyRangeAndAll(final Set<KeyValue<String, Long>> expectedCount,
final ReadOnlyKeyValueStore<String, Long> myCount) {
final Set<KeyValue<String, Long>> countRangeResults = new TreeSet<>(stringLongComparator);
final Set<KeyValue<String, Long>> countAllResults = new TreeSet<>(stringLongComparator);
final Set<KeyValue<String, Long>>
expectedRangeResults =
new TreeSet<>(stringLongComparator);
expectedRangeResults.addAll(Arrays.asList(
new KeyValue<>("hello", 1L),
new KeyValue<>("go", 1L),
new KeyValue<>("goodbye", 1L),
new KeyValue<>("kafka", 1L)
));
try (final KeyValueIterator<String, Long> range = myCount.range("go", "kafka")) {
while (range.hasNext()) {
countRangeResults.add(range.next());
}
}
try (final KeyValueIterator<String, Long> all = myCount.all()) {
while (all.hasNext()) {
countAllResults.add(all.next());
}
}
assertThat(countRangeResults, equalTo(expectedRangeResults));
assertThat(countAllResults, equalTo(expectedCount));
}
private void verifyCanGetByKey(final String[] keys,
final Set<KeyValue<String, Long>> expectedWindowState,
final Set<KeyValue<String, Long>> expectedCount,
final ReadOnlyWindowStore<String, Long> windowStore,
final ReadOnlyKeyValueStore<String, Long> myCount)
throws InterruptedException {
final Set<KeyValue<String, Long>> windowState = new TreeSet<>(stringLongComparator);
final Set<KeyValue<String, Long>> countState = new TreeSet<>(stringLongComparator);
final long timeout = System.currentTimeMillis() + 30000;
while (windowState.size() < 5 &&
countState.size() < 5 &&
System.currentTimeMillis() < timeout) {
Thread.sleep(10);
for (final String key : keys) {
windowState.addAll(fetch(windowStore, key));
final Long value = myCount.get(key);
if (value != null) {
countState.add(new KeyValue<>(key, value));
}
}
}
assertThat(windowState, equalTo(expectedWindowState));
assertThat(countState, equalTo(expectedCount));
}
/**
* Verify that the new count is greater than or equal to the previous count.
* Note: this method changes the values in expectedWindowState and expectedCount
*
* @param keys All the keys we ever expect to find
* @param expectedWindowedCount Expected windowed count
* @param expectedCount Expected count
* @param windowStore Window Store
* @param keyValueStore Key-value store
* @param failIfKeyNotFound if true, tests fails if an expected key is not found in store. If false,
* the method merely inserts the new found key into the list of
* expected keys.
* @throws InterruptedException
*/
private void verifyGreaterOrEqual(final String[] keys,
final Map<String, Long> expectedWindowedCount,
final Map<String, Long> expectedCount,
final ReadOnlyWindowStore<String, Long> windowStore,
final ReadOnlyKeyValueStore<String, Long> keyValueStore,
final boolean failIfKeyNotFound)
throws InterruptedException {
final Map<String, Long> windowState = new HashMap<>();
final Map<String, Long> countState = new HashMap<>();
for (final String key : keys) {
final Map<String, Long> map = fetchMap(windowStore, key);
if (map.equals(Collections.<String, Long>emptyMap()) && failIfKeyNotFound) {
fail("Key not found " + key);
}
windowState.putAll(map);
final Long value = keyValueStore.get(key);
if (value != null) {
countState.put(key, value);
} else if (failIfKeyNotFound) {
fail("Key not found " + key);
}
}
for (final Map.Entry<String, Long> actualWindowStateEntry : windowState.entrySet()) {
if (expectedWindowedCount.containsKey(actualWindowStateEntry.getKey())) {
final Long expectedValue = expectedWindowedCount.get(actualWindowStateEntry.getKey());
assertTrue(actualWindowStateEntry.getValue() >= expectedValue);
}
// return this for next round of comparisons
expectedWindowedCount.put(actualWindowStateEntry.getKey(), actualWindowStateEntry.getValue());
}
for (final Map.Entry<String, Long> actualCountStateEntry : countState.entrySet()) {
if (expectedCount.containsKey(actualCountStateEntry.getKey())) {
final Long expectedValue = expectedCount.get(actualCountStateEntry.getKey());
assertTrue(actualCountStateEntry.getValue() >= expectedValue);
}
// return this for next round of comparisons
expectedCount.put(actualCountStateEntry.getKey(), actualCountStateEntry.getValue());
}
}
private void waitUntilAtLeastNumRecordProcessed(final String topic, final int numRecs) throws InterruptedException {
final Properties config = new Properties();
config.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
config.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "queryable-state-consumer");
config.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
config.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
StringDeserializer.class.getName());
config.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
LongDeserializer.class.getName());
IntegrationTestUtils.waitUntilMinValuesRecordsReceived(
config,
topic,
numRecs,
60 * 1000);
}
private Set<KeyValue<String, Long>> fetch(final ReadOnlyWindowStore<String, Long> store,
final String key) {
final WindowStoreIterator<Long> fetch = store.fetch(key, 0, System.currentTimeMillis());
if (fetch.hasNext()) {
final KeyValue<Long, Long> next = fetch.next();
return Collections.singleton(KeyValue.pair(key, next.value));
}
return Collections.emptySet();
}
private Map<String, Long> fetchMap(final ReadOnlyWindowStore<String, Long> store,
final String key) {
final WindowStoreIterator<Long> fetch = store.fetch(key, 0, System.currentTimeMillis());
if (fetch.hasNext()) {
final KeyValue<Long, Long> next = fetch.next();
return Collections.singletonMap(key, next.value);
}
return Collections.emptyMap();
}
/**
* A class that periodically produces records in a separate thread
*/
private class ProducerRunnable implements Runnable {
private final String topic;
private final List<String> inputValues;
private final int numIterations;
private int currIteration = 0;
boolean shutdown = false;
ProducerRunnable(final String topic, final List<String> inputValues, final int numIterations) {
this.topic = topic;
this.inputValues = inputValues;
this.numIterations = numIterations;
}
private synchronized void incrementInteration() {
currIteration++;
}
public synchronized int getCurrIteration() {
return currIteration;
}
public synchronized void shutdown() {
shutdown = true;
}
@Override
public void run() {
final Properties producerConfig = new Properties();
producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
producerConfig.put(ProducerConfig.RETRIES_CONFIG, 0);
producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
final KafkaProducer<String, String>
producer =
new KafkaProducer<>(producerConfig, new StringSerializer(), new StringSerializer());
while (getCurrIteration() < numIterations && !shutdown) {
for (int i = 0; i < inputValues.size(); i++) {
producer.send(new ProducerRecord<String, String>(topic, inputValues.get(i)));
}
incrementInteration();
}
}
}
}
|
MINOR: catch InvalidStateStoreException in QueryableStateIntegrationTest
A couple of the tests may transiently fail in QueryableStateIntegrationTest as they are not catching InvalidStateStoreException. This exception is expected during rebalance.
Author: Damian Guy <damian.guy@gmail.com>
Reviewers: Eno Thereska, Guozhang Wang
Closes #1840 from dguy/minor-fix
|
streams/src/test/java/org/apache/kafka/streams/integration/QueryableStateIntegrationTest.java
|
MINOR: catch InvalidStateStoreException in QueryableStateIntegrationTest
|
|
Java
|
apache-2.0
|
245aaa9a1c53dcacc52c9b2d3b4e517fbd1318aa
| 0
|
gbif/dwc-api
|
/*
* Copyright 2021 Global Biodiversity Information Facility (GBIF)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gbif.dwc.terms;
import java.lang.annotation.Annotation;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Utility methods for {@link Term}
*/
public class Terms {
private static final Pattern NULL_PATTERN = Pattern.compile("^\\s*(\\\\N|\\\\?NULL)\\s*$");
private static final List<Term> VOCABULARY_BACKED_TERMS = findVocabularyBackedTerms();
/**
* static utility class
*/
private Terms() {
}
/**
* Tries various terms in given order until it finds a non empty value.
*
* @return value or null
*/
public static String getValueOfFirst(Map<Term, String> record, Term... terms) {
for (Term t : terms) {
if (record.containsKey(t)) {
String val = clean(record.get(t));
if (val != null) {
return val;
}
}
}
return null;
}
private static String clean(String str) {
if (isTermValueBlank(str)) {
return null;
}
str = str.trim();
if (str.isEmpty()) {
return null;
}
return str;
}
/**
* Check if the value provided should be considered "blank" in the context of a {@link Term} value.
*/
public static boolean isTermValueBlank(String termValue) {
return termValue == null || termValue.isEmpty() || NULL_PATTERN.matcher(termValue).find();
}
/**
* Returns all the {@link Term} that are annotated with {@link Vocabulary}.
*/
public static List<Term> getVocabularyBackedTerms() {
return VOCABULARY_BACKED_TERMS;
}
private static List<Term> findVocabularyBackedTerms() {
return Stream.of(getTerms(AcefTerm.class),
getTerms(AcTerm.class),
getTerms(DcTerm.class),
getTerms(DwcaTerm.class),
getTerms(DwcTerm.class),
getTerms(GadmTerm.class),
getTerms(GbifInternalTerm.class),
getTerms(GbifTerm.class),
getTerms(IucnTerm.class),
getTerms(XmpRightsTerm.class),
getTerms(XmpTerm.class),
getTerms(GbifTerm.class))
.flatMap(Arrays::stream)
.filter(Terms::isVocabulary)
.collect(Collectors.toList());
}
private static <T extends Term> Term[] getTerms(Class<T> term) {
return term.getEnumConstants();
}
/**
* @return true if the term is a handled/annotated as Vocabulary.
*/
public static boolean isVocabulary(Term term) {
return term instanceof Enum && hasTermAnnotation(term, Vocabulary.class);
}
private static boolean hasTermAnnotation(Term term, Class<? extends Annotation> annotation) {
try {
return term.getClass().getField(((Enum<?>) term).name()).isAnnotationPresent(annotation);
} catch (NoSuchFieldException ex) {
throw new IllegalArgumentException(ex);
}
}
}
|
src/main/java/org/gbif/dwc/terms/Terms.java
|
/*
* Copyright 2021 Global Biodiversity Information Facility (GBIF)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gbif.dwc.terms;
import java.util.Map;
import java.util.regex.Pattern;
/**
* Utility methods for {@link Term}
*/
public class Terms {
private static final Pattern NULL_PATTERN = Pattern.compile("^\\s*(\\\\N|\\\\?NULL)\\s*$");
/**
* static utility class
*/
private Terms() {
}
/**
* Tries various terms in given order until it finds a non empty value.
*
* @param record
* @param terms
* @return value or null
*/
public static String getValueOfFirst(Map<Term, String> record, Term... terms) {
for (Term t : terms) {
if (record.containsKey(t)) {
String val = clean(record.get(t));
if (val != null) {
return val;
}
}
}
return null;
}
private static String clean(String str) {
if (isTermValueBlank(str)) {
return null;
}
str = str.trim();
if(str.isEmpty()){
return null;
}
return str;
}
/**
* Check if the value provided should be considered "blank" in the context of a {@link Term} value.
*
* @param termValue
* @return
*/
public static boolean isTermValueBlank(String termValue){
return termValue == null || termValue.isEmpty() || NULL_PATTERN.matcher(termValue).find();
}
}
|
Utility methods for Vocabulary terms
|
src/main/java/org/gbif/dwc/terms/Terms.java
|
Utility methods for Vocabulary terms
|
|
Java
|
apache-2.0
|
ac4198aaa06e2d008a0baebf083c6c7b4f444329
| 0
|
mintern/gson
|
/*
* Copyright (C) 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gson.internal.bind;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Type;
import java.lang.reflect.TypeVariable;
final class Reflection {
/**
* Finds a compatible runtime type if it is more specific
*/
public static Type getRuntimeTypeIfMoreSpecific(Type type, Object value) {
if (value != null
&& (type == Object.class || type instanceof TypeVariable || type instanceof Class<?>)) {
type = (Class<?>) value.getClass();
}
return type;
}
// TODO: this should use Joel's unsafe constructor stuff
public static <T> T newInstance(Constructor<T> constructor) {
if (!constructor.isAccessible()) {
constructor.setAccessible(true);
}
try {
Object[] args = null;
return constructor.newInstance(args);
} catch (InstantiationException e) {
// TODO: JsonParseException ?
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
// TODO: don't wrap if cause is unchecked!
// TODO: JsonParseException ?
throw new RuntimeException(e.getTargetException());
} catch (IllegalAccessException e) {
throw new AssertionError(e);
}
}
}
|
src/main/java/com/google/gson/internal/bind/Reflection.java
|
/*
* Copyright (C) 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gson.internal.bind;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Type;
import java.lang.reflect.TypeVariable;
final class Reflection {
/**
* Finds a compatible runtime type if it is more specific
*/
public static Type getRuntimeTypeIfMoreSpecific(Type type, Object value) {
if (value != null
&& (type == Object.class || type instanceof TypeVariable || type instanceof Class<?>)) {
type = (Class<?>) value.getClass();
}
return type;
}
// TODO: this should use Joel's unsafe constructor stuff
public static <T> T newInstance(Constructor<T> constructor) {
try {
Object[] args = null;
return constructor.newInstance(args);
} catch (InstantiationException e) {
// TODO: JsonParseException ?
throw new RuntimeException(e);
} catch (InvocationTargetException e) {
// TODO: don't wrap if cause is unchecked!
// TODO: JsonParseException ?
throw new RuntimeException(e.getTargetException());
} catch (IllegalAccessException e) {
throw new AssertionError(e);
}
}
}
|
Support calling private constructors
git-svn-id: 7b8be7b2f8bf58e8147c910303b95fa2b8d9948f@916 2534bb62-2c4b-0410-85e8-b5006b95c4ae
|
src/main/java/com/google/gson/internal/bind/Reflection.java
|
Support calling private constructors
|
|
Java
|
apache-2.0
|
ee7bca12ec48df9ab764943cb33dbd57e03125b9
| 0
|
ic3fox/jawr,ic3fox/jawr,ic3fox/jawr,ic3fox/jawr
|
/**
* Copyright 2007-2016 Jordi Hernández Sellés, Ibrahim Chaehoi
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package net.jawr.web.resource.bundle.handler;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.Writer;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.zip.GZIPOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.jawr.web.DebugMode;
import net.jawr.web.JawrConstant;
import net.jawr.web.config.JawrConfig;
import net.jawr.web.context.ThreadLocalJawrContext;
import net.jawr.web.exception.BundlingProcessException;
import net.jawr.web.exception.ResourceNotFoundException;
import net.jawr.web.resource.BinaryResourcesHandler;
import net.jawr.web.resource.bundle.CompositeResourceBundle;
import net.jawr.web.resource.bundle.IOUtils;
import net.jawr.web.resource.bundle.JoinableResourceBundle;
import net.jawr.web.resource.bundle.JoinableResourceBundleContent;
import net.jawr.web.resource.bundle.JoinableResourceBundlePropertySerializer;
import net.jawr.web.resource.bundle.factory.global.postprocessor.GlobalPostProcessingContext;
import net.jawr.web.resource.bundle.factory.global.preprocessor.GlobalPreprocessingContext;
import net.jawr.web.resource.bundle.factory.util.ClassLoaderResourceUtils;
import net.jawr.web.resource.bundle.factory.util.PathNormalizer;
import net.jawr.web.resource.bundle.global.processor.GlobalProcessor;
import net.jawr.web.resource.bundle.hashcode.BundleHashcodeGenerator;
import net.jawr.web.resource.bundle.iterator.BundlePath;
import net.jawr.web.resource.bundle.iterator.ConditionalCommentCallbackHandler;
import net.jawr.web.resource.bundle.iterator.DebugModePathsIteratorImpl;
import net.jawr.web.resource.bundle.iterator.IECssDebugPathsIteratorImpl;
import net.jawr.web.resource.bundle.iterator.PathsIteratorImpl;
import net.jawr.web.resource.bundle.iterator.ResourceBundlePathsIterator;
import net.jawr.web.resource.bundle.postprocess.AbstractChainedResourceBundlePostProcessor;
import net.jawr.web.resource.bundle.postprocess.BundleProcessingStatus;
import net.jawr.web.resource.bundle.postprocess.ResourceBundlePostProcessor;
import net.jawr.web.resource.bundle.sorting.GlobalResourceBundleComparator;
import net.jawr.web.resource.bundle.variant.VariantSet;
import net.jawr.web.resource.bundle.variant.VariantUtils;
import net.jawr.web.resource.handler.bundle.ResourceBundleHandler;
import net.jawr.web.resource.handler.reader.ResourceReaderHandler;
import net.jawr.web.resource.watcher.ResourceWatcher;
import net.jawr.web.util.StopWatch;
import net.jawr.web.util.StringUtils;
import net.jawr.web.util.bom.UnicodeBOMReader;
/**
* Default implementation of ResourceBundlesHandler
*
* @author Jordi Hernández Sellés
* @author Ibrahim Chaehoi
*/
/**
*
* @author Ibrahim Chaehoi
*/
public class ResourceBundlesHandlerImpl implements ResourceBundlesHandler {
/** The logger */
private static final Logger LOGGER = LoggerFactory.getLogger(ResourceBundlesHandler.class);
/**
* The bundles that this handler manages.
*/
private List<JoinableResourceBundle> bundles;
/**
* Global bundles, to include in every page
*/
private List<JoinableResourceBundle> globalBundles;
/**
* Bundles to include upon request
*/
private List<JoinableResourceBundle> contextBundles;
/**
* The bundles that will be processed once when the server will be up and
* running.
*/
private List<String> liveProcessBundles = new ArrayList<String>();
/** The resource handler */
private ResourceReaderHandler resourceHandler;
/** The resource handler */
private ResourceBundleHandler resourceBundleHandler;
/** The Jawr config */
private JawrConfig config;
/** The post processor */
private ResourceBundlePostProcessor postProcessor;
/** The unitary post processor */
private ResourceBundlePostProcessor unitaryPostProcessor;
/** The post processor for composite bundle */
private ResourceBundlePostProcessor compositePostProcessor;
/** The unitary post processor for composite bundle */
private ResourceBundlePostProcessor unitaryCompositePostProcessor;
/** The resourceTypeBundle global preprocessor */
private GlobalProcessor<GlobalPreprocessingContext> resourceTypePreprocessor;
/** The resourceTypeBundle global postprocessor */
private GlobalProcessor<GlobalPostProcessingContext> resourceTypePostprocessor;
/** The client side handler generator */
private ClientSideHandlerGenerator clientSideHandlerGenerator;
/** The bundle hashcode generator */
private BundleHashcodeGenerator bundleHashcodeGenerator;
/** The bundle mapping */
private Properties bundleMapping;
/** The resource watcher */
private ResourceWatcher watcher;
/** The flag indicating if we need to search for variant in post process */
private boolean needToSearchForVariantInPostProcess;
/**
* Build a ResourceBundlesHandler.
*
* @param bundles
* List The JoinableResourceBundles to use for this handler.
* @param resourceHandler
* The file system access handler.
* @param config
* Configuration for this handler.
*/
public ResourceBundlesHandlerImpl(List<JoinableResourceBundle> bundles, ResourceReaderHandler resourceHandler,
ResourceBundleHandler resourceBundleHandler, JawrConfig config) {
this(bundles, resourceHandler, resourceBundleHandler, config, null, null, null, null, null, null);
}
/**
* Build a ResourceBundlesHandler which will use the specified
* postprocessor.
*
* @param bundles
* List The JoinableResourceBundles to use for this handler.
* @param resourceHandler
* The file system access handler.
* @param config
* Configuration for this handler.
* @param postProcessor
*/
public ResourceBundlesHandlerImpl(List<JoinableResourceBundle> bundles, ResourceReaderHandler resourceHandler,
ResourceBundleHandler resourceBundleHandler, JawrConfig config, ResourceBundlePostProcessor postProcessor,
ResourceBundlePostProcessor unitaryPostProcessor, ResourceBundlePostProcessor compositePostProcessor,
ResourceBundlePostProcessor unitaryCompositePostProcessor,
GlobalProcessor<GlobalPreprocessingContext> resourceTypePreprocessor,
GlobalProcessor<GlobalPostProcessingContext> resourceTypePostprocessor) {
super();
this.resourceHandler = resourceHandler;
this.resourceBundleHandler = resourceBundleHandler;
this.config = config;
this.bundleHashcodeGenerator = config.getBundleHashcodeGenerator();
this.postProcessor = postProcessor;
this.unitaryPostProcessor = unitaryPostProcessor;
this.compositePostProcessor = compositePostProcessor;
this.unitaryCompositePostProcessor = unitaryCompositePostProcessor;
this.resourceTypePreprocessor = resourceTypePreprocessor;
this.resourceTypePostprocessor = resourceTypePostprocessor;
this.bundles = new CopyOnWriteArrayList<JoinableResourceBundle>();
this.bundles.addAll(bundles);
splitBundlesByType(bundles);
this.clientSideHandlerGenerator = (ClientSideHandlerGenerator) ClassLoaderResourceUtils
.buildObjectInstance(config.getClientSideHandlerGeneratorClass());
this.clientSideHandlerGenerator.init(config, globalBundles, contextBundles);
this.needToSearchForVariantInPostProcess = isSearchingForVariantInPostProcessNeeded();
}
/**
* Checks if it is needed to search for variant in post process
*
* @return true if it is needed to search for variant in post process
*/
private boolean isSearchingForVariantInPostProcessNeeded() {
boolean needToSearch = false;
ResourceBundlePostProcessor[] postprocessors = new ResourceBundlePostProcessor[] { postProcessor,
unitaryCompositePostProcessor, compositePostProcessor, unitaryCompositePostProcessor };
for (ResourceBundlePostProcessor resourceBundlePostProcessor : postprocessors) {
if (resourceBundlePostProcessor != null
&& ((AbstractChainedResourceBundlePostProcessor) resourceBundlePostProcessor)
.isVariantPostProcessor()) {
needToSearch = true;
break;
}
}
return needToSearch;
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getResourceType ()
*/
public String getResourceType() {
return resourceBundleHandler.getResourceType();
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getContextBundles ()
*/
public List<JoinableResourceBundle> getContextBundles() {
return contextBundles;
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getGlobalBundles ()
*/
public List<JoinableResourceBundle> getGlobalBundles() {
return globalBundles;
}
/**
* Splits the bundles in two lists, one for global lists and other for the
* remaining bundles.
*/
private void splitBundlesByType(List<JoinableResourceBundle> bundles) {
// Temporary lists (CopyOnWriteArrayList does not support sort())
List<JoinableResourceBundle> tmpGlobal = new ArrayList<JoinableResourceBundle>();
List<JoinableResourceBundle> tmpContext = new ArrayList<JoinableResourceBundle>();
for (Iterator<JoinableResourceBundle> it = bundles.iterator(); it.hasNext();) {
JoinableResourceBundle bundle = it.next();
if (bundle.getInclusionPattern().isGlobal()) {
tmpGlobal.add(bundle);
} else {
tmpContext.add(bundle);
}
}
// Sort the global bundles
Collections.sort(tmpGlobal, new GlobalResourceBundleComparator());
globalBundles = new CopyOnWriteArrayList<JoinableResourceBundle>();
globalBundles.addAll(tmpGlobal);
contextBundles = new CopyOnWriteArrayList<JoinableResourceBundle>();
contextBundles.addAll(tmpContext);
}
/*
* (non-Javadoc)
*
* @seenet.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* isGlobalResourceBundle(java.lang.String)
*/
public boolean isGlobalResourceBundle(String resourceBundleId) {
boolean isGlobalResourceBundle = false;
for (Iterator<JoinableResourceBundle> it = globalBundles.iterator(); it.hasNext();) {
JoinableResourceBundle bundle = it.next();
if (bundle.getId().equals(resourceBundleId)) {
isGlobalResourceBundle = true;
}
}
return isGlobalResourceBundle;
}
/*
* (non-Javadoc)
*
* @seenet.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getGlobalResourceBundlePaths
* (net.jawr.web.resource.bundle.iterator.ConditionalCommentCallbackHandler,
* java.lang.String)
*/
public ResourceBundlePathsIterator getGlobalResourceBundlePaths(DebugMode debugMode,
ConditionalCommentCallbackHandler commentCallbackHandler, Map<String, String> variants) {
return getBundleIterator(debugMode, globalBundles, commentCallbackHandler, variants);
}
/*
* (non-Javadoc)
*
* @seenet.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getGlobalResourceBundlePaths
* (net.jawr.web.resource.bundle.iterator.ConditionalCommentCallbackHandler,
* java.lang.String)
*/
public ResourceBundlePathsIterator getGlobalResourceBundlePaths(
ConditionalCommentCallbackHandler commentCallbackHandler, Map<String, String> variants) {
return getBundleIterator(getDebugMode(), globalBundles, commentCallbackHandler, variants);
}
/*
* (non-Javadoc)
*
* @seenet.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getGlobalResourceBundlePaths
* (net.jawr.web.resource.bundle.iterator.ConditionalCommentCallbackHandler,
* java.lang.String)
*/
public ResourceBundlePathsIterator getGlobalResourceBundlePaths(String bundleId,
ConditionalCommentCallbackHandler commentCallbackHandler, Map<String, String> variants) {
List<JoinableResourceBundle> bundles = new ArrayList<JoinableResourceBundle>();
for (Iterator<JoinableResourceBundle> it = globalBundles.iterator(); it.hasNext();) {
JoinableResourceBundle bundle = it.next();
if (bundle.getId().equals(bundleId)) {
bundles.add(bundle);
break;
}
}
return getBundleIterator(getDebugMode(), bundles, commentCallbackHandler, variants);
}
/*
* (non-Javadoc)
*
* @see
* net.jawr.web.resource.bundle.ResourceCollector#getBundlePaths(java.lang
* .String)
*/
public ResourceBundlePathsIterator getBundlePaths(String bundleId,
ConditionalCommentCallbackHandler commentCallbackHandler, Map<String, String> variants) {
return getBundlePaths(getDebugMode(), bundleId, commentCallbackHandler, variants);
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getBundlePaths (boolean, java.lang.String,
* net.jawr.web.resource.bundle.iterator.ConditionalCommentCallbackHandler,
* java.lang.String)
*/
public ResourceBundlePathsIterator getBundlePaths(DebugMode debugMode, String bundleId,
ConditionalCommentCallbackHandler commentCallbackHandler, Map<String, String> variants) {
List<JoinableResourceBundle> bundles = new ArrayList<JoinableResourceBundle>();
// if the path did not correspond to a global bundle, find the requested
// one.
if (!isGlobalResourceBundle(bundleId)) {
for (Iterator<JoinableResourceBundle> it = contextBundles.iterator(); it.hasNext();) {
JoinableResourceBundle bundle = it.next();
if (bundle.getId().equals(bundleId)) {
bundles.add(bundle);
break;
}
}
}
return getBundleIterator(debugMode, bundles, commentCallbackHandler, variants);
}
/**
* Returns the bundle iterator
*
* @param debugMode
* the flag indicating if we are in debug mode or not
* @param commentCallbackHandler
* the comment callback handler
* @param variants
* the variant map
* @return the bundle iterator
*/
private ResourceBundlePathsIterator getBundleIterator(DebugMode debugMode, List<JoinableResourceBundle> bundles,
ConditionalCommentCallbackHandler commentCallbackHandler, Map<String, String> variants) {
ResourceBundlePathsIterator bundlesIterator;
if (debugMode.equals(DebugMode.DEBUG)) {
bundlesIterator = new DebugModePathsIteratorImpl(bundles, commentCallbackHandler, variants);
} else if (debugMode.equals(DebugMode.FORCE_NON_DEBUG_IN_IE)) {
bundlesIterator = new IECssDebugPathsIteratorImpl(bundles, commentCallbackHandler, variants);
} else
bundlesIterator = new PathsIteratorImpl(bundles, commentCallbackHandler, variants);
return bundlesIterator;
}
/*
* (non-Javadoc)
*
* @see
* net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#writeBundleTo
* (java.lang.String, java.io.Writer)
*/
@Override
public void writeBundleTo(String bundlePath, Writer writer) throws ResourceNotFoundException {
Reader rd = null;
try {
// If debug mode is on, resources are retrieved one by one.
if (config.isDebugModeOn()) {
rd = resourceHandler.getResource(null, bundlePath);
} else {
// Prefixes are used only in production mode
String path = PathNormalizer.removeVariantPrefixFromPath(bundlePath);
rd = resourceBundleHandler.getResourceBundleReader(path);
if (liveProcessBundles.contains(path)) {
rd = processInLive(rd);
}
}
IOUtils.copy(rd, writer);
writer.flush();
} catch (IOException e) {
throw new BundlingProcessException("Unexpected IOException writing bundle[" + bundlePath + "]", e);
} finally {
IOUtils.close(rd);
}
}
/**
* Process the bundle content in live
*
* @param reader
* the reader
* @return the processed bundle content
* @throws IOException
* if an IOException occured
*/
private StringReader processInLive(Reader reader) throws IOException {
String requestURL = ThreadLocalJawrContext.getRequestURL();
StringWriter swriter = new StringWriter();
IOUtils.copy(reader, swriter, true);
String updatedContent = swriter.getBuffer().toString();
if (requestURL != null) {
updatedContent = updatedContent.replaceAll(JawrConstant.JAWR_BUNDLE_PATH_PLACEHOLDER_PATTERN, requestURL);
}
return new StringReader(updatedContent);
}
/*
* (non-Javadoc)
*
* @see
* net.jawr.web.resource.bundle.ResourceBundlesHandler#streamBundleTo(java
* .lang.String, java.io.OutputStream)
*/
public void streamBundleTo(String bundlePath, OutputStream out) throws ResourceNotFoundException {
// Remove prefix, which are used only in production mode
String path = PathNormalizer.removeVariantPrefixFromPath(bundlePath);
ReadableByteChannel data = null;
try {
if (liveProcessBundles.contains(path)) {
Reader rd = null;
try {
rd = resourceBundleHandler.getResourceBundleReader(path);
StringReader strRd = processInLive(rd);
StringWriter strWriter = new StringWriter();
IOUtils.copy(strRd, strWriter);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
GZIPOutputStream gzOut = new GZIPOutputStream(bos);
byte[] byteData = strWriter.getBuffer().toString().getBytes(config.getResourceCharset().name());
gzOut.write(byteData, 0, byteData.length);
gzOut.close();
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
data = Channels.newChannel(bis);
} finally {
IOUtils.close(rd);
}
} else {
data = resourceBundleHandler.getResourceBundleChannel(path);
}
WritableByteChannel outChannel = Channels.newChannel(out);
IOUtils.copy(data, outChannel);
} catch (IOException e) {
throw new BundlingProcessException("Unexpected IOException writing bundle [" + path + "]", e);
} finally {
IOUtils.close(data);
}
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.ResourceCollector#getConfig()
*/
public JawrConfig getConfig() {
return config;
}
/**
* Returns the current debug mode
*
* @return the current debug mode
*/
private DebugMode getDebugMode() {
return config.isDebugModeOn() ? DebugMode.DEBUG : DebugMode.NO_DEBUG;
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.ResourceCollector#initAllBundles()
*/
public void initAllBundles() {
if (config.getUseBundleMapping()) {
bundleMapping = resourceBundleHandler.getJawrBundleMapping();
}
boolean mappingFileExists = resourceBundleHandler.isExistingMappingFile();
boolean processBundleFlag = !config.getUseBundleMapping() || !mappingFileExists;
StopWatch stopWatch = ThreadLocalJawrContext.getStopWatch();
List<JoinableResourceBundle> bundleToProcess = this.bundles;
boolean forceStoreJawrBundleMapping = false;
if (!processBundleFlag) {
int storeJawrConfigHashcode = Integer.parseInt(
resourceBundleHandler.getJawrBundleMapping().getProperty(JawrConstant.JAWR_CONFIG_HASHCODE));
int jawrConfigHashcode = config.getConfigProperties().hashCode();
boolean rebuildAllBundles = !config.getUseSmartBundling()
|| (storeJawrConfigHashcode != jawrConfigHashcode);
if (!rebuildAllBundles) {
bundleToProcess = getBundlesToRebuild();
if (!bundleToProcess.isEmpty() && LOGGER.isDebugEnabled()) {
StringBuilder msg = new StringBuilder(
"Jawr has detect changes on the following bundles, which will be updated :\n");
for (JoinableResourceBundle b : bundleToProcess) {
msg.append(b.getName() + "\n");
}
LOGGER.debug(msg.toString());
}
} else {
if (LOGGER.isDebugEnabled() && storeJawrConfigHashcode != jawrConfigHashcode) {
LOGGER.debug("Jawr config has changed since last bundling process. All bundles will be processed.");
}
}
forceStoreJawrBundleMapping = !bundleToProcess.isEmpty();
}
// Execute processing
build(bundleToProcess, forceStoreJawrBundleMapping, stopWatch);
}
/**
* Executes the global preprocessing
*
* @param bundlesToBuild
* The list of bundles to rebuild
*
* @param processBundleFlag
* the flag indicating if the bundles needs to be processed
* @param stopWatch
* the stopWatch
*/
private void executeGlobalPreprocessing(List<JoinableResourceBundle> bundlesToBuild, boolean processBundleFlag,
StopWatch stopWatch) {
if (resourceTypePreprocessor != null) {
if (stopWatch != null) {
stopWatch.start("Global preprocessing");
}
GlobalPreprocessingContext ctx = new GlobalPreprocessingContext(config, resourceHandler, processBundleFlag);
resourceTypePreprocessor.processBundles(ctx, bundles);
// Update the list of bundle to rebuild if new bundles have been
// detected as dirty in the global preprocessing phase
List<JoinableResourceBundle> bundles = getBundlesToRebuild();
for (JoinableResourceBundle b : bundles) {
if (!bundlesToBuild.contains(b)) {
bundlesToBuild.add(b);
}
}
if (stopWatch != null) {
stopWatch.stop();
}
}
}
/**
* Rebuilds the bundles given in parameter
*
* @param bundlesToRebuild
* the list of bundle to rebuild
*/
public synchronized void rebuildModifiedBundles() {
StopWatch stopWatch = ThreadLocalJawrContext.getStopWatch();
if (config.getUseSmartBundling()) {
List<JoinableResourceBundle> bundlesToRebuild = getBundlesToRebuild();
for (JoinableResourceBundle bundle : bundlesToRebuild) {
bundle.resetBundleMapping();
}
build(bundlesToRebuild, true, stopWatch);
try {
if (watcher != null) {
watcher.initPathToResourceBundleMap(bundlesToRebuild);
}
} catch (IOException e) {
throw new BundlingProcessException(e);
}
} else {
LOGGER.warn("You should turn of \"smart bundling\" feature to be able to rebuild modified bundles.");
}
}
/**
* Returns the bundles which needs to be rebuild
*
* @return the bundles which needs to be rebuild
*/
private List<JoinableResourceBundle> getBundlesToRebuild() {
List<JoinableResourceBundle> bundlesToRebuild = new ArrayList<>();
if (config.getUseSmartBundling()) {
for (JoinableResourceBundle bundle : globalBundles) {
if (bundle.isDirty()) {
bundlesToRebuild.add(bundle);
}
}
for (JoinableResourceBundle bundle : contextBundles) {
if (bundle.isDirty()) {
bundlesToRebuild.add(bundle);
}
}
}
return bundlesToRebuild;
}
/**
* Builds the bundles given in parameter
*
* @param bundlesToBuild
* the list of bundle to build
* @param forceWriteBundleMapping
* the flag indicating if the bundle mapping must be written in
* any case
* @param stopWatch
* the stop watch
*/
public void build(List<JoinableResourceBundle> bundlesToBuild, boolean forceWriteBundleMapping,
StopWatch stopWatch) {
boolean mappingFileExists = resourceBundleHandler.isExistingMappingFile();
boolean processBundleFlag = !config.getUseBundleMapping() || !mappingFileExists;
// Global preprocessing
executeGlobalPreprocessing(bundlesToBuild, processBundleFlag, stopWatch);
for (JoinableResourceBundle bundle : bundlesToBuild) {
// Clears the linked resource mappings as they will be initialized
// by the processing
bundle.getLinkedFilePathMappings().clear();
if (stopWatch != null) {
stopWatch.start("Processing bundle '" + bundle.getName() + "'");
}
if (!ThreadLocalJawrContext.isBundleProcessingAtBuildTime() && null != bundle.getAlternateProductionURL()) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("No bundle generated for '" + bundle.getId()
+ "' because a production URL is defined for this bundle.");
}
}
if (bundle instanceof CompositeResourceBundle) {
joinAndStoreCompositeResourcebundle((CompositeResourceBundle) bundle);
} else {
joinAndStoreBundle(bundle);
}
if (config.getUseBundleMapping() && (!mappingFileExists || bundle.isDirty())) {
JoinableResourceBundlePropertySerializer.serializeInProperties(bundle,
resourceBundleHandler.getResourceType(), bundleMapping);
}
bundle.setDirty(false);
if (stopWatch != null) {
stopWatch.stop();
}
}
executeGlobalPostProcessing(processBundleFlag, stopWatch);
storeJawrBundleMapping(resourceBundleHandler.isExistingMappingFile(), true);
}
/**
* Stores the Jawr bundle mapping
*
* @param mappingFileExists
* the flag indicating if the mapping file exists
* @param force
* force the storage of Jawr bundle mapping
*/
private void storeJawrBundleMapping(boolean mappingFileExists, boolean force) {
if (config.getUseBundleMapping() && (!mappingFileExists || force)) {
bundleMapping.setProperty(JawrConstant.JAWR_CONFIG_HASHCODE,
Integer.toString(config.getConfigProperties().hashCode()));
resourceBundleHandler.storeJawrBundleMapping(bundleMapping);
if (resourceBundleHandler.getResourceType().equals(JawrConstant.CSS_TYPE)) {
// Retrieve the image servlet mapping
BinaryResourcesHandler binaryRsHandler = (BinaryResourcesHandler) config.getContext()
.getAttribute(JawrConstant.BINARY_CONTEXT_ATTRIBUTE);
if (binaryRsHandler != null) {
// Here we update the image mapping if we are using the
// build time bundle processor
JawrConfig binaryJawrConfig = binaryRsHandler.getConfig();
// If we use the full image bundle mapping and the jawr
// working directory is not located inside the web
// application
// We store the image bundle maping which now contains the
// mapping for CSS images
String jawrWorkingDirectory = binaryJawrConfig.getJawrWorkingDirectory();
if (binaryJawrConfig.getUseBundleMapping() && (jawrWorkingDirectory == null
|| !jawrWorkingDirectory.startsWith(JawrConstant.URL_SEPARATOR))) {
// Store the bundle mapping
Properties props = new Properties();
props.putAll(binaryRsHandler.getBinaryPathMap());
props.setProperty(JawrConstant.JAWR_CONFIG_HASHCODE,
Integer.toString(binaryJawrConfig.getConfigProperties().hashCode()));
binaryRsHandler.getRsBundleHandler().storeJawrBundleMapping(props);
}
}
}
}
}
/**
* Execute the global post processing
*
* @param processBundleFlag
* the flag indicating if the bundle should be processed
* @param stopWatch
* the stopWatch
*/
private void executeGlobalPostProcessing(boolean processBundleFlag, StopWatch stopWatch) {
// Launch global postprocessing
if (resourceTypePostprocessor != null) {
if (stopWatch != null) {
stopWatch.start("Global postprocessing");
}
GlobalPostProcessingContext ctx = new GlobalPostProcessingContext(config, this, resourceHandler,
processBundleFlag);
resourceTypePostprocessor.processBundles(ctx, this.bundles);
if (stopWatch != null) {
stopWatch.stop();
}
}
}
/**
* Joins the members of a composite bundle in all its variants, storing in a
* separate file for each variant.
*
* @param composite
* the composite resource bundle
*/
private void joinAndStoreCompositeResourcebundle(CompositeResourceBundle composite) {
BundleProcessingStatus status = new BundleProcessingStatus(BundleProcessingStatus.FILE_PROCESSING_TYPE,
composite, resourceHandler, config);
// Collect all variant names from child bundles
Map<String, VariantSet> compositeBundleVariants = new HashMap<String, VariantSet>();
for (Iterator<JoinableResourceBundle> it = composite.getChildBundles().iterator(); it.hasNext();) {
JoinableResourceBundle childbundle = it.next();
if (null != childbundle.getVariants())
compositeBundleVariants = VariantUtils.concatVariants(compositeBundleVariants,
childbundle.getVariants());
}
composite.setVariants(compositeBundleVariants);
if (needToSearchForVariantInPostProcess || hasVariantPostProcessor(composite)) {
status.setSearchingPostProcessorVariants(true);
joinAndPostProcessBundle(composite, status);
Map<String, VariantSet> postProcessVariants = status.getPostProcessVariants();
if (!postProcessVariants.isEmpty()) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Post process variants found for bundle " + composite.getId() + ":" + postProcessVariants);
}
Map<String, VariantSet> newVariants = VariantUtils.concatVariants(composite.getVariants(),
postProcessVariants);
composite.setVariants(newVariants);
status.setSearchingPostProcessorVariants(false);
joinAndPostProcessBundle(composite, status);
}
} else {
status.setSearchingPostProcessorVariants(false);
joinAndPostProcessBundle(composite, status);
}
}
/**
* Checks if the bundle has variant post processor
* @param bundle the bundle
* @return true if the bundle has variant post processor
*/
private boolean hasVariantPostProcessor(JoinableResourceBundle bundle) {
boolean hasVariantPostProcessor = false;
ResourceBundlePostProcessor postProcessor = bundle.getBundlePostProcessor();
if(postProcessor != null && ((AbstractChainedResourceBundlePostProcessor)postProcessor).isVariantPostProcessor()){
hasVariantPostProcessor = true;
}else {
postProcessor = bundle.getUnitaryPostProcessor();
if(postProcessor != null && ((AbstractChainedResourceBundlePostProcessor)postProcessor).isVariantPostProcessor()){
hasVariantPostProcessor = true;
}
}
return hasVariantPostProcessor;
}
/**
* Joins and post process the variant composite bundle
*
* @param composite
* the composite bundle
* @param status
* the status
* @param compositeBundleVariants
* the variants
*/
private void joinAndPostProcessBundle(CompositeResourceBundle composite, BundleProcessingStatus status) {
JoinableResourceBundleContent store;
List<Map<String, String>> allVariants = VariantUtils.getAllVariants(composite.getVariants());
// Add the default bundle variant (the non variant one)
allVariants.add(null);
// Process all variants
for (Iterator<Map<String, String>> vars = allVariants.iterator(); vars.hasNext();) {
Map<String, String> variants = vars.next();
status.setBundleVariants(variants);
store = new JoinableResourceBundleContent();
for (Iterator<JoinableResourceBundle> it = composite.getChildBundles().iterator(); it.hasNext();) {
JoinableResourceBundle childbundle = (JoinableResourceBundle) it.next();
if (!childbundle.getInclusionPattern().isIncludeOnlyOnDebug()) {
JoinableResourceBundleContent childContent = joinAndPostprocessBundle(childbundle, variants,
status);
// Do unitary postprocessing.
status.setProcessingType(BundleProcessingStatus.FILE_PROCESSING_TYPE);
StringBuffer content = executeUnitaryPostProcessing(composite, status, childContent.getContent(),
this.unitaryCompositePostProcessor);
childContent.setContent(content);
store.append(childContent);
}
}
// Post process composite bundle as needed
store = postProcessJoinedCompositeBundle(composite, store.getContent(), status);
String variantKey = VariantUtils.getVariantKey(variants);
String name = VariantUtils.getVariantBundleName(composite.getId(), variantKey, false);
storeBundle(name, store);
initBundleDataHashcode(composite, store, variantKey);
}
}
/**
* Postprocess the composite bundle only if a composite bundle post
* processor is defined
*
* @param composite
* the composite bundle
* @param content
* the content
* @param status
* the status
* @return the content
*/
private JoinableResourceBundleContent postProcessJoinedCompositeBundle(CompositeResourceBundle composite,
StringBuffer content, BundleProcessingStatus status) {
JoinableResourceBundleContent store = new JoinableResourceBundleContent();
StringBuffer processedContent = null;
status.setProcessingType(BundleProcessingStatus.BUNDLE_PROCESSING_TYPE);
ResourceBundlePostProcessor bundlePostProcessor = composite.getBundlePostProcessor();
if (null != bundlePostProcessor) {
processedContent = bundlePostProcessor.postProcessBundle(status, content);
} else if (null != this.compositePostProcessor) {
processedContent = this.compositePostProcessor.postProcessBundle(status, content);
} else {
processedContent = content;
}
store.setContent(processedContent);
return store;
}
/**
* Initialize the bundle data hashcode and initialize the bundle mapping if
* needed
*
* @param bundle
* the bundle
* @param store
* the data to store
*/
private void initBundleDataHashcode(JoinableResourceBundle bundle, JoinableResourceBundleContent store,
String variant) {
String bundleHashcode = bundleHashcodeGenerator.generateHashCode(config, store.getContent().toString());
bundle.setBundleDataHashCode(variant, bundleHashcode);
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getTypeBundleHashcode(java.lang.String)
*/
public BundleHashcodeType getBundleHashcodeType(String requestedPath) {
BundleHashcodeType typeBundleHashcode = BundleHashcodeType.UNKNOW_BUNDLE;
String[] pathInfos = PathNormalizer.extractBundleInfoFromPath(requestedPath);
if (pathInfos != null) {
String bundlePrefix = pathInfos[0];
String bundleId = pathInfos[1];
String variantKey = pathInfos[2];
String hashcode = pathInfos[3];
JoinableResourceBundle bundle = resolveBundleForPath(bundleId);
if (bundle != null) {
String bundleHashcode = bundle.getBundleDataHashCode(variantKey);
if (hashcode == null && bundleHashcode == null || hashcode != null && hashcode.equals(bundleHashcode)
&& ((bundlePrefix == null && bundle.getBundlePrefix() == null)
|| (bundlePrefix != null && bundlePrefix.equals(bundle.getBundlePrefix())))) {
typeBundleHashcode = BundleHashcodeType.VALID_HASHCODE;
} else {
typeBundleHashcode = BundleHashcodeType.INVALID_HASHCODE;
}
}
}
return typeBundleHashcode;
}
/**
* Joins the members of a bundle and stores it
*
* @param bundle
* the bundle
* @param the
* flag indicating if we should process the bundle or not
*/
private void joinAndStoreBundle(JoinableResourceBundle bundle) {
BundleProcessingStatus status = new BundleProcessingStatus(BundleProcessingStatus.FILE_PROCESSING_TYPE, bundle,
resourceHandler, config);
JoinableResourceBundleContent store = null;
// Process the bundle for searching variant
if (needToSearchForVariantInPostProcess || hasVariantPostProcessor(bundle)) {
status.setSearchingPostProcessorVariants(true);
joinAndPostProcessBundle(bundle, status);
// Process the bundles
status.setSearchingPostProcessorVariants(false);
Map<String, VariantSet> postProcessVariants = status.getPostProcessVariants();
if (!postProcessVariants.isEmpty()) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Post process variants found for bundle " + bundle.getId() + ":" + postProcessVariants);
}
Map<String, VariantSet> newVariants = VariantUtils.concatVariants(bundle.getVariants(),
postProcessVariants);
bundle.setVariants(newVariants);
joinAndPostProcessBundle(bundle, status);
}
} else {
status.setSearchingPostProcessorVariants(false);
joinAndPostProcessBundle(bundle, status);
}
// Store the collected resources as a single file, both in text and
// gzip formats.
store = joinAndPostprocessBundle(bundle, null, status);
storeBundle(bundle.getId(), store);
// Set the data hascode in the bundle, in case the prefix needs to
// be generated
initBundleDataHashcode(bundle, store, null);
}
/**
* Store the bundle
*
* @param bundleId
* the bundle Id to store
* @param store
* the bundle
*/
private void storeBundle(String bundleId, JoinableResourceBundleContent store) {
if (bundleMustBeProcessedInLive(store.getContent().toString())) {
liveProcessBundles.add(bundleId);
}
resourceBundleHandler.storeBundle(bundleId, store);
}
/**
* Checks if the bundle must be processed in live
*
* @param the
* bundle content
* @return true if the bundle must be processed in live
*/
private boolean bundleMustBeProcessedInLive(String content) {
return content.indexOf(JawrConstant.JAWR_BUNDLE_PATH_PLACEHOLDER) != -1;
}
/**
* Join and post process the bundle taking in account all its variants.
*
* @param bundle
* the bundle
* @param status
* the bundle processing status
*/
private void joinAndPostProcessBundle(JoinableResourceBundle bundle, BundleProcessingStatus status) {
JoinableResourceBundleContent store;
List<Map<String, String>> allVariants = VariantUtils.getAllVariants(bundle.getVariants());
// Add the default bundle variant (the non variant one)
allVariants.add(null);
for (Iterator<Map<String, String>> it = allVariants.iterator(); it.hasNext();) {
Map<String, String> variantMap = it.next();
status.setBundleVariants(variantMap);
String variantKey = VariantUtils.getVariantKey(variantMap);
String name = VariantUtils.getVariantBundleName(bundle.getId(), variantKey, false);
store = joinAndPostprocessBundle(bundle, variantMap, status);
storeBundle(name, store);
initBundleDataHashcode(bundle, store, variantKey);
}
}
/**
* Reads all the members of a bundle and executes all associated
* postprocessors.
*
* @param bundle
* the bundle
* @param variants
* the variant map
* @param the
* bundling processing status
* @param the
* flag indicating if we should process the bundle or not
* @return the resource bundle content, where all postprocessors have been
* executed
*/
private JoinableResourceBundleContent joinAndPostprocessBundle(JoinableResourceBundle bundle,
Map<String, String> variants, BundleProcessingStatus status) {
JoinableResourceBundleContent bundleContent = new JoinableResourceBundleContent();
StringBuffer bundleData = new StringBuffer();
StringBuffer store = null;
try {
boolean firstPath = true;
// Run through all the files belonging to the bundle
Iterator<BundlePath> pathIterator = null;
if (bundle.getInclusionPattern().isIncludeOnlyOnDebug()) {
pathIterator = bundle.getItemDebugPathList(variants).iterator();
} else {
pathIterator = bundle.getItemPathList(variants).iterator();
}
for (Iterator<BundlePath> it = pathIterator; it.hasNext();) {
// File is first created in memory using a stringwriter.
StringWriter writer = new StringWriter();
BufferedWriter bwriter = new BufferedWriter(writer);
String path = (String) it.next().getPath();
if (LOGGER.isDebugEnabled())
LOGGER.debug("Adding file [" + path + "] to bundle " + bundle.getId());
// Get a reader on the resource, with appropiate encoding
Reader rd = null;
try {
rd = resourceHandler.getResource(bundle, path, true);
} catch (ResourceNotFoundException e) {
// If a mapped file does not exist, a warning is issued and
// process continues normally.
LOGGER.warn("A mapped resource was not found: [" + path + "]. Please check your configuration");
continue;
}
// Update the status.
status.setLastPathAdded(path);
rd = new UnicodeBOMReader(rd, config.getResourceCharset());
if (!firstPath && ((UnicodeBOMReader) rd).hasBOM()) {
((UnicodeBOMReader) rd).skipBOM();
} else {
firstPath = false;
}
IOUtils.copy(rd, bwriter, true);
// Add new line at the end if it doesn't exist
StringBuffer buffer = writer.getBuffer();
if (!buffer.toString().endsWith(StringUtils.STR_LINE_FEED)) {
buffer.append(StringUtils.STR_LINE_FEED);
}
// Do unitary postprocessing.
status.setProcessingType(BundleProcessingStatus.FILE_PROCESSING_TYPE);
bundleData.append(executeUnitaryPostProcessing(bundle, status, buffer, this.unitaryPostProcessor));
}
// Post process bundle as needed
store = executeBundlePostProcessing(bundle, status, bundleData);
} catch (IOException e) {
throw new BundlingProcessException(
"Unexpected IOException generating collected file [" + bundle.getId() + "].", e);
}
bundleContent.setContent(store);
return bundleContent;
}
/**
* Executes the unitary resource post processing
*
* @param bundle
* the bundle
* @param status
* the bundle processing status
* @param content
* the content to process
* @return the processed content
*/
private StringBuffer executeUnitaryPostProcessing(JoinableResourceBundle bundle, BundleProcessingStatus status,
StringBuffer content, ResourceBundlePostProcessor defaultPostProcessor) {
StringBuffer bundleData = new StringBuffer();
status.setProcessingType(BundleProcessingStatus.FILE_PROCESSING_TYPE);
if (null != bundle.getUnitaryPostProcessor()) {
StringBuffer resourceData = bundle.getUnitaryPostProcessor().postProcessBundle(status, content);
bundleData.append(resourceData);
} else if (null != defaultPostProcessor) {
if (LOGGER.isDebugEnabled())
LOGGER.debug("POSTPROCESSING UNIT:" + status.getLastPathAdded());
StringBuffer resourceData = defaultPostProcessor.postProcessBundle(status, content);
bundleData.append(resourceData);
} else {
bundleData = content;
}
return bundleData;
}
/**
* Execute the bundle post processing
*
* @param bundle
* the bundle
* @param status
* the status
* @param bundleData
* the bundle data
* @return the processed content
*/
private StringBuffer executeBundlePostProcessing(JoinableResourceBundle bundle, BundleProcessingStatus status,
StringBuffer bundleData) {
StringBuffer store;
status.setProcessingType(BundleProcessingStatus.BUNDLE_PROCESSING_TYPE);
status.setLastPathAdded(bundle.getId());
if (null != bundle.getBundlePostProcessor())
store = bundle.getBundlePostProcessor().postProcessBundle(status, bundleData);
else if (null != this.postProcessor)
store = this.postProcessor.postProcessBundle(status, bundleData);
else
store = bundleData;
return store;
}
/*
* (non-Javadoc)
*
* @seenet.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* resolveBundleForPath(java.lang.String)
*/
public JoinableResourceBundle resolveBundleForPath(String path) {
JoinableResourceBundle theBundle = null;
for (Iterator<JoinableResourceBundle> it = bundles.iterator(); it.hasNext();) {
JoinableResourceBundle bundle = it.next();
if (bundle.getId().equals(path) || bundle.belongsToBundle(path)) {
theBundle = bundle;
break;
}
}
return theBundle;
}
/*
* (non-Javadoc)
*
* @seenet.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getClientSideHandler()
*/
public ClientSideHandlerGenerator getClientSideHandler() {
return this.clientSideHandlerGenerator;
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getBundleTextDirPath()
*/
public String getBundleTextDirPath() {
return this.resourceBundleHandler.getBundleTextDirPath();
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getBundleZipDirPath()
*/
@Override
public String getBundleZipDirPath() {
return this.resourceBundleHandler.getBundleZipDirPath();
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* notifyModification(java.util.List)
*/
@Override
public void notifyModification(List<JoinableResourceBundle> bundles) {
for (JoinableResourceBundle bundle : bundles) {
if (LOGGER.isDebugEnabled() && !bundle.isDirty()) {
LOGGER.debug("The bundle '" + bundle.getId() + "' has been modified and needs to be rebuild.");
}
bundle.setDirty(true);
}
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* bundlesNeedToBeRebuild()
*/
@Override
public boolean bundlesNeedToBeRebuild() {
return !getBundlesToRebuild().isEmpty();
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getDirtyBundleNames()
*/
@Override
public List<String> getDirtyBundleNames() {
List<String> bundleNames = new ArrayList<>();
List<JoinableResourceBundle> bundles = getBundlesToRebuild();
for (JoinableResourceBundle bundle : bundles) {
bundleNames.add(bundle.getName());
}
return bundleNames;
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* setResourceWatcher(net.jawr.web.resource.watcher.ResourceWatcher)
*/
@Override
public void setResourceWatcher(ResourceWatcher watcher) {
this.watcher = watcher;
}
}
|
jawr-core/src/main/java/net/jawr/web/resource/bundle/handler/ResourceBundlesHandlerImpl.java
|
/**
* Copyright 2007-2016 Jordi Hernández Sellés, Ibrahim Chaehoi
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package net.jawr.web.resource.bundle.handler;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.Writer;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.zip.GZIPOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.jawr.web.DebugMode;
import net.jawr.web.JawrConstant;
import net.jawr.web.config.JawrConfig;
import net.jawr.web.context.ThreadLocalJawrContext;
import net.jawr.web.exception.BundlingProcessException;
import net.jawr.web.exception.ResourceNotFoundException;
import net.jawr.web.resource.BinaryResourcesHandler;
import net.jawr.web.resource.bundle.CompositeResourceBundle;
import net.jawr.web.resource.bundle.IOUtils;
import net.jawr.web.resource.bundle.JoinableResourceBundle;
import net.jawr.web.resource.bundle.JoinableResourceBundleContent;
import net.jawr.web.resource.bundle.JoinableResourceBundlePropertySerializer;
import net.jawr.web.resource.bundle.factory.global.postprocessor.GlobalPostProcessingContext;
import net.jawr.web.resource.bundle.factory.global.preprocessor.GlobalPreprocessingContext;
import net.jawr.web.resource.bundle.factory.util.ClassLoaderResourceUtils;
import net.jawr.web.resource.bundle.factory.util.PathNormalizer;
import net.jawr.web.resource.bundle.global.processor.GlobalProcessor;
import net.jawr.web.resource.bundle.hashcode.BundleHashcodeGenerator;
import net.jawr.web.resource.bundle.iterator.BundlePath;
import net.jawr.web.resource.bundle.iterator.ConditionalCommentCallbackHandler;
import net.jawr.web.resource.bundle.iterator.DebugModePathsIteratorImpl;
import net.jawr.web.resource.bundle.iterator.IECssDebugPathsIteratorImpl;
import net.jawr.web.resource.bundle.iterator.PathsIteratorImpl;
import net.jawr.web.resource.bundle.iterator.ResourceBundlePathsIterator;
import net.jawr.web.resource.bundle.postprocess.AbstractChainedResourceBundlePostProcessor;
import net.jawr.web.resource.bundle.postprocess.BundleProcessingStatus;
import net.jawr.web.resource.bundle.postprocess.ResourceBundlePostProcessor;
import net.jawr.web.resource.bundle.sorting.GlobalResourceBundleComparator;
import net.jawr.web.resource.bundle.variant.VariantSet;
import net.jawr.web.resource.bundle.variant.VariantUtils;
import net.jawr.web.resource.handler.bundle.ResourceBundleHandler;
import net.jawr.web.resource.handler.reader.ResourceReaderHandler;
import net.jawr.web.resource.watcher.ResourceWatcher;
import net.jawr.web.util.StopWatch;
import net.jawr.web.util.StringUtils;
import net.jawr.web.util.bom.UnicodeBOMReader;
/**
* Default implementation of ResourceBundlesHandler
*
* @author Jordi Hernández Sellés
* @author Ibrahim Chaehoi
*/
/**
*
* @author Ibrahim Chaehoi
*/
public class ResourceBundlesHandlerImpl implements ResourceBundlesHandler {
/** The logger */
private static final Logger LOGGER = LoggerFactory.getLogger(ResourceBundlesHandler.class);
/**
* The bundles that this handler manages.
*/
private List<JoinableResourceBundle> bundles;
/**
* Global bundles, to include in every page
*/
private List<JoinableResourceBundle> globalBundles;
/**
* Bundles to include upon request
*/
private List<JoinableResourceBundle> contextBundles;
/**
* The bundles that will be processed once when the server will be up and
* running.
*/
private List<String> liveProcessBundles = new ArrayList<String>();
/** The resource handler */
private ResourceReaderHandler resourceHandler;
/** The resource handler */
private ResourceBundleHandler resourceBundleHandler;
/** The Jawr config */
private JawrConfig config;
/** The post processor */
private ResourceBundlePostProcessor postProcessor;
/** The unitary post processor */
private ResourceBundlePostProcessor unitaryPostProcessor;
/** The post processor for composite bundle */
private ResourceBundlePostProcessor compositePostProcessor;
/** The unitary post processor for composite bundle */
private ResourceBundlePostProcessor unitaryCompositePostProcessor;
/** The resourceTypeBundle global preprocessor */
private GlobalProcessor<GlobalPreprocessingContext> resourceTypePreprocessor;
/** The resourceTypeBundle global postprocessor */
private GlobalProcessor<GlobalPostProcessingContext> resourceTypePostprocessor;
/** The client side handler generator */
private ClientSideHandlerGenerator clientSideHandlerGenerator;
/** The bundle hashcode generator */
private BundleHashcodeGenerator bundleHashcodeGenerator;
/** The bundle mapping */
private Properties bundleMapping;
/** The resource watcher */
private ResourceWatcher watcher;
/** The flag indicating if we need to search for variant in post process */
private boolean needToSearchForVariantInPostProcess;
/**
* Build a ResourceBundlesHandler.
*
* @param bundles
* List The JoinableResourceBundles to use for this handler.
* @param resourceHandler
* The file system access handler.
* @param config
* Configuration for this handler.
*/
public ResourceBundlesHandlerImpl(List<JoinableResourceBundle> bundles, ResourceReaderHandler resourceHandler,
ResourceBundleHandler resourceBundleHandler, JawrConfig config) {
this(bundles, resourceHandler, resourceBundleHandler, config, null, null, null, null, null, null);
}
/**
* Build a ResourceBundlesHandler which will use the specified
* postprocessor.
*
* @param bundles
* List The JoinableResourceBundles to use for this handler.
* @param resourceHandler
* The file system access handler.
* @param config
* Configuration for this handler.
* @param postProcessor
*/
public ResourceBundlesHandlerImpl(List<JoinableResourceBundle> bundles, ResourceReaderHandler resourceHandler,
ResourceBundleHandler resourceBundleHandler, JawrConfig config, ResourceBundlePostProcessor postProcessor,
ResourceBundlePostProcessor unitaryPostProcessor, ResourceBundlePostProcessor compositePostProcessor,
ResourceBundlePostProcessor unitaryCompositePostProcessor,
GlobalProcessor<GlobalPreprocessingContext> resourceTypePreprocessor,
GlobalProcessor<GlobalPostProcessingContext> resourceTypePostprocessor) {
super();
this.resourceHandler = resourceHandler;
this.resourceBundleHandler = resourceBundleHandler;
this.config = config;
this.bundleHashcodeGenerator = config.getBundleHashcodeGenerator();
this.postProcessor = postProcessor;
this.unitaryPostProcessor = unitaryPostProcessor;
this.compositePostProcessor = compositePostProcessor;
this.unitaryCompositePostProcessor = unitaryCompositePostProcessor;
this.resourceTypePreprocessor = resourceTypePreprocessor;
this.resourceTypePostprocessor = resourceTypePostprocessor;
this.bundles = new CopyOnWriteArrayList<JoinableResourceBundle>();
this.bundles.addAll(bundles);
splitBundlesByType(bundles);
this.clientSideHandlerGenerator = (ClientSideHandlerGenerator) ClassLoaderResourceUtils
.buildObjectInstance(config.getClientSideHandlerGeneratorClass());
this.clientSideHandlerGenerator.init(config, globalBundles, contextBundles);
this.needToSearchForVariantInPostProcess = isSearchingForVariantInPostProcessNeeded();
}
/**
* Checks if it is needed to search for variant in post process
*
* @return true if it is needed to search for variant in post process
*/
private boolean isSearchingForVariantInPostProcessNeeded() {
boolean needToSearch = false;
ResourceBundlePostProcessor[] postprocessors = new ResourceBundlePostProcessor[] { postProcessor,
unitaryCompositePostProcessor, compositePostProcessor, unitaryCompositePostProcessor };
for (ResourceBundlePostProcessor resourceBundlePostProcessor : postprocessors) {
if (resourceBundlePostProcessor != null
&& ((AbstractChainedResourceBundlePostProcessor) resourceBundlePostProcessor)
.isVariantPostProcessor()) {
needToSearch = true;
break;
}
}
return needToSearch;
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getResourceType ()
*/
public String getResourceType() {
return resourceBundleHandler.getResourceType();
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getContextBundles ()
*/
public List<JoinableResourceBundle> getContextBundles() {
return contextBundles;
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getGlobalBundles ()
*/
public List<JoinableResourceBundle> getGlobalBundles() {
return globalBundles;
}
/**
* Splits the bundles in two lists, one for global lists and other for the
* remaining bundles.
*/
private void splitBundlesByType(List<JoinableResourceBundle> bundles) {
// Temporary lists (CopyOnWriteArrayList does not support sort())
List<JoinableResourceBundle> tmpGlobal = new ArrayList<JoinableResourceBundle>();
List<JoinableResourceBundle> tmpContext = new ArrayList<JoinableResourceBundle>();
for (Iterator<JoinableResourceBundle> it = bundles.iterator(); it.hasNext();) {
JoinableResourceBundle bundle = it.next();
if (bundle.getInclusionPattern().isGlobal()) {
tmpGlobal.add(bundle);
} else {
tmpContext.add(bundle);
}
}
// Sort the global bundles
Collections.sort(tmpGlobal, new GlobalResourceBundleComparator());
globalBundles = new CopyOnWriteArrayList<JoinableResourceBundle>();
globalBundles.addAll(tmpGlobal);
contextBundles = new CopyOnWriteArrayList<JoinableResourceBundle>();
contextBundles.addAll(tmpContext);
}
/*
* (non-Javadoc)
*
* @seenet.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* isGlobalResourceBundle(java.lang.String)
*/
public boolean isGlobalResourceBundle(String resourceBundleId) {
boolean isGlobalResourceBundle = false;
for (Iterator<JoinableResourceBundle> it = globalBundles.iterator(); it.hasNext();) {
JoinableResourceBundle bundle = it.next();
if (bundle.getId().equals(resourceBundleId)) {
isGlobalResourceBundle = true;
}
}
return isGlobalResourceBundle;
}
/*
* (non-Javadoc)
*
* @seenet.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getGlobalResourceBundlePaths
* (net.jawr.web.resource.bundle.iterator.ConditionalCommentCallbackHandler,
* java.lang.String)
*/
public ResourceBundlePathsIterator getGlobalResourceBundlePaths(DebugMode debugMode,
ConditionalCommentCallbackHandler commentCallbackHandler, Map<String, String> variants) {
return getBundleIterator(debugMode, globalBundles, commentCallbackHandler, variants);
}
/*
* (non-Javadoc)
*
* @seenet.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getGlobalResourceBundlePaths
* (net.jawr.web.resource.bundle.iterator.ConditionalCommentCallbackHandler,
* java.lang.String)
*/
public ResourceBundlePathsIterator getGlobalResourceBundlePaths(
ConditionalCommentCallbackHandler commentCallbackHandler, Map<String, String> variants) {
return getBundleIterator(getDebugMode(), globalBundles, commentCallbackHandler, variants);
}
/*
* (non-Javadoc)
*
* @seenet.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getGlobalResourceBundlePaths
* (net.jawr.web.resource.bundle.iterator.ConditionalCommentCallbackHandler,
* java.lang.String)
*/
public ResourceBundlePathsIterator getGlobalResourceBundlePaths(String bundleId,
ConditionalCommentCallbackHandler commentCallbackHandler, Map<String, String> variants) {
List<JoinableResourceBundle> bundles = new ArrayList<JoinableResourceBundle>();
for (Iterator<JoinableResourceBundle> it = globalBundles.iterator(); it.hasNext();) {
JoinableResourceBundle bundle = it.next();
if (bundle.getId().equals(bundleId)) {
bundles.add(bundle);
break;
}
}
return getBundleIterator(getDebugMode(), bundles, commentCallbackHandler, variants);
}
/*
* (non-Javadoc)
*
* @see
* net.jawr.web.resource.bundle.ResourceCollector#getBundlePaths(java.lang
* .String)
*/
public ResourceBundlePathsIterator getBundlePaths(String bundleId,
ConditionalCommentCallbackHandler commentCallbackHandler, Map<String, String> variants) {
return getBundlePaths(getDebugMode(), bundleId, commentCallbackHandler, variants);
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getBundlePaths (boolean, java.lang.String,
* net.jawr.web.resource.bundle.iterator.ConditionalCommentCallbackHandler,
* java.lang.String)
*/
public ResourceBundlePathsIterator getBundlePaths(DebugMode debugMode, String bundleId,
ConditionalCommentCallbackHandler commentCallbackHandler, Map<String, String> variants) {
List<JoinableResourceBundle> bundles = new ArrayList<JoinableResourceBundle>();
// if the path did not correspond to a global bundle, find the requested
// one.
if (!isGlobalResourceBundle(bundleId)) {
for (Iterator<JoinableResourceBundle> it = contextBundles.iterator(); it.hasNext();) {
JoinableResourceBundle bundle = it.next();
if (bundle.getId().equals(bundleId)) {
bundles.add(bundle);
break;
}
}
}
return getBundleIterator(debugMode, bundles, commentCallbackHandler, variants);
}
/**
* Returns the bundle iterator
*
* @param debugMode
* the flag indicating if we are in debug mode or not
* @param commentCallbackHandler
* the comment callback handler
* @param variants
* the variant map
* @return the bundle iterator
*/
private ResourceBundlePathsIterator getBundleIterator(DebugMode debugMode, List<JoinableResourceBundle> bundles,
ConditionalCommentCallbackHandler commentCallbackHandler, Map<String, String> variants) {
ResourceBundlePathsIterator bundlesIterator;
if (debugMode.equals(DebugMode.DEBUG)) {
bundlesIterator = new DebugModePathsIteratorImpl(bundles, commentCallbackHandler, variants);
} else if (debugMode.equals(DebugMode.FORCE_NON_DEBUG_IN_IE)) {
bundlesIterator = new IECssDebugPathsIteratorImpl(bundles, commentCallbackHandler, variants);
} else
bundlesIterator = new PathsIteratorImpl(bundles, commentCallbackHandler, variants);
return bundlesIterator;
}
/*
* (non-Javadoc)
*
* @see
* net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#writeBundleTo
* (java.lang.String, java.io.Writer)
*/
@Override
public void writeBundleTo(String bundlePath, Writer writer) throws ResourceNotFoundException {
Reader rd = null;
try {
// If debug mode is on, resources are retrieved one by one.
if (config.isDebugModeOn()) {
rd = resourceHandler.getResource(null, bundlePath);
} else {
// Prefixes are used only in production mode
String path = PathNormalizer.removeVariantPrefixFromPath(bundlePath);
rd = resourceBundleHandler.getResourceBundleReader(path);
if (liveProcessBundles.contains(path)) {
rd = processInLive(rd);
}
}
IOUtils.copy(rd, writer);
writer.flush();
} catch (IOException e) {
throw new BundlingProcessException("Unexpected IOException writing bundle[" + bundlePath + "]", e);
} finally {
IOUtils.close(rd);
}
}
/**
* Process the bundle content in live
*
* @param reader
* the reader
* @return the processed bundle content
* @throws IOException
* if an IOException occured
*/
private StringReader processInLive(Reader reader) throws IOException {
String requestURL = ThreadLocalJawrContext.getRequestURL();
StringWriter swriter = new StringWriter();
IOUtils.copy(reader, swriter, true);
String updatedContent = swriter.getBuffer().toString();
if (requestURL != null) {
updatedContent = updatedContent.replaceAll(JawrConstant.JAWR_BUNDLE_PATH_PLACEHOLDER_PATTERN, requestURL);
}
return new StringReader(updatedContent);
}
/*
* (non-Javadoc)
*
* @see
* net.jawr.web.resource.bundle.ResourceBundlesHandler#streamBundleTo(java
* .lang.String, java.io.OutputStream)
*/
public void streamBundleTo(String bundlePath, OutputStream out) throws ResourceNotFoundException {
// Remove prefix, which are used only in production mode
String path = PathNormalizer.removeVariantPrefixFromPath(bundlePath);
ReadableByteChannel data = null;
try {
if (liveProcessBundles.contains(path)) {
Reader rd = null;
try {
rd = resourceBundleHandler.getResourceBundleReader(path);
StringReader strRd = processInLive(rd);
StringWriter strWriter = new StringWriter();
IOUtils.copy(strRd, strWriter);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
GZIPOutputStream gzOut = new GZIPOutputStream(bos);
byte[] byteData = strWriter.getBuffer().toString().getBytes(config.getResourceCharset().name());
gzOut.write(byteData, 0, byteData.length);
gzOut.close();
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
data = Channels.newChannel(bis);
} finally {
IOUtils.close(rd);
}
} else {
data = resourceBundleHandler.getResourceBundleChannel(path);
}
WritableByteChannel outChannel = Channels.newChannel(out);
IOUtils.copy(data, outChannel);
} catch (IOException e) {
throw new BundlingProcessException("Unexpected IOException writing bundle [" + path + "]", e);
} finally {
IOUtils.close(data);
}
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.ResourceCollector#getConfig()
*/
public JawrConfig getConfig() {
return config;
}
/**
* Returns the current debug mode
*
* @return the current debug mode
*/
private DebugMode getDebugMode() {
return config.isDebugModeOn() ? DebugMode.DEBUG : DebugMode.NO_DEBUG;
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.ResourceCollector#initAllBundles()
*/
public void initAllBundles() {
if (config.getUseBundleMapping()) {
bundleMapping = resourceBundleHandler.getJawrBundleMapping();
}
boolean mappingFileExists = resourceBundleHandler.isExistingMappingFile();
boolean processBundleFlag = !config.getUseBundleMapping() || !mappingFileExists;
StopWatch stopWatch = ThreadLocalJawrContext.getStopWatch();
List<JoinableResourceBundle> bundleToProcess = this.bundles;
boolean forceStoreJawrBundleMapping = false;
if (!processBundleFlag) {
int storeJawrConfigHashcode = Integer.parseInt(
resourceBundleHandler.getJawrBundleMapping().getProperty(JawrConstant.JAWR_CONFIG_HASHCODE));
int jawrConfigHashcode = config.getConfigProperties().hashCode();
boolean rebuildAllBundles = !config.getUseSmartBundling()
|| (storeJawrConfigHashcode != jawrConfigHashcode);
if (!rebuildAllBundles) {
bundleToProcess = getBundlesToRebuild();
if (!bundleToProcess.isEmpty() && LOGGER.isDebugEnabled()) {
StringBuilder msg = new StringBuilder(
"Jawr has detect changes on the following bundles, which will be updated :\n");
for (JoinableResourceBundle b : bundleToProcess) {
msg.append(b.getName() + "\n");
}
LOGGER.debug(msg.toString());
}
} else {
if (LOGGER.isDebugEnabled() && storeJawrConfigHashcode != jawrConfigHashcode) {
LOGGER.debug("Jawr config has changed since last bundling process. All bundles will be processed.");
}
}
forceStoreJawrBundleMapping = !bundleToProcess.isEmpty();
}
// Execute processing
build(bundleToProcess, forceStoreJawrBundleMapping, stopWatch);
}
/**
* Executes the global preprocessing
*
* @param bundlesToBuild
* The list of bundles to rebuild
*
* @param processBundleFlag
* the flag indicating if the bundles needs to be processed
* @param stopWatch
* the stopWatch
*/
private void executeGlobalPreprocessing(List<JoinableResourceBundle> bundlesToBuild, boolean processBundleFlag,
StopWatch stopWatch) {
if (resourceTypePreprocessor != null) {
if (stopWatch != null) {
stopWatch.start("Global preprocessing");
}
GlobalPreprocessingContext ctx = new GlobalPreprocessingContext(config, resourceHandler, processBundleFlag);
resourceTypePreprocessor.processBundles(ctx, bundles);
// Update the list of bundle to rebuild if new bundles have been
// detected as dirty in the global preprocessing phase
List<JoinableResourceBundle> bundles = getBundlesToRebuild();
for (JoinableResourceBundle b : bundles) {
if (!bundlesToBuild.contains(b)) {
bundlesToBuild.add(b);
}
}
if (stopWatch != null) {
stopWatch.stop();
}
}
}
/**
* Rebuilds the bundles given in parameter
*
* @param bundlesToRebuild
* the list of bundle to rebuild
*/
public synchronized void rebuildModifiedBundles() {
StopWatch stopWatch = ThreadLocalJawrContext.getStopWatch();
if (config.getUseSmartBundling()) {
List<JoinableResourceBundle> bundlesToRebuild = getBundlesToRebuild();
for (JoinableResourceBundle bundle : bundlesToRebuild) {
bundle.resetBundleMapping();
}
build(bundlesToRebuild, true, stopWatch);
try {
if (watcher != null) {
watcher.initPathToResourceBundleMap(bundlesToRebuild);
}
} catch (IOException e) {
throw new BundlingProcessException(e);
}
} else {
LOGGER.warn("You should turn of \"smart bundling\" feature to be able to rebuild modified bundles.");
}
}
/**
* Returns the bundles which needs to be rebuild
*
* @return the bundles which needs to be rebuild
*/
private List<JoinableResourceBundle> getBundlesToRebuild() {
List<JoinableResourceBundle> bundlesToRebuild = new ArrayList<>();
if (config.getUseSmartBundling()) {
for (JoinableResourceBundle bundle : globalBundles) {
if (bundle.isDirty()) {
bundlesToRebuild.add(bundle);
}
}
for (JoinableResourceBundle bundle : contextBundles) {
if (bundle.isDirty()) {
bundlesToRebuild.add(bundle);
}
}
}
return bundlesToRebuild;
}
/**
* Builds the bundles given in parameter
*
* @param bundlesToBuild
* the list of bundle to build
* @param forceWriteBundleMapping
* the flag indicating if the bundle mapping must be written in
* any case
* @param stopWatch
* the stop watch
*/
public void build(List<JoinableResourceBundle> bundlesToBuild, boolean forceWriteBundleMapping,
StopWatch stopWatch) {
boolean mappingFileExists = resourceBundleHandler.isExistingMappingFile();
boolean processBundleFlag = !config.getUseBundleMapping() || !mappingFileExists;
// Global preprocessing
executeGlobalPreprocessing(bundlesToBuild, processBundleFlag, stopWatch);
for (JoinableResourceBundle bundle : bundlesToBuild) {
// Clears the linked resource mappings as they will be initialized
// by the processing
bundle.getLinkedFilePathMappings().clear();
if (stopWatch != null) {
stopWatch.start("Processing bundle '" + bundle.getName() + "'");
}
if (!ThreadLocalJawrContext.isBundleProcessingAtBuildTime() && null != bundle.getAlternateProductionURL()) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("No bundle generated for '" + bundle.getId()
+ "' because a production URL is defined for this bundle.");
}
}
if (bundle instanceof CompositeResourceBundle) {
joinAndStoreCompositeResourcebundle((CompositeResourceBundle) bundle);
} else {
joinAndStoreBundle(bundle);
}
if (config.getUseBundleMapping() && (!mappingFileExists || bundle.isDirty())) {
JoinableResourceBundlePropertySerializer.serializeInProperties(bundle,
resourceBundleHandler.getResourceType(), bundleMapping);
}
bundle.setDirty(false);
if (stopWatch != null) {
stopWatch.stop();
}
}
executeGlobalPostProcessing(processBundleFlag, stopWatch);
storeJawrBundleMapping(resourceBundleHandler.isExistingMappingFile(), true);
}
/**
* Stores the Jawr bundle mapping
*
* @param mappingFileExists
* the flag indicating if the mapping file exists
* @param force
* force the storage of Jawr bundle mapping
*/
private void storeJawrBundleMapping(boolean mappingFileExists, boolean force) {
if (config.getUseBundleMapping() && (!mappingFileExists || force)) {
bundleMapping.setProperty(JawrConstant.JAWR_CONFIG_HASHCODE,
Integer.toString(config.getConfigProperties().hashCode()));
resourceBundleHandler.storeJawrBundleMapping(bundleMapping);
if (resourceBundleHandler.getResourceType().equals(JawrConstant.CSS_TYPE)) {
// Retrieve the image servlet mapping
BinaryResourcesHandler binaryRsHandler = (BinaryResourcesHandler) config.getContext()
.getAttribute(JawrConstant.BINARY_CONTEXT_ATTRIBUTE);
if (binaryRsHandler != null) {
// Here we update the image mapping if we are using the
// build time bundle processor
JawrConfig binaryJawrConfig = binaryRsHandler.getConfig();
// If we use the full image bundle mapping and the jawr
// working directory is not located inside the web
// application
// We store the image bundle maping which now contains the
// mapping for CSS images
String jawrWorkingDirectory = binaryJawrConfig.getJawrWorkingDirectory();
if (binaryJawrConfig.getUseBundleMapping() && (jawrWorkingDirectory == null
|| !jawrWorkingDirectory.startsWith(JawrConstant.URL_SEPARATOR))) {
// Store the bundle mapping
Properties props = new Properties();
props.putAll(binaryRsHandler.getBinaryPathMap());
props.setProperty(JawrConstant.JAWR_CONFIG_HASHCODE,
Integer.toString(binaryJawrConfig.getConfigProperties().hashCode()));
binaryRsHandler.getRsBundleHandler().storeJawrBundleMapping(props);
}
}
}
}
}
/**
* Execute the global post processing
*
* @param processBundleFlag
* the flag indicating if the bundle should be processed
* @param stopWatch
* the stopWatch
*/
private void executeGlobalPostProcessing(boolean processBundleFlag, StopWatch stopWatch) {
// Launch global postprocessing
if (resourceTypePostprocessor != null) {
if (stopWatch != null) {
stopWatch.start("Global postprocessing");
}
GlobalPostProcessingContext ctx = new GlobalPostProcessingContext(config, this, resourceHandler,
processBundleFlag);
resourceTypePostprocessor.processBundles(ctx, this.bundles);
if (stopWatch != null) {
stopWatch.stop();
}
}
}
/**
* Joins the members of a composite bundle in all its variants, storing in a
* separate file for each variant.
*
* @param composite
* the composite resource bundle
*/
private void joinAndStoreCompositeResourcebundle(CompositeResourceBundle composite) {
BundleProcessingStatus status = new BundleProcessingStatus(BundleProcessingStatus.FILE_PROCESSING_TYPE,
composite, resourceHandler, config);
// Collect all variant names from child bundles
Map<String, VariantSet> compositeBundleVariants = new HashMap<String, VariantSet>();
for (Iterator<JoinableResourceBundle> it = composite.getChildBundles().iterator(); it.hasNext();) {
JoinableResourceBundle childbundle = it.next();
if (null != childbundle.getVariants())
compositeBundleVariants = VariantUtils.concatVariants(compositeBundleVariants,
childbundle.getVariants());
}
composite.setVariants(compositeBundleVariants);
if (needToSearchForVariantInPostProcess) {
status.setSearchingPostProcessorVariants(true);
joinAndPostProcessBundle(composite, status);
Map<String, VariantSet> postProcessVariants = status.getPostProcessVariants();
if (!postProcessVariants.isEmpty()) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Post process variants found for bundle " + composite.getId() + ":" + postProcessVariants);
}
Map<String, VariantSet> newVariants = VariantUtils.concatVariants(composite.getVariants(),
postProcessVariants);
composite.setVariants(newVariants);
status.setSearchingPostProcessorVariants(false);
joinAndPostProcessBundle(composite, status);
}
} else {
status.setSearchingPostProcessorVariants(false);
joinAndPostProcessBundle(composite, status);
}
}
/**
* Joins and post process the variant composite bundle
*
* @param composite
* the composite bundle
* @param status
* the status
* @param compositeBundleVariants
* the variants
*/
private void joinAndPostProcessBundle(CompositeResourceBundle composite, BundleProcessingStatus status) {
JoinableResourceBundleContent store;
List<Map<String, String>> allVariants = VariantUtils.getAllVariants(composite.getVariants());
// Add the default bundle variant (the non variant one)
allVariants.add(null);
// Process all variants
for (Iterator<Map<String, String>> vars = allVariants.iterator(); vars.hasNext();) {
Map<String, String> variants = vars.next();
status.setBundleVariants(variants);
store = new JoinableResourceBundleContent();
for (Iterator<JoinableResourceBundle> it = composite.getChildBundles().iterator(); it.hasNext();) {
JoinableResourceBundle childbundle = (JoinableResourceBundle) it.next();
if (!childbundle.getInclusionPattern().isIncludeOnlyOnDebug()) {
JoinableResourceBundleContent childContent = joinAndPostprocessBundle(childbundle, variants,
status);
// Do unitary postprocessing.
status.setProcessingType(BundleProcessingStatus.FILE_PROCESSING_TYPE);
StringBuffer content = executeUnitaryPostProcessing(composite, status, childContent.getContent(),
this.unitaryCompositePostProcessor);
childContent.setContent(content);
store.append(childContent);
}
}
// Post process composite bundle as needed
store = postProcessJoinedCompositeBundle(composite, store.getContent(), status);
String variantKey = VariantUtils.getVariantKey(variants);
String name = VariantUtils.getVariantBundleName(composite.getId(), variantKey, false);
storeBundle(name, store);
initBundleDataHashcode(composite, store, variantKey);
}
}
/**
* Postprocess the composite bundle only if a composite bundle post
* processor is defined
*
* @param composite
* the composite bundle
* @param content
* the content
* @param status
* the status
* @return the content
*/
private JoinableResourceBundleContent postProcessJoinedCompositeBundle(CompositeResourceBundle composite,
StringBuffer content, BundleProcessingStatus status) {
JoinableResourceBundleContent store = new JoinableResourceBundleContent();
StringBuffer processedContent = null;
status.setProcessingType(BundleProcessingStatus.BUNDLE_PROCESSING_TYPE);
ResourceBundlePostProcessor bundlePostProcessor = composite.getBundlePostProcessor();
if (null != bundlePostProcessor) {
processedContent = bundlePostProcessor.postProcessBundle(status, content);
} else if (null != this.compositePostProcessor) {
processedContent = this.compositePostProcessor.postProcessBundle(status, content);
} else {
processedContent = content;
}
store.setContent(processedContent);
return store;
}
/**
* Initialize the bundle data hashcode and initialize the bundle mapping if
* needed
*
* @param bundle
* the bundle
* @param store
* the data to store
*/
private void initBundleDataHashcode(JoinableResourceBundle bundle, JoinableResourceBundleContent store,
String variant) {
String bundleHashcode = bundleHashcodeGenerator.generateHashCode(config, store.getContent().toString());
bundle.setBundleDataHashCode(variant, bundleHashcode);
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getTypeBundleHashcode(java.lang.String)
*/
public BundleHashcodeType getBundleHashcodeType(String requestedPath) {
BundleHashcodeType typeBundleHashcode = BundleHashcodeType.UNKNOW_BUNDLE;
String[] pathInfos = PathNormalizer.extractBundleInfoFromPath(requestedPath);
if (pathInfos != null) {
String bundlePrefix = pathInfos[0];
String bundleId = pathInfos[1];
String variantKey = pathInfos[2];
String hashcode = pathInfos[3];
JoinableResourceBundle bundle = resolveBundleForPath(bundleId);
if (bundle != null) {
String bundleHashcode = bundle.getBundleDataHashCode(variantKey);
if (hashcode == null && bundleHashcode == null || hashcode != null && hashcode.equals(bundleHashcode)
&& ((bundlePrefix == null && bundle.getBundlePrefix() == null)
|| (bundlePrefix != null && bundlePrefix.equals(bundle.getBundlePrefix())))) {
typeBundleHashcode = BundleHashcodeType.VALID_HASHCODE;
} else {
typeBundleHashcode = BundleHashcodeType.INVALID_HASHCODE;
}
}
}
return typeBundleHashcode;
}
/**
* Joins the members of a bundle and stores it
*
* @param bundle
* the bundle
* @param the
* flag indicating if we should process the bundle or not
*/
private void joinAndStoreBundle(JoinableResourceBundle bundle) {
BundleProcessingStatus status = new BundleProcessingStatus(BundleProcessingStatus.FILE_PROCESSING_TYPE, bundle,
resourceHandler, config);
JoinableResourceBundleContent store = null;
// Process the bundle for searching variant
if (needToSearchForVariantInPostProcess) {
status.setSearchingPostProcessorVariants(true);
joinAndPostProcessBundle(bundle, status);
// Process the bundles
status.setSearchingPostProcessorVariants(false);
Map<String, VariantSet> postProcessVariants = status.getPostProcessVariants();
if (!postProcessVariants.isEmpty()) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Post process variants found for bundle " + bundle.getId() + ":" + postProcessVariants);
}
Map<String, VariantSet> newVariants = VariantUtils.concatVariants(bundle.getVariants(),
postProcessVariants);
bundle.setVariants(newVariants);
joinAndPostProcessBundle(bundle, status);
}
} else {
status.setSearchingPostProcessorVariants(false);
joinAndPostProcessBundle(bundle, status);
}
// Store the collected resources as a single file, both in text and
// gzip formats.
store = joinAndPostprocessBundle(bundle, null, status);
storeBundle(bundle.getId(), store);
// Set the data hascode in the bundle, in case the prefix needs to
// be generated
initBundleDataHashcode(bundle, store, null);
}
/**
* Store the bundle
*
* @param bundleId
* the bundle Id to store
* @param store
* the bundle
*/
private void storeBundle(String bundleId, JoinableResourceBundleContent store) {
if (bundleMustBeProcessedInLive(store.getContent().toString())) {
liveProcessBundles.add(bundleId);
}
resourceBundleHandler.storeBundle(bundleId, store);
}
/**
* Checks if the bundle must be processed in live
*
* @param the
* bundle content
* @return true if the bundle must be processed in live
*/
private boolean bundleMustBeProcessedInLive(String content) {
return content.indexOf(JawrConstant.JAWR_BUNDLE_PATH_PLACEHOLDER) != -1;
}
/**
* Join and post process the bundle taking in account all its variants.
*
* @param bundle
* the bundle
* @param status
* the bundle processing status
*/
private void joinAndPostProcessBundle(JoinableResourceBundle bundle, BundleProcessingStatus status) {
JoinableResourceBundleContent store;
List<Map<String, String>> allVariants = VariantUtils.getAllVariants(bundle.getVariants());
// Add the default bundle variant (the non variant one)
allVariants.add(null);
for (Iterator<Map<String, String>> it = allVariants.iterator(); it.hasNext();) {
Map<String, String> variantMap = it.next();
status.setBundleVariants(variantMap);
String variantKey = VariantUtils.getVariantKey(variantMap);
String name = VariantUtils.getVariantBundleName(bundle.getId(), variantKey, false);
store = joinAndPostprocessBundle(bundle, variantMap, status);
storeBundle(name, store);
initBundleDataHashcode(bundle, store, variantKey);
}
}
/**
* Reads all the members of a bundle and executes all associated
* postprocessors.
*
* @param bundle
* the bundle
* @param variants
* the variant map
* @param the
* bundling processing status
* @param the
* flag indicating if we should process the bundle or not
* @return the resource bundle content, where all postprocessors have been
* executed
*/
private JoinableResourceBundleContent joinAndPostprocessBundle(JoinableResourceBundle bundle,
Map<String, String> variants, BundleProcessingStatus status) {
JoinableResourceBundleContent bundleContent = new JoinableResourceBundleContent();
StringBuffer bundleData = new StringBuffer();
StringBuffer store = null;
try {
boolean firstPath = true;
// Run through all the files belonging to the bundle
Iterator<BundlePath> pathIterator = null;
if (bundle.getInclusionPattern().isIncludeOnlyOnDebug()) {
pathIterator = bundle.getItemDebugPathList(variants).iterator();
} else {
pathIterator = bundle.getItemPathList(variants).iterator();
}
for (Iterator<BundlePath> it = pathIterator; it.hasNext();) {
// File is first created in memory using a stringwriter.
StringWriter writer = new StringWriter();
BufferedWriter bwriter = new BufferedWriter(writer);
String path = (String) it.next().getPath();
if (LOGGER.isDebugEnabled())
LOGGER.debug("Adding file [" + path + "] to bundle " + bundle.getId());
// Get a reader on the resource, with appropiate encoding
Reader rd = null;
try {
rd = resourceHandler.getResource(bundle, path, true);
} catch (ResourceNotFoundException e) {
// If a mapped file does not exist, a warning is issued and
// process continues normally.
LOGGER.warn("A mapped resource was not found: [" + path + "]. Please check your configuration");
continue;
}
// Update the status.
status.setLastPathAdded(path);
rd = new UnicodeBOMReader(rd, config.getResourceCharset());
if (!firstPath && ((UnicodeBOMReader) rd).hasBOM()) {
((UnicodeBOMReader) rd).skipBOM();
} else {
firstPath = false;
}
IOUtils.copy(rd, bwriter, true);
// Add new line at the end if it doesn't exist
StringBuffer buffer = writer.getBuffer();
if (!buffer.toString().endsWith(StringUtils.STR_LINE_FEED)) {
buffer.append(StringUtils.STR_LINE_FEED);
}
// Do unitary postprocessing.
status.setProcessingType(BundleProcessingStatus.FILE_PROCESSING_TYPE);
bundleData.append(executeUnitaryPostProcessing(bundle, status, buffer, this.unitaryPostProcessor));
}
// Post process bundle as needed
store = executeBundlePostProcessing(bundle, status, bundleData);
} catch (IOException e) {
throw new BundlingProcessException(
"Unexpected IOException generating collected file [" + bundle.getId() + "].", e);
}
bundleContent.setContent(store);
return bundleContent;
}
/**
* Executes the unitary resource post processing
*
* @param bundle
* the bundle
* @param status
* the bundle processing status
* @param content
* the content to process
* @return the processed content
*/
private StringBuffer executeUnitaryPostProcessing(JoinableResourceBundle bundle, BundleProcessingStatus status,
StringBuffer content, ResourceBundlePostProcessor defaultPostProcessor) {
StringBuffer bundleData = new StringBuffer();
status.setProcessingType(BundleProcessingStatus.FILE_PROCESSING_TYPE);
if (null != bundle.getUnitaryPostProcessor()) {
StringBuffer resourceData = bundle.getUnitaryPostProcessor().postProcessBundle(status, content);
bundleData.append(resourceData);
} else if (null != defaultPostProcessor) {
if (LOGGER.isDebugEnabled())
LOGGER.debug("POSTPROCESSING UNIT:" + status.getLastPathAdded());
StringBuffer resourceData = defaultPostProcessor.postProcessBundle(status, content);
bundleData.append(resourceData);
} else {
bundleData = content;
}
return bundleData;
}
/**
* Execute the bundle post processing
*
* @param bundle
* the bundle
* @param status
* the status
* @param bundleData
* the bundle data
* @return the processed content
*/
private StringBuffer executeBundlePostProcessing(JoinableResourceBundle bundle, BundleProcessingStatus status,
StringBuffer bundleData) {
StringBuffer store;
status.setProcessingType(BundleProcessingStatus.BUNDLE_PROCESSING_TYPE);
status.setLastPathAdded(bundle.getId());
if (null != bundle.getBundlePostProcessor())
store = bundle.getBundlePostProcessor().postProcessBundle(status, bundleData);
else if (null != this.postProcessor)
store = this.postProcessor.postProcessBundle(status, bundleData);
else
store = bundleData;
return store;
}
/*
* (non-Javadoc)
*
* @seenet.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* resolveBundleForPath(java.lang.String)
*/
public JoinableResourceBundle resolveBundleForPath(String path) {
JoinableResourceBundle theBundle = null;
for (Iterator<JoinableResourceBundle> it = bundles.iterator(); it.hasNext();) {
JoinableResourceBundle bundle = it.next();
if (bundle.getId().equals(path) || bundle.belongsToBundle(path)) {
theBundle = bundle;
break;
}
}
return theBundle;
}
/*
* (non-Javadoc)
*
* @seenet.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getClientSideHandler()
*/
public ClientSideHandlerGenerator getClientSideHandler() {
return this.clientSideHandlerGenerator;
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getBundleTextDirPath()
*/
public String getBundleTextDirPath() {
return this.resourceBundleHandler.getBundleTextDirPath();
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getBundleZipDirPath()
*/
@Override
public String getBundleZipDirPath() {
return this.resourceBundleHandler.getBundleZipDirPath();
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* notifyModification(java.util.List)
*/
@Override
public void notifyModification(List<JoinableResourceBundle> bundles) {
for (JoinableResourceBundle bundle : bundles) {
if (LOGGER.isDebugEnabled() && !bundle.isDirty()) {
LOGGER.debug("The bundle '" + bundle.getId() + "' has been modified and needs to be rebuild.");
}
bundle.setDirty(true);
}
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* bundlesNeedToBeRebuild()
*/
@Override
public boolean bundlesNeedToBeRebuild() {
return !getBundlesToRebuild().isEmpty();
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* getDirtyBundleNames()
*/
@Override
public List<String> getDirtyBundleNames() {
List<String> bundleNames = new ArrayList<>();
List<JoinableResourceBundle> bundles = getBundlesToRebuild();
for (JoinableResourceBundle bundle : bundles) {
bundleNames.add(bundle.getName());
}
return bundleNames;
}
/*
* (non-Javadoc)
*
* @see net.jawr.web.resource.bundle.handler.ResourceBundlesHandler#
* setResourceWatcher(net.jawr.web.resource.watcher.ResourceWatcher)
*/
@Override
public void setResourceWatcher(ResourceWatcher watcher) {
this.watcher = watcher;
}
}
|
Fix issue in with search variant postprocessor
|
jawr-core/src/main/java/net/jawr/web/resource/bundle/handler/ResourceBundlesHandlerImpl.java
|
Fix issue in with search variant postprocessor
|
|
Java
|
apache-2.0
|
063ff3b0e0dc8b87b930687dca8244d3e4eabf35
| 0
|
adleritech/flexibee
|
package com.adleritech.flexibee.core.api;
import com.adleritech.flexibee.core.api.domain.WinstromRequest;
import com.adleritech.flexibee.core.api.domain.WinstromResponse;
import retrofit2.Call;
import retrofit2.Response;
import retrofit2.http.Body;
import retrofit2.http.PUT;
import retrofit2.http.Path;
import java.io.IOException;
public class FlexibeeClient {
private static final String API_BASE_URL = "https://demo.flexibee.eu:5434";
private final String company;
private final Api client;
public FlexibeeClient(String username, String password, String company) {
this.company = company;
client = RetrofitClientFactory.createService(Api.class, API_BASE_URL, username, password);
}
public FlexibeeClient(String username, String password, String company, String apiBaseUrl) {
this.company = company;
client = RetrofitClientFactory.createService(Api.class, apiBaseUrl, username, password);
}
public WinstromResponse createInvoice(WinstromRequest winstromRequest) throws IOException {
Response<WinstromResponse> response = client.issueInvoice(company, winstromRequest).execute();
return response.body();
}
interface Api {
@PUT("/c/{company}/faktura-vydana.xml")
Call<WinstromResponse> issueInvoice(@Path("company") String company, @Body WinstromRequest request);
}
}
|
flexibee-core/src/main/java/com/adleritech/flexibee/core/api/FlexibeeClient.java
|
package com.adleritech.flexibee.core.api;
import com.adleritech.flexibee.core.api.domain.WinstromRequest;
import com.adleritech.flexibee.core.api.domain.WinstromResponse;
import retrofit2.Call;
import retrofit2.Response;
import retrofit2.http.Body;
import retrofit2.http.PUT;
import retrofit2.http.Path;
import java.io.IOException;
public class FlexibeeClient {
private static final String API_BASE_URL = "https://demo.flexibee.eu:5434";
private final String company;
private final Api client;
public FlexibeeClient(String username, String password, String company) {
this.company = company;
client = RetrofitClientFactory.createService(Api.class, API_BASE_URL, username, password);
}
public WinstromResponse createInvoice(WinstromRequest winstromRequest) throws IOException {
Response<WinstromResponse> response = client.issueInvoice(company, winstromRequest).execute();
return response.body();
}
interface Api {
@PUT("/c/{company}/faktura-vydana.xml")
Call<WinstromResponse> issueInvoice(@Path("company") String company, @Body WinstromRequest request);
}
}
|
Allow to change flexibee url
|
flexibee-core/src/main/java/com/adleritech/flexibee/core/api/FlexibeeClient.java
|
Allow to change flexibee url
|
|
Java
|
apache-2.0
|
a41f3baed3657153d189b7c6ede9d99d59b5b0d9
| 0
|
etnetera/jmeter,benbenw/jmeter,apache/jmeter,ham1/jmeter,benbenw/jmeter,apache/jmeter,ham1/jmeter,ham1/jmeter,ham1/jmeter,apache/jmeter,benbenw/jmeter,apache/jmeter,benbenw/jmeter,etnetera/jmeter,etnetera/jmeter,ham1/jmeter,etnetera/jmeter,etnetera/jmeter,apache/jmeter
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.assertions;
import java.io.Serializable;
import java.net.URL;
import java.util.ArrayList;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.testelement.AbstractScopedAssertion;
import org.apache.jmeter.testelement.property.CollectionProperty;
import org.apache.jmeter.testelement.property.IntegerProperty;
import org.apache.jmeter.testelement.property.JMeterProperty;
import org.apache.jmeter.testelement.property.NullProperty;
import org.apache.jmeter.testelement.property.PropertyIterator;
import org.apache.jmeter.testelement.property.StringProperty;
import org.apache.jmeter.util.Document;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
import org.apache.oro.text.MalformedCachePatternException;
import org.apache.oro.text.regex.Pattern;
import org.apache.oro.text.regex.Perl5Compiler;
import org.apache.oro.text.regex.Perl5Matcher;
// @see org.apache.jmeter.assertions.ResponseAssertionTest for unit tests
/**
* Test element to handle Response Assertions, @see AssertionGui
*/
public class ResponseAssertion extends AbstractScopedAssertion implements Serializable, Assertion {
private static final Logger log = LoggingManager.getLoggerForClass();
private static final long serialVersionUID = 240L;
private static final String TEST_FIELD = "Assertion.test_field"; // $NON-NLS-1$
// Values for TEST_FIELD
// N.B. we cannot change the text value as it is in test plans
private static final String SAMPLE_URL = "Assertion.sample_label"; // $NON-NLS-1$
private static final String RESPONSE_DATA = "Assertion.response_data"; // $NON-NLS-1$
private static final String RESPONSE_DATA_AS_DOCUMENT = "Assertion.response_data_as_document"; // $NON-NLS-1$
private static final String RESPONSE_CODE = "Assertion.response_code"; // $NON-NLS-1$
private static final String RESPONSE_MESSAGE = "Assertion.response_message"; // $NON-NLS-1$
private static final String RESPONSE_HEADERS = "Assertion.response_headers"; // $NON-NLS-1$
private static final String ASSUME_SUCCESS = "Assertion.assume_success"; // $NON-NLS-1$
private static final String TEST_STRINGS = "Asserion.test_strings"; // $NON-NLS-1$
private static final String TEST_TYPE = "Assertion.test_type"; // $NON-NLS-1$
/*
* Mask values for TEST_TYPE TODO: remove either MATCH or CONTAINS - they
* are mutually exckusive
*/
private static final int MATCH = 1 << 0;
private static final int CONTAINS = 1 << 1;
private static final int NOT = 1 << 2;
private static final int EQUALS = 1 << 3;
private static final int SUBSTRING = 1 << 4;
// Mask should contain all types (but not NOT)
private static final int TYPE_MASK = CONTAINS | EQUALS | MATCH | SUBSTRING;
private static final int EQUALS_SECTION_DIFF_LEN
= JMeterUtils.getPropDefault("assertion.equals_section_diff_len", 100);
/** Signifies truncated text in diff display. */
private static final String EQUALS_DIFF_TRUNC = "...";
private static final String RECEIVED_STR = "****** received : ";
private static final String COMPARISON_STR = "****** comparison: ";
private static final String DIFF_DELTA_START
= JMeterUtils.getPropDefault("assertion.equals_diff_delta_start", "[[[");
private static final String DIFF_DELTA_END
= JMeterUtils.getPropDefault("assertion.equals_diff_delta_end", "]]]");
public ResponseAssertion() {
setProperty(new CollectionProperty(TEST_STRINGS, new ArrayList<String>()));
}
@Override
public void clear() {
super.clear();
setProperty(new CollectionProperty(TEST_STRINGS, new ArrayList<String>()));
}
private void setTestField(String testField) {
setProperty(TEST_FIELD, testField);
}
public void setTestFieldURL(){
setTestField(SAMPLE_URL);
}
public void setTestFieldResponseCode(){
setTestField(RESPONSE_CODE);
}
public void setTestFieldResponseData(){
setTestField(RESPONSE_DATA);
}
public void setTestFieldResponseDataAsDocument(){
setTestField(RESPONSE_DATA_AS_DOCUMENT);
}
public void setTestFieldResponseMessage(){
setTestField(RESPONSE_MESSAGE);
}
public void setTestFieldResponseHeaders(){
setTestField(RESPONSE_HEADERS);
}
public boolean isTestFieldURL(){
return SAMPLE_URL.equals(getTestField());
}
public boolean isTestFieldResponseCode(){
return RESPONSE_CODE.equals(getTestField());
}
public boolean isTestFieldResponseData(){
return RESPONSE_DATA.equals(getTestField());
}
public boolean isTestFieldResponseDataAsDocument() {
return RESPONSE_DATA_AS_DOCUMENT.equals(getTestField());
}
public boolean isTestFieldResponseMessage(){
return RESPONSE_MESSAGE.equals(getTestField());
}
public boolean isTestFieldResponseHeaders(){
return RESPONSE_HEADERS.equals(getTestField());
}
private void setTestType(int testType) {
setProperty(new IntegerProperty(TEST_TYPE, testType));
}
private void setTestTypeMasked(int testType) {
int value = getTestType() & ~(TYPE_MASK) | testType;
setProperty(new IntegerProperty(TEST_TYPE, value));
}
public void addTestString(String testString) {
getTestStrings().addProperty(new StringProperty(String.valueOf(testString.hashCode()), testString));
}
public void clearTestStrings() {
getTestStrings().clear();
}
@Override
public AssertionResult getResult(SampleResult response) {
AssertionResult result;
// None of the other Assertions check the response status, so remove
// this check
// for the time being, at least...
// if (!response.isSuccessful())
// {
// result = new AssertionResult();
// result.setError(true);
// byte [] ba = response.getResponseData();
// result.setFailureMessage(
// ba == null ? "Unknown Error (responseData is empty)" : new String(ba)
// );
// return result;
// }
result = evaluateResponse(response);
return result;
}
/***************************************************************************
* !ToDoo (Method description)
*
* @return !ToDo (Return description)
**************************************************************************/
public String getTestField() {
return getPropertyAsString(TEST_FIELD);
}
/***************************************************************************
* !ToDoo (Method description)
*
* @return !ToDo (Return description)
**************************************************************************/
public int getTestType() {
JMeterProperty type = getProperty(TEST_TYPE);
if (type instanceof NullProperty) {
return CONTAINS;
}
return type.getIntValue();
}
/***************************************************************************
* !ToDoo (Method description)
*
* @return !ToDo (Return description)
**************************************************************************/
public CollectionProperty getTestStrings() {
return (CollectionProperty) getProperty(TEST_STRINGS);
}
public boolean isEqualsType() {
return (getTestType() & EQUALS) != 0;
}
public boolean isSubstringType() {
return (getTestType() & SUBSTRING) != 0;
}
public boolean isContainsType() {
return (getTestType() & CONTAINS) != 0;
}
public boolean isMatchType() {
return (getTestType() & MATCH) != 0;
}
public boolean isNotType() {
return (getTestType() & NOT) != 0;
}
public void setToContainsType() {
setTestTypeMasked(CONTAINS);
}
public void setToMatchType() {
setTestTypeMasked(MATCH);
}
public void setToEqualsType() {
setTestTypeMasked(EQUALS);
}
public void setToSubstringType() {
setTestTypeMasked(SUBSTRING);
}
public void setToNotType() {
setTestType((getTestType() | NOT));
}
public void unsetNotType() {
setTestType(getTestType() & ~NOT);
}
public boolean getAssumeSuccess() {
return getPropertyAsBoolean(ASSUME_SUCCESS, false);
}
public void setAssumeSuccess(boolean b) {
setProperty(ASSUME_SUCCESS, b);
}
/**
* Make sure the response satisfies the specified assertion requirements.
*
* @param response
* an instance of SampleResult
* @return an instance of AssertionResult
*/
private AssertionResult evaluateResponse(SampleResult response) {
AssertionResult result = new AssertionResult(getName());
String toCheck = ""; // The string to check (Url or data)
if (getAssumeSuccess()) {
response.setSuccessful(true);// Allow testing of failure codes
}
// What are we testing against?
if (isScopeVariable()){
toCheck = getThreadContext().getVariables().get(getVariableName());
} else if (isTestFieldResponseData()) {
toCheck = response.getResponseDataAsString(); // (bug25052)
} else if (isTestFieldResponseDataAsDocument()) {
toCheck = Document.getTextFromDocument(response.getResponseData());
} else if (isTestFieldResponseCode()) {
toCheck = response.getResponseCode();
} else if (isTestFieldResponseMessage()) {
toCheck = response.getResponseMessage();
} else if (isTestFieldResponseHeaders()) {
toCheck = response.getResponseHeaders();
} else { // Assume it is the URL
toCheck = "";
final URL url = response.getURL();
if (url != null){
toCheck = url.toString();
}
}
result.setFailure(false);
result.setError(false);
boolean notTest = (NOT & getTestType()) > 0;
boolean contains = isContainsType(); // do it once outside loop
boolean equals = isEqualsType();
boolean substring = isSubstringType();
boolean matches = isMatchType();
boolean debugEnabled = log.isDebugEnabled();
if (debugEnabled){
log.debug("Type:" + (contains?"Contains":"Match") + (notTest? "(not)": ""));
}
if (toCheck.length() == 0) {
if (notTest) { // Not should always succeed against an empty result
return result;
}
if (debugEnabled){
log.debug("Not checking empty response field in: "+response.getSampleLabel());
}
return result.setResultForNull();
}
boolean pass = true;
try {
// Get the Matcher for this thread
Perl5Matcher localMatcher = JMeterUtils.getMatcher();
PropertyIterator iter = getTestStrings().iterator();
while (iter.hasNext()) {
String stringPattern = iter.next().getStringValue();
Pattern pattern = null;
if (contains || matches) {
pattern = JMeterUtils.getPatternCache().getPattern(stringPattern, Perl5Compiler.READ_ONLY_MASK);
}
boolean found;
if (contains) {
found = localMatcher.contains(toCheck, pattern);
} else if (equals) {
found = toCheck.equals(stringPattern);
} else if (substring) {
found = toCheck.indexOf(stringPattern) != -1;
} else {
found = localMatcher.matches(toCheck, pattern);
}
pass = notTest ? !found : found;
if (!pass) {
if (debugEnabled){log.debug("Failed: "+stringPattern);}
result.setFailure(true);
result.setFailureMessage(getFailText(stringPattern,toCheck));
break;
}
if (debugEnabled){log.debug("Passed: "+stringPattern);}
}
} catch (MalformedCachePatternException e) {
result.setError(true);
result.setFailure(false);
result.setFailureMessage("Bad test configuration " + e);
}
return result;
}
/**
* Generate the failure reason from the TestType
*
* @param stringPattern
* @return the message for the assertion report
*/
// TODO strings should be resources
private String getFailText(String stringPattern, String toCheck) {
StringBuilder sb = new StringBuilder(200);
sb.append("Test failed: ");
if (isScopeVariable()){
sb.append("variable(").append(getVariableName()).append(')');
} else if (isTestFieldResponseData()) {
sb.append("text");
} else if (isTestFieldResponseCode()) {
sb.append("code");
} else if (isTestFieldResponseMessage()) {
sb.append("message");
} else if (isTestFieldResponseHeaders()) {
sb.append("headers");
} else if (isTestFieldResponseDataAsDocument()) {
sb.append("document");
} else // Assume it is the URL
{
sb.append("URL");
}
switch (getTestType()) {
case CONTAINS:
case SUBSTRING:
sb.append(" expected to contain ");
break;
case NOT | CONTAINS:
case NOT | SUBSTRING:
sb.append(" expected not to contain ");
break;
case MATCH:
sb.append(" expected to match ");
break;
case NOT | MATCH:
sb.append(" expected not to match ");
break;
case EQUALS:
sb.append(" expected to equal ");
break;
case NOT | EQUALS:
sb.append(" expected not to equal ");
break;
default:// should never happen...
sb.append(" expected something using ");
}
sb.append("/");
if (isEqualsType()){
sb.append(equalsComparisonText(toCheck, stringPattern));
} else {
sb.append(stringPattern);
}
sb.append("/");
return sb.toString();
}
private static String trunc(final boolean right, final String str)
{
if (str.length() <= EQUALS_SECTION_DIFF_LEN) {
return str;
} else if (right) {
return str.substring(0, EQUALS_SECTION_DIFF_LEN) + EQUALS_DIFF_TRUNC;
} else {
return EQUALS_DIFF_TRUNC + str.substring(str.length() - EQUALS_SECTION_DIFF_LEN, str.length());
}
}
/**
* Returns some helpful logging text to determine where equality between two strings
* is broken, with one pointer working from the front of the strings and another working
* backwards from the end.
*
* @param received String received from sampler.
* @param comparison String specified for "equals" response assertion.
* @return Two lines of text separated by newlines, and then forward and backward pointers
* denoting first position of difference.
*/
private static StringBuilder equalsComparisonText(final String received, final String comparison)
{
int firstDiff;
int lastRecDiff = -1;
int lastCompDiff = -1;
final int recLength = received.length();
final int compLength = comparison.length();
final int minLength = Math.min(recLength, compLength);
final String startingEqSeq;
String recDeltaSeq = "";
String compDeltaSeq = "";
String endingEqSeq = "";
final StringBuilder text = new StringBuilder(Math.max(recLength, compLength) * 2);
for (firstDiff = 0; firstDiff < minLength; firstDiff++) {
if (received.charAt(firstDiff) != comparison.charAt(firstDiff)){
break;
}
}
if (firstDiff == 0) {
startingEqSeq = "";
} else {
startingEqSeq = trunc(false, received.substring(0, firstDiff));
}
lastRecDiff = recLength - 1;
lastCompDiff = compLength - 1;
while ((lastRecDiff > firstDiff) && (lastCompDiff > firstDiff)
&& received.charAt(lastRecDiff) == comparison.charAt(lastCompDiff))
{
lastRecDiff--;
lastCompDiff--;
}
endingEqSeq = trunc(true, received.substring(lastRecDiff + 1, recLength));
if (endingEqSeq.length() == 0)
{
recDeltaSeq = trunc(true, received.substring(firstDiff, recLength));
compDeltaSeq = trunc(true, comparison.substring(firstDiff, compLength));
}
else
{
recDeltaSeq = trunc(true, received.substring(firstDiff, lastRecDiff + 1));
compDeltaSeq = trunc(true, comparison.substring(firstDiff, lastCompDiff + 1));
}
final StringBuilder pad = new StringBuilder(Math.abs(recDeltaSeq.length() - compDeltaSeq.length()));
for (int i = 0; i < pad.capacity(); i++){
pad.append(' ');
}
if (recDeltaSeq.length() > compDeltaSeq.length()){
compDeltaSeq += pad.toString();
} else {
recDeltaSeq += pad.toString();
}
text.append("\n\n");
text.append(RECEIVED_STR);
text.append(startingEqSeq);
text.append(DIFF_DELTA_START);
text.append(recDeltaSeq);
text.append(DIFF_DELTA_END);
text.append(endingEqSeq);
text.append("\n\n");
text.append(COMPARISON_STR);
text.append(startingEqSeq);
text.append(DIFF_DELTA_START);
text.append(compDeltaSeq);
text.append(DIFF_DELTA_END);
text.append(endingEqSeq);
text.append("\n\n");
return text;
}
}
|
src/components/org/apache/jmeter/assertions/ResponseAssertion.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.assertions;
import java.io.Serializable;
import java.net.URL;
import java.util.ArrayList;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.testelement.AbstractScopedAssertion;
import org.apache.jmeter.testelement.property.CollectionProperty;
import org.apache.jmeter.testelement.property.IntegerProperty;
import org.apache.jmeter.testelement.property.JMeterProperty;
import org.apache.jmeter.testelement.property.NullProperty;
import org.apache.jmeter.testelement.property.PropertyIterator;
import org.apache.jmeter.testelement.property.StringProperty;
import org.apache.jmeter.util.Document;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
import org.apache.oro.text.MalformedCachePatternException;
import org.apache.oro.text.regex.Pattern;
import org.apache.oro.text.regex.Perl5Compiler;
import org.apache.oro.text.regex.Perl5Matcher;
// @see org.apache.jmeter.assertions.ResponseAssertionTest for unit tests
/**
* Test element to handle Response Assertions, @see AssertionGui
*/
public class ResponseAssertion extends AbstractScopedAssertion implements Serializable, Assertion {
private static final Logger log = LoggingManager.getLoggerForClass();
private static final long serialVersionUID = 240L;
private static final String TEST_FIELD = "Assertion.test_field"; // $NON-NLS-1$
// Values for TEST_FIELD
// N.B. we cannot change the text value as it is in test plans
private static final String SAMPLE_URL = "Assertion.sample_label"; // $NON-NLS-1$
private static final String RESPONSE_DATA = "Assertion.response_data"; // $NON-NLS-1$
private static final String RESPONSE_DATA_AS_DOCUMENT = "Assertion.response_data_as_document"; // $NON-NLS-1$
private static final String RESPONSE_CODE = "Assertion.response_code"; // $NON-NLS-1$
private static final String RESPONSE_MESSAGE = "Assertion.response_message"; // $NON-NLS-1$
private static final String RESPONSE_HEADERS = "Assertion.response_headers"; // $NON-NLS-1$
private static final String ASSUME_SUCCESS = "Assertion.assume_success"; // $NON-NLS-1$
private static final String TEST_STRINGS = "Asserion.test_strings"; // $NON-NLS-1$
private static final String TEST_TYPE = "Assertion.test_type"; // $NON-NLS-1$
/*
* Mask values for TEST_TYPE TODO: remove either MATCH or CONTAINS - they
* are mutually exckusive
*/
private static final int MATCH = 1 << 0;
private static final int CONTAINS = 1 << 1;
private static final int NOT = 1 << 2;
private static final int EQUALS = 1 << 3;
private static final int SUBSTRING = 1 << 4;
// Mask should contain all types (but not NOT)
private static final int TYPE_MASK = CONTAINS | EQUALS | MATCH | SUBSTRING;
private static final int EQUALS_SECTION_DIFF_LEN
= JMeterUtils.getPropDefault("assertion.equals_section_diff_len", 100);
/** Signifies truncated text in diff display. */
private static final String EQUALS_DIFF_TRUNC = "...";
private static final String RECEIVED_STR = "****** received : ";
private static final String COMPARISON_STR = "****** comparison: ";
private static final String DIFF_DELTA_START
= JMeterUtils.getPropDefault("assertion.equals_diff_delta_start", "[[[");
private static final String DIFF_DELTA_END
= JMeterUtils.getPropDefault("assertion.equals_diff_delta_end", "]]]");
public ResponseAssertion() {
setProperty(new CollectionProperty(TEST_STRINGS, new ArrayList<String>()));
}
@Override
public void clear() {
super.clear();
setProperty(new CollectionProperty(TEST_STRINGS, new ArrayList<String>()));
}
private void setTestField(String testField) {
setProperty(TEST_FIELD, testField);
}
public void setTestFieldURL(){
setTestField(SAMPLE_URL);
}
public void setTestFieldResponseCode(){
setTestField(RESPONSE_CODE);
}
public void setTestFieldResponseData(){
setTestField(RESPONSE_DATA);
}
public void setTestFieldResponseDataAsDocument(){
setTestField(RESPONSE_DATA_AS_DOCUMENT);
}
public void setTestFieldResponseMessage(){
setTestField(RESPONSE_MESSAGE);
}
public void setTestFieldResponseHeaders(){
setTestField(RESPONSE_HEADERS);
}
public boolean isTestFieldURL(){
return SAMPLE_URL.equals(getTestField());
}
public boolean isTestFieldResponseCode(){
return RESPONSE_CODE.equals(getTestField());
}
public boolean isTestFieldResponseData(){
return RESPONSE_DATA.equals(getTestField());
}
public boolean isTestFieldResponseDataAsDocument() {
return RESPONSE_DATA_AS_DOCUMENT.equals(getTestField());
}
public boolean isTestFieldResponseMessage(){
return RESPONSE_MESSAGE.equals(getTestField());
}
public boolean isTestFieldResponseHeaders(){
return RESPONSE_HEADERS.equals(getTestField());
}
private void setTestType(int testType) {
setProperty(new IntegerProperty(TEST_TYPE, testType));
}
private void setTestTypeMasked(int testType) {
int value = getTestType() & ~(TYPE_MASK) | testType;
setProperty(new IntegerProperty(TEST_TYPE, value));
}
public void addTestString(String testString) {
getTestStrings().addProperty(new StringProperty(String.valueOf(testString.hashCode()), testString));
}
public void clearTestStrings() {
getTestStrings().clear();
}
@Override
public AssertionResult getResult(SampleResult response) {
AssertionResult result;
// None of the other Assertions check the response status, so remove
// this check
// for the time being, at least...
// if (!response.isSuccessful())
// {
// result = new AssertionResult();
// result.setError(true);
// byte [] ba = response.getResponseData();
// result.setFailureMessage(
// ba == null ? "Unknown Error (responseData is empty)" : new String(ba)
// );
// return result;
// }
result = evaluateResponse(response);
return result;
}
/***************************************************************************
* !ToDoo (Method description)
*
* @return !ToDo (Return description)
**************************************************************************/
public String getTestField() {
return getPropertyAsString(TEST_FIELD);
}
/***************************************************************************
* !ToDoo (Method description)
*
* @return !ToDo (Return description)
**************************************************************************/
public int getTestType() {
JMeterProperty type = getProperty(TEST_TYPE);
if (type instanceof NullProperty) {
return CONTAINS;
}
return type.getIntValue();
}
/***************************************************************************
* !ToDoo (Method description)
*
* @return !ToDo (Return description)
**************************************************************************/
public CollectionProperty getTestStrings() {
return (CollectionProperty) getProperty(TEST_STRINGS);
}
public boolean isEqualsType() {
return (getTestType() & EQUALS) != 0;
}
public boolean isSubstringType() {
return (getTestType() & SUBSTRING) != 0;
}
public boolean isContainsType() {
return (getTestType() & CONTAINS) != 0;
}
public boolean isMatchType() {
return (getTestType() & MATCH) != 0;
}
public boolean isNotType() {
return (getTestType() & NOT) != 0;
}
public void setToContainsType() {
setTestTypeMasked(CONTAINS);
}
public void setToMatchType() {
setTestTypeMasked(MATCH);
}
public void setToEqualsType() {
setTestTypeMasked(EQUALS);
}
public void setToSubstringType() {
setTestTypeMasked(SUBSTRING);
}
public void setToNotType() {
setTestType((getTestType() | NOT));
}
public void unsetNotType() {
setTestType(getTestType() & ~NOT);
}
public boolean getAssumeSuccess() {
return getPropertyAsBoolean(ASSUME_SUCCESS, false);
}
public void setAssumeSuccess(boolean b) {
setProperty(ASSUME_SUCCESS, b);
}
/**
* Make sure the response satisfies the specified assertion requirements.
*
* @param response
* an instance of SampleResult
* @return an instance of AssertionResult
*/
private AssertionResult evaluateResponse(SampleResult response) {
boolean pass = true;
boolean notTest = (NOT & getTestType()) > 0;
AssertionResult result = new AssertionResult(getName());
String toCheck = ""; // The string to check (Url or data)
if (getAssumeSuccess()) {
response.setSuccessful(true);// Allow testing of failure codes
}
// What are we testing against?
if (isScopeVariable()){
toCheck = getThreadContext().getVariables().get(getVariableName());
} else if (isTestFieldResponseData()) {
toCheck = response.getResponseDataAsString(); // (bug25052)
} else if (isTestFieldResponseDataAsDocument()) {
toCheck = Document.getTextFromDocument(response.getResponseData());
} else if (isTestFieldResponseCode()) {
toCheck = response.getResponseCode();
} else if (isTestFieldResponseMessage()) {
toCheck = response.getResponseMessage();
} else if (isTestFieldResponseHeaders()) {
toCheck = response.getResponseHeaders();
} else { // Assume it is the URL
toCheck = "";
final URL url = response.getURL();
if (url != null){
toCheck = url.toString();
}
}
result.setFailure(false);
result.setError(false);
if (toCheck.length() == 0) {
if (notTest) {
return result;
}
return result.setResultForNull();
}
boolean contains = isContainsType(); // do it once outside loop
boolean equals = isEqualsType();
boolean substring = isSubstringType();
boolean matches = isMatchType();
boolean debugEnabled = log.isDebugEnabled();
if (debugEnabled){
log.debug("Type:" + (contains?"Contains":"Match") + (notTest? "(not)": ""));
}
try {
// Get the Matcher for this thread
Perl5Matcher localMatcher = JMeterUtils.getMatcher();
PropertyIterator iter = getTestStrings().iterator();
while (iter.hasNext()) {
String stringPattern = iter.next().getStringValue();
Pattern pattern = null;
if (contains || matches) {
pattern = JMeterUtils.getPatternCache().getPattern(stringPattern, Perl5Compiler.READ_ONLY_MASK);
}
boolean found;
if (contains) {
found = localMatcher.contains(toCheck, pattern);
} else if (equals) {
found = toCheck.equals(stringPattern);
} else if (substring) {
found = toCheck.indexOf(stringPattern) != -1;
} else {
found = localMatcher.matches(toCheck, pattern);
}
pass = notTest ? !found : found;
if (!pass) {
if (debugEnabled){log.debug("Failed: "+stringPattern);}
result.setFailure(true);
result.setFailureMessage(getFailText(stringPattern,toCheck));
break;
}
if (debugEnabled){log.debug("Passed: "+stringPattern);}
}
} catch (MalformedCachePatternException e) {
result.setError(true);
result.setFailure(false);
result.setFailureMessage("Bad test configuration " + e);
}
return result;
}
/**
* Generate the failure reason from the TestType
*
* @param stringPattern
* @return the message for the assertion report
*/
// TODO strings should be resources
private String getFailText(String stringPattern, String toCheck) {
StringBuilder sb = new StringBuilder(200);
sb.append("Test failed: ");
if (isScopeVariable()){
sb.append("variable(").append(getVariableName()).append(')');
} else if (isTestFieldResponseData()) {
sb.append("text");
} else if (isTestFieldResponseCode()) {
sb.append("code");
} else if (isTestFieldResponseMessage()) {
sb.append("message");
} else if (isTestFieldResponseHeaders()) {
sb.append("headers");
} else if (isTestFieldResponseDataAsDocument()) {
sb.append("document");
} else // Assume it is the URL
{
sb.append("URL");
}
switch (getTestType()) {
case CONTAINS:
case SUBSTRING:
sb.append(" expected to contain ");
break;
case NOT | CONTAINS:
case NOT | SUBSTRING:
sb.append(" expected not to contain ");
break;
case MATCH:
sb.append(" expected to match ");
break;
case NOT | MATCH:
sb.append(" expected not to match ");
break;
case EQUALS:
sb.append(" expected to equal ");
break;
case NOT | EQUALS:
sb.append(" expected not to equal ");
break;
default:// should never happen...
sb.append(" expected something using ");
}
sb.append("/");
if (isEqualsType()){
sb.append(equalsComparisonText(toCheck, stringPattern));
} else {
sb.append(stringPattern);
}
sb.append("/");
return sb.toString();
}
private static String trunc(final boolean right, final String str)
{
if (str.length() <= EQUALS_SECTION_DIFF_LEN) {
return str;
} else if (right) {
return str.substring(0, EQUALS_SECTION_DIFF_LEN) + EQUALS_DIFF_TRUNC;
} else {
return EQUALS_DIFF_TRUNC + str.substring(str.length() - EQUALS_SECTION_DIFF_LEN, str.length());
}
}
/**
* Returns some helpful logging text to determine where equality between two strings
* is broken, with one pointer working from the front of the strings and another working
* backwards from the end.
*
* @param received String received from sampler.
* @param comparison String specified for "equals" response assertion.
* @return Two lines of text separated by newlines, and then forward and backward pointers
* denoting first position of difference.
*/
private static StringBuilder equalsComparisonText(final String received, final String comparison)
{
int firstDiff;
int lastRecDiff = -1;
int lastCompDiff = -1;
final int recLength = received.length();
final int compLength = comparison.length();
final int minLength = Math.min(recLength, compLength);
final String startingEqSeq;
String recDeltaSeq = "";
String compDeltaSeq = "";
String endingEqSeq = "";
final StringBuilder text = new StringBuilder(Math.max(recLength, compLength) * 2);
for (firstDiff = 0; firstDiff < minLength; firstDiff++) {
if (received.charAt(firstDiff) != comparison.charAt(firstDiff)){
break;
}
}
if (firstDiff == 0) {
startingEqSeq = "";
} else {
startingEqSeq = trunc(false, received.substring(0, firstDiff));
}
lastRecDiff = recLength - 1;
lastCompDiff = compLength - 1;
while ((lastRecDiff > firstDiff) && (lastCompDiff > firstDiff)
&& received.charAt(lastRecDiff) == comparison.charAt(lastCompDiff))
{
lastRecDiff--;
lastCompDiff--;
}
endingEqSeq = trunc(true, received.substring(lastRecDiff + 1, recLength));
if (endingEqSeq.length() == 0)
{
recDeltaSeq = trunc(true, received.substring(firstDiff, recLength));
compDeltaSeq = trunc(true, comparison.substring(firstDiff, compLength));
}
else
{
recDeltaSeq = trunc(true, received.substring(firstDiff, lastRecDiff + 1));
compDeltaSeq = trunc(true, comparison.substring(firstDiff, lastCompDiff + 1));
}
final StringBuilder pad = new StringBuilder(Math.abs(recDeltaSeq.length() - compDeltaSeq.length()));
for (int i = 0; i < pad.capacity(); i++){
pad.append(' ');
}
if (recDeltaSeq.length() > compDeltaSeq.length()){
compDeltaSeq += pad.toString();
} else {
recDeltaSeq += pad.toString();
}
text.append("\n\n");
text.append(RECEIVED_STR);
text.append(startingEqSeq);
text.append(DIFF_DELTA_START);
text.append(recDeltaSeq);
text.append(DIFF_DELTA_END);
text.append(endingEqSeq);
text.append("\n\n");
text.append(COMPARISON_STR);
text.append(startingEqSeq);
text.append(DIFF_DELTA_START);
text.append(compDeltaSeq);
text.append(DIFF_DELTA_END);
text.append(endingEqSeq);
text.append("\n\n");
return text;
}
}
|
Re-arrange order slightly to give better debug output
git-svn-id: https://svn.apache.org/repos/asf/jmeter/trunk@1443164 13f79535-47bb-0310-9956-ffa450edef68
Former-commit-id: 91e2b1d4ab0301b03a591a4b9f306b920b6e35bd
|
src/components/org/apache/jmeter/assertions/ResponseAssertion.java
|
Re-arrange order slightly to give better debug output
|
|
Java
|
apache-2.0
|
614f4871a4a4bae6cfb52952e19e2bcffe438d8c
| 0
|
AsuraTeam/dubbos,AsuraTeam/dubbos,AsuraTeam/dubbos
|
/*
* Copyright 1999-2011 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.rpc.cluster.support;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.alibaba.dubbo.common.Constants;
import com.alibaba.dubbo.common.Version;
import com.alibaba.dubbo.common.logger.Logger;
import com.alibaba.dubbo.common.logger.LoggerFactory;
import com.alibaba.dubbo.common.utils.NetUtils;
import com.alibaba.dubbo.rpc.Invocation;
import com.alibaba.dubbo.rpc.Invoker;
import com.alibaba.dubbo.rpc.Result;
import com.alibaba.dubbo.rpc.RpcContext;
import com.alibaba.dubbo.rpc.RpcException;
import com.alibaba.dubbo.rpc.cluster.Directory;
import com.alibaba.dubbo.rpc.cluster.LoadBalance;
/**
* 失败转移,当出现失败,重试其它服务器,通常用于读操作,但重试会带来更长延迟。
*
* <a href="http://en.wikipedia.org/wiki/Failover">Failover</a>
*
* @author william.liangf
*/
public class FailoverClusterInvoker<T> extends AbstractClusterInvoker<T> {
private static final Logger logger = LoggerFactory.getLogger(FailoverClusterInvoker.class);
public FailoverClusterInvoker(Directory<T> directory) {
super(directory);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public Result doInvoke(Invocation invocation, List<Invoker<T>> invokers, LoadBalance loadbalance) throws RpcException {
checkInvokers(invokers, invocation);
int len = getUrl().getMethodParameter(invocation.getMethodName(), Constants.RETRIES_KEY, Constants.DEFAULT_RETRIES) + 1;
if (len <= 0) {
len = 1;
}
// retry loop.
RpcException le = null; // last exception.
List<Invoker<T>> invoked = new ArrayList<Invoker<T>>(invokers.size()); // invoked invokers.
Set<String> providers = new HashSet<String>(len);
for (int i = 0; i < len; i++) {
Invoker<T> invoker = select(loadbalance, invocation, invokers, invoked);
invoked.add(invoker);
RpcContext.getContext().setInvokers((List)invoked);
try {
Result result = invoker.invoke(invocation);
if (le != null && logger.isWarnEnabled()) {
logger.warn("Although retry the method " + invocation.getMethodName()
+ " in the service " + getInterface().getName()
+ " was successful by the provider " + invoker.getUrl().getAddress()
+ ", but there have been failed providers " + providers
+ " (" + providers.size() + "/" + invokers.size()
+ ") from the registry " + directory.getUrl().getAddress()
+ " on the consumer " + NetUtils.getLocalHost()
+ " using the dubbo version " + Version.getVersion() + ". Last error is: "
+ le.getMessage(), le);
}
return result;
} catch (RpcException e) {
if (e.isBiz()) { // biz exception.
throw e;
}
le = e;
} catch (Throwable e) {
le = new RpcException(e.getMessage(), e);
} finally {
providers.add(invoker.getUrl().getAddress());
}
}
throw new RpcException(le != null ? le.getCode() : 0, "Failed to invoke the method "
+ invocation.getMethodName() + " in the service " + getInterface().getName()
+ ". Tried " + len + " times of the providers " + providers
+ " (" + providers.size() + "/" + invokers.size()
+ ") from the registry " + directory.getUrl().getAddress()
+ " on the consumer " + NetUtils.getLocalHost() + " using the dubbo version "
+ Version.getVersion() + ". Last error is: "
+ (le != null ? le.getMessage() : ""), le != null && le.getCause() != null ? le.getCause() : le);
}
}
|
dubbo-cluster/src/main/java/com/alibaba/dubbo/rpc/cluster/support/FailoverClusterInvoker.java
|
/*
* Copyright 1999-2011 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.rpc.cluster.support;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.alibaba.dubbo.common.Constants;
import com.alibaba.dubbo.common.Version;
import com.alibaba.dubbo.common.logger.Logger;
import com.alibaba.dubbo.common.logger.LoggerFactory;
import com.alibaba.dubbo.common.utils.NetUtils;
import com.alibaba.dubbo.rpc.Invocation;
import com.alibaba.dubbo.rpc.Invoker;
import com.alibaba.dubbo.rpc.Result;
import com.alibaba.dubbo.rpc.RpcContext;
import com.alibaba.dubbo.rpc.RpcException;
import com.alibaba.dubbo.rpc.cluster.Directory;
import com.alibaba.dubbo.rpc.cluster.LoadBalance;
/**
* 失败转移,当出现失败,重试其它服务器,通常用于读操作,但重试会带来更长延迟。
*
* <a href="http://en.wikipedia.org/wiki/Failover">Failover</a>
*
* @author william.liangf
*/
public class FailoverClusterInvoker<T> extends AbstractClusterInvoker<T> {
private static final Logger logger = LoggerFactory.getLogger(FailoverClusterInvoker.class);
public FailoverClusterInvoker(Directory<T> directory) {
super(directory);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public Result doInvoke(Invocation invocation, List<Invoker<T>> invokers, LoadBalance loadbalance) throws RpcException {
checkInvokers(invokers, invocation);
int len = getUrl().getMethodParameter(invocation.getMethodName(), Constants.RETRIES_KEY, Constants.DEFAULT_RETRIES) + 1;
if (len <= 0) {
len = 1;
}
// retry loop.
RpcException le = null; // last exception.
List<Invoker<T>> invoked = new ArrayList<Invoker<T>>(invokers.size()); // invoked invokers.
Set<String> providers = new HashSet<String>(len);
for (int i = 0; i < len; i++) {
Invoker<T> invoker = select(loadbalance, invocation, invokers, invoked);
invoked.add(invoker);
RpcContext.getContext().setInvokers((List)invoked);
try {
Result result = invoker.invoke(invocation);
if (le != null && logger.isWarnEnabled()) {
logger.warn("Although retry the method " + invocation.getMethodName()
+ " in the service " + getInterface().getName()
+ " was successful by the provider " + invoker.getUrl().getAddress()
+ ", but there have been failed providers " + providers
+ " (" + providers.size() + "/" + invokers.size()
+ ") from the registry " + directory.getUrl().getAddress()
+ " on the consumer " + NetUtils.getLocalHost()
+ " using the dubbo version " + Version.getVersion() + ". Last error is: "
+ le.getMessage(), le);
}
return result;
} catch (RpcException e) {
if (e.isBiz()) { // biz exception.
throw e;
}
le = e;
} catch (Throwable e) {
le = new RpcException(e.getMessage(), e);
} finally {
providers.add(invoker.getUrl().getAddress());
}
}
throw new RpcException(le != null ? le.getCode() : 0, "Failed to invoke the method "
+ invocation.getMethodName() + " in the service " + getInterface().getName()
+ ". Tried " + len + " times of the providers " + providers
+ " (" + providers.size() + "/" + invokers.size()
+ ") from the registry " + directory.getUrl().getAddress()
+ " on the consumer " + NetUtils.getLocalHost() + " using the dubbo version "
+ Version.getVersion() + ". Last error is: "
+ (le != null ? le.getMessage() : ""), le.getCause() != null ? le.getCause() : le);
}
}
|
检查null
git-svn-id: 3d0e7b608a819e97e591a7b753bfd1a27aaeb5ee@1062 1a56cb94-b969-4eaa-88fa-be21384802f2
|
dubbo-cluster/src/main/java/com/alibaba/dubbo/rpc/cluster/support/FailoverClusterInvoker.java
|
检查null
|
|
Java
|
apache-2.0
|
de05fb9723834072157711ff1cc7f147a7df86eb
| 0
|
wro4j/wro4j,dacofr/wro4j,UAK-35/wro4j,UAK-35/wro4j,dacofr/wro4j,dacofr/wro4j,wro4j/wro4j,UAK-35/wro4j,UAK-35/wro4j,wro4j/wro4j,dacofr/wro4j
|
/**
* Copyright Alex Objelean
*/
package ro.isdc.wro.maven.plugin;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringWriter;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.maven.artifact.DependencyResolutionRequiredException;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.project.MavenProject;
import org.codehaus.classworlds.ClassRealm;
import org.sonatype.plexus.build.incremental.BuildContext;
import ro.isdc.wro.WroRuntimeException;
import ro.isdc.wro.config.Context;
import ro.isdc.wro.extensions.manager.standalone.ExtensionsStandaloneManagerFactory;
import ro.isdc.wro.manager.WroManager;
import ro.isdc.wro.manager.factory.WroManagerFactory;
import ro.isdc.wro.manager.factory.standalone.StandaloneContext;
import ro.isdc.wro.manager.factory.standalone.StandaloneContextAware;
import ro.isdc.wro.maven.plugin.support.ExtraConfigFileAware;
import ro.isdc.wro.model.WroModel;
import ro.isdc.wro.model.WroModelInspector;
import ro.isdc.wro.model.group.Group;
import ro.isdc.wro.model.group.processor.InjectorBuilder;
import ro.isdc.wro.model.resource.Resource;
import ro.isdc.wro.model.resource.ResourceType;
import ro.isdc.wro.model.resource.locator.factory.UriLocatorFactory;
import ro.isdc.wro.model.resource.processor.ResourcePreProcessor;
import ro.isdc.wro.model.resource.processor.decorator.ExceptionHandlingProcessorDecorator;
import ro.isdc.wro.model.resource.processor.impl.css.AbstractCssImportPreProcessor;
import ro.isdc.wro.model.resource.processor.impl.css.CssImportPreProcessor;
import ro.isdc.wro.model.resource.support.hash.HashStrategy;
import ro.isdc.wro.util.Function;
/**
* Defines most common properties used by wro4j build-time solution infrastructure.
*
* @author Alex Objelean
*/
public abstract class AbstractWro4jMojo
extends AbstractMojo {
/**
* File containing the groups definitions.
*
* @parameter default-value="${basedir}/src/main/webapp/WEB-INF/wro.xml" expression="${wroFile}"
* @optional
*/
private File wroFile;
/**
* The folder where web application context resides useful for locating resources relative to servletContext .
*
* @parameter default-value="${basedir}/src/main/webapp/" expression="${contextFolder}"
*/
private File contextFolder;
/**
* @parameter default-value="true" expression="${minimize}"
* @optional
*/
private boolean minimize;
/**
* @parameter default-value="true" expression="${ignoreMissingResources}"
* @optional
*/
private boolean ignoreMissingResources;
/**
* Comma separated group names. This field is optional. If no value is provided, a file for each group will be
* created.
*
* @parameter expression="${targetGroups}"
* @optional
*/
private String targetGroups;
/**
* @parameter default-value="${project}"
*/
private MavenProject mavenProject;
/**
* @parameter expression="${wroManagerFactory}"
* @optional
*/
private String wroManagerFactory;
/**
* An instance of {@link StandaloneContextAware}.
*/
private WroManagerFactory managerFactory;
/**
* The path to configuration file.
*
* @parameter default-value="${basedir}/src/main/webapp/WEB-INF/wro.properties" expression="${extraConfigFile}"
* @optional
*/
private File extraConfigFile;
/**
* Responsible for identifying the resources changed during incremental build.
* <p/>
* Read more about it <a href="http://wiki.eclipse.org/M2E_compatible_maven_plugins#BuildContext">here</a>
*
* @component
*/
private BuildContext buildContext;
/**
* {@inheritDoc}
*/
public final void execute()
throws MojoExecutionException {
validate();
getLog().info("Executing the mojo: ");
getLog().info("Wro4j Model path: " + wroFile.getPath());
getLog().info("targetGroups: " + getTargetGroups());
getLog().info("minimize: " + isMinimize());
getLog().info("ignoreMissingResources: " + isIgnoreMissingResources());
getLog().debug("wroManagerFactory: " + this.wroManagerFactory);
getLog().debug("extraConfig: " + extraConfigFile);
extendPluginClasspath();
Context.set(Context.standaloneContext());
try {
onBeforeExecute();
doExecute();
} catch (final Exception e) {
throw new MojoExecutionException("Exception occured while processing: " + e.getMessage(), e);
} finally {
onAfterExecute();
}
}
/**
* Invoked before execution is performed.
*/
protected void onBeforeExecute() {
}
/**
* Invoked right after execution completion. This method is invoked also if the execution failed with an exception.
*/
protected void onAfterExecute() {
}
/**
* Creates a {@link StandaloneContext} by setting properties passed after mojo is initialized.
*/
private StandaloneContext createStandaloneContext() {
final StandaloneContext runContext = new StandaloneContext();
runContext.setContextFolder(getContextFolder());
runContext.setMinimize(isMinimize());
runContext.setWroFile(getWroFile());
runContext.setIgnoreMissingResources(isIgnoreMissingResources());
return runContext;
}
/**
* Perform actual plugin processing.
*/
protected abstract void doExecute()
throws Exception;
/**
* This method will ensure that you have a right and initialized instance of {@link StandaloneContextAware}. When
* overriding this method, ensure that creating managerFactory performs injection during manager creation, otherwise
* the manager won't be initialized porperly.
*
* @return {@link WroManagerFactory} implementation.
*/
protected WroManagerFactory getManagerFactory() {
if (managerFactory == null) {
try {
managerFactory = newWroManagerFactory();
} catch (final MojoExecutionException e) {
throw WroRuntimeException.wrap(e);
}
// initialize before process.
if (managerFactory instanceof StandaloneContextAware) {
((StandaloneContextAware) managerFactory).initialize(createStandaloneContext());
}
}
return managerFactory;
}
/**
* {@inheritDoc}
*/
protected WroManagerFactory newWroManagerFactory()
throws MojoExecutionException {
WroManagerFactory factory = null;
if (wroManagerFactory != null) {
factory = createCustomManagerFactory();
} else {
factory = new ExtensionsStandaloneManagerFactory();
}
getLog().info("wroManagerFactory class: " + factory.getClass().getName());
if (factory instanceof ExtraConfigFileAware) {
if (extraConfigFile == null) {
throw new MojoExecutionException("The " + factory.getClass() + " requires a valid extraConfigFile!");
}
getLog().debug("Using extraConfigFile: " + extraConfigFile.getAbsolutePath());
((ExtraConfigFileAware) factory).setExtraConfigFile(extraConfigFile);
}
return factory;
}
/**
* Creates an instance of Manager factory based on the value of the wroManagerFactory plugin parameter value.
*/
private WroManagerFactory createCustomManagerFactory()
throws MojoExecutionException {
WroManagerFactory managerFactory;
try {
final Class<?> wroManagerFactoryClass = Thread.currentThread().getContextClassLoader().loadClass(
wroManagerFactory.trim());
managerFactory = (WroManagerFactory) wroManagerFactoryClass.newInstance();
} catch (final Exception e) {
getLog().error("Cannot instantiate wroManagerFactoryClass", e);
throw new MojoExecutionException("Invalid wroManagerFactoryClass, called: " + wroManagerFactory, e);
}
return managerFactory;
}
/**
* @return a list of groups which will be processed.
*/
protected final List<String> getTargetGroupsAsList()
throws Exception {
List<String> result = null;
if (isIncrementalBuild()) {
result = getIncrementalGroupNames();
} else if (getTargetGroups() == null) {
result = getAllModelGroupNames();
} else {
result = Arrays.asList(getTargetGroups().split(","));
}
persistResourceFingerprints(result);
getLog().info("The following groups will be processed: " + result);
return result;
}
/**
* Store digest for all resources contained inside the list of provided groups.
*/
private void persistResourceFingerprints(final List<String> groupNames) {
if (buildContext != null) {
final WroModelInspector modelInspector = new WroModelInspector(getModel());
for (final String groupName : groupNames) {
final Group group = modelInspector.getGroupByName(groupName);
if (group != null) {
for (final Resource resource : group.getResources()) {
persistResourceFingerprints(resource);
}
}
}
}
}
private void persistResourceFingerprints(final Resource resource) {
final WroManager manager = getWroManager();
final HashStrategy hashStrategy = manager.getHashStrategy();
final UriLocatorFactory locatorFactory = manager.getUriLocatorFactory();
try {
final String fingerprint = hashStrategy.getHash(locatorFactory.locate(resource.getUri()));
buildContext.setValue(resource.getUri(), fingerprint);
getLog().debug("Persist fingerprint for resource '" + resource.getUri() + "' : " + fingerprint);
if (resource.getType() == ResourceType.CSS) {
final Reader reader = new InputStreamReader(locatorFactory.locate(resource.getUri()));
getLog().debug("Check @import directive from " + resource);
// persist fingerprints in imported resources.
persistFingerprintsForCssImports(resource, reader);
}
} catch (final IOException e) {
getLog().debug("could not check fingerprint of resource: " + resource);
}
}
/**
* Invokes the provided function for each detected css import.
*
* @param func
* a function (closure) invoked for each found import. It will be provided as argument the uri of imported
* css.
*/
private void forEachCssImportApply(final Function<String, Void> func, final Resource resource, final Reader reader)
throws IOException {
final ResourcePreProcessor cssImportProcessor = new AbstractCssImportPreProcessor() {
@Override
protected void onImportDetected(final String importedUri) {
getLog().debug("Found @import " + importedUri);
try {
func.apply(importedUri);
} catch (final Exception e) {
getLog().error("Cannot apply a function on @import resource: " + importedUri + ". Ignoring it.", e);
}
persistResourceFingerprints(Resource.create(importedUri, ResourceType.CSS));
}
@Override
protected String doTransform(final String cssContent, final List<Resource> foundImports)
throws IOException {
// no need to build the content, since we are interested in finding imported resources only
return "";
}
@Override
public String toString() {
return CssImportPreProcessor.class.getSimpleName();
}
};
final ResourcePreProcessor processor = new ExceptionHandlingProcessorDecorator(cssImportProcessor) {
@Override
protected boolean isIgnoreFailingProcessor() {
return true;
}
};
InjectorBuilder.create(getManagerFactory()).build().inject(processor);
processor.process(resource, reader, new StringWriter());
}
private void persistFingerprintsForCssImports(final Resource resource, final Reader reader)
throws IOException {
forEachCssImportApply(new Function<String, Void>() {
public Void apply(final String importedUri)
throws Exception {
persistResourceFingerprints(Resource.create(importedUri, ResourceType.CSS));
return null;
}
}, resource, reader);
}
private void detectChangeForCssImports(final Resource resource, final Reader reader,
final AtomicBoolean changeDetected)
throws IOException {
forEachCssImportApply(new Function<String, Void>() {
public Void apply(final String importedUri)
throws Exception {
final boolean isImportChanged = isResourceChanged(Resource.create(importedUri, ResourceType.CSS));
getLog().debug("\tisImportChanged: " + isImportChanged);
if (isImportChanged) {
changeDetected.set(true);
// no need to continue
throw new WroRuntimeException("Change detected. No need to continue processing");
}
return null;
}
}, resource, reader);
}
/**
* @return a list of groups changed by incremental builds.
*/
private List<String> getIncrementalGroupNames()
throws Exception {
final List<String> changedGroupNames = new ArrayList<String>();
for (final Group group : getModel().getGroups()) {
for (final Resource resource : group.getResources()) {
getLog().debug("checking delta for resource: " + resource);
if (isResourceChanged(resource)) {
getLog().debug("detected change for resource: " + resource + " and group: " + group.getName());
changedGroupNames.add(group.getName());
// no need to check rest of resources from this group
break;
}
}
}
return changedGroupNames;
}
private boolean isResourceChanged(final Resource resource) {
final WroManager manager = getWroManager();
final HashStrategy hashStrategy = manager.getHashStrategy();
final UriLocatorFactory locatorFactory = manager.getUriLocatorFactory();
// using AtomicBoolean because we need to mutate this variable inside an anonymous class.
final AtomicBoolean changeDetected = new AtomicBoolean(false);
try {
final String fingerprint = hashStrategy.getHash(locatorFactory.locate(resource.getUri()));
final String previousFingerprint = buildContext != null ? String
.valueOf(buildContext.getValue(resource.getUri())) : null;
getLog().debug("fingerprint <current, prev>: <" + fingerprint + ", " + previousFingerprint + ">");
changeDetected.set(fingerprint != null && !fingerprint.equals(previousFingerprint));
if (!changeDetected.get() && resource.getType() == ResourceType.CSS) {
final Reader reader = new InputStreamReader(locatorFactory.locate(resource.getUri()));
getLog().debug("Check @import directive from " + resource);
// detect changes in imported resources.
detectChangeForCssImports(resource, reader, changeDetected);
}
return changeDetected.get();
} catch (final IOException e) {
getLog().debug("failed to check for delta resource: " + resource);
}
return false;
}
/**
* @return true if the build was triggered by an incremental change.
*/
protected final boolean isIncrementalBuild() {
return buildContext != null && buildContext.isIncremental();
}
private List<String> getAllModelGroupNames() {
return new WroModelInspector(getModel()).getGroupNames();
}
private WroModel getModel() {
return getWroManager().getModelFactory().create();
}
private WroManager getWroManager() {
try {
return getManagerFactory().create();
} catch (final Exception e) {
throw WroRuntimeException.wrap(e);
}
}
/**
* Checks if all required fields are configured.
*/
protected void validate()
throws MojoExecutionException {
if (wroFile == null) {
throw new MojoExecutionException("contextFolder was not set!");
}
if (contextFolder == null) {
throw new MojoExecutionException("contextFolder was not set!");
}
}
/**
* Update the classpath.
*/
protected final void extendPluginClasspath()
throws MojoExecutionException {
// this code is inspired from http://teleal.org/weblog/Extending%20the%20Maven%20plugin%20classpath.html
final List<String> classpathElements = new ArrayList<String>();
try {
classpathElements.addAll(mavenProject.getRuntimeClasspathElements());
} catch (final DependencyResolutionRequiredException e) {
throw new MojoExecutionException("Could not get compile classpath elements", e);
}
final ClassLoader classLoader = createClassLoader(classpathElements);
Thread.currentThread().setContextClassLoader(classLoader);
}
/**
* @return {@link ClassRealm} based on project dependencies.
*/
private ClassLoader createClassLoader(final List<String> classpathElements) {
getLog().debug("Classpath elements:");
final List<URL> urls = new ArrayList<URL>();
try {
for (final String element : classpathElements) {
final File elementFile = new File(element);
getLog().debug("Adding element to plugin classpath: " + elementFile.getPath());
urls.add(elementFile.toURI().toURL());
}
} catch (final Exception e) {
getLog().error("Error retreiving URL for artifact", e);
throw new RuntimeException(e);
}
return new URLClassLoader(urls.toArray(new URL[] {}), Thread.currentThread().getContextClassLoader());
}
/**
* @param contextFolder
* the servletContextFolder to set
* @VisibleForTesting
*/
void setContextFolder(final File contextFolder) {
this.contextFolder = contextFolder;
}
/**
* @param wroFile
* the wroFile to set
* @VisibleForTesting
*/
void setWroFile(final File wroFile) {
this.wroFile = wroFile;
}
/**
* @return the wroFile
* @VisibleForTesting
*/
File getWroFile() {
return this.wroFile;
}
/**
* @return the contextFolder
* @VisibleForTesting
*/
File getContextFolder() {
return this.contextFolder;
}
/**
* @param minimize
* flag for minimization.
* @VisibleForTesting
*/
void setMinimize(final boolean minimize) {
this.minimize = minimize;
}
/**
* @param ignoreMissingResources
* the ignoreMissingResources to set
* @VisibleForTesting
*/
void setIgnoreMissingResources(final boolean ignoreMissingResources) {
this.ignoreMissingResources = ignoreMissingResources;
}
/**
* @return the minimize
* @VisibleForTesting
*/
boolean isMinimize() {
return this.minimize;
}
/**
* @return the ignoreMissingResources
* @VisibleForTesting
*/
boolean isIgnoreMissingResources() {
return this.ignoreMissingResources;
}
/**
* Used for testing.
*
* @param mavenProject
* the mavenProject to set
*/
void setMavenProject(final MavenProject mavenProject) {
this.mavenProject = mavenProject;
}
/**
* @return the targetGroups
* @VisibleForTesting
*/
String getTargetGroups() {
return this.targetGroups;
}
/**
* @param versionEncoder
* (targetGroups) comma separated group names.
* @VisibleForTesting
*/
void setTargetGroups(final String targetGroups) {
this.targetGroups = targetGroups;
}
/**
* @param wroManagerFactory
* fully qualified name of the {@link WroManagerFactory} class.
* @VisibleForTesting
*/
void setWroManagerFactory(final String wroManagerFactory) {
this.wroManagerFactory = wroManagerFactory;
}
/**
* @param extraConfigFile
* the extraConfigFile to set
* @VisibleForTesting
*/
void setExtraConfigFile(final File extraConfigFile) {
this.extraConfigFile = extraConfigFile;
}
/**
* @VisibleForTesting
*/
void setBuildContext(final BuildContext buildContext) {
this.buildContext = buildContext;
}
}
|
wro4j-maven-plugin/src/main/java/ro/isdc/wro/maven/plugin/AbstractWro4jMojo.java
|
/**
* Copyright Alex Objelean
*/
package ro.isdc.wro.maven.plugin;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringWriter;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.maven.artifact.DependencyResolutionRequiredException;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.project.MavenProject;
import org.codehaus.classworlds.ClassRealm;
import org.sonatype.plexus.build.incremental.BuildContext;
import ro.isdc.wro.WroRuntimeException;
import ro.isdc.wro.config.Context;
import ro.isdc.wro.extensions.manager.standalone.ExtensionsStandaloneManagerFactory;
import ro.isdc.wro.manager.WroManager;
import ro.isdc.wro.manager.factory.WroManagerFactory;
import ro.isdc.wro.manager.factory.standalone.StandaloneContext;
import ro.isdc.wro.manager.factory.standalone.StandaloneContextAware;
import ro.isdc.wro.maven.plugin.support.ExtraConfigFileAware;
import ro.isdc.wro.model.WroModel;
import ro.isdc.wro.model.WroModelInspector;
import ro.isdc.wro.model.group.Group;
import ro.isdc.wro.model.group.processor.InjectorBuilder;
import ro.isdc.wro.model.resource.Resource;
import ro.isdc.wro.model.resource.ResourceType;
import ro.isdc.wro.model.resource.locator.factory.UriLocatorFactory;
import ro.isdc.wro.model.resource.processor.ResourcePreProcessor;
import ro.isdc.wro.model.resource.processor.decorator.ExceptionHandlingProcessorDecorator;
import ro.isdc.wro.model.resource.processor.impl.css.AbstractCssImportPreProcessor;
import ro.isdc.wro.model.resource.processor.impl.css.CssImportPreProcessor;
import ro.isdc.wro.model.resource.support.hash.HashStrategy;
import ro.isdc.wro.util.Function;
/**
* Defines most common properties used by wro4j build-time solution infrastructure.
*
* @author Alex Objelean
*/
public abstract class AbstractWro4jMojo
extends AbstractMojo {
/**
* File containing the groups definitions.
*
* @parameter default-value="${basedir}/src/main/webapp/WEB-INF/wro.xml" expression="${wroFile}"
* @optional
*/
private File wroFile;
/**
* The folder where web application context resides useful for locating resources relative to servletContext .
*
* @parameter default-value="${basedir}/src/main/webapp/" expression="${contextFolder}"
*/
private File contextFolder;
/**
* @parameter default-value="true" expression="${minimize}"
* @optional
*/
private boolean minimize;
/**
* @parameter default-value="true" expression="${ignoreMissingResources}"
* @optional
*/
private boolean ignoreMissingResources;
/**
* Comma separated group names. This field is optional. If no value is provided, a file for each group will be
* created.
*
* @parameter expression="${targetGroups}"
* @optional
*/
private String targetGroups;
/**
* @parameter default-value="${project}"
*/
private MavenProject mavenProject;
/**
* @parameter expression="${wroManagerFactory}"
* @optional
*/
private String wroManagerFactory;
/**
* An instance of {@link StandaloneContextAware}.
*/
private WroManagerFactory managerFactory;
/**
* The path to configuration file.
*
* @parameter default-value="${basedir}/src/main/webapp/WEB-INF/wro.properties" expression="${extraConfigFile}"
* @optional
*/
private File extraConfigFile;
/**
* Responsible for identifying the resources changed during incremental build.
* <p/>
* Read more about it <a href="http://wiki.eclipse.org/M2E_compatible_maven_plugins#BuildContext">here</a>
*
* @component
*/
private BuildContext buildContext;
/**
* {@inheritDoc}
*/
public final void execute()
throws MojoExecutionException {
validate();
getLog().info("Executing the mojo: ");
getLog().info("Wro4j Model path: " + wroFile.getPath());
getLog().info("targetGroups: " + getTargetGroups());
getLog().info("minimize: " + isMinimize());
getLog().info("ignoreMissingResources: " + isIgnoreMissingResources());
getLog().debug("wroManagerFactory: " + this.wroManagerFactory);
getLog().debug("extraConfig: " + extraConfigFile);
extendPluginClasspath();
Context.set(Context.standaloneContext());
try {
onBeforeExecute();
doExecute();
} catch (final Exception e) {
throw new MojoExecutionException("Exception occured while processing: " + e.getMessage(), e);
} finally {
onAfterExecute();
}
}
/**
* Invoked before execution is performed.
*/
protected void onBeforeExecute() {
}
/**
* Invoked right after execution completion. This method is invoked also if the execution failed with an exception.
*/
protected void onAfterExecute() {
}
/**
* Creates a {@link StandaloneContext} by setting properties passed after mojo is initialized.
*/
private StandaloneContext createStandaloneContext() {
final StandaloneContext runContext = new StandaloneContext();
runContext.setContextFolder(getContextFolder());
runContext.setMinimize(isMinimize());
runContext.setWroFile(getWroFile());
runContext.setIgnoreMissingResources(isIgnoreMissingResources());
return runContext;
}
/**
* Perform actual plugin processing.
*/
protected abstract void doExecute()
throws Exception;
/**
* This method will ensure that you have a right and initialized instance of {@link StandaloneContextAware}. When
* overriding this method, ensure that creating managerFactory performs injection during manager creation, otherwise
* the manager won't be initialized porperly.
*
* @return {@link WroManagerFactory} implementation.
*/
protected WroManagerFactory getManagerFactory() {
if (managerFactory == null) {
try {
managerFactory = newWroManagerFactory();
} catch (final MojoExecutionException e) {
throw WroRuntimeException.wrap(e);
}
// initialize before process.
if (managerFactory instanceof StandaloneContextAware) {
((StandaloneContextAware) managerFactory).initialize(createStandaloneContext());
}
}
return managerFactory;
}
/**
* {@inheritDoc}
*/
protected WroManagerFactory newWroManagerFactory()
throws MojoExecutionException {
WroManagerFactory factory = null;
if (wroManagerFactory != null) {
factory = createCustomManagerFactory();
} else {
factory = new ExtensionsStandaloneManagerFactory();
}
getLog().info("wroManagerFactory class: " + factory.getClass().getName());
if (factory instanceof ExtraConfigFileAware) {
if (extraConfigFile == null) {
throw new MojoExecutionException("The " + factory.getClass() + " requires a valid extraConfigFile!");
}
getLog().debug("Using extraConfigFile: " + extraConfigFile.getAbsolutePath());
((ExtraConfigFileAware) factory).setExtraConfigFile(extraConfigFile);
}
return factory;
}
/**
* Creates an instance of Manager factory based on the value of the wroManagerFactory plugin parameter value.
*/
private WroManagerFactory createCustomManagerFactory()
throws MojoExecutionException {
WroManagerFactory managerFactory;
try {
final Class<?> wroManagerFactoryClass = Thread.currentThread().getContextClassLoader().loadClass(
wroManagerFactory.trim());
managerFactory = (WroManagerFactory) wroManagerFactoryClass.newInstance();
} catch (final Exception e) {
getLog().error("Cannot instantiate wroManagerFactoryClass", e);
throw new MojoExecutionException("Invalid wroManagerFactoryClass, called: " + wroManagerFactory, e);
}
return managerFactory;
}
/**
* @return a list of groups which will be processed.
*/
protected final List<String> getTargetGroupsAsList()
throws Exception {
List<String> result = null;
if (isIncrementalBuild()) {
result = getIncrementalGroupNames();
} else if (getTargetGroups() == null) {
result = getAllModelGroupNames();
} else {
result = Arrays.asList(getTargetGroups().split(","));
}
persistResourceFingerprints(result);
getLog().info("The following groups will be processed: " + result);
return result;
}
/**
* Store digest for all resources contained inside the list of provided groups.
*/
private void persistResourceFingerprints(final List<String> groupNames) {
if (buildContext != null) {
final WroModelInspector modelInspector = new WroModelInspector(getModel());
for (final String groupName : groupNames) {
final Group group = modelInspector.getGroupByName(groupName);
if (group != null) {
for (final Resource resource : group.getResources()) {
persistResourceFingerprints(resource);
}
}
}
}
}
private void persistResourceFingerprints(final Resource resource) {
final WroManager manager = getWroManager();
final HashStrategy hashStrategy = manager.getHashStrategy();
final UriLocatorFactory locatorFactory = manager.getUriLocatorFactory();
try {
final String fingerprint = hashStrategy.getHash(locatorFactory.locate(resource.getUri()));
buildContext.setValue(resource.getUri(), fingerprint);
getLog().debug("Persist fingerprint for resource '" + resource.getUri() + "' : " + fingerprint);
if (resource.getType() == ResourceType.CSS) {
final Reader reader = new InputStreamReader(locatorFactory.locate(resource.getUri()));
getLog().debug("Check @import directive from " + resource);
// persist fingerprints in imported resources.
persistFingerprintsForCssImports(resource, reader);
}
} catch (final IOException e) {
getLog().debug("could not check fingerprint of resource: " + resource);
}
}
/**
* Invokes the provided function for each detected css import.
*/
private void forEachCssImportApply(final Function<String, Void> func, final Resource resource, final Reader reader)
throws IOException {
final ResourcePreProcessor cssImportProcessor = new AbstractCssImportPreProcessor() {
@Override
protected void onImportDetected(final String importedUri) {
getLog().debug("Found @import " + importedUri);
try {
func.apply(importedUri);
} catch (final Exception e) {
getLog().error("Cannot apply a function on @import resource: " + importedUri + ". Ignoring it.", e);
}
persistResourceFingerprints(Resource.create(importedUri, ResourceType.CSS));
}
@Override
protected String doTransform(final String cssContent, final List<Resource> foundImports)
throws IOException {
// no need to build the content, since we are interested in finding imported resources only
return "";
}
@Override
public String toString() {
return CssImportPreProcessor.class.getSimpleName();
}
};
final ResourcePreProcessor processor = new ExceptionHandlingProcessorDecorator(cssImportProcessor) {
@Override
protected boolean isIgnoreFailingProcessor() {
return true;
}
};
InjectorBuilder.create(getManagerFactory()).build().inject(processor);
processor.process(resource, reader, new StringWriter());
}
private void persistFingerprintsForCssImports(final Resource resource, final Reader reader) throws IOException {
forEachCssImportApply(new Function<String, Void>() {
public Void apply(final String importedUri)
throws Exception {
persistResourceFingerprints(Resource.create(importedUri, ResourceType.CSS));
return null;
}
}, resource, reader);
}
private void createCssImportProcessor(final Resource resource, final Reader reader,
final AtomicBoolean changeDetected)
throws IOException {
forEachCssImportApply(new Function<String, Void>() {
public Void apply(final String importedUri)
throws Exception {
final boolean isImportChanged = isResourceChanged(Resource.create(importedUri, ResourceType.CSS));
getLog().debug("\tisImportChanged: " + isImportChanged);
if (isImportChanged) {
changeDetected.set(true);
// no need to continue
throw new WroRuntimeException("Change detected. No need to continue processing");
}
return null;
}
}, resource, reader);
}
/**
* @return a list of groups changed by incremental builds.
*/
private List<String> getIncrementalGroupNames()
throws Exception {
final List<String> changedGroupNames = new ArrayList<String>();
for (final Group group : getModel().getGroups()) {
for (final Resource resource : group.getResources()) {
getLog().debug("checking delta for resource: " + resource);
if (isResourceChanged(resource)) {
getLog().debug("detected change for resource: " + resource + " and group: " + group.getName());
changedGroupNames.add(group.getName());
// no need to check rest of resources from this group
break;
}
}
}
return changedGroupNames;
}
private boolean isResourceChanged(final Resource resource) {
final WroManager manager = getWroManager();
final HashStrategy hashStrategy = manager.getHashStrategy();
final UriLocatorFactory locatorFactory = manager.getUriLocatorFactory();
// using AtomicBoolean because we need to mutate this variable inside an anonymous class.
final AtomicBoolean changeDetected = new AtomicBoolean(false);
try {
final String fingerprint = hashStrategy.getHash(locatorFactory.locate(resource.getUri()));
final String previousFingerprint = buildContext != null ? String
.valueOf(buildContext.getValue(resource.getUri())) : null;
getLog().debug("fingerprint <current, prev>: <" + fingerprint + ", " + previousFingerprint + ">");
changeDetected.set(fingerprint != null && !fingerprint.equals(previousFingerprint));
if (!changeDetected.get() && resource.getType() == ResourceType.CSS) {
final Reader reader = new InputStreamReader(locatorFactory.locate(resource.getUri()));
getLog().debug("Check @import directive from " + resource);
// detect changes in imported resources.
createCssImportProcessor(resource, reader, changeDetected);
}
return changeDetected.get();
} catch (final IOException e) {
getLog().debug("failed to check for delta resource: " + resource);
}
return false;
}
/**
* @return true if the build was triggered by an incremental change.
*/
protected final boolean isIncrementalBuild() {
return buildContext != null && buildContext.isIncremental();
}
private List<String> getAllModelGroupNames() {
return new WroModelInspector(getModel()).getGroupNames();
}
private WroModel getModel() {
return getWroManager().getModelFactory().create();
}
private WroManager getWroManager() {
try {
return getManagerFactory().create();
} catch (final Exception e) {
throw WroRuntimeException.wrap(e);
}
}
/**
* Checks if all required fields are configured.
*/
protected void validate()
throws MojoExecutionException {
if (wroFile == null) {
throw new MojoExecutionException("contextFolder was not set!");
}
if (contextFolder == null) {
throw new MojoExecutionException("contextFolder was not set!");
}
}
/**
* Update the classpath.
*/
protected final void extendPluginClasspath()
throws MojoExecutionException {
// this code is inspired from http://teleal.org/weblog/Extending%20the%20Maven%20plugin%20classpath.html
final List<String> classpathElements = new ArrayList<String>();
try {
classpathElements.addAll(mavenProject.getRuntimeClasspathElements());
} catch (final DependencyResolutionRequiredException e) {
throw new MojoExecutionException("Could not get compile classpath elements", e);
}
final ClassLoader classLoader = createClassLoader(classpathElements);
Thread.currentThread().setContextClassLoader(classLoader);
}
/**
* @return {@link ClassRealm} based on project dependencies.
*/
private ClassLoader createClassLoader(final List<String> classpathElements) {
getLog().debug("Classpath elements:");
final List<URL> urls = new ArrayList<URL>();
try {
for (final String element : classpathElements) {
final File elementFile = new File(element);
getLog().debug("Adding element to plugin classpath: " + elementFile.getPath());
urls.add(elementFile.toURI().toURL());
}
} catch (final Exception e) {
getLog().error("Error retreiving URL for artifact", e);
throw new RuntimeException(e);
}
return new URLClassLoader(urls.toArray(new URL[] {}), Thread.currentThread().getContextClassLoader());
}
/**
* @param contextFolder
* the servletContextFolder to set
* @VisibleForTesting
*/
void setContextFolder(final File contextFolder) {
this.contextFolder = contextFolder;
}
/**
* @param wroFile
* the wroFile to set
* @VisibleForTesting
*/
void setWroFile(final File wroFile) {
this.wroFile = wroFile;
}
/**
* @return the wroFile
* @VisibleForTesting
*/
File getWroFile() {
return this.wroFile;
}
/**
* @return the contextFolder
* @VisibleForTesting
*/
File getContextFolder() {
return this.contextFolder;
}
/**
* @param minimize
* flag for minimization.
* @VisibleForTesting
*/
void setMinimize(final boolean minimize) {
this.minimize = minimize;
}
/**
* @param ignoreMissingResources
* the ignoreMissingResources to set
* @VisibleForTesting
*/
void setIgnoreMissingResources(final boolean ignoreMissingResources) {
this.ignoreMissingResources = ignoreMissingResources;
}
/**
* @return the minimize
* @VisibleForTesting
*/
boolean isMinimize() {
return this.minimize;
}
/**
* @return the ignoreMissingResources
* @VisibleForTesting
*/
boolean isIgnoreMissingResources() {
return this.ignoreMissingResources;
}
/**
* Used for testing.
*
* @param mavenProject
* the mavenProject to set
*/
void setMavenProject(final MavenProject mavenProject) {
this.mavenProject = mavenProject;
}
/**
* @return the targetGroups
* @VisibleForTesting
*/
String getTargetGroups() {
return this.targetGroups;
}
/**
* @param versionEncoder
* (targetGroups) comma separated group names.
* @VisibleForTesting
*/
void setTargetGroups(final String targetGroups) {
this.targetGroups = targetGroups;
}
/**
* @param wroManagerFactory
* fully qualified name of the {@link WroManagerFactory} class.
* @VisibleForTesting
*/
void setWroManagerFactory(final String wroManagerFactory) {
this.wroManagerFactory = wroManagerFactory;
}
/**
* @param extraConfigFile
* the extraConfigFile to set
* @VisibleForTesting
*/
void setExtraConfigFile(final File extraConfigFile) {
this.extraConfigFile = extraConfigFile;
}
/**
* @VisibleForTesting
*/
void setBuildContext(final BuildContext buildContext) {
this.buildContext = buildContext;
}
}
|
rename a method and add javadoc
|
wro4j-maven-plugin/src/main/java/ro/isdc/wro/maven/plugin/AbstractWro4jMojo.java
|
rename a method and add javadoc
|
|
Java
|
apache-2.0
|
99b0f2325d3c95eab3a711a6408b88c5b69ca196
| 0
|
joansmith/supernode,bitsofproof/supernode
|
server/src/test/java/com/bitsofproof/supernode/test/LevelDBTest.java
|
package com.bitsofproof.supernode.test;
import static org.fusesource.leveldbjni.JniDBFactory.factory;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import org.iq80.leveldb.DB;
import org.iq80.leveldb.DBComparator;
import org.iq80.leveldb.DBException;
import org.iq80.leveldb.DBIterator;
import org.iq80.leveldb.Options;
import org.iq80.leveldb.ReadOptions;
import org.iq80.leveldb.WriteBatch;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LevelDBTest
{
private static final Logger log = LoggerFactory.getLogger (LevelDBTest.class);
private static DB db;
private static final String testdb = "levelDBExample";
@BeforeClass
public static void openDB () throws IOException
{
DBComparator comparator = new DBComparator ()
{
@Override
public int compare (byte[] key1, byte[] key2)
{
return new String (key1).compareTo (new String (key2));
}
@Override
public String name ()
{
return "simple";
}
@Override
public byte[] findShortestSeparator (byte[] start, byte[] limit)
{
return start;
}
@Override
public byte[] findShortSuccessor (byte[] key)
{
return key;
}
};
org.iq80.leveldb.Logger logger = new org.iq80.leveldb.Logger ()
{
@Override
public void log (String message)
{
log.trace (message);
}
};
Options options = new Options ();
options.comparator (comparator);
options.logger (logger);
options.cacheSize (100 * 1048576); // 100MB cache
options.createIfMissing (true);
db = factory.open (new File (testdb), options);
}
@AfterClass
public static void closeDB () throws IOException
{
String stats = db.getProperty ("leveldb.stats");
System.out.println (stats);
db.close ();
Options options = new Options ();
factory.destroy (new File (testdb), options);
}
@Test
public void putGetDeleteTest () throws UnsupportedEncodingException, DBException
{
db.put ("Tampa".getBytes (), "rocks".getBytes ());
assertTrue (new String (db.get ("Tampa".getBytes ()), "US-ASCII").equals ("rocks"));
db.delete ("Tampa".getBytes ());
db.put ("Denver".getBytes (), "whatever".getBytes ());
}
@Test
public void batchTest () throws UnsupportedEncodingException, DBException
{
WriteBatch batch = db.createWriteBatch ();
batch.delete ("Denver".getBytes ());
batch.put ("Tampa".getBytes (), "green".getBytes ());
batch.put ("London".getBytes (), "red".getBytes ());
db.write (batch);
assertTrue (new String (db.get ("London".getBytes ()), "US-ASCII").equals ("red"));
}
@Test
public void iteratorTest () throws UnsupportedEncodingException, IOException
{
DBIterator iterator = db.iterator ();
doIterate (iterator);
}
private void doIterate (DBIterator iterator) throws UnsupportedEncodingException, IOException
{
try
{
String[] keys = { "London", "Tampa" };
String[] values = { "red", "green" };
int i = 0;
for ( iterator.seekToFirst (); iterator.hasNext (); iterator.next () )
{
String key = new String (iterator.peekNext ().getKey (), "US-ASCII");
String value = new String (iterator.peekNext ().getValue (), "US-ASCII");
assertTrue (keys[i].equals (key));
assertTrue (values[i].equals (value));
++i;
}
}
finally
{
// Make sure you close the iterator to avoid resource leaks.
iterator.close ();
}
}
@Test
public void snapshotTest () throws UnsupportedEncodingException, IOException
{
ReadOptions ro = new ReadOptions ();
ro.snapshot (db.getSnapshot ());
try
{
doIterate (db.iterator (ro));
}
finally
{
// Make sure you close the snapshot to avoid resource leaks.
ro.snapshot ().close ();
}
}
}
|
leveldb test randomly breaks on travis. Not testing.
|
server/src/test/java/com/bitsofproof/supernode/test/LevelDBTest.java
|
leveldb test randomly breaks on travis. Not testing.
|
||
Java
|
apache-2.0
|
22c68700407070fa45fe4728cb095c794bd0e645
| 0
|
EvilMcJerkface/atlasdb,EvilMcJerkface/atlasdb,palantir/atlasdb,palantir/atlasdb,EvilMcJerkface/atlasdb,palantir/atlasdb
|
/**
* Copyright 2015 Palantir Technologies
*
* Licensed under the BSD-3 License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.atlasdb.transaction.impl;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentNavigableMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
import org.apache.commons.lang.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.base.Stopwatch;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.AbstractIterator;
import com.google.common.collect.Collections2;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ImmutableSortedMap.Builder;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
import com.palantir.atlasdb.AtlasDbConstants;
import com.palantir.atlasdb.AtlasDbPerformanceConstants;
import com.palantir.atlasdb.cleaner.Cleaner;
import com.palantir.atlasdb.cleaner.NoOpCleaner;
import com.palantir.atlasdb.encoding.PtBytes;
import com.palantir.atlasdb.keyvalue.api.Cell;
import com.palantir.atlasdb.keyvalue.api.ColumnSelection;
import com.palantir.atlasdb.keyvalue.api.KeyAlreadyExistsException;
import com.palantir.atlasdb.keyvalue.api.KeyValueService;
import com.palantir.atlasdb.keyvalue.api.RangeRequest;
import com.palantir.atlasdb.keyvalue.api.RangeRequests;
import com.palantir.atlasdb.keyvalue.api.RowResult;
import com.palantir.atlasdb.keyvalue.api.TableReference;
import com.palantir.atlasdb.keyvalue.api.Value;
import com.palantir.atlasdb.keyvalue.impl.Cells;
import com.palantir.atlasdb.keyvalue.impl.RowResults;
import com.palantir.atlasdb.protos.generated.TableMetadataPersistence.SweepStrategy;
import com.palantir.atlasdb.table.description.exceptions.AtlasDbConstraintException;
import com.palantir.atlasdb.transaction.api.AtlasDbConstraintCheckingMode;
import com.palantir.atlasdb.transaction.api.ConflictHandler;
import com.palantir.atlasdb.transaction.api.ConstraintCheckable;
import com.palantir.atlasdb.transaction.api.ConstraintCheckingTransaction;
import com.palantir.atlasdb.transaction.api.TransactionCommitFailedException;
import com.palantir.atlasdb.transaction.api.TransactionConflictException;
import com.palantir.atlasdb.transaction.api.TransactionConflictException.CellConflict;
import com.palantir.atlasdb.transaction.api.TransactionFailedException;
import com.palantir.atlasdb.transaction.api.TransactionFailedRetriableException;
import com.palantir.atlasdb.transaction.api.TransactionLockTimeoutException;
import com.palantir.atlasdb.transaction.api.TransactionReadSentinelBehavior;
import com.palantir.atlasdb.transaction.service.TransactionService;
import com.palantir.common.annotation.Output;
import com.palantir.common.base.AbortingVisitor;
import com.palantir.common.base.AbstractBatchingVisitable;
import com.palantir.common.base.BatchingVisitable;
import com.palantir.common.base.BatchingVisitableFromIterable;
import com.palantir.common.base.BatchingVisitables;
import com.palantir.common.base.ClosableIterator;
import com.palantir.common.base.ClosableIterators;
import com.palantir.common.base.ForwardingClosableIterator;
import com.palantir.common.base.Throwables;
import com.palantir.common.collect.IterableUtils;
import com.palantir.common.collect.IteratorUtils;
import com.palantir.common.collect.MapEntries;
import com.palantir.lock.AtlasCellLockDescriptor;
import com.palantir.lock.AtlasRowLockDescriptor;
import com.palantir.lock.LockClient;
import com.palantir.lock.LockDescriptor;
import com.palantir.lock.LockMode;
import com.palantir.lock.LockRefreshToken;
import com.palantir.lock.LockRequest;
import com.palantir.lock.RemoteLockService;
import com.palantir.timestamp.TimestampService;
import com.palantir.util.AssertUtils;
import com.palantir.util.DistributedCacheMgrCache;
import com.palantir.util.Pair;
import com.palantir.util.SoftCache;
import com.palantir.util.paging.TokenBackedBasicResultsPage;
/**
* This implements snapshot isolation for transactions.
* <p>
* This object is thread safe and you may do reads and writes from multiple threads.
* You may not continue reading or writing after {@link #commit()} or {@link #abort()}
* is called.
* <p>
* Things to keep in mind when dealing with snapshot transactions:
* 1. Transactions that do writes should be short lived.
* 1a. Read only transactions can be long lived (within reason).
* 2. Do not write too much data in one transaction (this relates back to #1)
* 3. A row should be able to fit in memory without any trouble. This includes
* all columns of the row. If you are thinking about making your row bigger than like 10MB, you
* should think about breaking these up into different rows and using range scans.
*/
public class SnapshotTransaction extends AbstractTransaction implements ConstraintCheckingTransaction {
private static final int BATCH_SIZE_GET_FIRST_PAGE = 1000;
private final static Logger log = LoggerFactory.getLogger(SnapshotTransaction.class);
private static final Logger perfLogger = LoggerFactory.getLogger("dualschema.perf");
private static final Logger constraintLogger = LoggerFactory.getLogger("dualschema.constraints");
private enum State {
UNCOMMITTED,
COMMITTED,
COMMITTING,
ABORTED,
/**
* Commit has failed during commit.
*/
FAILED
}
protected final TimestampService timestampService;
final KeyValueService keyValueService;
protected final RemoteLockService lockService;
final TransactionService defaultTransactionService;
private final Cleaner cleaner;
private final Supplier<Long> startTimestamp;
protected final long immutableTimestamp;
protected final ImmutableSet<LockRefreshToken> externalLocksTokens;
protected final long timeCreated = System.currentTimeMillis();
protected final ConcurrentMap<TableReference, ConcurrentNavigableMap<Cell, byte[]>> writesByTable = Maps.newConcurrentMap();
private final ConflictDetectionManager conflictDetectionManager;
private final DistributedCacheMgrCache<Long, Long> cachedCommitTimes = new SoftCache<Long, Long>();
private final AtomicLong byteCount = new AtomicLong();
private final AtlasDbConstraintCheckingMode constraintCheckingMode;
private final ConcurrentMap<TableReference, ConstraintCheckable> constraintsByTableName = Maps.newConcurrentMap();
private final AtomicReference<State> state = new AtomicReference<State>(State.UNCOMMITTED);
private final AtomicLong numWriters = new AtomicLong();
protected final SweepStrategyManager sweepStrategyManager;
protected final Long transactionReadTimeoutMillis;
private final TransactionReadSentinelBehavior readSentinelBehavior;
private volatile long commitTsForScrubbing = TransactionConstants.FAILED_COMMIT_TS;
protected final boolean allowHiddenTableAccess;
protected final Stopwatch transactionTimer = Stopwatch.createStarted();
/**
* @param keyValueService
* @param lockService
* @param timestampService
* @param startTimeStamp
* @param immutableTimestamp If we find a row written before the immutableTimestamp we don't need to
* grab a read lock for it because we know that no writers exist.
* @param tokensValidForCommit These tokens need to be valid with {@link #lockService} for this transaction
* to commit. If these locks have expired then the commit will fail.
* @param transactionTimeoutMillis
*/
/* package */ SnapshotTransaction(KeyValueService keyValueService,
RemoteLockService lockService,
TimestampService timestampService,
TransactionService transactionService,
Cleaner cleaner,
Supplier<Long> startTimeStamp,
ConflictDetectionManager conflictDetectionManager,
SweepStrategyManager sweepStrategyManager,
long immutableTimestamp,
Iterable<LockRefreshToken> tokensValidForCommit,
AtlasDbConstraintCheckingMode constraintCheckingMode,
Long transactionTimeoutMillis,
TransactionReadSentinelBehavior readSentinelBehavior,
boolean allowHiddenTableAccess) {
this.keyValueService = keyValueService;
this.timestampService = timestampService;
this.defaultTransactionService = transactionService;
this.cleaner = cleaner;
this.lockService = lockService;
this.startTimestamp = startTimeStamp;
this.conflictDetectionManager = conflictDetectionManager;
this.sweepStrategyManager = sweepStrategyManager;
this.immutableTimestamp = immutableTimestamp;
this.externalLocksTokens = ImmutableSet.copyOf(tokensValidForCommit);
this.constraintCheckingMode = constraintCheckingMode;
this.transactionReadTimeoutMillis = transactionTimeoutMillis;
this.readSentinelBehavior = readSentinelBehavior;
this.allowHiddenTableAccess = allowHiddenTableAccess;
}
// TEST ONLY
SnapshotTransaction(KeyValueService keyValueService,
RemoteLockService lockService,
TimestampService timestampService,
TransactionService transactionService,
Cleaner cleaner,
long startTimeStamp,
Map<TableReference, ConflictHandler> tablesToWriteWrite,
AtlasDbConstraintCheckingMode constraintCheckingMode,
TransactionReadSentinelBehavior readSentinelBehavior) {
this.keyValueService = keyValueService;
this.timestampService = timestampService;
this.defaultTransactionService = transactionService;
this.cleaner = cleaner;
this.lockService = lockService;
this.startTimestamp = Suppliers.ofInstance(startTimeStamp);
this.conflictDetectionManager = ConflictDetectionManagers.fromMap(tablesToWriteWrite);
this.sweepStrategyManager = SweepStrategyManagers.createDefault(keyValueService);
this.immutableTimestamp = 0;
this.externalLocksTokens = ImmutableSet.of();
this.constraintCheckingMode = constraintCheckingMode;
this.transactionReadTimeoutMillis = null;
this.readSentinelBehavior = readSentinelBehavior;
this.allowHiddenTableAccess = false;
}
/**
* Used for read only transactions and subclasses that are read only and
* bypass aspects of the transaction protocol.
*/
protected SnapshotTransaction(KeyValueService keyValueService,
TransactionService transactionService,
RemoteLockService lockService,
long startTimeStamp,
AtlasDbConstraintCheckingMode constraintCheckingMode,
TransactionReadSentinelBehavior readSentinelBehavior) {
this(keyValueService, transactionService, lockService, startTimeStamp, constraintCheckingMode, readSentinelBehavior, false);
}
protected SnapshotTransaction(KeyValueService keyValueService,
TransactionService transactionService,
RemoteLockService lockService,
long startTimeStamp,
AtlasDbConstraintCheckingMode constraintCheckingMode,
TransactionReadSentinelBehavior readSentinelBehavior,
boolean allowHiddenTableAccess) {
this.keyValueService = keyValueService;
this.defaultTransactionService = transactionService;
this.cleaner = NoOpCleaner.INSTANCE;
this.lockService = lockService;
this.startTimestamp = Suppliers.ofInstance(startTimeStamp);
this.conflictDetectionManager = ConflictDetectionManagers.withoutConflictDetection(keyValueService);
this.sweepStrategyManager = SweepStrategyManagers.createDefault(keyValueService);
this.timestampService = null;
this.immutableTimestamp = startTimeStamp;
this.externalLocksTokens = ImmutableSet.of();
this.constraintCheckingMode = constraintCheckingMode;
this.transactionReadTimeoutMillis = null;
this.readSentinelBehavior = readSentinelBehavior;
this.allowHiddenTableAccess = allowHiddenTableAccess;
}
@Override
public long getTimestamp() {
return getStartTimestamp();
}
long getCommitTimestamp() {
return commitTsForScrubbing;
}
@Override
public TransactionReadSentinelBehavior getReadSentinelBehavior() {
return readSentinelBehavior;
}
public Stopwatch getTrasactionTimer() {
return transactionTimer;
}
protected void checkGetPreconditions(TableReference tableRef) {
if (transactionReadTimeoutMillis != null && System.currentTimeMillis() - timeCreated > transactionReadTimeoutMillis) {
throw new TransactionFailedRetriableException("Transaction timed out.");
}
Preconditions.checkArgument(allowHiddenTableAccess || !AtlasDbConstants.hiddenTables.contains(tableRef));
Preconditions.checkState(state.get() == State.UNCOMMITTED || state.get() == State.COMMITTING,
"Transaction must be uncommitted.");
}
@Override
public SortedMap<byte[], RowResult<byte[]>> getRows(TableReference tableRef, Iterable<byte[]> rows,
ColumnSelection columnSelection) {
Stopwatch watch = Stopwatch.createStarted();
checkGetPreconditions(tableRef);
if (Iterables.isEmpty(rows)) {
return AbstractTransaction.EMPTY_SORTED_ROWS;
}
Map<Cell, byte[]> result = Maps.newHashMap();
Map<Cell, Value> rawResults = Maps.newHashMap(
keyValueService.getRows(tableRef, rows, columnSelection, getStartTimestamp()));
SortedMap<Cell, byte[]> writes = writesByTable.get(tableRef);
if (writes != null) {
for (byte[] row : rows) {
extractLocalWritesForRow(result, writes, row);
}
}
// We don't need to do work postfiltering if we have a write locally.
rawResults.keySet().removeAll(result.keySet());
SortedMap<byte[], RowResult<byte[]>> results = filterRowResults(tableRef, rawResults, result);
if (perfLogger.isDebugEnabled()) {
perfLogger.debug("getRows({}, {} rows) found {} rows, took {} ms",
tableRef, Iterables.size(rows), results.size(), watch.elapsed(TimeUnit.MILLISECONDS));
}
validateExternalAndCommitLocksIfNecessary(tableRef);
return results;
}
@Override
public SortedMap<byte[], RowResult<byte[]>> getRowsIgnoringLocalWrites(TableReference tableRef, Iterable<byte[]> rows) {
checkGetPreconditions(tableRef);
if (Iterables.isEmpty(rows)) {
return AbstractTransaction.EMPTY_SORTED_ROWS;
}
Map<Cell, Value> rawResults = Maps.newHashMap(keyValueService.getRows(tableRef,
rows,
ColumnSelection.all(),
getStartTimestamp()));
return filterRowResults(tableRef, rawResults, Maps.<Cell, byte[]>newHashMap());
}
private SortedMap<byte[], RowResult<byte[]>> filterRowResults(TableReference tableRef,
Map<Cell, Value> rawResults,
Map<Cell, byte[]> result) {
getWithPostfiltering(tableRef, rawResults, result, Value.GET_VALUE);
Map<Cell, byte[]> filterDeletedValues = Maps.filterValues(result, Predicates.not(Value.IS_EMPTY));
return RowResults.viewOfSortedMap(Cells.breakCellsUpByRow(filterDeletedValues));
}
/**
* This will add any local writes for this row to the result map.
* <p>
* If an empty value was written as a delete, this will also be included in the map.
*/
private void extractLocalWritesForRow(@Output Map<Cell, byte[]> result,
SortedMap<Cell, byte[]> writes, byte[] row) {
Cell lowCell = Cells.createSmallestCellForRow(row);
Iterator<Entry<Cell, byte[]>> it = writes.tailMap(lowCell).entrySet().iterator();
while (it.hasNext()) {
Entry<Cell, byte[]> e = it.next();
Cell cell = e.getKey();
if (!Arrays.equals(row, cell.getRowName())) {
break;
}
result.put(cell, e.getValue());
}
}
@Override
public Map<Cell, byte[]> get(TableReference tableRef, Set<Cell> cells) {
Stopwatch watch = Stopwatch.createStarted();
checkGetPreconditions(tableRef);
if (Iterables.isEmpty(cells)) { return ImmutableMap.of(); }
Map<Cell, byte[]> result = Maps.newHashMap();
SortedMap<Cell, byte[]> writes = writesByTable.get(tableRef);
if (writes != null) {
for (Cell cell : cells) {
if (writes.containsKey(cell)) {
result.put(cell, writes.get(cell));
}
}
}
// We don't need to read any cells that were written locally.
result.putAll(getFromKeyValueService(tableRef, Sets.difference(cells, result.keySet())));
if (perfLogger.isDebugEnabled()) {
perfLogger.debug("get({}, {} cells) found {} cells (some possibly deleted), took {} ms",
tableRef, cells.size(), result.size(), watch.elapsed(TimeUnit.MILLISECONDS));
}
validateExternalAndCommitLocksIfNecessary(tableRef);
return Maps.filterValues(result, Predicates.not(Value.IS_EMPTY));
}
@Override
public Map<Cell, byte[]> getIgnoringLocalWrites(TableReference tableRef, Set<Cell> cells) {
checkGetPreconditions(tableRef);
if (Iterables.isEmpty(cells)) { return ImmutableMap.of(); }
Map<Cell, byte[]> result = getFromKeyValueService(tableRef, cells);
return Maps.filterValues(result, Predicates.not(Value.IS_EMPTY));
}
/**
* This will load the given keys from the underlying key value service and apply postfiltering
* so we have snapshot isolation. If the value in the key value service is the empty array
* this will be included here and needs to be filtered out.
*/
private Map<Cell, byte[]> getFromKeyValueService(TableReference tableRef, Set<Cell> cells) {
Map<Cell, byte[]> result = Maps.newHashMap();
Map<Cell, Long> toRead = Cells.constantValueMap(cells, getStartTimestamp());
Map<Cell, Value> rawResults = keyValueService.get(tableRef, toRead);
getWithPostfiltering(tableRef, rawResults, result, Value.GET_VALUE);
return result;
}
private static byte[] getNextStartRowName(RangeRequest range, TokenBackedBasicResultsPage<RowResult<Value>, byte[]> prePostFilter) {
if (!prePostFilter.moreResultsAvailable()) {
return range.getEndExclusive();
}
return prePostFilter.getTokenForNextPage();
}
@Override
public Iterable<BatchingVisitable<RowResult<byte[]>>> getRanges(final TableReference tableRef,
Iterable<RangeRequest> rangeRequests) {
checkGetPreconditions(tableRef);
if (perfLogger.isDebugEnabled()) {
perfLogger.debug("Passed {} ranges to getRanges({}, {})",
Iterables.size(rangeRequests), tableRef, rangeRequests);
}
return FluentIterable.from(Iterables.partition(rangeRequests, BATCH_SIZE_GET_FIRST_PAGE))
.transformAndConcat(new Function<List<RangeRequest>, List<BatchingVisitable<RowResult<byte[]>>>>() {
@Override
public List<BatchingVisitable<RowResult<byte[]>>> apply(List<RangeRequest> input) {
Stopwatch timer = Stopwatch.createStarted();
Map<RangeRequest, TokenBackedBasicResultsPage<RowResult<Value>, byte[]>> firstPages = keyValueService.getFirstBatchForRanges(
tableRef,
input,
getStartTimestamp());
validateExternalAndCommitLocksIfNecessary(tableRef);
final SortedMap<Cell, byte[]> postFiltered = postFilterPages(
tableRef,
firstPages.values());
List<BatchingVisitable<RowResult<byte[]>>> ret = Lists.newArrayListWithCapacity(input.size());
for (final RangeRequest rangeRequest : input) {
final TokenBackedBasicResultsPage<RowResult<Value>, byte[]> prePostFilter = firstPages.get(rangeRequest);
final byte[] nextStartRowName = getNextStartRowName(
rangeRequest,
prePostFilter);
final List<Entry<Cell, byte[]>> mergeIterators = getPostfilteredWithLocalWrites(
tableRef,
postFiltered,
rangeRequest,
prePostFilter.getResults(),
nextStartRowName);
ret.add(new AbstractBatchingVisitable<RowResult<byte[]>>() {
@Override
protected <K extends Exception> void batchAcceptSizeHint(int batchSizeHint,
ConsistentVisitor<RowResult<byte[]>, K> v)
throws K {
checkGetPreconditions(tableRef);
final Iterator<RowResult<byte[]>> rowResults = Cells.createRowView(mergeIterators);
while (rowResults.hasNext()) {
if (!v.visit(ImmutableList.of(rowResults.next()))) {
return;
}
}
if ((nextStartRowName.length == 0) || !prePostFilter.moreResultsAvailable()) {
return;
}
RangeRequest newRange = rangeRequest.getBuilder()
.startRowInclusive(nextStartRowName)
.build();
getRange(tableRef, newRange)
.batchAccept(batchSizeHint, v);
}
});
}
log.info("Processed {} range requests for {} in {}ms",
input.size(), tableRef, timer.elapsed(TimeUnit.MILLISECONDS));
return ret;
}
});
}
private void validateExternalAndCommitLocksIfNecessary(TableReference tableRef) {
if (!isValidationNecessary(tableRef)) {
return;
}
throwIfExternalAndCommitLocksNotValid(null);
}
private boolean isValidationNecessary(TableReference tableRef) {
return sweepStrategyManager.get().get(tableRef) == SweepStrategy.THOROUGH;
}
private List<Entry<Cell, byte[]>> getPostfilteredWithLocalWrites(final TableReference tableRef,
final SortedMap<Cell, byte[]> postFiltered,
final RangeRequest rangeRequest,
List<RowResult<Value>> prePostFilter,
final byte[] endRowExclusive) {
Map<Cell, Value> prePostFilterCells = Cells.convertRowResultsToCells(prePostFilter);
Collection<Entry<Cell, byte[]>> postFilteredCells = Collections2.filter(
postFiltered.entrySet(),
Predicates.compose(
Predicates.in(prePostFilterCells.keySet()),
MapEntries.<Cell, byte[]> getKeyFunction()));
Collection<Entry<Cell, byte[]>> localWritesInRange = getLocalWritesForRange(
tableRef,
rangeRequest.getStartInclusive(),
endRowExclusive).entrySet();
return ImmutableList.copyOf(mergeInLocalWrites(
postFilteredCells.iterator(),
localWritesInRange.iterator(),
rangeRequest.isReverse()));
}
@Override
public BatchingVisitable<RowResult<byte[]>> getRange(final TableReference tableRef,
final RangeRequest range) {
checkGetPreconditions(tableRef);
if (range.isEmptyRange()) {
return BatchingVisitables.emptyBatchingVisitable();
}
return new AbstractBatchingVisitable<RowResult<byte[]>>() {
@Override
public <K extends Exception> void batchAcceptSizeHint(int userRequestedSize,
ConsistentVisitor<RowResult<byte[]>, K> v)
throws K {
Preconditions.checkState(state.get() == State.UNCOMMITTED,
"Transaction must be uncommitted.");
if (range.getBatchHint() != null) {
userRequestedSize = range.getBatchHint();
}
int preFilterBatchSize = getRequestHintToKvStore(userRequestedSize);
Validate.isTrue(!range.isReverse(), "we currently do not support reverse ranges");
getBatchingVisitableFromIterator(
tableRef,
range,
userRequestedSize,
v,
preFilterBatchSize);
}
};
}
private <K extends Exception> boolean getBatchingVisitableFromIterator(final TableReference tableRef,
RangeRequest range,
int userRequestedSize,
AbortingVisitor<List<RowResult<byte[]>>, K> v,
int preFilterBatchSize) throws K {
ClosableIterator<RowResult<byte[]>> postFilterIterator =
postFilterIterator(tableRef, range, preFilterBatchSize, Value.GET_VALUE);
try {
Iterator<RowResult<byte[]>> localWritesInRange =
Cells.createRowView(getLocalWritesForRange(tableRef, range.getStartInclusive(), range.getEndExclusive()).entrySet());
Iterator<RowResult<byte[]>> mergeIterators =
mergeInLocalWritesRows(postFilterIterator, localWritesInRange, range.isReverse());
return BatchingVisitableFromIterable.create(mergeIterators).batchAccept(userRequestedSize, v);
} finally {
postFilterIterator.close();
}
}
protected static int getRequestHintToKvStore(int userRequestedSize) {
if (userRequestedSize == 1) {
// Handle 1 specially because the underlying store could have an optimization for 1
return 1;
}
//TODO: carrino: tune the param here based on how likely we are to post filter
// rows out and have deleted rows
int preFilterBatchSize = userRequestedSize + ((userRequestedSize+9)/10);
if (preFilterBatchSize > AtlasDbPerformanceConstants.MAX_BATCH_SIZE
|| preFilterBatchSize < 0) {
preFilterBatchSize = AtlasDbPerformanceConstants.MAX_BATCH_SIZE;
}
return preFilterBatchSize;
}
private static Iterator<RowResult<byte[]>> mergeInLocalWritesRows(Iterator<RowResult<byte[]>> postFilterIterator,
Iterator<RowResult<byte[]>> localWritesInRange,
boolean isReverse) {
Ordering<RowResult<byte[]>> ordering = RowResult.<byte[]>getOrderingByRowName();
Iterator<RowResult<byte[]>> mergeIterators = IteratorUtils.mergeIterators(
postFilterIterator, localWritesInRange,
isReverse ? ordering.reverse() : ordering,
new Function<Pair<RowResult<byte[]>, RowResult<byte[]>>, RowResult<byte[]>>() {
@Override
public RowResult<byte[]> apply(Pair<RowResult<byte[]>,RowResult<byte[]>> from) {
// prefer local writes
return RowResults.merge(from.lhSide, from.rhSide);
}
});
return RowResults.filterDeletedColumnsAndEmptyRows(mergeIterators);
}
private static Iterator<Entry<Cell, byte[]>> mergeInLocalWrites(Iterator<Entry<Cell, byte[]>> postFilterIterator,
Iterator<Entry<Cell, byte[]>> localWritesInRange,
boolean isReverse) {
Ordering<Entry<Cell, byte[]>> ordering = Ordering.natural().onResultOf(MapEntries.<Cell, byte[]>getKeyFunction());
Iterator<Entry<Cell, byte[]>> mergeIterators = IteratorUtils.mergeIterators(
postFilterIterator, localWritesInRange,
isReverse ? ordering.reverse() : ordering,
new Function<Pair<Entry<Cell, byte[]>, Entry<Cell, byte[]>>, Entry<Cell, byte[]>>() {
@Override
public Map.Entry<Cell, byte[]> apply(Pair<Map.Entry<Cell, byte[]>, Map.Entry<Cell, byte[]>> from) {
// always override their value with written values
return from.rhSide;
}
});
return Iterators.filter(mergeIterators,
Predicates.compose(Predicates.not(Value.IS_EMPTY), MapEntries.<Cell, byte[]>getValueFunction()));
}
protected <T> ClosableIterator<RowResult<T>> postFilterIterator(final TableReference tableRef,
RangeRequest range,
int preFilterBatchSize,
final Function<Value, T> transformer) {
final BatchSizeIncreasingRangeIterator results = new BatchSizeIncreasingRangeIterator(tableRef, range, preFilterBatchSize);
Iterator<Iterator<RowResult<T>>> batchedPostfiltered = new AbstractIterator<Iterator<RowResult<T>>>() {
@Override
protected Iterator<RowResult<T>> computeNext() {
List<RowResult<Value>> batch = results.getBatch();
if (batch.isEmpty()) {
return endOfData();
}
SortedMap<Cell, T> postFilter = postFilterRows(tableRef, batch, transformer);
results.markNumRowsNotDeleted(Cells.getRows(postFilter.keySet()).size());
return Cells.createRowView(postFilter.entrySet());
}
};
final Iterator<RowResult<T>> rows = Iterators.concat(batchedPostfiltered);
return new ForwardingClosableIterator<RowResult<T>>() {
@Override
protected ClosableIterator<RowResult<T>> delegate() {
return ClosableIterators.wrap(rows);
}
@Override
public void close() {
if (results != null) {
results.close();
}
}
};
}
private class BatchSizeIncreasingRangeIterator {
final TableReference tableRef;
final RangeRequest range;
final int originalBatchSize;
long numReturned = 0;
long numNotDeleted = 0;
ClosableIterator<RowResult<Value>> results = null;
int lastBatchSize;
byte[] lastRow = null;
public BatchSizeIncreasingRangeIterator(TableReference tableRef,
RangeRequest range,
int originalBatchSize) {
Validate.isTrue(originalBatchSize > 0);
this.tableRef = tableRef;
this.range = range;
this.originalBatchSize = originalBatchSize;
}
public void markNumRowsNotDeleted(int rowsInBatch) {
numNotDeleted += rowsInBatch;
AssertUtils.assertAndLog(numNotDeleted <= numReturned, "NotDeleted is bigger than the number of rows we returned.");
}
int getBestBatchSize() {
if (numReturned == 0) {
return originalBatchSize;
}
final long batchSize;
if (numNotDeleted == 0) {
// If everything we've seen has been deleted, we should be aggressive about getting more rows.
batchSize = numReturned*4;
} else {
batchSize = (long)Math.ceil(originalBatchSize * (numReturned / (double)numNotDeleted));
}
return (int)Math.min(batchSize, AtlasDbPerformanceConstants.MAX_BATCH_SIZE);
}
private void updateResultsIfNeeded() {
if (results == null) {
results = keyValueService.getRange(tableRef, range.withBatchHint(originalBatchSize), getStartTimestamp());
lastBatchSize = originalBatchSize;
return;
}
Validate.isTrue(lastRow != null);
// If the last row we got was the maximal row, then we are done.
if (RangeRequests.isTerminalRow(range.isReverse(), lastRow)) {
results = ClosableIterators.wrap(ImmutableList.<RowResult<Value>>of().iterator());
return;
}
int bestBatchSize = getBestBatchSize();
// Only close and throw away our old iterator if the batch size has changed by a factor of 2 or more.
if (bestBatchSize >= lastBatchSize*2 || bestBatchSize <= lastBatchSize/2) {
byte[] nextStartRow = RangeRequests.getNextStartRow(range.isReverse(), lastRow);
if (Arrays.equals(nextStartRow, range.getEndExclusive())) {
results = ClosableIterators.wrap(ImmutableList.<RowResult<Value>>of().iterator());
return;
}
RangeRequest.Builder newRange = range.getBuilder();
newRange.startRowInclusive(nextStartRow);
newRange.batchHint(bestBatchSize);
results.close();
results = keyValueService.getRange(tableRef, newRange.build(), getStartTimestamp());
lastBatchSize = bestBatchSize;
}
}
public List<RowResult<Value>> getBatch() {
updateResultsIfNeeded();
Validate.isTrue(lastBatchSize > 0);
ImmutableList<RowResult<Value>> list = ImmutableList.copyOf(Iterators.limit(results, lastBatchSize));
numReturned += list.size();
if (!list.isEmpty()) {
lastRow = list.get(list.size()-1).getRowName();
}
return list;
}
public void close() {
if (results != null) {
results.close();
}
}
}
private ConcurrentNavigableMap<Cell, byte[]> getLocalWrites(TableReference tableRef) {
ConcurrentNavigableMap<Cell, byte[]> writes = writesByTable.get(tableRef);
if (writes == null) {
writes = new ConcurrentSkipListMap<Cell, byte[]>();
ConcurrentNavigableMap<Cell, byte[]> previous = writesByTable.putIfAbsent(tableRef, writes);
if (previous != null) {
writes = previous;
}
}
return writes;
}
/**
* This includes deleted writes as zero length byte arrays, be sure to strip them out.
*/
private SortedMap<Cell, byte[]> getLocalWritesForRange(TableReference tableRef, byte[] startRow, byte[] endRow) {
SortedMap<Cell, byte[]> writes = getLocalWrites(tableRef);
if (startRow.length != 0) {
writes = writes.tailMap(Cells.createSmallestCellForRow(startRow));
}
if (endRow.length != 0) {
writes = writes.headMap(Cells.createSmallestCellForRow(endRow));
}
return writes;
}
private SortedMap<Cell, byte[]> postFilterPages(TableReference tableRef,
Iterable<TokenBackedBasicResultsPage<RowResult<Value>, byte[]>> rangeRows) {
List<RowResult<Value>> results = Lists.newArrayList();
for (TokenBackedBasicResultsPage<RowResult<Value>, byte[]> page : rangeRows) {
results.addAll(page.getResults());
}
return postFilterRows(tableRef, results, Value.GET_VALUE);
}
private <T> SortedMap<Cell, T> postFilterRows(TableReference tableRef,
List<RowResult<Value>> rangeRows,
Function<Value, T> transformer) {
Preconditions.checkState(state.get() == State.UNCOMMITTED, "Transaction must be uncommitted.");
if (rangeRows.isEmpty()) {
return ImmutableSortedMap.of();
}
Map<Cell, Value> rawResults = Maps.newHashMapWithExpectedSize(estimateSize(rangeRows));
for (RowResult<Value> rowResult : rangeRows) {
for (Map.Entry<byte[], Value> e : rowResult.getColumns().entrySet()) {
rawResults.put(Cell.create(rowResult.getRowName(), e.getKey()), e.getValue());
}
}
SortedMap<Cell, T> postFilter = Maps.newTreeMap();
getWithPostfiltering(tableRef, rawResults, postFilter, transformer);
return postFilter;
}
private int estimateSize(List<RowResult<Value>> rangeRows) {
int estimatedSize = 0;
for (RowResult<Value> rowResult : rangeRows) {
estimatedSize += rowResult.getColumns().size();
}
return estimatedSize;
}
private <T> void getWithPostfiltering(TableReference tableRef,
Map<Cell, Value> rawResults,
@Output Map<Cell, T> results,
Function<Value, T> transformer) {
long bytes = 0;
for (Map.Entry<Cell, Value> e : rawResults.entrySet()) {
bytes += e.getValue().getContents().length + Cells.getApproxSizeOfCell(e.getKey());
}
if (bytes > TransactionConstants.ERROR_LEVEL_FOR_QUEUED_BYTES && !AtlasDbConstants.TABLES_KNOWN_TO_BE_POORLY_DESIGNED.contains(tableRef)) {
log.error("A single get had a lot of bytes: " + bytes + " for table " + tableRef.getQualifiedName() + ". "
+ "The number of results was " + rawResults.size() + ". "
+ "The first 10 results were " + Iterables.limit(rawResults.entrySet(), 10) + ". "
+ "This can potentially cause out-of-memory errors.",
new RuntimeException("This exception and stack trace are provided for debugging purposes."));
} else if (bytes > TransactionConstants.WARN_LEVEL_FOR_QUEUED_BYTES && log.isWarnEnabled()) {
log.warn("A single get had quite a few bytes: " + bytes + " for table " + tableRef.getQualifiedName() + ". "
+ "The number of results was " + rawResults.size() + ". "
+ "The first 10 results were " + Iterables.limit(rawResults.entrySet(), 10) + ". ",
new RuntimeException("This exception and stack trace are provided for debugging purposes."));
}
if (isTempTable(tableRef) || (AtlasDbConstants.SKIP_POSTFILTER_TABLES.contains(tableRef) && allowHiddenTableAccess)) {
// If we are reading from a temp table, we can just bypass postfiltering
// or skip postfiltering if reading the transaction or namespace table from atlasdb shell
for (Map.Entry<Cell, Value> e : rawResults.entrySet()) {
results.put(e.getKey(), transformer.apply(e.getValue()));
}
return;
}
while (!rawResults.isEmpty()) {
rawResults = getWithPostfilteringInternal(tableRef, rawResults, results, transformer);
}
}
/**
* This will return all the keys that still need to be postfiltered. It will output properly
* postfiltered keys to the results output param.
*/
private <T> Map<Cell, Value> getWithPostfilteringInternal(TableReference tableRef,
Map<Cell, Value> rawResults,
@Output Map<Cell, T> results,
Function<Value, T> transformer) {
Set<Long> startTimestampsForValues = getStartTimestampsForValues(rawResults.values());
Map<Long, Long> commitTimestamps = getCommitTimestamps(tableRef, startTimestampsForValues, true);
Map<Cell, Long> keysToReload = Maps.newHashMapWithExpectedSize(0);
Map<Cell, Long> keysToDelete = Maps.newHashMapWithExpectedSize(0);
for (Map.Entry<Cell, Value> e : rawResults.entrySet()) {
Cell key = e.getKey();
Value value = e.getValue();
if (value.getTimestamp() == Value.INVALID_VALUE_TIMESTAMP) {
// This means that this transaction started too long ago. When we do garbage collection,
// we clean up old values, and this transaction started at a timestamp before the garbage collection.
switch (getReadSentinelBehavior()) {
case IGNORE:
break;
case THROW_EXCEPTION:
throw new TransactionFailedRetriableException("Tried to read a value that has been deleted. " +
" This can be caused by hard delete transactions using the type " +
TransactionType.AGGRESSIVE_HARD_DELETE +
". It can also be caused by transactions taking too long, or" +
" its locks expired. Retrying it should work.");
default:
throw new IllegalStateException("Invalid read sentinel behavior " + getReadSentinelBehavior());
}
} else {
Long theirCommitTimestamp = commitTimestamps.get(value.getTimestamp());
if (theirCommitTimestamp == null || theirCommitTimestamp == TransactionConstants.FAILED_COMMIT_TS) {
keysToReload.put(key, value.getTimestamp());
if (shouldDeleteAndRollback()) {
// This is from a failed transaction so we can roll it back and then reload it.
keysToDelete.put(key, value.getTimestamp());
}
} else if (theirCommitTimestamp > getStartTimestamp()) {
// The value's commit timestamp is after our start timestamp.
// This means the value is from a transaction which committed
// after our transaction began. We need to try reading at an
// earlier timestamp.
keysToReload.put(key, value.getTimestamp());
} else {
// The value has a commit timestamp less than our start timestamp, and is visible and valid.
if (value.getContents().length != 0) {
results.put(key, transformer.apply(value));
}
}
}
}
if (!keysToDelete.isEmpty()) {
// if we can't roll back the failed transactions, we should just try again
if (!rollbackFailedTransactions(tableRef, keysToDelete, commitTimestamps, defaultTransactionService)) {
return rawResults;
}
}
if (!keysToReload.isEmpty()) {
Map<Cell, Value> nextRawResults = keyValueService.get(tableRef, keysToReload);
return nextRawResults;
} else {
return ImmutableMap.of();
}
}
/**
* This is protected to allow for different post filter behavior.
*/
protected boolean shouldDeleteAndRollback() {
Validate.notNull(lockService, "if we don't have a valid lock server we can't roll back transactions");
return true;
}
@Override
final public void delete(TableReference tableRef, Set<Cell> cells) {
put(tableRef, Cells.constantValueMap(cells, PtBytes.EMPTY_BYTE_ARRAY), Cell.INVALID_TTL, Cell.INVALID_TTL_TYPE);
}
@Override
public void put(TableReference tableRef, Map<Cell, byte[]> values) {
ensureNoEmptyValues(values);
put(tableRef, values, Cell.INVALID_TTL, Cell.INVALID_TTL_TYPE);
}
private void ensureNoEmptyValues(Map<Cell, byte[]> values) {
for (Entry<Cell, byte[]> cellEntry : values.entrySet()) {
if (cellEntry.getValue().length == 0) {
throw new IllegalArgumentException("AtlasDB does not currently support inserting empty (zero-byte) values.");
}
}
}
private void put(TableReference tableRef, Map<Cell, byte[]> values, long ttlDuration, TimeUnit ttlUnit) {
Preconditions.checkArgument(!AtlasDbConstants.hiddenTables.contains(tableRef));
// todo (clockfort) also check if valid table for TTL
if (ttlDuration != Cell.INVALID_TTL && ttlUnit != Cell.INVALID_TTL_TYPE) {
values = createExpiringValues(values, ttlDuration, ttlUnit);
}
if (!validConflictDetection(tableRef)) {
conflictDetectionManager.recompute();
Preconditions.checkArgument(validConflictDetection(tableRef),
"Not a valid table for this transaction. Make sure this table name has a namespace: " + tableRef);
}
Validate.isTrue(isTempTable(tableRef) || getAllTempTables().isEmpty(),
"Temp tables may only be used by read only transactions.");
if (values.isEmpty()) {
return;
}
numWriters.incrementAndGet();
try {
// We need to check the status after incrementing writers to ensure that we fail if we are committing.
Preconditions.checkState(state.get() == State.UNCOMMITTED, "Transaction must be uncommitted.");
ConcurrentNavigableMap<Cell, byte[]> writes = getLocalWrites(tableRef);
if (isTempTable(tableRef)) {
putTempTableWrites(tableRef, values, writes);
} else {
putWritesAndLogIfTooLarge(values, writes);
}
} finally {
numWriters.decrementAndGet();
}
}
private Map<Cell, byte[]> createExpiringValues(Map<Cell, byte[]> values,
long ttlDuration,
TimeUnit ttlUnit) {
Map<Cell, byte[]> expiringValues = Maps.newHashMapWithExpectedSize(values.size());
for (Entry<Cell, byte[]> cellEntry : values.entrySet()) {
Cell expiringCell = Cell.create(
cellEntry.getKey().getRowName(),
cellEntry.getKey().getColumnName(),
ttlDuration, ttlUnit);
expiringValues.put(expiringCell, cellEntry.getValue());
}
return expiringValues;
}
private boolean validConflictDetection(TableReference tableRef) {
if (isTempTable(tableRef)) {
return true;
}
return conflictDetectionManager.isEmptyOrContainsTable(tableRef);
}
private void putWritesAndLogIfTooLarge(Map<Cell, byte[]> values, SortedMap<Cell, byte[]> writes) {
for (Map.Entry<Cell, byte[]> e : values.entrySet()) {
byte[] val = e.getValue();
if (val == null) {
val = PtBytes.EMPTY_BYTE_ARRAY;
}
Cell cell = e.getKey();
if (writes.put(cell, val) == null) {
long toAdd = val.length + Cells.getApproxSizeOfCell(cell);
long newVal = byteCount.addAndGet(toAdd);
if (newVal >= TransactionConstants.WARN_LEVEL_FOR_QUEUED_BYTES
&& newVal - toAdd < TransactionConstants.WARN_LEVEL_FOR_QUEUED_BYTES) {
log.warn("A single transaction has put quite a few bytes: " + newVal,
new RuntimeException("This exception and stack trace are provided for debugging purposes."));
}
if (newVal >= TransactionConstants.ERROR_LEVEL_FOR_QUEUED_BYTES
&& newVal - toAdd < TransactionConstants.ERROR_LEVEL_FOR_QUEUED_BYTES) {
log.warn("A single transaction has put too many bytes: " + newVal + ". This can potentially cause" +
"out-of-memory errors.",
new RuntimeException("This exception and stack trace are provided for debugging purposes."));
}
}
}
}
@Override
public void abort() {
if (state.get() == State.ABORTED) {
return;
}
while (true) {
Preconditions.checkState(state.get() == State.UNCOMMITTED, "Transaction must be uncommitted.");
if (state.compareAndSet(State.UNCOMMITTED, State.ABORTED)) {
dropTempTables();
if (hasWrites()) {
throwIfExternalAndCommitLocksNotValid(null);
}
return;
}
}
}
@Override
public boolean isAborted() {
return state.get() == State.ABORTED;
}
@Override
public boolean isUncommitted() {
return state.get() == State.UNCOMMITTED;
}
///////////////////////////////////////////////////////////////////////////
/// Committing
///////////////////////////////////////////////////////////////////////////
@Override
public void commit() {
commit(defaultTransactionService);
}
@Override
public void commit(TransactionService transactionService) {
if (state.get() == State.COMMITTED) {
return;
}
if (state.get() == State.FAILED) {
throw new IllegalStateException("this transaction has already failed");
}
while (true) {
Preconditions.checkState(state.get() == State.UNCOMMITTED, "Transaction must be uncommitted.");
if (state.compareAndSet(State.UNCOMMITTED, State.COMMITTING)) {
break;
}
}
// This must be done BEFORE we commit (otherwise if the system goes down after
// we commit but before we queue cells for scrubbing, then we will lose track of
// which cells we need to scrub)
if (getTransactionType() == TransactionType.AGGRESSIVE_HARD_DELETE ||
getTransactionType() == TransactionType.HARD_DELETE) {
cleaner.queueCellsForScrubbing(getCellsToQueueForScrubbing(), getStartTimestamp());
}
boolean success = false;
try {
if (numWriters.get() > 0) {
// After we set state to committing we need to make sure no one is still writing.
throw new IllegalStateException("Cannot commit while other threads are still calling put.");
}
if (!getAllTempTables().isEmpty()) {
dropTempTables();
Validate.isTrue(getAllTempTables().containsAll(writesByTable.keySet()),
"Temp tables may only be used by read only transactions.");
} else {
checkConstraints();
commitWrites(transactionService);
}
perfLogger.debug("Commited transaction {} in {}ms",
getStartTimestamp(),
getTrasactionTimer().elapsed(TimeUnit.MILLISECONDS));
success = true;
} finally {
// Once we are in state committing, we need to try/finally to set the state to a terminal state.
state.set(success ? State.COMMITTED : State.FAILED);
}
}
private void checkConstraints() {
List<String> violations = Lists.newArrayList();
for (Map.Entry<TableReference, ConstraintCheckable> entry : constraintsByTableName.entrySet()) {
SortedMap<Cell, byte[]> sortedMap = writesByTable.get(entry.getKey());
if (sortedMap != null) {
violations.addAll(entry.getValue().findConstraintFailures(sortedMap, this, constraintCheckingMode));
}
}
if (!violations.isEmpty()) {
if(constraintCheckingMode.shouldThrowException()) {
throw new AtlasDbConstraintException(violations);
} else {
constraintLogger.error("Constraint failure on commit.",
new AtlasDbConstraintException(violations));
}
}
}
private void commitWrites(TransactionService transactionService) {
if (!hasWrites()) {
return;
}
Stopwatch watch = Stopwatch.createStarted();
LockRefreshToken commitLocksToken = acquireLocksForCommit();
long millisForLocks = watch.elapsed(TimeUnit.MILLISECONDS);
try {
watch.reset().start();
throwIfConflictOnCommit(commitLocksToken, transactionService);
long millisCheckingForConflicts = watch.elapsed(TimeUnit.MILLISECONDS);
watch.reset().start();
keyValueService.multiPut(writesByTable, getStartTimestamp());
long millisForWrites = watch.elapsed(TimeUnit.MILLISECONDS);
// Now that all writes are done, get the commit timestamp
// We must do this before we check that our locks are still valid to ensure that
// other transactions that will hold these locks are sure to have start
// timestamps after our commit timestamp.
long commitTimestamp = timestampService.getFreshTimestamp();
commitTsForScrubbing = commitTimestamp;
// punch on commit so that if hard delete is the only thing happening on a system,
// we won't block forever waiting for the unreadable timestamp to advance past the
// scrub timestamp (same as the hard delete transaction's start timestamp)
watch.reset().start();
cleaner.punch(commitTimestamp);
long millisForPunch = watch.elapsed(TimeUnit.MILLISECONDS);
throwIfReadWriteConflictForSerializable(commitTimestamp);
// Verify that our locks are still valid before we actually commit;
// this check is required by the transaction protocol for correctness
throwIfExternalAndCommitLocksNotValid(commitLocksToken);
watch.reset().start();
putCommitTimestamp(commitTimestamp, commitLocksToken, transactionService);
long millisForCommitTs = watch.elapsed(TimeUnit.MILLISECONDS);
Set<LockRefreshToken> expiredLocks = refreshExternalAndCommitLocks(commitLocksToken);
if (!expiredLocks.isEmpty()) {
String errorMessage =
"This isn't a bug but it should happen very infrequently. Required locks are no longer" +
" valid but we have already committed successfully. " + getExpiredLocksErrorString(commitLocksToken, expiredLocks);
log.error(errorMessage, new TransactionFailedRetriableException(errorMessage));
}
long millisSinceCreation = System.currentTimeMillis() - timeCreated;
if (perfLogger.isDebugEnabled()) {
perfLogger.debug("Committed {} bytes with locks, start ts {}, commit ts {}, " +
"acquiring locks took {} ms, checking for conflicts took {} ms, " +
"writing took {} ms, punch took {} ms, putCommitTs took {} ms, " +
"total time since tx creation {} ms, tables: {}.",
byteCount.get(), getStartTimestamp(),
commitTimestamp, millisForLocks, millisCheckingForConflicts, millisForWrites,
millisForPunch, millisForCommitTs, millisSinceCreation, writesByTable.keySet());
}
} finally {
lockService.unlock(commitLocksToken);
}
}
protected void throwIfReadWriteConflictForSerializable(long commitTimestamp) {
// This is for overriding to get serializable transactions
}
private boolean hasWrites() {
boolean hasWrites = false;
for (SortedMap<?, ?> map : writesByTable.values()) {
if (!map.isEmpty()) {
hasWrites = true;
break;
}
}
return hasWrites;
}
protected ConflictHandler getConflictHandlerForTable(TableReference tableRef) {
Map<TableReference, ConflictHandler> tableToConflictHandler = conflictDetectionManager.get();
if (tableToConflictHandler.isEmpty()) {
return ConflictHandler.RETRY_ON_WRITE_WRITE;
}
return tableToConflictHandler.get(tableRef);
}
private String getExpiredLocksErrorString(@Nullable LockRefreshToken commitLocksToken,
Set<LockRefreshToken> expiredLocks) {
return "The following external locks were required: " + externalLocksTokens +
"; the following commit locks were required: " + commitLocksToken +
"; the following locks are no longer valid: " + expiredLocks;
}
private void throwIfExternalAndCommitLocksNotValid(@Nullable LockRefreshToken commitLocksToken) {
Set<LockRefreshToken> expiredLocks = refreshExternalAndCommitLocks(commitLocksToken);
if (!expiredLocks.isEmpty()) {
String errorMessage =
"Required locks are no longer valid. " + getExpiredLocksErrorString(commitLocksToken, expiredLocks);
TransactionLockTimeoutException e = new TransactionLockTimeoutException(errorMessage);
log.error(errorMessage, e);
throw e;
}
}
/**
* @param commitLocksToken
* @return set of locks that could not be refreshed
*/
private Set<LockRefreshToken> refreshExternalAndCommitLocks(@Nullable LockRefreshToken commitLocksToken) {
ImmutableSet<LockRefreshToken> toRefresh;
if (commitLocksToken == null) {
toRefresh = externalLocksTokens;
} else {
toRefresh = ImmutableSet.<LockRefreshToken>builder()
.addAll(externalLocksTokens)
.add(commitLocksToken).build();
}
if (toRefresh.isEmpty()) {
return ImmutableSet.of();
}
return Sets.difference(toRefresh, lockService.refreshLockRefreshTokens(toRefresh)).immutableCopy();
}
/**
* Make sure we have all the rows we are checking already locked before calling this.
*/
protected void throwIfConflictOnCommit(LockRefreshToken commitLocksToken, TransactionService transactionService) throws TransactionConflictException {
for (Entry<TableReference, ConcurrentNavigableMap<Cell, byte[]>> write : writesByTable.entrySet()) {
ConflictHandler conflictHandler = getConflictHandlerForTable(write.getKey());
throwIfWriteAlreadyCommitted(write.getKey(), write.getValue(), conflictHandler, commitLocksToken, transactionService);
}
}
protected void throwIfWriteAlreadyCommitted(TableReference tableRef,
Map<Cell, byte[]> writes,
ConflictHandler conflictHandler,
LockRefreshToken commitLocksToken,
TransactionService transactionService)
throws TransactionConflictException {
if (writes.isEmpty() || conflictHandler == ConflictHandler.IGNORE_ALL) {
return;
}
Set<CellConflict> spanningWrites = Sets.newHashSet();
Set<CellConflict> dominatingWrites = Sets.newHashSet();
Map<Cell, Long> keysToLoad = Maps.asMap(writes.keySet(), Functions.constant(Long.MAX_VALUE));
while (!keysToLoad.isEmpty()) {
keysToLoad = detectWriteAlreadyCommittedInternal(tableRef, keysToLoad, spanningWrites, dominatingWrites, transactionService);
}
if (conflictHandler == ConflictHandler.RETRY_ON_VALUE_CHANGED) {
throwIfValueChangedConflict(tableRef, writes, spanningWrites, dominatingWrites, commitLocksToken);
} else if (conflictHandler == ConflictHandler.RETRY_ON_WRITE_WRITE
|| conflictHandler == ConflictHandler.RETRY_ON_WRITE_WRITE_CELL
|| conflictHandler == ConflictHandler.SERIALIZABLE) {
if (!spanningWrites.isEmpty() || !dominatingWrites.isEmpty()) {
throw TransactionConflictException.create(tableRef, getStartTimestamp(), spanningWrites,
dominatingWrites, System.currentTimeMillis() - timeCreated);
}
} else {
throw new IllegalArgumentException("Unknown conflictHandler type: " + conflictHandler);
}
}
/**
* This will throw if we have a value changed conflict. This means that either we changed the
* value and anyone did a write after our start timestamp, or we just touched the value (put the
* same value as before) and a changed value was written after our start time.
*/
private void throwIfValueChangedConflict(TableReference table,
Map<Cell, byte[]> writes,
Set<CellConflict> spanningWrites,
Set<CellConflict> dominatingWrites,
LockRefreshToken commitLocksToken) {
Map<Cell, CellConflict> cellToConflict = Maps.newHashMap();
Map<Cell, Long> cellToTs = Maps.newHashMap();
for (CellConflict c : Sets.union(spanningWrites, dominatingWrites)) {
cellToConflict.put(c.cell, c);
cellToTs.put(c.cell, c.theirStart + 1);
}
Map<Cell, byte[]> oldValues = getIgnoringLocalWrites(table, cellToTs.keySet());
Map<Cell, Value> conflictingValues = keyValueService.get(table, cellToTs);
Set<Cell> conflictingCells = Sets.newHashSet();
for (Entry<Cell, Long> cellEntry : cellToTs.entrySet()) {
Cell cell = cellEntry.getKey();
if (!writes.containsKey(cell)) {
Validate.isTrue(false, "Missing write for cell: " + cellToConflict.get(cell)
+ " for table " + table);
}
if (!conflictingValues.containsKey(cell)) {
// This error case could happen if our locks expired.
throwIfExternalAndCommitLocksNotValid(commitLocksToken);
Validate.isTrue(false, "Missing conflicting value for cell: " + cellToConflict.get(cell)
+ " for table " + table);
}
if (conflictingValues.get(cell).getTimestamp() != (cellEntry.getValue() - 1)) {
// This error case could happen if our locks expired.
throwIfExternalAndCommitLocksNotValid(commitLocksToken);
Validate.isTrue(false, "Wrong timestamp for cell in table " + table
+ " Expected: " + cellToConflict.get(cell)
+ " Actual: " + conflictingValues.get(cell));
}
@Nullable byte[] oldVal = oldValues.get(cell);
byte[] writeVal = writes.get(cell);
byte[] conflictingVal = conflictingValues.get(cell).getContents();
if (!Transactions.cellValuesEqual(oldVal, writeVal)
|| !Arrays.equals(writeVal, conflictingVal)) {
conflictingCells.add(cell);
} else if (log.isInfoEnabled()) {
log.info("Another transaction committed to the same cell before us but " +
"their value was the same. " + "Cell: " + cell + " Table: " + table);
}
}
if (conflictingCells.isEmpty()) {
return;
}
Predicate<CellConflict> conflicting = Predicates.compose(Predicates.in(conflictingCells), CellConflict.getCellFunction());
throw TransactionConflictException.create(table,
getStartTimestamp(),
Sets.filter(spanningWrites, conflicting),
Sets.filter(dominatingWrites, conflicting),
System.currentTimeMillis() - timeCreated);
}
/**
* This will return the set of keys that need to be retried. It will output any conflicts
* it finds into the output params.
*/
protected Map<Cell, Long> detectWriteAlreadyCommittedInternal(TableReference tableRef,
Map<Cell, Long> keysToLoad,
@Output Set<CellConflict> spanningWrites,
@Output Set<CellConflict> dominatingWrites,
TransactionService transactionService) {
Map<Cell, Long> rawResults = keyValueService.getLatestTimestamps(tableRef, keysToLoad);
Map<Long, Long> commitTimestamps = getCommitTimestamps(tableRef, rawResults.values(), false);
Map<Cell, Long> keysToDelete = Maps.newHashMapWithExpectedSize(0);
for (Map.Entry<Cell, Long> e : rawResults.entrySet()) {
Cell key = e.getKey();
long theirStartTimestamp = e.getValue();
AssertUtils.assertAndLog(theirStartTimestamp != getStartTimestamp(),
"Timestamp reuse is bad:%d", getStartTimestamp());
Long theirCommitTimestamp = commitTimestamps.get(theirStartTimestamp);
if (theirCommitTimestamp == null
|| theirCommitTimestamp == TransactionConstants.FAILED_COMMIT_TS) {
// The value has no commit timestamp or was explicitly rolled back.
// This means the value is garbage from a transaction which didn't commit.
keysToDelete.put(key, theirStartTimestamp);
continue;
}
AssertUtils.assertAndLog(theirCommitTimestamp != getStartTimestamp(),
"Timestamp reuse is bad:%d", getStartTimestamp());
if (theirStartTimestamp > getStartTimestamp()) {
dominatingWrites.add(Cells.createConflictWithMetadata(
keyValueService,
tableRef,
key,
theirStartTimestamp,
theirCommitTimestamp));
} else if (theirCommitTimestamp > getStartTimestamp()) {
spanningWrites.add(Cells.createConflictWithMetadata(
keyValueService,
tableRef,
key,
theirStartTimestamp,
theirCommitTimestamp));
}
}
if (!keysToDelete.isEmpty()) {
if (!rollbackFailedTransactions(tableRef, keysToDelete, commitTimestamps, transactionService)) {
// If we can't roll back the failed transactions, we should just try again.
return keysToLoad;
}
}
// Once we successfully rollback and delete these cells we need to reload them.
return keysToDelete;
}
/**
* This will attempt to rollback the passed transactions. If all are rolled back correctly this
* method will also delete the values for the transactions that have been rolled back.
* @return false if we cannot roll back the failed transactions because someone beat us to it.
*/
private boolean rollbackFailedTransactions(TableReference tableRef,
Map<Cell, Long> keysToDelete, Map<Long, Long> commitTimestamps, TransactionService transactionService) {
for (long startTs : Sets.newHashSet(keysToDelete.values())) {
if (commitTimestamps.get(startTs) == null) {
log.warn("Rolling back transaction: " + startTs);
if (!rollbackOtherTransaction(startTs, transactionService)) {
return false;
}
} else {
Validate.isTrue(commitTimestamps.get(startTs) == TransactionConstants.FAILED_COMMIT_TS);
}
}
try {
log.warn("For table: " + tableRef + " we are deleting values of an uncommitted transaction: " + keysToDelete);
keyValueService.delete(tableRef, Multimaps.forMap(keysToDelete));
} catch (RuntimeException e) {
String msg = "This isn't a bug but it should be infrequent if all nodes of your KV service are running. "
+ "Delete has stronger consistency semantics than read/write and must talk to all nodes "
+ "instead of just talking to a quorum of nodes. "
+ "Failed to delete keys for table" + tableRef
+ " from an uncommitted transaction: " + keysToDelete;
log.error(msg, e);
}
return true;
}
/**
* @return true if the other transaction was rolled back
*/
private boolean rollbackOtherTransaction(long startTs, TransactionService transactionService) {
try {
transactionService.putUnlessExists(startTs, TransactionConstants.FAILED_COMMIT_TS);
return true;
} catch (KeyAlreadyExistsException e) {
String msg = "Two transactions tried to roll back someone else's request with start: " + startTs;
log.error("This isn't a bug but it should be very infrequent. " + msg, new TransactionFailedRetriableException(msg, e));
return false;
}
}
///////////////////////////////////////////////////////////////////////////
/// Locking
///////////////////////////////////////////////////////////////////////////
/**
* This method should acquire any locks needed to do proper concurrency control at commit time.
*/
protected LockRefreshToken acquireLocksForCommit() {
SortedMap<LockDescriptor, LockMode> lockMap = getLocksForWrites();
try {
return lockService.lock(LockClient.ANONYMOUS.getClientId(), LockRequest.builder(lockMap).build());
} catch (InterruptedException e) {
throw Throwables.throwUncheckedException(e);
}
}
protected ImmutableSortedMap<LockDescriptor, LockMode> getLocksForWrites() {
Builder<LockDescriptor, LockMode> builder = ImmutableSortedMap.naturalOrder();
Iterable<TableReference> allTables = IterableUtils.append(writesByTable.keySet(), TransactionConstants.TRANSACTION_TABLE);
for (TableReference tableRef : allTables) {
if (tableRef.equals(TransactionConstants.TRANSACTION_TABLE)) {
builder.put(AtlasRowLockDescriptor.of(TransactionConstants.TRANSACTION_TABLE.getQualifiedName(), TransactionConstants.getValueForTimestamp(getStartTimestamp())), LockMode.WRITE);
continue;
}
ConflictHandler conflictHandler = getConflictHandlerForTable(tableRef);
if (conflictHandler == ConflictHandler.RETRY_ON_WRITE_WRITE_CELL) {
for (Cell cell : getLocalWrites(tableRef).keySet()) {
builder.put(AtlasCellLockDescriptor.of(tableRef.getQualifiedName(), cell.getRowName(), cell.getColumnName()), LockMode.WRITE);
}
} else if (conflictHandler != ConflictHandler.IGNORE_ALL) {
Cell lastCell = null;
for (Cell cell : getLocalWrites(tableRef).keySet()) {
if (lastCell == null || !Arrays.equals(lastCell.getRowName(), cell.getRowName())) {
builder.put(AtlasRowLockDescriptor.of(tableRef.getQualifiedName(), cell.getRowName()), LockMode.WRITE);
}
lastCell = cell;
}
}
}
return builder.build();
}
/**
* We will block here until the passed transactions have released their lock. This means that
* the committing transaction is either complete or it has failed and we are allowed to roll
* it back.
*/
private void waitForCommitToComplete(Iterable<Long> startTimestamps) {
boolean isEmpty = true;
Builder<LockDescriptor, LockMode> builder = ImmutableSortedMap.naturalOrder();
for (long start : startTimestamps) {
if (start < immutableTimestamp) {
// We don't need to block in this case because this transaction is already complete
continue;
}
isEmpty = false;
builder.put(AtlasRowLockDescriptor.of(TransactionConstants.TRANSACTION_TABLE.getQualifiedName(), TransactionConstants.getValueForTimestamp(start)), LockMode.READ);
}
if (isEmpty) {
return;
}
// TODO: This can have better performance if we have a blockAndReturn method in lock server
// However lock server blocking is an issue if we fill up all our requests
try {
lockService.lock(LockClient.ANONYMOUS.getClientId(), LockRequest.builder(builder.build()).lockAndRelease().build());
} catch (InterruptedException e) {
throw Throwables.throwUncheckedException(e);
}
}
///////////////////////////////////////////////////////////////////////////
/// Commit timestamp management
///////////////////////////////////////////////////////////////////////////
private Set<Long> getStartTimestampsForValues(Iterable<Value> values) {
Set<Long> results = Sets.newHashSet();
for (Value v : values) {
results.add(v.getTimestamp());
}
return results;
}
/**
* Returns a map from start timestamp to commit timestamp. If a start timestamp wasn't
* committed, then it will be missing from the map. This method will block until the
* transactions for these start timestamps are complete.
*/
protected Map<Long, Long> getCommitTimestamps(@Nullable TableReference tableRef,
Iterable<Long> startTimestamps,
boolean waitForCommitterToComplete) {
if (Iterables.isEmpty(startTimestamps)) {
return ImmutableMap.of();
}
Map<Long, Long> result = Maps.newHashMap();
Set<Long> gets = Sets.newHashSet();
for (long startTS : startTimestamps) {
Long cached = cachedCommitTimes.get(startTS);
if (cached != null) {
result.put(startTS, cached);
} else {
gets.add(startTS);
}
}
if (gets.isEmpty()) {
return result;
}
// Before we do the reads, we need to make sure the committer is done writing.
if (waitForCommitterToComplete) {
Stopwatch watch = Stopwatch.createStarted();
waitForCommitToComplete(startTimestamps);
perfLogger.debug("Waited {} ms to get commit timestamps for table {}.",
watch.elapsed(TimeUnit.MILLISECONDS), tableRef);
}
Map<Long, Long> rawResults = defaultTransactionService.get(gets);
for (Map.Entry<Long, Long> e : rawResults.entrySet()) {
if (e.getValue() != null) {
long startTS = e.getKey();
long commitTS = e.getValue();
result.put(startTS, commitTS);
cachedCommitTimes.put(startTS, commitTS);
}
}
return result;
}
/**
* This will attempt to put the commitTimestamp into the DB.
*
* @throws TransactionLockTimeoutException If our locks timed out while trying to commit.
* @throws TransactionCommitFailedException failed when committing in a way that isn't retriable
*/
private void putCommitTimestamp(long commitTimestamp, LockRefreshToken locksToken, TransactionService transactionService) throws TransactionFailedException {
Validate.isTrue(commitTimestamp > getStartTimestamp(), "commitTs must be greater than startTs");
try {
transactionService.putUnlessExists(getStartTimestamp(), commitTimestamp);
} catch (KeyAlreadyExistsException e) {
handleKeyAlreadyExistsException(commitTimestamp, e, locksToken);
} catch (Exception e) {
TransactionCommitFailedException commitFailedEx = new TransactionCommitFailedException(
"This transaction failed writing the commit timestamp. " +
"It might have been committed, but it may not have.", e);
log.error("failed to commit an atlasdb transaction", commitFailedEx);
throw commitFailedEx;
}
}
private void handleKeyAlreadyExistsException(long commitTs, KeyAlreadyExistsException e, LockRefreshToken commitLocksToken) {
try {
if (wasCommitSuccessful(commitTs)) {
// We did actually commit successfully. This case could happen if the impl
// for putUnlessExists did a retry and we had committed already
return;
}
Set<LockRefreshToken> expiredLocks = refreshExternalAndCommitLocks(commitLocksToken);
if (!expiredLocks.isEmpty()) {
throw new TransactionLockTimeoutException("Our commit was already rolled back at commit time " +
"because our locks timed out. startTs: " + getStartTimestamp() + ". " +
getExpiredLocksErrorString(commitLocksToken, expiredLocks), e);
} else {
AssertUtils.assertAndLog(false,
"BUG: Someone tried to roll back our transaction but our locks were still valid; this is not allowed." +
" Held external locks: " + externalLocksTokens + "; held commit locks: " + commitLocksToken);
}
} catch (TransactionFailedException e1) {
throw e1;
} catch (Exception e1) {
log.error("Failed to determine if we can retry this transaction. startTs: " + getStartTimestamp(), e1);
}
String msg = "Our commit was already rolled back at commit time. " +
"Locking should prevent this from happening, but our locks may have timed out. " +
"startTs: " + getStartTimestamp();
throw new TransactionCommitFailedException(msg, e);
}
private boolean wasCommitSuccessful(long commitTs) throws Exception {
Map<Long, Long> commitTimestamps = getCommitTimestamps(null, Collections.singleton(getStartTimestamp()), false);
long storedCommit = commitTimestamps.get(getStartTimestamp());
if (storedCommit != commitTs && storedCommit != TransactionConstants.FAILED_COMMIT_TS) {
Validate.isTrue(false, "Commit value is wrong. startTs " + getStartTimestamp() + " commitTs: " + commitTs);
}
return storedCommit == commitTs;
}
@Override
public void useTable(TableReference tableRef, ConstraintCheckable table) {
constraintsByTableName.put(tableRef, table);
}
private long getStartTimestamp() {
return startTimestamp.get();
}
@Override
protected KeyValueService getKeyValueService() {
return keyValueService;
}
private Multimap<Cell, TableReference> getCellsToQueueForScrubbing() {
return getCellsToScrubByCell(State.COMMITTING);
}
Multimap<TableReference, Cell> getCellsToScrubImmediately() {
return getCellsToScrubByTable(State.COMMITTED);
}
private Multimap<Cell, TableReference> getCellsToScrubByCell(State expectedState) {
Multimap<Cell, TableReference> cellToTableName = HashMultimap.create();
State actualState = state.get();
if (expectedState == actualState) {
for (Entry<TableReference, ConcurrentNavigableMap<Cell, byte[]>> entry : writesByTable.entrySet()) {
TableReference table = entry.getKey();
Set<Cell> cells = entry.getValue().keySet();
for (Cell c : cells) {
cellToTableName.put(c, table);
}
}
} else {
AssertUtils.assertAndLog(false, "Expected state: " + expectedState + "; actual state: " + actualState);
}
return cellToTableName;
}
private Multimap<TableReference, Cell> getCellsToScrubByTable(State expectedState) {
Multimap<TableReference, Cell> tableRefToCells = HashMultimap.create();
State actualState = state.get();
if (expectedState == actualState) {
for (Entry<TableReference, ConcurrentNavigableMap<Cell, byte[]>> entry : writesByTable.entrySet()) {
TableReference table = entry.getKey();
Set<Cell> cells = entry.getValue().keySet();
tableRefToCells.putAll(table, cells);
}
} else {
AssertUtils.assertAndLog(false, "Expected state: " + expectedState + "; actual state: " + actualState);
}
return tableRefToCells;
}
}
|
atlasdb-impl-shared/src/main/java/com/palantir/atlasdb/transaction/impl/SnapshotTransaction.java
|
/**
* Copyright 2015 Palantir Technologies
*
* Licensed under the BSD-3 License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.atlasdb.transaction.impl;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentNavigableMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
import org.apache.commons.lang.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.base.Stopwatch;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.AbstractIterator;
import com.google.common.collect.Collections2;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ImmutableSortedMap.Builder;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
import com.palantir.atlasdb.AtlasDbConstants;
import com.palantir.atlasdb.AtlasDbPerformanceConstants;
import com.palantir.atlasdb.cleaner.Cleaner;
import com.palantir.atlasdb.cleaner.NoOpCleaner;
import com.palantir.atlasdb.encoding.PtBytes;
import com.palantir.atlasdb.keyvalue.api.Cell;
import com.palantir.atlasdb.keyvalue.api.ColumnSelection;
import com.palantir.atlasdb.keyvalue.api.KeyAlreadyExistsException;
import com.palantir.atlasdb.keyvalue.api.KeyValueService;
import com.palantir.atlasdb.keyvalue.api.RangeRequest;
import com.palantir.atlasdb.keyvalue.api.RangeRequests;
import com.palantir.atlasdb.keyvalue.api.RowResult;
import com.palantir.atlasdb.keyvalue.api.TableReference;
import com.palantir.atlasdb.keyvalue.api.Value;
import com.palantir.atlasdb.keyvalue.impl.Cells;
import com.palantir.atlasdb.keyvalue.impl.RowResults;
import com.palantir.atlasdb.protos.generated.TableMetadataPersistence.SweepStrategy;
import com.palantir.atlasdb.table.description.exceptions.AtlasDbConstraintException;
import com.palantir.atlasdb.transaction.api.AtlasDbConstraintCheckingMode;
import com.palantir.atlasdb.transaction.api.ConflictHandler;
import com.palantir.atlasdb.transaction.api.ConstraintCheckable;
import com.palantir.atlasdb.transaction.api.ConstraintCheckingTransaction;
import com.palantir.atlasdb.transaction.api.TransactionCommitFailedException;
import com.palantir.atlasdb.transaction.api.TransactionConflictException;
import com.palantir.atlasdb.transaction.api.TransactionConflictException.CellConflict;
import com.palantir.atlasdb.transaction.api.TransactionFailedException;
import com.palantir.atlasdb.transaction.api.TransactionFailedRetriableException;
import com.palantir.atlasdb.transaction.api.TransactionLockTimeoutException;
import com.palantir.atlasdb.transaction.api.TransactionReadSentinelBehavior;
import com.palantir.atlasdb.transaction.service.TransactionService;
import com.palantir.common.annotation.Output;
import com.palantir.common.base.AbortingVisitor;
import com.palantir.common.base.AbstractBatchingVisitable;
import com.palantir.common.base.BatchingVisitable;
import com.palantir.common.base.BatchingVisitableFromIterable;
import com.palantir.common.base.BatchingVisitables;
import com.palantir.common.base.ClosableIterator;
import com.palantir.common.base.ClosableIterators;
import com.palantir.common.base.ForwardingClosableIterator;
import com.palantir.common.base.Throwables;
import com.palantir.common.collect.IterableUtils;
import com.palantir.common.collect.IteratorUtils;
import com.palantir.common.collect.MapEntries;
import com.palantir.lock.AtlasCellLockDescriptor;
import com.palantir.lock.AtlasRowLockDescriptor;
import com.palantir.lock.LockClient;
import com.palantir.lock.LockDescriptor;
import com.palantir.lock.LockMode;
import com.palantir.lock.LockRefreshToken;
import com.palantir.lock.LockRequest;
import com.palantir.lock.RemoteLockService;
import com.palantir.timestamp.TimestampService;
import com.palantir.util.AssertUtils;
import com.palantir.util.DistributedCacheMgrCache;
import com.palantir.util.Pair;
import com.palantir.util.SoftCache;
import com.palantir.util.paging.TokenBackedBasicResultsPage;
/**
* This implements snapshot isolation for transactions.
* <p>
* This object is thread safe and you may do reads and writes from multiple threads.
* You may not continue reading or writing after {@link #commit()} or {@link #abort()}
* is called.
* <p>
* Things to keep in mind when dealing with snapshot transactions:
* 1. Transactions that do writes should be short lived.
* 1a. Read only transactions can be long lived (within reason).
* 2. Do not write too much data in one transaction (this relates back to #1)
* 3. A row should be able to fit in memory without any trouble. This includes
* all columns of the row. If you are thinking about making your row bigger than like 10MB, you
* should think about breaking these up into different rows and using range scans.
*/
public class SnapshotTransaction extends AbstractTransaction implements ConstraintCheckingTransaction {
private static final int BATCH_SIZE_GET_FIRST_PAGE = 1000;
private final static Logger log = LoggerFactory.getLogger(SnapshotTransaction.class);
private static final Logger perfLogger = LoggerFactory.getLogger("dualschema.perf");
private static final Logger constraintLogger = LoggerFactory.getLogger("dualschema.constraints");
private enum State {
UNCOMMITTED,
COMMITTED,
COMMITTING,
ABORTED,
/**
* Commit has failed during commit.
*/
FAILED
}
protected final TimestampService timestampService;
final KeyValueService keyValueService;
protected final RemoteLockService lockService;
final TransactionService defaultTransactionService;
private final Cleaner cleaner;
private final Supplier<Long> startTimestamp;
protected final long immutableTimestamp;
protected final ImmutableSet<LockRefreshToken> externalLocksTokens;
protected final long timeCreated = System.currentTimeMillis();
protected final ConcurrentMap<TableReference, ConcurrentNavigableMap<Cell, byte[]>> writesByTable = Maps.newConcurrentMap();
private final ConflictDetectionManager conflictDetectionManager;
private final DistributedCacheMgrCache<Long, Long> cachedCommitTimes = new SoftCache<Long, Long>();
private final AtomicLong byteCount = new AtomicLong();
private final AtlasDbConstraintCheckingMode constraintCheckingMode;
private final ConcurrentMap<TableReference, ConstraintCheckable> constraintsByTableName = Maps.newConcurrentMap();
private final AtomicReference<State> state = new AtomicReference<State>(State.UNCOMMITTED);
private final AtomicLong numWriters = new AtomicLong();
protected final SweepStrategyManager sweepStrategyManager;
protected final Long transactionReadTimeoutMillis;
private final TransactionReadSentinelBehavior readSentinelBehavior;
private volatile long commitTsForScrubbing = TransactionConstants.FAILED_COMMIT_TS;
protected final boolean allowHiddenTableAccess;
protected final Stopwatch transactionTimer = Stopwatch.createStarted();
/**
* @param keyValueService
* @param lockService
* @param timestampService
* @param startTimeStamp
* @param immutableTimestamp If we find a row written before the immutableTimestamp we don't need to
* grab a read lock for it because we know that no writers exist.
* @param tokensValidForCommit These tokens need to be valid with {@link #lockService} for this transaction
* to commit. If these locks have expired then the commit will fail.
* @param transactionTimeoutMillis
*/
/* package */ SnapshotTransaction(KeyValueService keyValueService,
RemoteLockService lockService,
TimestampService timestampService,
TransactionService transactionService,
Cleaner cleaner,
Supplier<Long> startTimeStamp,
ConflictDetectionManager conflictDetectionManager,
SweepStrategyManager sweepStrategyManager,
long immutableTimestamp,
Iterable<LockRefreshToken> tokensValidForCommit,
AtlasDbConstraintCheckingMode constraintCheckingMode,
Long transactionTimeoutMillis,
TransactionReadSentinelBehavior readSentinelBehavior,
boolean allowHiddenTableAccess) {
this.keyValueService = keyValueService;
this.timestampService = timestampService;
this.defaultTransactionService = transactionService;
this.cleaner = cleaner;
this.lockService = lockService;
this.startTimestamp = startTimeStamp;
this.conflictDetectionManager = conflictDetectionManager;
this.sweepStrategyManager = sweepStrategyManager;
this.immutableTimestamp = immutableTimestamp;
this.externalLocksTokens = ImmutableSet.copyOf(tokensValidForCommit);
this.constraintCheckingMode = constraintCheckingMode;
this.transactionReadTimeoutMillis = transactionTimeoutMillis;
this.readSentinelBehavior = readSentinelBehavior;
this.allowHiddenTableAccess = allowHiddenTableAccess;
}
// TEST ONLY
SnapshotTransaction(KeyValueService keyValueService,
RemoteLockService lockService,
TimestampService timestampService,
TransactionService transactionService,
Cleaner cleaner,
long startTimeStamp,
Map<TableReference, ConflictHandler> tablesToWriteWrite,
AtlasDbConstraintCheckingMode constraintCheckingMode,
TransactionReadSentinelBehavior readSentinelBehavior) {
this.keyValueService = keyValueService;
this.timestampService = timestampService;
this.defaultTransactionService = transactionService;
this.cleaner = cleaner;
this.lockService = lockService;
this.startTimestamp = Suppliers.ofInstance(startTimeStamp);
this.conflictDetectionManager = ConflictDetectionManagers.fromMap(tablesToWriteWrite);
this.sweepStrategyManager = SweepStrategyManagers.createDefault(keyValueService);
this.immutableTimestamp = 0;
this.externalLocksTokens = ImmutableSet.of();
this.constraintCheckingMode = constraintCheckingMode;
this.transactionReadTimeoutMillis = null;
this.readSentinelBehavior = readSentinelBehavior;
this.allowHiddenTableAccess = false;
}
/**
* Used for read only transactions and subclasses that are read only and
* bypass aspects of the transaction protocol.
*/
protected SnapshotTransaction(KeyValueService keyValueService,
TransactionService transactionService,
RemoteLockService lockService,
long startTimeStamp,
AtlasDbConstraintCheckingMode constraintCheckingMode,
TransactionReadSentinelBehavior readSentinelBehavior) {
this(keyValueService, transactionService, lockService, startTimeStamp, constraintCheckingMode, readSentinelBehavior, false);
}
protected SnapshotTransaction(KeyValueService keyValueService,
TransactionService transactionService,
RemoteLockService lockService,
long startTimeStamp,
AtlasDbConstraintCheckingMode constraintCheckingMode,
TransactionReadSentinelBehavior readSentinelBehavior,
boolean allowHiddenTableAccess) {
this.keyValueService = keyValueService;
this.defaultTransactionService = transactionService;
this.cleaner = NoOpCleaner.INSTANCE;
this.lockService = lockService;
this.startTimestamp = Suppliers.ofInstance(startTimeStamp);
this.conflictDetectionManager = ConflictDetectionManagers.withoutConflictDetection(keyValueService);
this.sweepStrategyManager = SweepStrategyManagers.createDefault(keyValueService);
this.timestampService = null;
this.immutableTimestamp = startTimeStamp;
this.externalLocksTokens = ImmutableSet.of();
this.constraintCheckingMode = constraintCheckingMode;
this.transactionReadTimeoutMillis = null;
this.readSentinelBehavior = readSentinelBehavior;
this.allowHiddenTableAccess = allowHiddenTableAccess;
}
@Override
public long getTimestamp() {
return getStartTimestamp();
}
long getCommitTimestamp() {
return commitTsForScrubbing;
}
@Override
public TransactionReadSentinelBehavior getReadSentinelBehavior() {
return readSentinelBehavior;
}
public Stopwatch getTrasactionTimer() {
return transactionTimer;
}
protected void checkGetPreconditions(TableReference tableRef) {
if (transactionReadTimeoutMillis != null && System.currentTimeMillis() - timeCreated > transactionReadTimeoutMillis) {
throw new TransactionFailedRetriableException("Transaction timed out.");
}
Preconditions.checkArgument(allowHiddenTableAccess || !AtlasDbConstants.hiddenTables.contains(tableRef));
Preconditions.checkState(state.get() == State.UNCOMMITTED || state.get() == State.COMMITTING,
"Transaction must be uncommitted.");
}
@Override
public SortedMap<byte[], RowResult<byte[]>> getRows(TableReference tableRef, Iterable<byte[]> rows,
ColumnSelection columnSelection) {
Stopwatch watch = Stopwatch.createStarted();
checkGetPreconditions(tableRef);
if (Iterables.isEmpty(rows)) {
return AbstractTransaction.EMPTY_SORTED_ROWS;
}
Map<Cell, byte[]> result = Maps.newHashMap();
Map<Cell, Value> rawResults = Maps.newHashMap(
keyValueService.getRows(tableRef, rows, columnSelection, getStartTimestamp()));
SortedMap<Cell, byte[]> writes = writesByTable.get(tableRef);
if (writes != null) {
for (byte[] row : rows) {
extractLocalWritesForRow(result, writes, row);
}
}
// We don't need to do work postfiltering if we have a write locally.
rawResults.keySet().removeAll(result.keySet());
SortedMap<byte[], RowResult<byte[]>> results = filterRowResults(tableRef, rawResults, result);
if (perfLogger.isDebugEnabled()) {
perfLogger.debug("getRows({}, {} rows) found {} rows, took {} ms",
tableRef, Iterables.size(rows), results.size(), watch.elapsed(TimeUnit.MILLISECONDS));
}
validateExternalAndCommitLocksIfNecessary(tableRef);
return results;
}
@Override
public SortedMap<byte[], RowResult<byte[]>> getRowsIgnoringLocalWrites(TableReference tableRef, Iterable<byte[]> rows) {
checkGetPreconditions(tableRef);
if (Iterables.isEmpty(rows)) {
return AbstractTransaction.EMPTY_SORTED_ROWS;
}
Map<Cell, Value> rawResults = Maps.newHashMap(keyValueService.getRows(tableRef,
rows,
ColumnSelection.all(),
getStartTimestamp()));
return filterRowResults(tableRef, rawResults, Maps.<Cell, byte[]>newHashMap());
}
private SortedMap<byte[], RowResult<byte[]>> filterRowResults(TableReference tableRef,
Map<Cell, Value> rawResults,
Map<Cell, byte[]> result) {
getWithPostfiltering(tableRef, rawResults, result, Value.GET_VALUE);
Map<Cell, byte[]> filterDeletedValues = Maps.filterValues(result, Predicates.not(Value.IS_EMPTY));
return RowResults.viewOfSortedMap(Cells.breakCellsUpByRow(filterDeletedValues));
}
/**
* This will add any local writes for this row to the result map.
* <p>
* If an empty value was written as a delete, this will also be included in the map.
*/
private void extractLocalWritesForRow(@Output Map<Cell, byte[]> result,
SortedMap<Cell, byte[]> writes, byte[] row) {
Cell lowCell = Cells.createSmallestCellForRow(row);
Iterator<Entry<Cell, byte[]>> it = writes.tailMap(lowCell).entrySet().iterator();
while (it.hasNext()) {
Entry<Cell, byte[]> e = it.next();
Cell cell = e.getKey();
if (!Arrays.equals(row, cell.getRowName())) {
break;
}
result.put(cell, e.getValue());
}
}
@Override
public Map<Cell, byte[]> get(TableReference tableRef, Set<Cell> cells) {
Stopwatch watch = Stopwatch.createStarted();
checkGetPreconditions(tableRef);
if (Iterables.isEmpty(cells)) { return ImmutableMap.of(); }
Map<Cell, byte[]> result = Maps.newHashMap();
SortedMap<Cell, byte[]> writes = writesByTable.get(tableRef);
if (writes != null) {
for (Cell cell : cells) {
if (writes.containsKey(cell)) {
result.put(cell, writes.get(cell));
}
}
}
// We don't need to read any cells that were written locally.
result.putAll(getFromKeyValueService(tableRef, Sets.difference(cells, result.keySet())));
if (perfLogger.isDebugEnabled()) {
perfLogger.debug("get({}, {} cells) found {} cells (some possibly deleted), took {} ms",
tableRef, cells.size(), result.size(), watch.elapsed(TimeUnit.MILLISECONDS));
}
validateExternalAndCommitLocksIfNecessary(tableRef);
return Maps.filterValues(result, Predicates.not(Value.IS_EMPTY));
}
@Override
public Map<Cell, byte[]> getIgnoringLocalWrites(TableReference tableRef, Set<Cell> cells) {
checkGetPreconditions(tableRef);
if (Iterables.isEmpty(cells)) { return ImmutableMap.of(); }
Map<Cell, byte[]> result = getFromKeyValueService(tableRef, cells);
return Maps.filterValues(result, Predicates.not(Value.IS_EMPTY));
}
/**
* This will load the given keys from the underlying key value service and apply postfiltering
* so we have snapshot isolation. If the value in the key value service is the empty array
* this will be included here and needs to be filtered out.
*/
private Map<Cell, byte[]> getFromKeyValueService(TableReference tableRef, Set<Cell> cells) {
Map<Cell, byte[]> result = Maps.newHashMap();
Map<Cell, Long> toRead = Cells.constantValueMap(cells, getStartTimestamp());
Map<Cell, Value> rawResults = keyValueService.get(tableRef, toRead);
getWithPostfiltering(tableRef, rawResults, result, Value.GET_VALUE);
return result;
}
private static byte[] getNextStartRowName(RangeRequest range, TokenBackedBasicResultsPage<RowResult<Value>, byte[]> prePostFilter) {
if (!prePostFilter.moreResultsAvailable()) {
return range.getEndExclusive();
}
return prePostFilter.getTokenForNextPage();
}
@Override
public Iterable<BatchingVisitable<RowResult<byte[]>>> getRanges(final TableReference tableRef,
Iterable<RangeRequest> rangeRequests) {
checkGetPreconditions(tableRef);
if (perfLogger.isDebugEnabled()) {
perfLogger.debug("Passed {} ranges to getRanges({}, {})",
Iterables.size(rangeRequests), tableRef, rangeRequests);
}
return FluentIterable.from(Iterables.partition(rangeRequests, BATCH_SIZE_GET_FIRST_PAGE))
.transformAndConcat(new Function<List<RangeRequest>, List<BatchingVisitable<RowResult<byte[]>>>>() {
@Override
public List<BatchingVisitable<RowResult<byte[]>>> apply(List<RangeRequest> input) {
Stopwatch timer = Stopwatch.createStarted();
Map<RangeRequest, TokenBackedBasicResultsPage<RowResult<Value>, byte[]>> firstPages = keyValueService.getFirstBatchForRanges(
tableRef,
input,
getStartTimestamp());
validateExternalAndCommitLocksIfNecessary(tableRef);
final SortedMap<Cell, byte[]> postFiltered = postFilterPages(
tableRef,
firstPages.values());
List<BatchingVisitable<RowResult<byte[]>>> ret = Lists.newArrayListWithCapacity(input.size());
for (final RangeRequest rangeRequest : input) {
final TokenBackedBasicResultsPage<RowResult<Value>, byte[]> prePostFilter = firstPages.get(rangeRequest);
final byte[] nextStartRowName = getNextStartRowName(
rangeRequest,
prePostFilter);
final List<Entry<Cell, byte[]>> mergeIterators = getPostfilteredWithLocalWrites(
tableRef,
postFiltered,
rangeRequest,
prePostFilter.getResults(),
nextStartRowName);
ret.add(new AbstractBatchingVisitable<RowResult<byte[]>>() {
@Override
protected <K extends Exception> void batchAcceptSizeHint(int batchSizeHint,
ConsistentVisitor<RowResult<byte[]>, K> v)
throws K {
checkGetPreconditions(tableRef);
final Iterator<RowResult<byte[]>> rowResults = Cells.createRowView(mergeIterators);
while (rowResults.hasNext()) {
if (!v.visit(ImmutableList.of(rowResults.next()))) {
return;
}
}
if ((nextStartRowName.length == 0) || !prePostFilter.moreResultsAvailable()) {
return;
}
RangeRequest newRange = rangeRequest.getBuilder()
.startRowInclusive(nextStartRowName)
.build();
getRange(tableRef, newRange)
.batchAccept(batchSizeHint, v);
}
});
}
log.info("Processed {} range requests for {} in {}ms",
input.size(), tableRef, timer.elapsed(TimeUnit.MILLISECONDS));
return ret;
}
});
}
private void validateExternalAndCommitLocksIfNecessary(TableReference tableRef) {
if (!isValidationNecessary(tableRef)) {
return;
}
throwIfExternalAndCommitLocksNotValid(null);
}
private boolean isValidationNecessary(TableReference tableRef) {
return sweepStrategyManager.get().get(tableRef) == SweepStrategy.THOROUGH;
}
private List<Entry<Cell, byte[]>> getPostfilteredWithLocalWrites(final TableReference tableRef,
final SortedMap<Cell, byte[]> postFiltered,
final RangeRequest rangeRequest,
List<RowResult<Value>> prePostFilter,
final byte[] endRowExclusive) {
Map<Cell, Value> prePostFilterCells = Cells.convertRowResultsToCells(prePostFilter);
Collection<Entry<Cell, byte[]>> postFilteredCells = Collections2.filter(
postFiltered.entrySet(),
Predicates.compose(
Predicates.in(prePostFilterCells.keySet()),
MapEntries.<Cell, byte[]> getKeyFunction()));
Collection<Entry<Cell, byte[]>> localWritesInRange = getLocalWritesForRange(
tableRef,
rangeRequest.getStartInclusive(),
endRowExclusive).entrySet();
return ImmutableList.copyOf(mergeInLocalWrites(
postFilteredCells.iterator(),
localWritesInRange.iterator(),
rangeRequest.isReverse()));
}
@Override
public BatchingVisitable<RowResult<byte[]>> getRange(final TableReference tableRef,
final RangeRequest range) {
checkGetPreconditions(tableRef);
if (range.isEmptyRange()) {
return BatchingVisitables.emptyBatchingVisitable();
}
return new AbstractBatchingVisitable<RowResult<byte[]>>() {
@Override
public <K extends Exception> void batchAcceptSizeHint(int userRequestedSize,
ConsistentVisitor<RowResult<byte[]>, K> v)
throws K {
Preconditions.checkState(state.get() == State.UNCOMMITTED,
"Transaction must be uncommitted.");
if (range.getBatchHint() != null) {
userRequestedSize = range.getBatchHint();
}
int preFilterBatchSize = getRequestHintToKvStore(userRequestedSize);
Validate.isTrue(!range.isReverse(), "we currently do not support reverse ranges");
getBatchingVisitableFromIterator(
tableRef,
range,
userRequestedSize,
v,
preFilterBatchSize);
}
};
}
private <K extends Exception> boolean getBatchingVisitableFromIterator(final TableReference tableRef,
RangeRequest range,
int userRequestedSize,
AbortingVisitor<List<RowResult<byte[]>>, K> v,
int preFilterBatchSize) throws K {
ClosableIterator<RowResult<byte[]>> postFilterIterator =
postFilterIterator(tableRef, range, preFilterBatchSize, Value.GET_VALUE);
try {
Iterator<RowResult<byte[]>> localWritesInRange =
Cells.createRowView(getLocalWritesForRange(tableRef, range.getStartInclusive(), range.getEndExclusive()).entrySet());
Iterator<RowResult<byte[]>> mergeIterators =
mergeInLocalWritesRows(postFilterIterator, localWritesInRange, range.isReverse());
return BatchingVisitableFromIterable.create(mergeIterators).batchAccept(userRequestedSize, v);
} finally {
postFilterIterator.close();
}
}
protected static int getRequestHintToKvStore(int userRequestedSize) {
if (userRequestedSize == 1) {
// Handle 1 specially because the underlying store could have an optimization for 1
return 1;
}
//TODO: carrino: tune the param here based on how likely we are to post filter
// rows out and have deleted rows
int preFilterBatchSize = userRequestedSize + ((userRequestedSize+9)/10);
if (preFilterBatchSize > AtlasDbPerformanceConstants.MAX_BATCH_SIZE
|| preFilterBatchSize < 0) {
preFilterBatchSize = AtlasDbPerformanceConstants.MAX_BATCH_SIZE;
}
return preFilterBatchSize;
}
private static Iterator<RowResult<byte[]>> mergeInLocalWritesRows(Iterator<RowResult<byte[]>> postFilterIterator,
Iterator<RowResult<byte[]>> localWritesInRange,
boolean isReverse) {
Ordering<RowResult<byte[]>> ordering = RowResult.<byte[]>getOrderingByRowName();
Iterator<RowResult<byte[]>> mergeIterators = IteratorUtils.mergeIterators(
postFilterIterator, localWritesInRange,
isReverse ? ordering.reverse() : ordering,
new Function<Pair<RowResult<byte[]>, RowResult<byte[]>>, RowResult<byte[]>>() {
@Override
public RowResult<byte[]> apply(Pair<RowResult<byte[]>,RowResult<byte[]>> from) {
// prefer local writes
return RowResults.merge(from.lhSide, from.rhSide);
}
});
return RowResults.filterDeletedColumnsAndEmptyRows(mergeIterators);
}
private static Iterator<Entry<Cell, byte[]>> mergeInLocalWrites(Iterator<Entry<Cell, byte[]>> postFilterIterator,
Iterator<Entry<Cell, byte[]>> localWritesInRange,
boolean isReverse) {
Ordering<Entry<Cell, byte[]>> ordering = Ordering.natural().onResultOf(MapEntries.<Cell, byte[]>getKeyFunction());
Iterator<Entry<Cell, byte[]>> mergeIterators = IteratorUtils.mergeIterators(
postFilterIterator, localWritesInRange,
isReverse ? ordering.reverse() : ordering,
new Function<Pair<Entry<Cell, byte[]>, Entry<Cell, byte[]>>, Entry<Cell, byte[]>>() {
@Override
public Map.Entry<Cell, byte[]> apply(Pair<Map.Entry<Cell, byte[]>, Map.Entry<Cell, byte[]>> from) {
// always override their value with written values
return from.rhSide;
}
});
return Iterators.filter(mergeIterators,
Predicates.compose(Predicates.not(Value.IS_EMPTY), MapEntries.<Cell, byte[]>getValueFunction()));
}
protected <T> ClosableIterator<RowResult<T>> postFilterIterator(final TableReference tableRef,
RangeRequest range,
int preFilterBatchSize,
final Function<Value, T> transformer) {
final BatchSizeIncreasingRangeIterator results = new BatchSizeIncreasingRangeIterator(tableRef, range, preFilterBatchSize);
Iterator<Iterator<RowResult<T>>> batchedPostfiltered = new AbstractIterator<Iterator<RowResult<T>>>() {
@Override
protected Iterator<RowResult<T>> computeNext() {
List<RowResult<Value>> batch = results.getBatch();
if (batch.isEmpty()) {
return endOfData();
}
SortedMap<Cell, T> postFilter = postFilterRows(tableRef, batch, transformer);
results.markNumRowsNotDeleted(Cells.getRows(postFilter.keySet()).size());
return Cells.createRowView(postFilter.entrySet());
}
};
final Iterator<RowResult<T>> rows = Iterators.concat(batchedPostfiltered);
return new ForwardingClosableIterator<RowResult<T>>() {
@Override
protected ClosableIterator<RowResult<T>> delegate() {
return ClosableIterators.wrap(rows);
}
@Override
public void close() {
if (results != null) {
results.close();
}
}
};
}
private class BatchSizeIncreasingRangeIterator {
final TableReference tableRef;
final RangeRequest range;
final int originalBatchSize;
long numReturned = 0;
long numNotDeleted = 0;
ClosableIterator<RowResult<Value>> results = null;
int lastBatchSize;
byte[] lastRow = null;
public BatchSizeIncreasingRangeIterator(TableReference tableRef,
RangeRequest range,
int originalBatchSize) {
Validate.isTrue(originalBatchSize > 0);
this.tableRef = tableRef;
this.range = range;
this.originalBatchSize = originalBatchSize;
}
public void markNumRowsNotDeleted(int rowsInBatch) {
numNotDeleted += rowsInBatch;
AssertUtils.assertAndLog(numNotDeleted <= numReturned, "NotDeleted is bigger than the number of rows we returned.");
}
int getBestBatchSize() {
if (numReturned == 0) {
return originalBatchSize;
}
final long batchSize;
if (numNotDeleted == 0) {
// If everything we've seen has been deleted, we should be aggressive about getting more rows.
batchSize = numReturned*4;
} else {
batchSize = (long)Math.ceil(originalBatchSize * (numReturned / (double)numNotDeleted));
}
return (int)Math.min(batchSize, AtlasDbPerformanceConstants.MAX_BATCH_SIZE);
}
private void updateResultsIfNeeded() {
if (results == null) {
results = keyValueService.getRange(tableRef, range.withBatchHint(originalBatchSize), getStartTimestamp());
lastBatchSize = originalBatchSize;
return;
}
Validate.isTrue(lastRow != null);
// If the last row we got was the maximal row, then we are done.
if (RangeRequests.isTerminalRow(range.isReverse(), lastRow)) {
results = ClosableIterators.wrap(ImmutableList.<RowResult<Value>>of().iterator());
return;
}
int bestBatchSize = getBestBatchSize();
// Only close and throw away our old iterator if the batch size has changed by a factor of 2 or more.
if (bestBatchSize >= lastBatchSize*2 || bestBatchSize <= lastBatchSize/2) {
byte[] nextStartRow = RangeRequests.getNextStartRow(range.isReverse(), lastRow);
if (Arrays.equals(nextStartRow, range.getEndExclusive())) {
results = ClosableIterators.wrap(ImmutableList.<RowResult<Value>>of().iterator());
return;
}
RangeRequest.Builder newRange = range.getBuilder();
newRange.startRowInclusive(nextStartRow);
newRange.batchHint(bestBatchSize);
results.close();
results = keyValueService.getRange(tableRef, newRange.build(), getStartTimestamp());
lastBatchSize = bestBatchSize;
}
}
public List<RowResult<Value>> getBatch() {
updateResultsIfNeeded();
Validate.isTrue(lastBatchSize > 0);
ImmutableList<RowResult<Value>> list = ImmutableList.copyOf(Iterators.limit(results, lastBatchSize));
numReturned += list.size();
if (!list.isEmpty()) {
lastRow = list.get(list.size()-1).getRowName();
}
return list;
}
public void close() {
if (results != null) {
results.close();
}
}
}
private ConcurrentNavigableMap<Cell, byte[]> getLocalWrites(TableReference tableRef) {
ConcurrentNavigableMap<Cell, byte[]> writes = writesByTable.get(tableRef);
if (writes == null) {
writes = new ConcurrentSkipListMap<Cell, byte[]>();
ConcurrentNavigableMap<Cell, byte[]> previous = writesByTable.putIfAbsent(tableRef, writes);
if (previous != null) {
writes = previous;
}
}
return writes;
}
/**
* This includes deleted writes as zero length byte arrays, be sure to strip them out.
*/
private SortedMap<Cell, byte[]> getLocalWritesForRange(TableReference tableRef, byte[] startRow, byte[] endRow) {
SortedMap<Cell, byte[]> writes = getLocalWrites(tableRef);
if (startRow.length != 0) {
writes = writes.tailMap(Cells.createSmallestCellForRow(startRow));
}
if (endRow.length != 0) {
writes = writes.headMap(Cells.createSmallestCellForRow(endRow));
}
return writes;
}
private SortedMap<Cell, byte[]> postFilterPages(TableReference tableRef,
Iterable<TokenBackedBasicResultsPage<RowResult<Value>, byte[]>> rangeRows) {
List<RowResult<Value>> results = Lists.newArrayList();
for (TokenBackedBasicResultsPage<RowResult<Value>, byte[]> page : rangeRows) {
results.addAll(page.getResults());
}
return postFilterRows(tableRef, results, Value.GET_VALUE);
}
private <T> SortedMap<Cell, T> postFilterRows(TableReference tableRef,
List<RowResult<Value>> rangeRows,
Function<Value, T> transformer) {
Preconditions.checkState(state.get() == State.UNCOMMITTED, "Transaction must be uncommitted.");
if (rangeRows.isEmpty()) {
return ImmutableSortedMap.of();
}
Map<Cell, Value> rawResults = Maps.newHashMapWithExpectedSize(estimateSize(rangeRows));
for (RowResult<Value> rowResult : rangeRows) {
for (Map.Entry<byte[], Value> e : rowResult.getColumns().entrySet()) {
rawResults.put(Cell.create(rowResult.getRowName(), e.getKey()), e.getValue());
}
}
SortedMap<Cell, T> postFilter = Maps.newTreeMap();
getWithPostfiltering(tableRef, rawResults, postFilter, transformer);
return postFilter;
}
private int estimateSize(List<RowResult<Value>> rangeRows) {
int estimatedSize = 0;
for (RowResult<Value> rowResult : rangeRows) {
estimatedSize += rowResult.getColumns().size();
}
return estimatedSize;
}
private <T> void getWithPostfiltering(TableReference tableRef,
Map<Cell, Value> rawResults,
@Output Map<Cell, T> results,
Function<Value, T> transformer) {
long bytes = 0;
for (Map.Entry<Cell, Value> e : rawResults.entrySet()) {
bytes += e.getValue().getContents().length + Cells.getApproxSizeOfCell(e.getKey());
}
if (bytes > TransactionConstants.ERROR_LEVEL_FOR_QUEUED_BYTES && !AtlasDbConstants.TABLES_KNOWN_TO_BE_POORLY_DESIGNED.contains(tableRef)) {
log.error("A single get had a lot of bytes: " + bytes + " for table " + tableRef.getQualifiedName() + ". "
+ "The number of results was " + rawResults.size() + ". "
+ "The first 10 results were " + Iterables.limit(rawResults.entrySet(), 10) + ". "
+ "This can potentially cause out-of-memory errors.",
new RuntimeException("This exception and stack trace are provided for debugging purposes."));
} else if (bytes > TransactionConstants.WARN_LEVEL_FOR_QUEUED_BYTES && log.isWarnEnabled()) {
log.warn("A single get had quite a few bytes: " + bytes + " for table " + tableRef.getQualifiedName() + ". "
+ "The number of results was " + rawResults.size() + ". "
+ "The first 10 results were " + Iterables.limit(rawResults.entrySet(), 10) + ". ",
new RuntimeException("This exception and stack trace are provided for debugging purposes."));
}
if (isTempTable(tableRef) || (AtlasDbConstants.SKIP_POSTFILTER_TABLES.contains(tableRef) && allowHiddenTableAccess)) {
// If we are reading from a temp table, we can just bypass postfiltering
// or skip postfiltering if reading the transaction or namespace table from atlasdb shell
for (Map.Entry<Cell, Value> e : rawResults.entrySet()) {
results.put(e.getKey(), transformer.apply(e.getValue()));
}
return;
}
while (!rawResults.isEmpty()) {
rawResults = getWithPostfilteringInternal(tableRef, rawResults, results, transformer);
}
}
/**
* This will return all the keys that still need to be postfiltered. It will output properly
* postfiltered keys to the results output param.
*/
private <T> Map<Cell, Value> getWithPostfilteringInternal(TableReference tableRef,
Map<Cell, Value> rawResults,
@Output Map<Cell, T> results,
Function<Value, T> transformer) {
Set<Long> startTimestampsForValues = getStartTimestampsForValues(rawResults.values());
Map<Long, Long> commitTimestamps = getCommitTimestamps(tableRef, startTimestampsForValues, true);
Map<Cell, Long> keysToReload = Maps.newHashMapWithExpectedSize(0);
Map<Cell, Long> keysToDelete = Maps.newHashMapWithExpectedSize(0);
for (Map.Entry<Cell, Value> e : rawResults.entrySet()) {
Cell key = e.getKey();
Value value = e.getValue();
if (value.getTimestamp() == Value.INVALID_VALUE_TIMESTAMP) {
// This means that this transaction started too long ago. When we do garbage collection,
// we clean up old values, and this transaction started at a timestamp before the garbage collection.
switch (getReadSentinelBehavior()) {
case IGNORE:
break;
case THROW_EXCEPTION:
throw new TransactionFailedRetriableException("Tried to read a value that has been deleted. " +
" This can be caused by hard delete transactions using the type " +
TransactionType.AGGRESSIVE_HARD_DELETE +
". It can also be caused by transactions taking too long, or" +
" its locks expired. Retrying it should work.");
default:
throw new IllegalStateException("Invalid read sentinel behavior " + getReadSentinelBehavior());
}
} else {
Long theirCommitTimestamp = commitTimestamps.get(value.getTimestamp());
if (theirCommitTimestamp == null || theirCommitTimestamp == TransactionConstants.FAILED_COMMIT_TS) {
keysToReload.put(key, value.getTimestamp());
if (shouldDeleteAndRollback()) {
// This is from a failed transaction so we can roll it back and then reload it.
keysToDelete.put(key, value.getTimestamp());
}
} else if (theirCommitTimestamp > getStartTimestamp()) {
// The value's commit timestamp is after our start timestamp.
// This means the value is from a transaction which committed
// after our transaction began. We need to try reading at an
// earlier timestamp.
keysToReload.put(key, value.getTimestamp());
} else {
// The value has a commit timestamp less than our start timestamp, and is visible and valid.
if (value.getContents().length != 0) {
results.put(key, transformer.apply(value));
}
}
}
}
if (!keysToDelete.isEmpty()) {
// if we can't roll back the failed transactions, we should just try again
if (!rollbackFailedTransactions(tableRef, keysToDelete, commitTimestamps, defaultTransactionService)) {
return rawResults;
}
}
if (!keysToReload.isEmpty()) {
Map<Cell, Value> nextRawResults = keyValueService.get(tableRef, keysToReload);
return nextRawResults;
} else {
return ImmutableMap.of();
}
}
/**
* This is protected to allow for different post filter behavior.
*/
protected boolean shouldDeleteAndRollback() {
Validate.notNull(lockService, "if we don't have a valid lock server we can't roll back transactions");
return true;
}
@Override
final public void delete(TableReference tableRef, Set<Cell> cells) {
put(tableRef, Cells.constantValueMap(cells, PtBytes.EMPTY_BYTE_ARRAY), Cell.INVALID_TTL, Cell.INVALID_TTL_TYPE);
}
@Override
public void put(TableReference tableRef, Map<Cell, byte[]> values) {
ensureNoEmptyValues(values);
put(tableRef, values, Cell.INVALID_TTL, Cell.INVALID_TTL_TYPE);
}
private void ensureNoEmptyValues(Map<Cell, byte[]> values) {
for (Entry<Cell, byte[]> cellEntry : values.entrySet()) {
if (cellEntry.getValue().length == 0) {
throw new IllegalArgumentException("AtlasDB does not currently support inserting empty (zero-byte) values.");
}
}
}
public void put(TableReference tableRef, Map<Cell, byte[]> values, long ttlDuration, TimeUnit ttlUnit) {
Preconditions.checkArgument(!AtlasDbConstants.hiddenTables.contains(tableRef));
// todo (clockfort) also check if valid table for TTL
if (ttlDuration != Cell.INVALID_TTL && ttlUnit != Cell.INVALID_TTL_TYPE) {
values = createExpiringValues(values, ttlDuration, ttlUnit);
}
if (!validConflictDetection(tableRef)) {
conflictDetectionManager.recompute();
Preconditions.checkArgument(validConflictDetection(tableRef),
"Not a valid table for this transaction. Make sure this table name has a namespace: " + tableRef);
}
Validate.isTrue(isTempTable(tableRef) || getAllTempTables().isEmpty(),
"Temp tables may only be used by read only transactions.");
if (values.isEmpty()) {
return;
}
numWriters.incrementAndGet();
try {
// We need to check the status after incrementing writers to ensure that we fail if we are committing.
Preconditions.checkState(state.get() == State.UNCOMMITTED, "Transaction must be uncommitted.");
ConcurrentNavigableMap<Cell, byte[]> writes = getLocalWrites(tableRef);
if (isTempTable(tableRef)) {
putTempTableWrites(tableRef, values, writes);
} else {
putWritesAndLogIfTooLarge(values, writes);
}
} finally {
numWriters.decrementAndGet();
}
}
private Map<Cell, byte[]> createExpiringValues(Map<Cell, byte[]> values,
long ttlDuration,
TimeUnit ttlUnit) {
Map<Cell, byte[]> expiringValues = Maps.newHashMapWithExpectedSize(values.size());
for (Entry<Cell, byte[]> cellEntry : values.entrySet()) {
Cell expiringCell = Cell.create(
cellEntry.getKey().getRowName(),
cellEntry.getKey().getColumnName(),
ttlDuration, ttlUnit);
expiringValues.put(expiringCell, cellEntry.getValue());
}
return expiringValues;
}
private boolean validConflictDetection(TableReference tableRef) {
if (isTempTable(tableRef)) {
return true;
}
return conflictDetectionManager.isEmptyOrContainsTable(tableRef);
}
private void putWritesAndLogIfTooLarge(Map<Cell, byte[]> values, SortedMap<Cell, byte[]> writes) {
for (Map.Entry<Cell, byte[]> e : values.entrySet()) {
byte[] val = e.getValue();
if (val == null) {
val = PtBytes.EMPTY_BYTE_ARRAY;
}
Cell cell = e.getKey();
if (writes.put(cell, val) == null) {
long toAdd = val.length + Cells.getApproxSizeOfCell(cell);
long newVal = byteCount.addAndGet(toAdd);
if (newVal >= TransactionConstants.WARN_LEVEL_FOR_QUEUED_BYTES
&& newVal - toAdd < TransactionConstants.WARN_LEVEL_FOR_QUEUED_BYTES) {
log.warn("A single transaction has put quite a few bytes: " + newVal,
new RuntimeException("This exception and stack trace are provided for debugging purposes."));
}
if (newVal >= TransactionConstants.ERROR_LEVEL_FOR_QUEUED_BYTES
&& newVal - toAdd < TransactionConstants.ERROR_LEVEL_FOR_QUEUED_BYTES) {
log.warn("A single transaction has put too many bytes: " + newVal + ". This can potentially cause" +
"out-of-memory errors.",
new RuntimeException("This exception and stack trace are provided for debugging purposes."));
}
}
}
}
@Override
public void abort() {
if (state.get() == State.ABORTED) {
return;
}
while (true) {
Preconditions.checkState(state.get() == State.UNCOMMITTED, "Transaction must be uncommitted.");
if (state.compareAndSet(State.UNCOMMITTED, State.ABORTED)) {
dropTempTables();
if (hasWrites()) {
throwIfExternalAndCommitLocksNotValid(null);
}
return;
}
}
}
@Override
public boolean isAborted() {
return state.get() == State.ABORTED;
}
@Override
public boolean isUncommitted() {
return state.get() == State.UNCOMMITTED;
}
///////////////////////////////////////////////////////////////////////////
/// Committing
///////////////////////////////////////////////////////////////////////////
@Override
public void commit() {
commit(defaultTransactionService);
}
@Override
public void commit(TransactionService transactionService) {
if (state.get() == State.COMMITTED) {
return;
}
if (state.get() == State.FAILED) {
throw new IllegalStateException("this transaction has already failed");
}
while (true) {
Preconditions.checkState(state.get() == State.UNCOMMITTED, "Transaction must be uncommitted.");
if (state.compareAndSet(State.UNCOMMITTED, State.COMMITTING)) {
break;
}
}
// This must be done BEFORE we commit (otherwise if the system goes down after
// we commit but before we queue cells for scrubbing, then we will lose track of
// which cells we need to scrub)
if (getTransactionType() == TransactionType.AGGRESSIVE_HARD_DELETE ||
getTransactionType() == TransactionType.HARD_DELETE) {
cleaner.queueCellsForScrubbing(getCellsToQueueForScrubbing(), getStartTimestamp());
}
boolean success = false;
try {
if (numWriters.get() > 0) {
// After we set state to committing we need to make sure no one is still writing.
throw new IllegalStateException("Cannot commit while other threads are still calling put.");
}
if (!getAllTempTables().isEmpty()) {
dropTempTables();
Validate.isTrue(getAllTempTables().containsAll(writesByTable.keySet()),
"Temp tables may only be used by read only transactions.");
} else {
checkConstraints();
commitWrites(transactionService);
}
perfLogger.debug("Commited transaction {} in {}ms",
getStartTimestamp(),
getTrasactionTimer().elapsed(TimeUnit.MILLISECONDS));
success = true;
} finally {
// Once we are in state committing, we need to try/finally to set the state to a terminal state.
state.set(success ? State.COMMITTED : State.FAILED);
}
}
private void checkConstraints() {
List<String> violations = Lists.newArrayList();
for (Map.Entry<TableReference, ConstraintCheckable> entry : constraintsByTableName.entrySet()) {
SortedMap<Cell, byte[]> sortedMap = writesByTable.get(entry.getKey());
if (sortedMap != null) {
violations.addAll(entry.getValue().findConstraintFailures(sortedMap, this, constraintCheckingMode));
}
}
if (!violations.isEmpty()) {
if(constraintCheckingMode.shouldThrowException()) {
throw new AtlasDbConstraintException(violations);
} else {
constraintLogger.error("Constraint failure on commit.",
new AtlasDbConstraintException(violations));
}
}
}
private void commitWrites(TransactionService transactionService) {
if (!hasWrites()) {
return;
}
Stopwatch watch = Stopwatch.createStarted();
LockRefreshToken commitLocksToken = acquireLocksForCommit();
long millisForLocks = watch.elapsed(TimeUnit.MILLISECONDS);
try {
watch.reset().start();
throwIfConflictOnCommit(commitLocksToken, transactionService);
long millisCheckingForConflicts = watch.elapsed(TimeUnit.MILLISECONDS);
watch.reset().start();
keyValueService.multiPut(writesByTable, getStartTimestamp());
long millisForWrites = watch.elapsed(TimeUnit.MILLISECONDS);
// Now that all writes are done, get the commit timestamp
// We must do this before we check that our locks are still valid to ensure that
// other transactions that will hold these locks are sure to have start
// timestamps after our commit timestamp.
long commitTimestamp = timestampService.getFreshTimestamp();
commitTsForScrubbing = commitTimestamp;
// punch on commit so that if hard delete is the only thing happening on a system,
// we won't block forever waiting for the unreadable timestamp to advance past the
// scrub timestamp (same as the hard delete transaction's start timestamp)
watch.reset().start();
cleaner.punch(commitTimestamp);
long millisForPunch = watch.elapsed(TimeUnit.MILLISECONDS);
throwIfReadWriteConflictForSerializable(commitTimestamp);
// Verify that our locks are still valid before we actually commit;
// this check is required by the transaction protocol for correctness
throwIfExternalAndCommitLocksNotValid(commitLocksToken);
watch.reset().start();
putCommitTimestamp(commitTimestamp, commitLocksToken, transactionService);
long millisForCommitTs = watch.elapsed(TimeUnit.MILLISECONDS);
Set<LockRefreshToken> expiredLocks = refreshExternalAndCommitLocks(commitLocksToken);
if (!expiredLocks.isEmpty()) {
String errorMessage =
"This isn't a bug but it should happen very infrequently. Required locks are no longer" +
" valid but we have already committed successfully. " + getExpiredLocksErrorString(commitLocksToken, expiredLocks);
log.error(errorMessage, new TransactionFailedRetriableException(errorMessage));
}
long millisSinceCreation = System.currentTimeMillis() - timeCreated;
if (perfLogger.isDebugEnabled()) {
perfLogger.debug("Committed {} bytes with locks, start ts {}, commit ts {}, " +
"acquiring locks took {} ms, checking for conflicts took {} ms, " +
"writing took {} ms, punch took {} ms, putCommitTs took {} ms, " +
"total time since tx creation {} ms, tables: {}.",
byteCount.get(), getStartTimestamp(),
commitTimestamp, millisForLocks, millisCheckingForConflicts, millisForWrites,
millisForPunch, millisForCommitTs, millisSinceCreation, writesByTable.keySet());
}
} finally {
lockService.unlock(commitLocksToken);
}
}
protected void throwIfReadWriteConflictForSerializable(long commitTimestamp) {
// This is for overriding to get serializable transactions
}
private boolean hasWrites() {
boolean hasWrites = false;
for (SortedMap<?, ?> map : writesByTable.values()) {
if (!map.isEmpty()) {
hasWrites = true;
break;
}
}
return hasWrites;
}
protected ConflictHandler getConflictHandlerForTable(TableReference tableRef) {
Map<TableReference, ConflictHandler> tableToConflictHandler = conflictDetectionManager.get();
if (tableToConflictHandler.isEmpty()) {
return ConflictHandler.RETRY_ON_WRITE_WRITE;
}
return tableToConflictHandler.get(tableRef);
}
private String getExpiredLocksErrorString(@Nullable LockRefreshToken commitLocksToken,
Set<LockRefreshToken> expiredLocks) {
return "The following external locks were required: " + externalLocksTokens +
"; the following commit locks were required: " + commitLocksToken +
"; the following locks are no longer valid: " + expiredLocks;
}
private void throwIfExternalAndCommitLocksNotValid(@Nullable LockRefreshToken commitLocksToken) {
Set<LockRefreshToken> expiredLocks = refreshExternalAndCommitLocks(commitLocksToken);
if (!expiredLocks.isEmpty()) {
String errorMessage =
"Required locks are no longer valid. " + getExpiredLocksErrorString(commitLocksToken, expiredLocks);
TransactionLockTimeoutException e = new TransactionLockTimeoutException(errorMessage);
log.error(errorMessage, e);
throw e;
}
}
/**
* @param commitLocksToken
* @return set of locks that could not be refreshed
*/
private Set<LockRefreshToken> refreshExternalAndCommitLocks(@Nullable LockRefreshToken commitLocksToken) {
ImmutableSet<LockRefreshToken> toRefresh;
if (commitLocksToken == null) {
toRefresh = externalLocksTokens;
} else {
toRefresh = ImmutableSet.<LockRefreshToken>builder()
.addAll(externalLocksTokens)
.add(commitLocksToken).build();
}
if (toRefresh.isEmpty()) {
return ImmutableSet.of();
}
return Sets.difference(toRefresh, lockService.refreshLockRefreshTokens(toRefresh)).immutableCopy();
}
/**
* Make sure we have all the rows we are checking already locked before calling this.
*/
protected void throwIfConflictOnCommit(LockRefreshToken commitLocksToken, TransactionService transactionService) throws TransactionConflictException {
for (Entry<TableReference, ConcurrentNavigableMap<Cell, byte[]>> write : writesByTable.entrySet()) {
ConflictHandler conflictHandler = getConflictHandlerForTable(write.getKey());
throwIfWriteAlreadyCommitted(write.getKey(), write.getValue(), conflictHandler, commitLocksToken, transactionService);
}
}
protected void throwIfWriteAlreadyCommitted(TableReference tableRef,
Map<Cell, byte[]> writes,
ConflictHandler conflictHandler,
LockRefreshToken commitLocksToken,
TransactionService transactionService)
throws TransactionConflictException {
if (writes.isEmpty() || conflictHandler == ConflictHandler.IGNORE_ALL) {
return;
}
Set<CellConflict> spanningWrites = Sets.newHashSet();
Set<CellConflict> dominatingWrites = Sets.newHashSet();
Map<Cell, Long> keysToLoad = Maps.asMap(writes.keySet(), Functions.constant(Long.MAX_VALUE));
while (!keysToLoad.isEmpty()) {
keysToLoad = detectWriteAlreadyCommittedInternal(tableRef, keysToLoad, spanningWrites, dominatingWrites, transactionService);
}
if (conflictHandler == ConflictHandler.RETRY_ON_VALUE_CHANGED) {
throwIfValueChangedConflict(tableRef, writes, spanningWrites, dominatingWrites, commitLocksToken);
} else if (conflictHandler == ConflictHandler.RETRY_ON_WRITE_WRITE
|| conflictHandler == ConflictHandler.RETRY_ON_WRITE_WRITE_CELL
|| conflictHandler == ConflictHandler.SERIALIZABLE) {
if (!spanningWrites.isEmpty() || !dominatingWrites.isEmpty()) {
throw TransactionConflictException.create(tableRef, getStartTimestamp(), spanningWrites,
dominatingWrites, System.currentTimeMillis() - timeCreated);
}
} else {
throw new IllegalArgumentException("Unknown conflictHandler type: " + conflictHandler);
}
}
/**
* This will throw if we have a value changed conflict. This means that either we changed the
* value and anyone did a write after our start timestamp, or we just touched the value (put the
* same value as before) and a changed value was written after our start time.
*/
private void throwIfValueChangedConflict(TableReference table,
Map<Cell, byte[]> writes,
Set<CellConflict> spanningWrites,
Set<CellConflict> dominatingWrites,
LockRefreshToken commitLocksToken) {
Map<Cell, CellConflict> cellToConflict = Maps.newHashMap();
Map<Cell, Long> cellToTs = Maps.newHashMap();
for (CellConflict c : Sets.union(spanningWrites, dominatingWrites)) {
cellToConflict.put(c.cell, c);
cellToTs.put(c.cell, c.theirStart + 1);
}
Map<Cell, byte[]> oldValues = getIgnoringLocalWrites(table, cellToTs.keySet());
Map<Cell, Value> conflictingValues = keyValueService.get(table, cellToTs);
Set<Cell> conflictingCells = Sets.newHashSet();
for (Entry<Cell, Long> cellEntry : cellToTs.entrySet()) {
Cell cell = cellEntry.getKey();
if (!writes.containsKey(cell)) {
Validate.isTrue(false, "Missing write for cell: " + cellToConflict.get(cell)
+ " for table " + table);
}
if (!conflictingValues.containsKey(cell)) {
// This error case could happen if our locks expired.
throwIfExternalAndCommitLocksNotValid(commitLocksToken);
Validate.isTrue(false, "Missing conflicting value for cell: " + cellToConflict.get(cell)
+ " for table " + table);
}
if (conflictingValues.get(cell).getTimestamp() != (cellEntry.getValue() - 1)) {
// This error case could happen if our locks expired.
throwIfExternalAndCommitLocksNotValid(commitLocksToken);
Validate.isTrue(false, "Wrong timestamp for cell in table " + table
+ " Expected: " + cellToConflict.get(cell)
+ " Actual: " + conflictingValues.get(cell));
}
@Nullable byte[] oldVal = oldValues.get(cell);
byte[] writeVal = writes.get(cell);
byte[] conflictingVal = conflictingValues.get(cell).getContents();
if (!Transactions.cellValuesEqual(oldVal, writeVal)
|| !Arrays.equals(writeVal, conflictingVal)) {
conflictingCells.add(cell);
} else if (log.isInfoEnabled()) {
log.info("Another transaction committed to the same cell before us but " +
"their value was the same. " + "Cell: " + cell + " Table: " + table);
}
}
if (conflictingCells.isEmpty()) {
return;
}
Predicate<CellConflict> conflicting = Predicates.compose(Predicates.in(conflictingCells), CellConflict.getCellFunction());
throw TransactionConflictException.create(table,
getStartTimestamp(),
Sets.filter(spanningWrites, conflicting),
Sets.filter(dominatingWrites, conflicting),
System.currentTimeMillis() - timeCreated);
}
/**
* This will return the set of keys that need to be retried. It will output any conflicts
* it finds into the output params.
*/
protected Map<Cell, Long> detectWriteAlreadyCommittedInternal(TableReference tableRef,
Map<Cell, Long> keysToLoad,
@Output Set<CellConflict> spanningWrites,
@Output Set<CellConflict> dominatingWrites,
TransactionService transactionService) {
Map<Cell, Long> rawResults = keyValueService.getLatestTimestamps(tableRef, keysToLoad);
Map<Long, Long> commitTimestamps = getCommitTimestamps(tableRef, rawResults.values(), false);
Map<Cell, Long> keysToDelete = Maps.newHashMapWithExpectedSize(0);
for (Map.Entry<Cell, Long> e : rawResults.entrySet()) {
Cell key = e.getKey();
long theirStartTimestamp = e.getValue();
AssertUtils.assertAndLog(theirStartTimestamp != getStartTimestamp(),
"Timestamp reuse is bad:%d", getStartTimestamp());
Long theirCommitTimestamp = commitTimestamps.get(theirStartTimestamp);
if (theirCommitTimestamp == null
|| theirCommitTimestamp == TransactionConstants.FAILED_COMMIT_TS) {
// The value has no commit timestamp or was explicitly rolled back.
// This means the value is garbage from a transaction which didn't commit.
keysToDelete.put(key, theirStartTimestamp);
continue;
}
AssertUtils.assertAndLog(theirCommitTimestamp != getStartTimestamp(),
"Timestamp reuse is bad:%d", getStartTimestamp());
if (theirStartTimestamp > getStartTimestamp()) {
dominatingWrites.add(Cells.createConflictWithMetadata(
keyValueService,
tableRef,
key,
theirStartTimestamp,
theirCommitTimestamp));
} else if (theirCommitTimestamp > getStartTimestamp()) {
spanningWrites.add(Cells.createConflictWithMetadata(
keyValueService,
tableRef,
key,
theirStartTimestamp,
theirCommitTimestamp));
}
}
if (!keysToDelete.isEmpty()) {
if (!rollbackFailedTransactions(tableRef, keysToDelete, commitTimestamps, transactionService)) {
// If we can't roll back the failed transactions, we should just try again.
return keysToLoad;
}
}
// Once we successfully rollback and delete these cells we need to reload them.
return keysToDelete;
}
/**
* This will attempt to rollback the passed transactions. If all are rolled back correctly this
* method will also delete the values for the transactions that have been rolled back.
* @return false if we cannot roll back the failed transactions because someone beat us to it.
*/
private boolean rollbackFailedTransactions(TableReference tableRef,
Map<Cell, Long> keysToDelete, Map<Long, Long> commitTimestamps, TransactionService transactionService) {
for (long startTs : Sets.newHashSet(keysToDelete.values())) {
if (commitTimestamps.get(startTs) == null) {
log.warn("Rolling back transaction: " + startTs);
if (!rollbackOtherTransaction(startTs, transactionService)) {
return false;
}
} else {
Validate.isTrue(commitTimestamps.get(startTs) == TransactionConstants.FAILED_COMMIT_TS);
}
}
try {
log.warn("For table: " + tableRef + " we are deleting values of an uncommitted transaction: " + keysToDelete);
keyValueService.delete(tableRef, Multimaps.forMap(keysToDelete));
} catch (RuntimeException e) {
String msg = "This isn't a bug but it should be infrequent if all nodes of your KV service are running. "
+ "Delete has stronger consistency semantics than read/write and must talk to all nodes "
+ "instead of just talking to a quorum of nodes. "
+ "Failed to delete keys for table" + tableRef
+ " from an uncommitted transaction: " + keysToDelete;
log.error(msg, e);
}
return true;
}
/**
* @return true if the other transaction was rolled back
*/
private boolean rollbackOtherTransaction(long startTs, TransactionService transactionService) {
try {
transactionService.putUnlessExists(startTs, TransactionConstants.FAILED_COMMIT_TS);
return true;
} catch (KeyAlreadyExistsException e) {
String msg = "Two transactions tried to roll back someone else's request with start: " + startTs;
log.error("This isn't a bug but it should be very infrequent. " + msg, new TransactionFailedRetriableException(msg, e));
return false;
}
}
///////////////////////////////////////////////////////////////////////////
/// Locking
///////////////////////////////////////////////////////////////////////////
/**
* This method should acquire any locks needed to do proper concurrency control at commit time.
*/
protected LockRefreshToken acquireLocksForCommit() {
SortedMap<LockDescriptor, LockMode> lockMap = getLocksForWrites();
try {
return lockService.lock(LockClient.ANONYMOUS.getClientId(), LockRequest.builder(lockMap).build());
} catch (InterruptedException e) {
throw Throwables.throwUncheckedException(e);
}
}
protected ImmutableSortedMap<LockDescriptor, LockMode> getLocksForWrites() {
Builder<LockDescriptor, LockMode> builder = ImmutableSortedMap.naturalOrder();
Iterable<TableReference> allTables = IterableUtils.append(writesByTable.keySet(), TransactionConstants.TRANSACTION_TABLE);
for (TableReference tableRef : allTables) {
if (tableRef.equals(TransactionConstants.TRANSACTION_TABLE)) {
builder.put(AtlasRowLockDescriptor.of(TransactionConstants.TRANSACTION_TABLE.getQualifiedName(), TransactionConstants.getValueForTimestamp(getStartTimestamp())), LockMode.WRITE);
continue;
}
ConflictHandler conflictHandler = getConflictHandlerForTable(tableRef);
if (conflictHandler == ConflictHandler.RETRY_ON_WRITE_WRITE_CELL) {
for (Cell cell : getLocalWrites(tableRef).keySet()) {
builder.put(AtlasCellLockDescriptor.of(tableRef.getQualifiedName(), cell.getRowName(), cell.getColumnName()), LockMode.WRITE);
}
} else if (conflictHandler != ConflictHandler.IGNORE_ALL) {
Cell lastCell = null;
for (Cell cell : getLocalWrites(tableRef).keySet()) {
if (lastCell == null || !Arrays.equals(lastCell.getRowName(), cell.getRowName())) {
builder.put(AtlasRowLockDescriptor.of(tableRef.getQualifiedName(), cell.getRowName()), LockMode.WRITE);
}
lastCell = cell;
}
}
}
return builder.build();
}
/**
* We will block here until the passed transactions have released their lock. This means that
* the committing transaction is either complete or it has failed and we are allowed to roll
* it back.
*/
private void waitForCommitToComplete(Iterable<Long> startTimestamps) {
boolean isEmpty = true;
Builder<LockDescriptor, LockMode> builder = ImmutableSortedMap.naturalOrder();
for (long start : startTimestamps) {
if (start < immutableTimestamp) {
// We don't need to block in this case because this transaction is already complete
continue;
}
isEmpty = false;
builder.put(AtlasRowLockDescriptor.of(TransactionConstants.TRANSACTION_TABLE.getQualifiedName(), TransactionConstants.getValueForTimestamp(start)), LockMode.READ);
}
if (isEmpty) {
return;
}
// TODO: This can have better performance if we have a blockAndReturn method in lock server
// However lock server blocking is an issue if we fill up all our requests
try {
lockService.lock(LockClient.ANONYMOUS.getClientId(), LockRequest.builder(builder.build()).lockAndRelease().build());
} catch (InterruptedException e) {
throw Throwables.throwUncheckedException(e);
}
}
///////////////////////////////////////////////////////////////////////////
/// Commit timestamp management
///////////////////////////////////////////////////////////////////////////
private Set<Long> getStartTimestampsForValues(Iterable<Value> values) {
Set<Long> results = Sets.newHashSet();
for (Value v : values) {
results.add(v.getTimestamp());
}
return results;
}
/**
* Returns a map from start timestamp to commit timestamp. If a start timestamp wasn't
* committed, then it will be missing from the map. This method will block until the
* transactions for these start timestamps are complete.
*/
protected Map<Long, Long> getCommitTimestamps(@Nullable TableReference tableRef,
Iterable<Long> startTimestamps,
boolean waitForCommitterToComplete) {
if (Iterables.isEmpty(startTimestamps)) {
return ImmutableMap.of();
}
Map<Long, Long> result = Maps.newHashMap();
Set<Long> gets = Sets.newHashSet();
for (long startTS : startTimestamps) {
Long cached = cachedCommitTimes.get(startTS);
if (cached != null) {
result.put(startTS, cached);
} else {
gets.add(startTS);
}
}
if (gets.isEmpty()) {
return result;
}
// Before we do the reads, we need to make sure the committer is done writing.
if (waitForCommitterToComplete) {
Stopwatch watch = Stopwatch.createStarted();
waitForCommitToComplete(startTimestamps);
perfLogger.debug("Waited {} ms to get commit timestamps for table {}.",
watch.elapsed(TimeUnit.MILLISECONDS), tableRef);
}
Map<Long, Long> rawResults = defaultTransactionService.get(gets);
for (Map.Entry<Long, Long> e : rawResults.entrySet()) {
if (e.getValue() != null) {
long startTS = e.getKey();
long commitTS = e.getValue();
result.put(startTS, commitTS);
cachedCommitTimes.put(startTS, commitTS);
}
}
return result;
}
/**
* This will attempt to put the commitTimestamp into the DB.
*
* @throws TransactionLockTimeoutException If our locks timed out while trying to commit.
* @throws TransactionCommitFailedException failed when committing in a way that isn't retriable
*/
private void putCommitTimestamp(long commitTimestamp, LockRefreshToken locksToken, TransactionService transactionService) throws TransactionFailedException {
Validate.isTrue(commitTimestamp > getStartTimestamp(), "commitTs must be greater than startTs");
try {
transactionService.putUnlessExists(getStartTimestamp(), commitTimestamp);
} catch (KeyAlreadyExistsException e) {
handleKeyAlreadyExistsException(commitTimestamp, e, locksToken);
} catch (Exception e) {
TransactionCommitFailedException commitFailedEx = new TransactionCommitFailedException(
"This transaction failed writing the commit timestamp. " +
"It might have been committed, but it may not have.", e);
log.error("failed to commit an atlasdb transaction", commitFailedEx);
throw commitFailedEx;
}
}
private void handleKeyAlreadyExistsException(long commitTs, KeyAlreadyExistsException e, LockRefreshToken commitLocksToken) {
try {
if (wasCommitSuccessful(commitTs)) {
// We did actually commit successfully. This case could happen if the impl
// for putUnlessExists did a retry and we had committed already
return;
}
Set<LockRefreshToken> expiredLocks = refreshExternalAndCommitLocks(commitLocksToken);
if (!expiredLocks.isEmpty()) {
throw new TransactionLockTimeoutException("Our commit was already rolled back at commit time " +
"because our locks timed out. startTs: " + getStartTimestamp() + ". " +
getExpiredLocksErrorString(commitLocksToken, expiredLocks), e);
} else {
AssertUtils.assertAndLog(false,
"BUG: Someone tried to roll back our transaction but our locks were still valid; this is not allowed." +
" Held external locks: " + externalLocksTokens + "; held commit locks: " + commitLocksToken);
}
} catch (TransactionFailedException e1) {
throw e1;
} catch (Exception e1) {
log.error("Failed to determine if we can retry this transaction. startTs: " + getStartTimestamp(), e1);
}
String msg = "Our commit was already rolled back at commit time. " +
"Locking should prevent this from happening, but our locks may have timed out. " +
"startTs: " + getStartTimestamp();
throw new TransactionCommitFailedException(msg, e);
}
private boolean wasCommitSuccessful(long commitTs) throws Exception {
Map<Long, Long> commitTimestamps = getCommitTimestamps(null, Collections.singleton(getStartTimestamp()), false);
long storedCommit = commitTimestamps.get(getStartTimestamp());
if (storedCommit != commitTs && storedCommit != TransactionConstants.FAILED_COMMIT_TS) {
Validate.isTrue(false, "Commit value is wrong. startTs " + getStartTimestamp() + " commitTs: " + commitTs);
}
return storedCommit == commitTs;
}
@Override
public void useTable(TableReference tableRef, ConstraintCheckable table) {
constraintsByTableName.put(tableRef, table);
}
private long getStartTimestamp() {
return startTimestamp.get();
}
@Override
protected KeyValueService getKeyValueService() {
return keyValueService;
}
private Multimap<Cell, TableReference> getCellsToQueueForScrubbing() {
return getCellsToScrubByCell(State.COMMITTING);
}
Multimap<TableReference, Cell> getCellsToScrubImmediately() {
return getCellsToScrubByTable(State.COMMITTED);
}
private Multimap<Cell, TableReference> getCellsToScrubByCell(State expectedState) {
Multimap<Cell, TableReference> cellToTableName = HashMultimap.create();
State actualState = state.get();
if (expectedState == actualState) {
for (Entry<TableReference, ConcurrentNavigableMap<Cell, byte[]>> entry : writesByTable.entrySet()) {
TableReference table = entry.getKey();
Set<Cell> cells = entry.getValue().keySet();
for (Cell c : cells) {
cellToTableName.put(c, table);
}
}
} else {
AssertUtils.assertAndLog(false, "Expected state: " + expectedState + "; actual state: " + actualState);
}
return cellToTableName;
}
private Multimap<TableReference, Cell> getCellsToScrubByTable(State expectedState) {
Multimap<TableReference, Cell> tableRefToCells = HashMultimap.create();
State actualState = state.get();
if (expectedState == actualState) {
for (Entry<TableReference, ConcurrentNavigableMap<Cell, byte[]>> entry : writesByTable.entrySet()) {
TableReference table = entry.getKey();
Set<Cell> cells = entry.getValue().keySet();
tableRefToCells.putAll(table, cells);
}
} else {
AssertUtils.assertAndLog(false, "Expected state: " + expectedState + "; actual state: " + actualState);
}
return tableRefToCells;
}
}
|
Make internal put private
|
atlasdb-impl-shared/src/main/java/com/palantir/atlasdb/transaction/impl/SnapshotTransaction.java
|
Make internal put private
|
|
Java
|
apache-2.0
|
59d5e70df836087c4462c52f825b6258ef913ca9
| 0
|
galderz/Aeron,mikeb01/Aeron,oleksiyp/Aeron,real-logic/Aeron,EvilMcJerkface/Aeron,galderz/Aeron,galderz/Aeron,real-logic/Aeron,mikeb01/Aeron,mikeb01/Aeron,galderz/Aeron,oleksiyp/Aeron,real-logic/Aeron,oleksiyp/Aeron,mikeb01/Aeron,EvilMcJerkface/Aeron,EvilMcJerkface/Aeron,real-logic/Aeron,EvilMcJerkface/Aeron
|
/*
* Copyright 2014-2017 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.archiver;
import io.aeron.Aeron;
import org.agrona.*;
import org.agrona.concurrent.*;
import org.agrona.concurrent.status.*;
import java.io.File;
import java.util.concurrent.ThreadFactory;
import java.util.function.Supplier;
public final class Archiver implements AutoCloseable
{
private final Context ctx;
private final AgentRunner conductorRunner;
private final AgentRunner replayRunner;
private final AgentRunner recorderRunner;
private final AgentInvoker invoker;
private final Aeron aeron;
private Archiver(final Context ctx)
{
this.ctx = ctx;
ctx.clientContext.driverAgentInvoker(ctx.driverAgentInvoker());
if (ctx.threadingMode() != ArchiverThreadingMode.DEDICATED)
{
ctx.clientContext.clientLock(new NoOpLock());
}
aeron = Aeron.connect(ctx.clientContext);
ctx.conclude();
final ErrorHandler errorHandler = ctx.errorHandler();
final AtomicCounter errorCounter = ctx.errorCounter();
final Replayer replayer;
final Recorder recorder;
if (ctx.threadingMode() == ArchiverThreadingMode.DEDICATED)
{
replayer = new ReplayerProxy(aeron, ctx);
recorder = new RecorderProxy(aeron, ctx);
}
else
{
replayer = new Replayer(aeron, ctx);
recorder = new Recorder(aeron, ctx);
ctx.replayerInvoker(new AgentInvoker(errorHandler, errorCounter, replayer));
ctx.recorderInvoker(new AgentInvoker(errorHandler, errorCounter, recorder));
}
ctx
.replayer(replayer)
.recorder(recorder);
final ArchiveConductor archiveConductor = new ArchiveConductor(aeron, ctx);
switch (ctx.threadingMode())
{
case INVOKER:
invoker = new AgentInvoker(errorHandler, errorCounter, archiveConductor);
conductorRunner = null;
replayRunner = null;
recorderRunner = null;
break;
case SHARED:
invoker = null;
conductorRunner = new AgentRunner(
ctx.idleStrategy(),
errorHandler,
errorCounter,
archiveConductor);
replayRunner = null;
recorderRunner = null;
break;
default:
case DEDICATED:
invoker = null;
conductorRunner = new AgentRunner(
ctx.idleStrategy(),
errorHandler,
errorCounter,
archiveConductor);
replayRunner = new AgentRunner(
ctx.idleStrategy(),
errorHandler,
errorCounter,
replayer);
recorderRunner = new AgentRunner(
ctx.idleStrategy(),
errorHandler,
errorCounter,
recorder);
}
}
public void close() throws Exception
{
CloseHelper.close(conductorRunner);
CloseHelper.close(replayRunner);
CloseHelper.close(recorderRunner);
CloseHelper.close(aeron);
}
private Archiver start()
{
if (ctx.threadingMode() == ArchiverThreadingMode.SHARED)
{
AgentRunner.startOnThread(conductorRunner, ctx.threadFactory());
}
else if (ctx.threadingMode() == ArchiverThreadingMode.DEDICATED)
{
AgentRunner.startOnThread(conductorRunner, ctx.threadFactory());
AgentRunner.startOnThread(replayRunner, ctx.threadFactory());
AgentRunner.startOnThread(recorderRunner, ctx.threadFactory());
}
return this;
}
public AgentInvoker invoker()
{
return invoker;
}
public static Archiver launch()
{
return launch(new Context());
}
public static Archiver launch(final Context ctx)
{
return new Archiver(ctx).start();
}
public static class Context
{
private Aeron.Context clientContext;
private File archiveDir;
private String controlRequestChannel;
private int controlRequestStreamId;
private String recordingEventsChannel;
private int recordingEventsStreamId;
private Supplier<IdleStrategy> idleStrategySupplier;
private EpochClock epochClock;
private int segmentFileLength = 128 * 1024 * 1024;
private boolean forceMetadataUpdates = false;
private boolean forceWrites = false;
private ArchiverThreadingMode threadingMode = ArchiverThreadingMode.SHARED;
private ThreadFactory threadFactory = Thread::new;
private AgentInvoker driverAgentInvoker;
private AgentInvoker replayerInvoker;
private AgentInvoker recorderInvoker;
private Replayer replayer;
private Recorder recorder;
private ErrorHandler errorHandler;
private AtomicCounter errorCounter;
public Context()
{
this(new Aeron.Context(), new File("archive"));
}
public Context(final Aeron.Context clientContext, final File archiveDir)
{
clientContext.useConductorAgentInvoker(true);
this.clientContext = clientContext;
this.archiveDir = archiveDir;
controlRequestChannel = "aeron:udp?endpoint=localhost:8010";
controlRequestStreamId = 0;
recordingEventsChannel = "aeron:udp?endpoint=localhost:8011";
recordingEventsStreamId = 0;
}
void conclude()
{
if (!archiveDir.exists() && !archiveDir.mkdirs())
{
throw new IllegalArgumentException(
"Failed to create archive dir: " + archiveDir.getAbsolutePath());
}
if (idleStrategySupplier == null)
{
idleStrategySupplier = () -> new SleepingMillisIdleStrategy(Aeron.IDLE_SLEEP_MS);
}
if (epochClock == null)
{
epochClock = clientContext.epochClock();
}
if (errorHandler == null)
{
errorHandler = Throwable::printStackTrace;
}
if (errorCounter == null)
{
final CountersManager counters = new CountersManager(
clientContext.countersMetaDataBuffer(),
clientContext.countersValuesBuffer());
errorCounter = counters.newCounter("archiver-errors");
}
}
public File archiveDir()
{
return archiveDir;
}
public Context archiveDir(final File archiveDir)
{
this.archiveDir = archiveDir;
return this;
}
public Aeron.Context clientContext()
{
return clientContext;
}
public Context clientContext(final Aeron.Context ctx)
{
this.clientContext = ctx;
return this;
}
public String controlRequestChannel()
{
return controlRequestChannel;
}
public Context controlRequestChannel(final String controlRequestChannel)
{
this.controlRequestChannel = controlRequestChannel;
return this;
}
public int controlRequestStreamId()
{
return controlRequestStreamId;
}
public Context controlRequestStreamId(final int controlRequestStreamId)
{
this.controlRequestStreamId = controlRequestStreamId;
return this;
}
public String recordingEventsChannel()
{
return recordingEventsChannel;
}
public Context recordingEventsChannel(final String recordingEventsChannel)
{
this.recordingEventsChannel = recordingEventsChannel;
return this;
}
public int recordingEventsStreamId()
{
return recordingEventsStreamId;
}
public Context recordingEventsStreamId(final int recordingEventsStreamId)
{
this.recordingEventsStreamId = recordingEventsStreamId;
return this;
}
/**
* Provides an IdleStrategy supplier for the thread responsible for publication/subscription backoff.
*
* @param idleStrategySupplier supplier of thread idle strategy for publication/subscription backoff.
* @return this Context for method chaining.
*/
public Context idleStrategySupplier(final Supplier<IdleStrategy> idleStrategySupplier)
{
this.idleStrategySupplier = idleStrategySupplier;
return this;
}
public IdleStrategy idleStrategy()
{
return idleStrategySupplier.get();
}
/**
* Set the {@link EpochClock} to be used for tracking wall clock time when interacting with the archiver.
*
* @param clock {@link EpochClock} to be used for tracking wall clock time when interacting with the archiver.
* @return this Context for method chaining
*/
public Context epochClock(final EpochClock clock)
{
this.epochClock = clock;
return this;
}
public EpochClock epochClock()
{
return epochClock;
}
int segmentFileLength()
{
return segmentFileLength;
}
public Context segmentFileLength(final int segmentFileLength)
{
this.segmentFileLength = segmentFileLength;
return this;
}
boolean forceMetadataUpdates()
{
return forceMetadataUpdates;
}
public Context forceMetadataUpdates(final boolean forceMetadataUpdates)
{
this.forceMetadataUpdates = forceMetadataUpdates;
return this;
}
boolean forceWrites()
{
return forceWrites;
}
public Context forceWrites(final boolean forceWrites)
{
this.forceWrites = forceWrites;
return this;
}
/**
* Get the {@link AgentInvoker} that should be used for the Media Driver if running in a lightweight mode.
*
* @return the {@link AgentInvoker} that should be used for the Media Driver if running in a lightweight mode.
*/
AgentInvoker driverAgentInvoker()
{
return driverAgentInvoker;
}
/**
* Set the {@link AgentInvoker} that should be used for the Media Driver if running in a lightweight mode.
*
* @param driverAgentInvoker that should be used for the Media Driver if running in a lightweight mode.
* @return this for a fluent API.
*/
public Context driverAgentInvoker(final AgentInvoker driverAgentInvoker)
{
this.driverAgentInvoker = driverAgentInvoker;
return this;
}
public AgentInvoker replayerInvoker()
{
return replayerInvoker;
}
public Context replayerInvoker(final AgentInvoker replayerInvoker)
{
this.replayerInvoker = replayerInvoker;
return this;
}
public AgentInvoker recorderInvoker()
{
return recorderInvoker;
}
public Context recorderInvoker(final AgentInvoker recorderInvoker)
{
this.recorderInvoker = recorderInvoker;
return this;
}
public ErrorHandler errorHandler()
{
return errorHandler;
}
public AtomicCounter errorCounter()
{
return errorCounter;
}
public Context errorCounter(final AtomicCounter errorCounter)
{
this.errorCounter = errorCounter;
return this;
}
public ArchiverThreadingMode threadingMode()
{
return threadingMode;
}
public Context threadingMode(final ArchiverThreadingMode threadingMode)
{
this.threadingMode = threadingMode;
return this;
}
public ThreadFactory threadFactory()
{
return threadFactory;
}
public Context threadFactory(final ThreadFactory threadFactory)
{
this.threadFactory = threadFactory;
return this;
}
public Context replayer(final Replayer replayer)
{
this.replayer = replayer;
return this;
}
public Context recorder(final Recorder recorder)
{
this.recorder = recorder;
return this;
}
Replayer replayer()
{
return replayer;
}
Recorder recorder()
{
return recorder;
}
}
}
|
aeron-archiver/src/main/java/io/aeron/archiver/Archiver.java
|
/*
* Copyright 2014-2017 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.archiver;
import io.aeron.Aeron;
import org.agrona.*;
import org.agrona.concurrent.*;
import org.agrona.concurrent.status.*;
import java.io.File;
import java.util.concurrent.ThreadFactory;
import java.util.function.Supplier;
public final class Archiver implements AutoCloseable
{
private final Context ctx;
private final AgentRunner conductorRunner;
private final AgentRunner replayRunner;
private final AgentRunner recorderRunner;
private final AgentInvoker invoker;
private final Aeron aeron;
private Archiver(final Context ctx)
{
this.ctx = ctx;
ctx.clientContext.driverAgentInvoker(ctx.driverAgentInvoker());
if (ctx.threadingMode() != ArchiverThreadingMode.DEDICATED)
{
ctx.clientContext.clientLock(new NoOpLock());
}
aeron = Aeron.connect(ctx.clientContext);
ctx.conclude();
final ErrorHandler errorHandler = ctx.errorHandler();
final AtomicCounter errorCounter = ctx.errorCounter();
final Replayer replayer;
final Recorder recorder;
if (ctx.threadingMode() == ArchiverThreadingMode.DEDICATED)
{
replayer = new ReplayerProxy(aeron, ctx);
recorder = new RecorderProxy(aeron, ctx);
}
else
{
replayer = new Replayer(aeron, ctx);
recorder = new Recorder(aeron, ctx);
ctx.replayerInvoker(new AgentInvoker(errorHandler, errorCounter, replayer));
ctx.recorderInvoker(new AgentInvoker(errorHandler, errorCounter, recorder));
}
ctx
.replayer(replayer)
.recorder(recorder);
final ArchiveConductor archiveConductor = new ArchiveConductor(aeron, ctx);
switch (ctx.threadingMode())
{
case INVOKER:
invoker = new AgentInvoker(errorHandler, errorCounter, archiveConductor);
conductorRunner = null;
replayRunner = null;
recorderRunner = null;
break;
case SHARED:
invoker = null;
conductorRunner = new AgentRunner(
ctx.idleStrategy(),
errorHandler,
errorCounter,
archiveConductor);
replayRunner = null;
recorderRunner = null;
break;
default:
case DEDICATED:
invoker = null;
conductorRunner = new AgentRunner(
ctx.idleStrategy(),
errorHandler,
errorCounter,
archiveConductor);
replayRunner = new AgentRunner(
ctx.idleStrategy(),
errorHandler,
errorCounter,
replayer);
recorderRunner = new AgentRunner(
ctx.idleStrategy(),
errorHandler,
errorCounter,
recorder);
}
}
public void close() throws Exception
{
CloseHelper.close(conductorRunner);
CloseHelper.close(replayRunner);
CloseHelper.close(recorderRunner);
CloseHelper.close(aeron);
}
private Archiver start()
{
if (ctx.threadingMode() == ArchiverThreadingMode.SHARED)
{
AgentRunner.startOnThread(conductorRunner, ctx.threadFactory());
}
else if (ctx.threadingMode() == ArchiverThreadingMode.DEDICATED)
{
AgentRunner.startOnThread(conductorRunner, ctx.threadFactory());
AgentRunner.startOnThread(replayRunner, ctx.threadFactory());
AgentRunner.startOnThread(recorderRunner, ctx.threadFactory());
}
return this;
}
public AgentInvoker invoker()
{
return invoker;
}
public static Archiver launch()
{
return launch(new Context());
}
public static Archiver launch(final Context ctx)
{
return new Archiver(ctx).start();
}
public static class Context
{
private Aeron.Context clientContext;
private File archiveDir;
private String controlRequestChannel;
private int controlRequestStreamId;
private String recordingEventsChannel;
private int recordingEventsStreamId;
private Supplier<IdleStrategy> idleStrategySupplier;
private EpochClock epochClock;
private int segmentFileLength = 128 * 1024 * 1024;
private boolean forceMetadataUpdates = true;
private boolean forceWrites = true;
private ArchiverThreadingMode threadingMode = ArchiverThreadingMode.SHARED;
private ThreadFactory threadFactory = Thread::new;
private AgentInvoker driverAgentInvoker;
private AgentInvoker replayerInvoker;
private AgentInvoker recorderInvoker;
private Replayer replayer;
private Recorder recorder;
private ErrorHandler errorHandler;
private AtomicCounter errorCounter;
public Context()
{
this(new Aeron.Context(), new File("archive"));
}
public Context(final Aeron.Context clientContext, final File archiveDir)
{
clientContext.useConductorAgentInvoker(true);
this.clientContext = clientContext;
this.archiveDir = archiveDir;
controlRequestChannel = "aeron:udp?endpoint=localhost:8010";
controlRequestStreamId = 0;
recordingEventsChannel = "aeron:udp?endpoint=localhost:8011";
recordingEventsStreamId = 0;
}
void conclude()
{
if (!archiveDir.exists() && !archiveDir.mkdirs())
{
throw new IllegalArgumentException(
"Failed to create archive dir: " + archiveDir.getAbsolutePath());
}
if (idleStrategySupplier == null)
{
idleStrategySupplier = () -> new SleepingMillisIdleStrategy(Aeron.IDLE_SLEEP_MS);
}
if (epochClock == null)
{
epochClock = clientContext.epochClock();
}
if (errorHandler == null)
{
errorHandler = Throwable::printStackTrace;
}
if (errorCounter == null)
{
final CountersManager counters = new CountersManager(
clientContext.countersMetaDataBuffer(),
clientContext.countersValuesBuffer());
errorCounter = counters.newCounter("archiver-errors");
}
}
public File archiveDir()
{
return archiveDir;
}
public Context archiveDir(final File archiveDir)
{
this.archiveDir = archiveDir;
return this;
}
public Aeron.Context clientContext()
{
return clientContext;
}
public Context clientContext(final Aeron.Context ctx)
{
this.clientContext = ctx;
return this;
}
public String controlRequestChannel()
{
return controlRequestChannel;
}
public Context controlRequestChannel(final String controlRequestChannel)
{
this.controlRequestChannel = controlRequestChannel;
return this;
}
public int controlRequestStreamId()
{
return controlRequestStreamId;
}
public Context controlRequestStreamId(final int controlRequestStreamId)
{
this.controlRequestStreamId = controlRequestStreamId;
return this;
}
public String recordingEventsChannel()
{
return recordingEventsChannel;
}
public Context recordingEventsChannel(final String recordingEventsChannel)
{
this.recordingEventsChannel = recordingEventsChannel;
return this;
}
public int recordingEventsStreamId()
{
return recordingEventsStreamId;
}
public Context recordingEventsStreamId(final int recordingEventsStreamId)
{
this.recordingEventsStreamId = recordingEventsStreamId;
return this;
}
/**
* Provides an IdleStrategy supplier for the thread responsible for publication/subscription backoff.
*
* @param idleStrategySupplier supplier of thread idle strategy for publication/subscription backoff.
* @return this Context for method chaining.
*/
public Context idleStrategySupplier(final Supplier<IdleStrategy> idleStrategySupplier)
{
this.idleStrategySupplier = idleStrategySupplier;
return this;
}
public IdleStrategy idleStrategy()
{
return idleStrategySupplier.get();
}
/**
* Set the {@link EpochClock} to be used for tracking wall clock time when interacting with the archiver.
*
* @param clock {@link EpochClock} to be used for tracking wall clock time when interacting with the archiver.
* @return this Context for method chaining
*/
public Context epochClock(final EpochClock clock)
{
this.epochClock = clock;
return this;
}
public EpochClock epochClock()
{
return epochClock;
}
int segmentFileLength()
{
return segmentFileLength;
}
public Context segmentFileLength(final int segmentFileLength)
{
this.segmentFileLength = segmentFileLength;
return this;
}
boolean forceMetadataUpdates()
{
return forceMetadataUpdates;
}
public Context forceMetadataUpdates(final boolean forceMetadataUpdates)
{
this.forceMetadataUpdates = forceMetadataUpdates;
return this;
}
boolean forceWrites()
{
return forceWrites;
}
public Context forceWrites(final boolean forceWrites)
{
this.forceWrites = forceWrites;
return this;
}
/**
* Get the {@link AgentInvoker} that should be used for the Media Driver if running in a lightweight mode.
*
* @return the {@link AgentInvoker} that should be used for the Media Driver if running in a lightweight mode.
*/
AgentInvoker driverAgentInvoker()
{
return driverAgentInvoker;
}
/**
* Set the {@link AgentInvoker} that should be used for the Media Driver if running in a lightweight mode.
*
* @param driverAgentInvoker that should be used for the Media Driver if running in a lightweight mode.
* @return this for a fluent API.
*/
public Context driverAgentInvoker(final AgentInvoker driverAgentInvoker)
{
this.driverAgentInvoker = driverAgentInvoker;
return this;
}
public AgentInvoker replayerInvoker()
{
return replayerInvoker;
}
public Context replayerInvoker(final AgentInvoker replayerInvoker)
{
this.replayerInvoker = replayerInvoker;
return this;
}
public AgentInvoker recorderInvoker()
{
return recorderInvoker;
}
public Context recorderInvoker(final AgentInvoker recorderInvoker)
{
this.recorderInvoker = recorderInvoker;
return this;
}
public ErrorHandler errorHandler()
{
return errorHandler;
}
public AtomicCounter errorCounter()
{
return errorCounter;
}
public Context errorCounter(final AtomicCounter errorCounter)
{
this.errorCounter = errorCounter;
return this;
}
public ArchiverThreadingMode threadingMode()
{
return threadingMode;
}
public Context threadingMode(final ArchiverThreadingMode threadingMode)
{
this.threadingMode = threadingMode;
return this;
}
public ThreadFactory threadFactory()
{
return threadFactory;
}
public Context threadFactory(final ThreadFactory threadFactory)
{
this.threadFactory = threadFactory;
return this;
}
public Context replayer(final Replayer replayer)
{
this.replayer = replayer;
return this;
}
public Context recorder(final Recorder recorder)
{
this.recorder = recorder;
return this;
}
Replayer replayer()
{
return replayer;
}
Recorder recorder()
{
return recorder;
}
}
}
|
[Java] Default archiver forcing writes to false.
|
aeron-archiver/src/main/java/io/aeron/archiver/Archiver.java
|
[Java] Default archiver forcing writes to false.
|
|
Java
|
apache-2.0
|
c0a2c6ee1534d4a6e7ec0c4b4c8a8f0e5b461bda
| 0
|
StCostea/k3po,jfallows/k3po,StCostea/k3po,mgherghe/k3po,k3po/k3po,dpwspoon/k3po,cmebarrow/k3po,dpwspoon/k3po,jfallows/k3po,mgherghe/k3po,k3po/k3po,cmebarrow/k3po
|
/**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.k3po.driver.internal.control.handler;
import static java.lang.String.format;
import static java.lang.Thread.currentThread;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.nio.file.FileSystems.newFileSystem;
import static org.kaazing.k3po.lang.internal.parser.ScriptParseStrategy.PROPERTY_NODE;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.FileSystem;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Pattern;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.logging.InternalLogger;
import org.jboss.netty.logging.InternalLoggerFactory;
import org.kaazing.k3po.driver.internal.Robot;
import org.kaazing.k3po.driver.internal.control.AwaitMessage;
import org.kaazing.k3po.driver.internal.control.DisposedMessage;
import org.kaazing.k3po.driver.internal.control.ErrorMessage;
import org.kaazing.k3po.driver.internal.control.FinishedMessage;
import org.kaazing.k3po.driver.internal.control.NotifiedMessage;
import org.kaazing.k3po.driver.internal.control.NotifyMessage;
import org.kaazing.k3po.driver.internal.control.PrepareMessage;
import org.kaazing.k3po.driver.internal.control.PreparedMessage;
import org.kaazing.k3po.driver.internal.control.StartedMessage;
import org.kaazing.k3po.lang.internal.parser.ScriptParseException;
import org.kaazing.k3po.lang.internal.parser.ScriptParserImpl;
public class ControlServerHandler extends ControlUpstreamHandler {
private static final Map<String, Object> EMPTY_ENVIRONMENT = Collections.<String, Object>emptyMap();
private static final InternalLogger logger = InternalLoggerFactory.getInstance(ControlServerHandler.class);
private static final String ERROR_MSG_NOT_PREPARED = "Script has not been prepared or is still preparing\n";
private static final String ERROR_MSG_ALREADY_PREPARED = "Script already prepared\n";
private static final String ERROR_MSG_ALREADY_STARTED = "Script has already been started\n";
private Robot robot;
private ChannelFutureListener whenAbortedOrFinished;
private BlockingQueue<CountDownLatch> notifiedLatches = new LinkedBlockingQueue<CountDownLatch>();
private final ChannelFuture channelClosedFuture = Channels.future(null);
private ClassLoader scriptLoader;
public void setScriptLoader(ClassLoader scriptLoader) {
this.scriptLoader = scriptLoader;
}
// Note that this is more than just the channel close future. It's a future that means not only
// that this channel has closed but it is a future that tells us when this obj has processed the closed event.
public ChannelFuture getChannelClosedFuture() {
return channelClosedFuture;
}
@Override
public void channelClosed(final ChannelHandlerContext ctx, final ChannelStateEvent e) throws Exception {
if (robot != null) {
robot.dispose().addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
channelClosedFuture.setSuccess();
ctx.sendUpstream(e);
}
});
}
}
@Override
public void prepareReceived(final ChannelHandlerContext ctx, MessageEvent evt) throws Exception {
if (robot != null && robot.getPreparedFuture() != null) {
sendErrorMessage(ctx, ERROR_MSG_ALREADY_PREPARED);
return;
}
final PrepareMessage prepare = (PrepareMessage) evt.getMessage();
// enforce control protocol version
String version = prepare.getVersion();
if (!"2.0".equals(version)) {
sendVersionError(ctx);
return;
}
List<String> scriptNames = prepare.getNames();
if (logger.isDebugEnabled()) {
logger.debug("preparing script(s) " + scriptNames);
}
robot = new Robot();
whenAbortedOrFinished = whenAbortedOrFinished(ctx);
String originScript = "";
String origin = prepare.getOrigin();
if (origin != null) {
try {
originScript = OriginScript.get(origin);
} catch (URISyntaxException e) {
throw new Exception("Could not find origin: ", e);
}
}
ChannelFuture prepareFuture;
try {
String aggregatedScript = originScript + aggregateScript(scriptNames, scriptLoader);
List<String> properyOverrides = prepare.getProperties();
// consider hard fail in the future, when test frameworks support
// override per test method
// Checks that it is a supported version
if (!"2.0".equals(version)) {
sendVersionError(ctx);
}
aggregatedScript = injectOverridenProperties(aggregatedScript, properyOverrides);
if (scriptLoader != null) {
Thread currentThread = currentThread();
ClassLoader contextClassLoader = currentThread.getContextClassLoader();
try {
currentThread.setContextClassLoader(scriptLoader);
prepareFuture = robot.prepare(aggregatedScript);
} finally {
currentThread.setContextClassLoader(contextClassLoader);
}
} else {
prepareFuture = robot.prepare(aggregatedScript);
}
final String scriptToRun = aggregatedScript;
prepareFuture.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(final ChannelFuture f) {
PreparedMessage prepared = new PreparedMessage();
prepared.setScript(scriptToRun);
prepared.getBarriers().addAll(robot.getBarriersByName().keySet());
Channels.write(ctx, Channels.future(null), prepared);
}
});
} catch (Exception e) {
sendErrorMessage(ctx, e);
return;
}
}
private String injectOverridenProperties(String aggregatedScript, List<String> scriptProperties)
throws Exception, ScriptParseException {
ScriptParserImpl parser = new ScriptParserImpl();
for (String propertyToInject : scriptProperties) {
String propertyName = parser.parseWithStrategy(propertyToInject, PROPERTY_NODE).getPropertyName();
StringBuilder replacementScript = new StringBuilder();
Pattern pattern = Pattern.compile("property\\s+" + propertyName + "\\s+.+");
boolean matchFound = false;
for (String scriptLine : aggregatedScript.split("\\r?\\n")) {
if (pattern.matcher(scriptLine).matches()) {
matchFound = true;
replacementScript.append(propertyToInject + "\n");
} else {
replacementScript.append(scriptLine + "\n");
}
}
if (!matchFound) {
String errorMsg = "Received " + propertyToInject + " in PREPARE but found no where to substitute it";
logger.error(errorMsg);
throw new Exception(errorMsg);
}
aggregatedScript = replacementScript.toString();
}
return aggregatedScript;
}
/*
* Public static because it is used in test utils
*/
public static String aggregateScript(List<String> scriptNames, ClassLoader scriptLoader)
throws URISyntaxException, IOException {
final StringBuilder aggregatedScript = new StringBuilder();
for (String scriptName : scriptNames) {
String scriptNameWithExtension = format("%s.rpt", scriptName);
Path scriptPath = Paths.get(scriptNameWithExtension);
scriptNameWithExtension = URI.create(scriptNameWithExtension).normalize().getPath();
String script = null;
assert !scriptPath.isAbsolute();
// resolve relative scripts in local file system
if (scriptLoader != null) {
// resolve relative scripts from class loader to support
// separated specification projects that include Robot scripts only
URL resource = scriptLoader.getResource(scriptNameWithExtension);
if (resource != null) {
URI resourceURI = resource.toURI();
if ("file".equals(resourceURI.getScheme())) {
Path resourcePath = Paths.get(resourceURI);
script = readScript(resourcePath);
} else {
try (FileSystem fileSystem = newFileSystem(resourceURI, EMPTY_ENVIRONMENT)) {
Path resourcePath = Paths.get(resourceURI);
script = readScript(resourcePath);
}
}
}
}
if (script == null) {
throw new RuntimeException("Script not found: " + scriptPath);
}
aggregatedScript.append(script);
}
return aggregatedScript.toString();
}
private static String readScript(Path scriptPath) throws IOException {
List<String> lines = Files.readAllLines(scriptPath, UTF_8);
StringBuilder sb = new StringBuilder();
for (String line : lines) {
sb.append(line);
sb.append("\n");
}
String script = sb.toString();
return script;
}
@Override
public void startReceived(final ChannelHandlerContext ctx, MessageEvent evt) throws Exception {
if (robot == null || robot.getPreparedFuture() == null) {
sendErrorMessage(ctx, ERROR_MSG_NOT_PREPARED);
return;
}
if (robot.getStartedFuture().isDone()) {
sendErrorMessage(ctx, ERROR_MSG_ALREADY_STARTED);
return;
}
try {
ChannelFuture startFuture = robot.start();
startFuture.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(final ChannelFuture f) {
if (f.isSuccess()) {
final StartedMessage started = new StartedMessage();
Channels.write(ctx, Channels.future(null), started);
} else {
sendErrorMessage(ctx, f.getCause());
}
}
});
} catch (Exception e) {
sendErrorMessage(ctx, e);
return;
}
assert whenAbortedOrFinished != null;
robot.finish().addListener(whenAbortedOrFinished);
}
@Override
public void abortReceived(final ChannelHandlerContext ctx, MessageEvent evt) throws Exception {
if (logger.isInfoEnabled()) {
logger.info("ABORT");
}
if (robot == null || robot.getPreparedFuture() == null) {
sendErrorMessage(ctx, ERROR_MSG_NOT_PREPARED);
return;
}
assert whenAbortedOrFinished != null;
try {
robot.abort().addListener(whenAbortedOrFinished);
} catch (Exception e) {
sendErrorMessage(ctx, e);
return;
}
}
@Override
public void notifyReceived(final ChannelHandlerContext ctx, MessageEvent evt) throws Exception {
NotifyMessage notifyMessage = (NotifyMessage) evt.getMessage();
final String barrier = notifyMessage.getBarrier();
if (logger.isDebugEnabled()) {
logger.debug("NOTIFY: " + barrier);
}
if (robot == null || robot.getPreparedFuture() == null) {
sendErrorMessage(ctx, ERROR_MSG_NOT_PREPARED);
return;
}
try {
writeNotifiedOnBarrier(barrier, ctx);
robot.notifyBarrier(barrier);
} catch (Exception e) {
sendErrorMessage(ctx, e);
return;
}
}
@Override
public void awaitReceived(final ChannelHandlerContext ctx, MessageEvent evt) throws Exception {
AwaitMessage awaitMessage = (AwaitMessage) evt.getMessage();
final String barrier = awaitMessage.getBarrier();
if (logger.isDebugEnabled()) {
logger.debug("AWAIT: " + barrier);
}
if (robot == null || robot.getPreparedFuture() == null) {
sendErrorMessage(ctx, ERROR_MSG_NOT_PREPARED);
return;
}
try {
writeNotifiedOnBarrier(barrier, ctx);
} catch (Exception e) {
sendErrorMessage(ctx, e);
return;
}
}
private void writeNotifiedOnBarrier(final String barrier, final ChannelHandlerContext ctx) throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
// Make sure finished message does not get sent before this notified message
notifiedLatches.add(latch);
robot.awaitBarrier(barrier).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
try {
if (future.isSuccess()) {
logger.debug("sending NOTIFIED: " + barrier);
final NotifiedMessage notified = new NotifiedMessage();
notified.setBarrier(barrier);
Channels.write(ctx, Channels.future(null), notified);
}
}
finally {
latch.countDown();
}
}
});
}
@Override
public void disposeReceived(final ChannelHandlerContext ctx, MessageEvent evt) throws Exception {
if (robot == null || robot.getPreparedFuture() == null) {
sendErrorMessage(ctx, ERROR_MSG_NOT_PREPARED);
return;
}
try {
robot.dispose().addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
writeDisposed(ctx);
}
});
} catch (Exception e) {
sendErrorMessage(ctx, e);
return;
}
}
private void writeDisposed(ChannelHandlerContext ctx) {
Channel channel = ctx.getChannel();
DisposedMessage disposedMessage = new DisposedMessage();
channel.write(disposedMessage);
}
private ChannelFutureListener whenAbortedOrFinished(final ChannelHandlerContext ctx) {
final AtomicBoolean oneTimeOnly = new AtomicBoolean();
return new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if (oneTimeOnly.compareAndSet(false, true)) {
sendFinishedMessage(ctx);
}
}
};
}
private void sendFinishedMessage(ChannelHandlerContext ctx) {
Channel channel = ctx.getChannel();
String observedScript = robot.getObservedScript();
FinishedMessage finished = new FinishedMessage();
finished.setScript(observedScript);
channel.write(finished);
}
private void sendVersionError(ChannelHandlerContext ctx) {
Channel channel = ctx.getChannel();
ErrorMessage error = new ErrorMessage();
error.setSummary("Bad control protocol version");
error.setDescription("Robot requires control protocol version 2.0");
channel.write(error);
}
private void sendErrorMessage(ChannelHandlerContext ctx, Throwable throwable) {
ErrorMessage error = new ErrorMessage();
error.setDescription(throwable.getMessage());
if (throwable instanceof ScriptParseException) {
if (logger.isDebugEnabled()) {
logger.error("Caught exception trying to parse script. Sending error to client", throwable);
} else {
logger.error("Caught exception trying to parse script. Sending error to client. Due to " + throwable);
}
error.setSummary("Parse Error");
Channels.write(ctx, Channels.future(null), error);
} else {
logger.error("Internal error. Sending error to client", throwable);
error.setSummary("Internal error");
Channels.write(ctx, Channels.future(null), error);
}
}
private void sendErrorMessage(ChannelHandlerContext ctx, String description) {
ErrorMessage error = new ErrorMessage();
error.setSummary("Internal error");
error.setDescription(description);
if (logger.isDebugEnabled())
logger.error("Sending error to client:" + description);
Channels.write(ctx, Channels.future(null), error);
}
}
|
driver/src/main/java/org/kaazing/k3po/driver/internal/control/handler/ControlServerHandler.java
|
/**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.k3po.driver.internal.control.handler;
import static java.lang.String.format;
import static java.lang.Thread.currentThread;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.nio.file.FileSystems.newFileSystem;
import static org.kaazing.k3po.lang.internal.parser.ScriptParseStrategy.PROPERTY_NODE;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.FileSystem;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Pattern;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.logging.InternalLogger;
import org.jboss.netty.logging.InternalLoggerFactory;
import org.kaazing.k3po.driver.internal.Robot;
import org.kaazing.k3po.driver.internal.control.AwaitMessage;
import org.kaazing.k3po.driver.internal.control.DisposedMessage;
import org.kaazing.k3po.driver.internal.control.ErrorMessage;
import org.kaazing.k3po.driver.internal.control.FinishedMessage;
import org.kaazing.k3po.driver.internal.control.NotifiedMessage;
import org.kaazing.k3po.driver.internal.control.NotifyMessage;
import org.kaazing.k3po.driver.internal.control.PrepareMessage;
import org.kaazing.k3po.driver.internal.control.PreparedMessage;
import org.kaazing.k3po.driver.internal.control.StartedMessage;
import org.kaazing.k3po.lang.internal.parser.ScriptParseException;
import org.kaazing.k3po.lang.internal.parser.ScriptParserImpl;
public class ControlServerHandler extends ControlUpstreamHandler {
private static final Map<String, Object> EMPTY_ENVIRONMENT = Collections.<String, Object>emptyMap();
private static final InternalLogger logger = InternalLoggerFactory.getInstance(ControlServerHandler.class);
private static final String ERROR_MSG_NOT_PREPARED = "Script has not been prepared or is still preparing\n";
private static final String ERROR_MSG_ALREADY_PREPARED = "Script already prepared\n";
private static final String ERROR_MSG_ALREADY_STARTED = "Script has already been started\n";
private Robot robot;
private ChannelFutureListener whenAbortedOrFinished;
private BlockingQueue<CountDownLatch> notifiedLatches;
private ChannelFuture channelClosedFuture;
private ClassLoader scriptLoader;
public ControlServerHandler() {
initialize();
}
public void setScriptLoader(ClassLoader scriptLoader) {
this.scriptLoader = scriptLoader;
}
// Note that this is more than just the channel close future. It's a future that means not only
// that this channel has closed but it is a future that tells us when this obj has processed the closed event.
public ChannelFuture getChannelClosedFuture() {
return channelClosedFuture;
}
@Override
public void channelClosed(final ChannelHandlerContext ctx, final ChannelStateEvent e) throws Exception {
if (robot != null) {
robot.dispose().addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
channelClosedFuture.setSuccess();
ctx.sendUpstream(e);
initialize();
}
});
}
}
@Override
public void prepareReceived(final ChannelHandlerContext ctx, MessageEvent evt) throws Exception {
if (robot != null && robot.getPreparedFuture() != null) {
sendErrorMessage(ctx, ERROR_MSG_ALREADY_PREPARED);
return;
}
final PrepareMessage prepare = (PrepareMessage) evt.getMessage();
// enforce control protocol version
String version = prepare.getVersion();
if (!"2.0".equals(version)) {
sendVersionError(ctx);
return;
}
List<String> scriptNames = prepare.getNames();
if (logger.isDebugEnabled()) {
logger.debug("preparing script(s) " + scriptNames);
}
robot = new Robot();
whenAbortedOrFinished = whenAbortedOrFinished(ctx);
String originScript = "";
String origin = prepare.getOrigin();
if (origin != null) {
try {
originScript = OriginScript.get(origin);
} catch (URISyntaxException e) {
throw new Exception("Could not find origin: ", e);
}
}
ChannelFuture prepareFuture;
try {
String aggregatedScript = originScript + aggregateScript(scriptNames, scriptLoader);
List<String> properyOverrides = prepare.getProperties();
// consider hard fail in the future, when test frameworks support
// override per test method
// Checks that it is a supported version
if (!"2.0".equals(version)) {
sendVersionError(ctx);
}
aggregatedScript = injectOverridenProperties(aggregatedScript, properyOverrides);
if (scriptLoader != null) {
Thread currentThread = currentThread();
ClassLoader contextClassLoader = currentThread.getContextClassLoader();
try {
currentThread.setContextClassLoader(scriptLoader);
prepareFuture = robot.prepare(aggregatedScript);
} finally {
currentThread.setContextClassLoader(contextClassLoader);
}
} else {
prepareFuture = robot.prepare(aggregatedScript);
}
final String scriptToRun = aggregatedScript;
prepareFuture.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(final ChannelFuture f) {
PreparedMessage prepared = new PreparedMessage();
prepared.setScript(scriptToRun);
prepared.getBarriers().addAll(robot.getBarriersByName().keySet());
Channels.write(ctx, Channels.future(null), prepared);
}
});
} catch (Exception e) {
sendErrorMessage(ctx, e);
return;
}
}
private String injectOverridenProperties(String aggregatedScript, List<String> scriptProperties)
throws Exception, ScriptParseException {
ScriptParserImpl parser = new ScriptParserImpl();
for (String propertyToInject : scriptProperties) {
String propertyName = parser.parseWithStrategy(propertyToInject, PROPERTY_NODE).getPropertyName();
StringBuilder replacementScript = new StringBuilder();
Pattern pattern = Pattern.compile("property\\s+" + propertyName + "\\s+.+");
boolean matchFound = false;
for (String scriptLine : aggregatedScript.split("\\r?\\n")) {
if (pattern.matcher(scriptLine).matches()) {
matchFound = true;
replacementScript.append(propertyToInject + "\n");
} else {
replacementScript.append(scriptLine + "\n");
}
}
if (!matchFound) {
String errorMsg = "Received " + propertyToInject + " in PREPARE but found no where to substitute it";
logger.error(errorMsg);
throw new Exception(errorMsg);
}
aggregatedScript = replacementScript.toString();
}
return aggregatedScript;
}
/*
* Public static because it is used in test utils
*/
public static String aggregateScript(List<String> scriptNames, ClassLoader scriptLoader)
throws URISyntaxException, IOException {
final StringBuilder aggregatedScript = new StringBuilder();
for (String scriptName : scriptNames) {
String scriptNameWithExtension = format("%s.rpt", scriptName);
Path scriptPath = Paths.get(scriptNameWithExtension);
scriptNameWithExtension = URI.create(scriptNameWithExtension).normalize().getPath();
String script = null;
assert !scriptPath.isAbsolute();
// resolve relative scripts in local file system
if (scriptLoader != null) {
// resolve relative scripts from class loader to support
// separated specification projects that include Robot scripts only
URL resource = scriptLoader.getResource(scriptNameWithExtension);
if (resource != null) {
URI resourceURI = resource.toURI();
if ("file".equals(resourceURI.getScheme())) {
Path resourcePath = Paths.get(resourceURI);
script = readScript(resourcePath);
} else {
try (FileSystem fileSystem = newFileSystem(resourceURI, EMPTY_ENVIRONMENT)) {
Path resourcePath = Paths.get(resourceURI);
script = readScript(resourcePath);
}
}
}
}
if (script == null) {
throw new RuntimeException("Script not found: " + scriptPath);
}
aggregatedScript.append(script);
}
return aggregatedScript.toString();
}
private static String readScript(Path scriptPath) throws IOException {
List<String> lines = Files.readAllLines(scriptPath, UTF_8);
StringBuilder sb = new StringBuilder();
for (String line : lines) {
sb.append(line);
sb.append("\n");
}
String script = sb.toString();
return script;
}
@Override
public void startReceived(final ChannelHandlerContext ctx, MessageEvent evt) throws Exception {
if (robot == null || robot.getPreparedFuture() == null) {
sendErrorMessage(ctx, ERROR_MSG_NOT_PREPARED);
return;
}
if (robot.getStartedFuture().isDone()) {
sendErrorMessage(ctx, ERROR_MSG_ALREADY_STARTED);
return;
}
try {
ChannelFuture startFuture = robot.start();
startFuture.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(final ChannelFuture f) {
if (f.isSuccess()) {
final StartedMessage started = new StartedMessage();
Channels.write(ctx, Channels.future(null), started);
} else {
sendErrorMessage(ctx, f.getCause());
}
}
});
} catch (Exception e) {
sendErrorMessage(ctx, e);
return;
}
assert whenAbortedOrFinished != null;
robot.finish().addListener(whenAbortedOrFinished);
}
@Override
public void abortReceived(final ChannelHandlerContext ctx, MessageEvent evt) throws Exception {
if (logger.isInfoEnabled()) {
logger.info("ABORT");
}
if (robot == null || robot.getPreparedFuture() == null) {
sendErrorMessage(ctx, ERROR_MSG_NOT_PREPARED);
return;
}
assert whenAbortedOrFinished != null;
try {
robot.abort().addListener(whenAbortedOrFinished);
} catch (Exception e) {
sendErrorMessage(ctx, e);
return;
}
}
@Override
public void notifyReceived(final ChannelHandlerContext ctx, MessageEvent evt) throws Exception {
NotifyMessage notifyMessage = (NotifyMessage) evt.getMessage();
final String barrier = notifyMessage.getBarrier();
if (logger.isDebugEnabled()) {
logger.debug("NOTIFY: " + barrier);
}
if (robot == null || robot.getPreparedFuture() == null) {
sendErrorMessage(ctx, ERROR_MSG_NOT_PREPARED);
return;
}
try {
writeNotifiedOnBarrier(barrier, ctx);
robot.notifyBarrier(barrier);
} catch (Exception e) {
sendErrorMessage(ctx, e);
return;
}
}
@Override
public void awaitReceived(final ChannelHandlerContext ctx, MessageEvent evt) throws Exception {
AwaitMessage awaitMessage = (AwaitMessage) evt.getMessage();
final String barrier = awaitMessage.getBarrier();
if (logger.isDebugEnabled()) {
logger.debug("AWAIT: " + barrier);
}
if (robot == null || robot.getPreparedFuture() == null) {
sendErrorMessage(ctx, ERROR_MSG_NOT_PREPARED);
return;
}
try {
writeNotifiedOnBarrier(barrier, ctx);
} catch (Exception e) {
sendErrorMessage(ctx, e);
return;
}
}
private void writeNotifiedOnBarrier(final String barrier, final ChannelHandlerContext ctx) throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
// Make sure finished message does not get sent before this notified message
notifiedLatches.add(latch);
robot.awaitBarrier(barrier).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
try {
if (future.isSuccess()) {
logger.debug("sending NOTIFIED: " + barrier);
final NotifiedMessage notified = new NotifiedMessage();
notified.setBarrier(barrier);
Channels.write(ctx, Channels.future(null), notified);
}
}
finally {
latch.countDown();
}
}
});
}
@Override
public void disposeReceived(final ChannelHandlerContext ctx, MessageEvent evt) throws Exception {
if (robot == null || robot.getPreparedFuture() == null) {
sendErrorMessage(ctx, ERROR_MSG_NOT_PREPARED);
return;
}
try {
robot.dispose().addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
writeDisposed(ctx);
initialize();
}
});
} catch (Exception e) {
sendErrorMessage(ctx, e);
return;
}
}
private void writeDisposed(ChannelHandlerContext ctx) {
Channel channel = ctx.getChannel();
DisposedMessage disposedMessage = new DisposedMessage();
channel.write(disposedMessage);
}
private ChannelFutureListener whenAbortedOrFinished(final ChannelHandlerContext ctx) {
final AtomicBoolean oneTimeOnly = new AtomicBoolean();
return new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if (oneTimeOnly.compareAndSet(false, true)) {
sendFinishedMessage(ctx);
}
}
};
}
private void sendFinishedMessage(ChannelHandlerContext ctx) {
Channel channel = ctx.getChannel();
String observedScript = robot.getObservedScript();
FinishedMessage finished = new FinishedMessage();
finished.setScript(observedScript);
channel.write(finished);
}
private void sendVersionError(ChannelHandlerContext ctx) {
Channel channel = ctx.getChannel();
ErrorMessage error = new ErrorMessage();
error.setSummary("Bad control protocol version");
error.setDescription("Robot requires control protocol version 2.0");
channel.write(error);
}
private void sendErrorMessage(ChannelHandlerContext ctx, Throwable throwable) {
ErrorMessage error = new ErrorMessage();
error.setDescription(throwable.getMessage());
if (throwable instanceof ScriptParseException) {
if (logger.isDebugEnabled()) {
logger.error("Caught exception trying to parse script. Sending error to client", throwable);
} else {
logger.error("Caught exception trying to parse script. Sending error to client. Due to " + throwable);
}
error.setSummary("Parse Error");
Channels.write(ctx, Channels.future(null), error);
} else {
logger.error("Internal error. Sending error to client", throwable);
error.setSummary("Internal error");
Channels.write(ctx, Channels.future(null), error);
}
}
private void sendErrorMessage(ChannelHandlerContext ctx, String description) {
ErrorMessage error = new ErrorMessage();
error.setSummary("Internal error");
error.setDescription(description);
if (logger.isDebugEnabled())
logger.error("Sending error to client:" + description);
Channels.write(ctx, Channels.future(null), error);
}
private void initialize() {
robot = null;
whenAbortedOrFinished = null;
notifiedLatches = new LinkedBlockingQueue<CountDownLatch>();
channelClosedFuture = Channels.future(null);
}
}
|
Fix for issue https://github.com/k3po/k3po/issues/400 .
|
driver/src/main/java/org/kaazing/k3po/driver/internal/control/handler/ControlServerHandler.java
|
Fix for issue https://github.com/k3po/k3po/issues/400 .
|
|
Java
|
apache-2.0
|
b88b6436c9877758972a3c4376bc1196591bc076
| 0
|
ceylon/ceylon-js,ceylon/ceylon-js,ceylon/ceylon-js
|
package com.redhat.ceylon.compiler.js;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.Functional;
import com.redhat.ceylon.compiler.typechecker.model.ProducedType;
import com.redhat.ceylon.compiler.typechecker.model.TypeParameter;
import com.redhat.ceylon.compiler.typechecker.model.UnionType;
import com.redhat.ceylon.compiler.typechecker.model.Util;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
/** Generates js code for invocation expression (named and positional). */
public class InvocationGenerator {
private final GenerateJsVisitor gen;
private final JsIdentifierNames names;
private final RetainedVars retainedVars;
InvocationGenerator(GenerateJsVisitor owner, JsIdentifierNames names, RetainedVars rv) {
gen = owner;
this.names = names;
retainedVars = rv;
}
void generateInvocation(Tree.InvocationExpression that) {
if (that.getNamedArgumentList()!=null) {
Tree.NamedArgumentList argList = that.getNamedArgumentList();
if (gen.isInDynamicBlock() && that.getPrimary() instanceof Tree.MemberOrTypeExpression && ((Tree.MemberOrTypeExpression)that.getPrimary()).getDeclaration() == null) {
//Call a native js constructor passing a native js object as parameter
that.getPrimary().visit(gen);
gen.out("(");
nativeObject(argList);
gen.out(")");
} else {
gen.out("(");
Map<String, String> argVarNames = defineNamedArguments(argList);
that.getPrimary().visit(gen);
Tree.TypeArguments targs = that.getPrimary() instanceof Tree.BaseMemberOrTypeExpression ?
((Tree.BaseMemberOrTypeExpression)that.getPrimary()).getTypeArguments() : null;
if (that.getPrimary() instanceof Tree.MemberOrTypeExpression) {
Tree.MemberOrTypeExpression mte = (Tree.MemberOrTypeExpression) that.getPrimary();
if (mte.getDeclaration() instanceof Functional) {
Functional f = (Functional) mte.getDeclaration();
applyNamedArguments(argList, f, argVarNames, gen.getSuperMemberScope(mte)!=null, targs);
}
}
gen.out(")");
}
}
else {
Tree.PositionalArgumentList argList = that.getPositionalArgumentList();
boolean dyntype = false;
if (gen.isInDynamicBlock() && that.getPrimary() instanceof Tree.BaseTypeExpression
&& ((Tree.BaseTypeExpression)that.getPrimary()).getDeclaration() == null) {
//Could be a dynamic object, or a Ceylon one
gen.out(GenerateJsVisitor.getClAlias(), "dyntype(");
dyntype = true;
}
that.getPrimary().visit(gen);
if (gen.prototypeStyle && (gen.getSuperMemberScope(that.getPrimary()) != null)) {
gen.out(".call(this");
if (!argList.getPositionalArguments().isEmpty()) {
gen.out(",");
}
} else if (dyntype) {
if (!argList.getPositionalArguments().isEmpty()) {
gen.out(",");
}
} else {
gen.out("(");
}
generatePositionalArguments(argList, argList.getPositionalArguments(), false);
Tree.TypeArguments targs = that.getPrimary() instanceof Tree.StaticMemberOrTypeExpression
? ((Tree.StaticMemberOrTypeExpression)that.getPrimary()).getTypeArguments() : null;
if (targs != null && targs.getTypeModels() != null && !targs.getTypeModels().isEmpty()) {
if (argList.getPositionalArguments().size() > 0) {
gen.out(",");
}
Declaration bmed = ((Tree.StaticMemberOrTypeExpression)that.getPrimary()).getDeclaration();
if (bmed instanceof Functional) {
if (((Functional) bmed).getParameterLists().get(0).getParameters().size() > argList.getPositionalArguments().size()
// has no comprehension
&& (argList.getPositionalArguments().isEmpty()
|| argList.getPositionalArguments().get(argList.getPositionalArguments().size()-1) instanceof Tree.Comprehension == false)) {
gen.out("undefined,");
}
if (targs != null && targs.getTypeModels() != null && !targs.getTypeModels().isEmpty()) {
Map<TypeParameter, ProducedType> invargs = TypeUtils.matchTypeParametersWithArguments(
((Functional) bmed).getTypeParameters(), targs.getTypeModels());
if (invargs != null) {
TypeUtils.printTypeArguments(that, invargs, gen);
} else {
gen.out("/*TARGS != TPARAMS!!!! WTF?????*/");
}
}
}
}
gen.out(")");
}
}
Map<String, String> defineNamedArguments(Tree.NamedArgumentList argList) {
Map<String, String> argVarNames = new HashMap<String, String>();
for (Tree.NamedArgument arg: argList.getNamedArguments()) {
com.redhat.ceylon.compiler.typechecker.model.Parameter p = arg.getParameter();
final String paramName;
if (p == null && gen.isInDynamicBlock()) {
paramName = arg.getIdentifier().getText();
} else {
paramName = arg.getParameter().getName();
}
String varName = names.createTempVariable(paramName);
argVarNames.put(paramName, varName);
retainedVars.add(varName);
gen.out(varName, "=");
arg.visit(gen);
gen.out(",");
}
Tree.SequencedArgument sarg = argList.getSequencedArgument();
if (sarg!=null) {
String paramName = sarg.getParameter().getName();
String varName = names.createTempVariable(paramName);
argVarNames.put(paramName, varName);
retainedVars.add(varName);
gen.out(varName, "=");
generatePositionalArguments(argList, sarg.getPositionalArguments(), true);
gen.out(",");
}
return argVarNames;
}
void applyNamedArguments(Tree.NamedArgumentList argList, Functional func,
Map<String, String> argVarNames, boolean superAccess, Tree.TypeArguments targs) {
boolean firstList = true;
for (com.redhat.ceylon.compiler.typechecker.model.ParameterList plist : func.getParameterLists()) {
List<String> argNames = argList.getNamedArgumentList().getArgumentNames();
boolean first=true;
if (firstList && superAccess) {
gen.out(".call(this");
if (!plist.getParameters().isEmpty()) { gen.out(","); }
}
else {
gen.out("(");
}
for (com.redhat.ceylon.compiler.typechecker.model.Parameter p : plist.getParameters()) {
if (!first) gen.out(",");
boolean namedArgumentGiven = argNames.contains(p.getName());
if (namedArgumentGiven) {
gen.out(argVarNames.get(p.getName()));
} else if (p.isSequenced()) {
gen.out(GenerateJsVisitor.getClAlias(), "getEmpty()");
} else if (argList.getSequencedArgument()!=null) {
String pname = argVarNames.get(p.getName());
gen.out(pname==null ? "undefined" : pname);
} else if (p.isDefaulted()) {
gen.out("undefined");
} else {
//It's an empty Iterable
gen.out(GenerateJsVisitor.getClAlias(), "getEmpty()");
}
first = false;
}
if (targs != null && !targs.getTypeModels().isEmpty()) {
Map<TypeParameter, ProducedType> invargs = TypeUtils.matchTypeParametersWithArguments(
func.getTypeParameters(), targs.getTypeModels());
if (!first) gen.out(",");
TypeUtils.printTypeArguments(argList, invargs, gen);
}
gen.out(")");
firstList = false;
}
}
void generatePositionalArguments(Tree.ArgumentList that, List<Tree.PositionalArgument> args, final boolean forceSequenced) {
if (!args.isEmpty()) {
boolean first=true;
boolean opened=false;
ProducedType sequencedType=null;
for (Tree.PositionalArgument arg: args) {
Tree.Expression expr;
if (arg instanceof Tree.ListedArgument) {
if (!first) gen.out(",");
expr = ((Tree.ListedArgument) arg).getExpression();
ProducedType exprType = expr.getTypeModel();
boolean dyncheck = gen.isInDynamicBlock() && !TypeUtils.isUnknown(arg.getParameter()) && exprType.isUnknown();
if (forceSequenced || (arg.getParameter() != null && arg.getParameter().isSequenced())) {
if (dyncheck) {
//We don't have a real type so get the one declared in the parameter
exprType = arg.getParameter().getType();
}
if (sequencedType == null) {
sequencedType=exprType;
} else {
ArrayList<ProducedType> cases = new ArrayList<ProducedType>(2);
Util.addToUnion(cases, sequencedType);
Util.addToUnion(cases, exprType);
if (cases.size() > 1) {
UnionType ut = new UnionType(that.getUnit());
ut.setCaseTypes(cases);
sequencedType = ut.getType();
} else {
sequencedType = cases.get(0);
}
}
if (!opened) gen.out("[");
opened=true;
}
int boxType = gen.boxUnboxStart(expr.getTerm(), arg.getParameter());
if (dyncheck) {
TypeUtils.generateDynamicCheck(((Tree.ListedArgument) arg).getExpression(),
arg.getParameter().getType(), gen);
} else {
arg.visit(gen);
}
gen.boxUnboxEnd(boxType);
} else if (arg instanceof Tree.SpreadArgument || arg instanceof Tree.Comprehension) {
if (arg instanceof Tree.SpreadArgument) {
expr = ((Tree.SpreadArgument) arg).getExpression();
} else {
expr = null;
}
if (!first) {
gen.closeSequenceWithReifiedType(that,
gen.getTypeUtils().wrapAsIterableArguments(sequencedType));
gen.out(".chain(");
sequencedType=null;
}
if (arg instanceof Tree.SpreadArgument) {
int boxType = gen.boxUnboxStart(expr.getTerm(), arg.getParameter());
arg.visit(gen);
gen.boxUnboxEnd(boxType);
} else {
((Tree.Comprehension)arg).visit(gen);
}
if (!first) {
gen.out(",");
if (expr == null) {
//it's a comprehension
TypeUtils.printTypeArguments(that,
gen.getTypeUtils().wrapAsIterableArguments(arg.getTypeModel()), gen);
} else {
ProducedType spreadType = TypeUtils.findSupertype(gen.getTypeUtils().sequential, expr.getTypeModel());
TypeUtils.printTypeArguments(that, spreadType.getTypeArguments(), gen);
}
gen.out(")");
}
if (arg instanceof Tree.Comprehension) {
break;
}
}
first = false;
}
if (sequencedType != null) {
gen.closeSequenceWithReifiedType(that,
gen.getTypeUtils().wrapAsIterableArguments(sequencedType));
}
}
}
/** Generate the code to create a native js object. */
void nativeObject(Tree.NamedArgumentList argList) {
if (argList.getSequencedArgument() == null) {
gen.out("{");
boolean first = true;
for (Tree.NamedArgument arg : argList.getNamedArguments()) {
if (first) { first = false; } else { gen.out(","); }
gen.out(arg.getIdentifier().getText(), ":");
arg.visit(gen);
}
gen.out("}");
} else {
String arr = null;
if (argList.getNamedArguments().size() > 0) {
gen.out("function()");
gen.beginBlock();
arr = names.createTempVariable();
gen.out("var ", arr, "=");
}
gen.out("[");
boolean first = true;
for (Tree.PositionalArgument arg : argList.getSequencedArgument().getPositionalArguments()) {
if (first) { first = false; } else { gen.out(","); }
arg.visit(gen);
}
gen.out("]");
if (argList.getNamedArguments().size() > 0) {
gen.endLine(true);
for (Tree.NamedArgument arg : argList.getNamedArguments()) {
gen.out(arr, ".", arg.getIdentifier().getText(), "=");
arg.visit(gen);
gen.endLine(true);
}
gen.out("return ", arr, ";");
gen.endBlock();
gen.out("()");
}
}
}
}
|
src/main/java/com/redhat/ceylon/compiler/js/InvocationGenerator.java
|
package com.redhat.ceylon.compiler.js;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.Functional;
import com.redhat.ceylon.compiler.typechecker.model.ProducedType;
import com.redhat.ceylon.compiler.typechecker.model.TypeParameter;
import com.redhat.ceylon.compiler.typechecker.model.UnionType;
import com.redhat.ceylon.compiler.typechecker.model.Util;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
/** Generates js code for invocation expression (named and positional). */
public class InvocationGenerator {
private final GenerateJsVisitor gen;
private final JsIdentifierNames names;
private final RetainedVars retainedVars;
InvocationGenerator(GenerateJsVisitor owner, JsIdentifierNames names, RetainedVars rv) {
gen = owner;
this.names = names;
retainedVars = rv;
}
void generateInvocation(Tree.InvocationExpression that) {
if (that.getNamedArgumentList()!=null) {
Tree.NamedArgumentList argList = that.getNamedArgumentList();
if (gen.isInDynamicBlock() && that.getPrimary() instanceof Tree.MemberOrTypeExpression && ((Tree.MemberOrTypeExpression)that.getPrimary()).getDeclaration() == null) {
//Call a native js constructor passing a native js object as parameter
that.getPrimary().visit(gen);
gen.out("(");
nativeObject(argList);
gen.out(")");
} else {
gen.out("(");
Map<String, String> argVarNames = defineNamedArguments(argList);
that.getPrimary().visit(gen);
Tree.TypeArguments targs = that.getPrimary() instanceof Tree.BaseMemberOrTypeExpression ?
((Tree.BaseMemberOrTypeExpression)that.getPrimary()).getTypeArguments() : null;
if (that.getPrimary() instanceof Tree.MemberOrTypeExpression) {
Tree.MemberOrTypeExpression mte = (Tree.MemberOrTypeExpression) that.getPrimary();
if (mte.getDeclaration() instanceof Functional) {
Functional f = (Functional) mte.getDeclaration();
applyNamedArguments(argList, f, argVarNames, gen.getSuperMemberScope(mte)!=null, targs);
}
}
gen.out(")");
}
}
else {
Tree.PositionalArgumentList argList = that.getPositionalArgumentList();
that.getPrimary().visit(gen);
if (gen.prototypeStyle && (gen.getSuperMemberScope(that.getPrimary()) != null)) {
gen.out(".call(this");
if (!argList.getPositionalArguments().isEmpty()) {
gen.out(",");
}
}
else {
gen.out("(");
}
argList.visit(gen);
Tree.TypeArguments targs = that.getPrimary() instanceof Tree.StaticMemberOrTypeExpression
? ((Tree.StaticMemberOrTypeExpression)that.getPrimary()).getTypeArguments() : null;
if (targs != null && targs.getTypeModels() != null && !targs.getTypeModels().isEmpty()) {
if (argList.getPositionalArguments().size() > 0) {
gen.out(",");
}
Declaration bmed = ((Tree.StaticMemberOrTypeExpression)that.getPrimary()).getDeclaration();
if (bmed instanceof Functional) {
if (((Functional) bmed).getParameterLists().get(0).getParameters().size() > argList.getPositionalArguments().size()
// has no comprehension
&& (argList.getPositionalArguments().isEmpty()
|| argList.getPositionalArguments().get(argList.getPositionalArguments().size()-1) instanceof Tree.Comprehension == false)) {
gen.out("undefined,");
}
if (targs != null && targs.getTypeModels() != null && !targs.getTypeModels().isEmpty()) {
Map<TypeParameter, ProducedType> invargs = TypeUtils.matchTypeParametersWithArguments(
((Functional) bmed).getTypeParameters(), targs.getTypeModels());
if (invargs != null) {
TypeUtils.printTypeArguments(that, invargs, gen);
} else {
gen.out("/*TARGS != TPARAMS!!!! WTF?????*/");
}
}
}
}
gen.out(")");
}
}
Map<String, String> defineNamedArguments(Tree.NamedArgumentList argList) {
Map<String, String> argVarNames = new HashMap<String, String>();
for (Tree.NamedArgument arg: argList.getNamedArguments()) {
com.redhat.ceylon.compiler.typechecker.model.Parameter p = arg.getParameter();
final String paramName;
if (p == null && gen.isInDynamicBlock()) {
paramName = arg.getIdentifier().getText();
} else {
paramName = arg.getParameter().getName();
}
String varName = names.createTempVariable(paramName);
argVarNames.put(paramName, varName);
retainedVars.add(varName);
gen.out(varName, "=");
arg.visit(gen);
gen.out(",");
}
Tree.SequencedArgument sarg = argList.getSequencedArgument();
if (sarg!=null) {
String paramName = sarg.getParameter().getName();
String varName = names.createTempVariable(paramName);
argVarNames.put(paramName, varName);
retainedVars.add(varName);
gen.out(varName, "=");
generatePositionalArguments(argList, sarg.getPositionalArguments(), true);
gen.out(",");
}
return argVarNames;
}
void applyNamedArguments(Tree.NamedArgumentList argList, Functional func,
Map<String, String> argVarNames, boolean superAccess, Tree.TypeArguments targs) {
boolean firstList = true;
for (com.redhat.ceylon.compiler.typechecker.model.ParameterList plist : func.getParameterLists()) {
List<String> argNames = argList.getNamedArgumentList().getArgumentNames();
boolean first=true;
if (firstList && superAccess) {
gen.out(".call(this");
if (!plist.getParameters().isEmpty()) { gen.out(","); }
}
else {
gen.out("(");
}
for (com.redhat.ceylon.compiler.typechecker.model.Parameter p : plist.getParameters()) {
if (!first) gen.out(",");
boolean namedArgumentGiven = argNames.contains(p.getName());
if (namedArgumentGiven) {
gen.out(argVarNames.get(p.getName()));
} else if (p.isSequenced()) {
gen.out(GenerateJsVisitor.getClAlias(), "getEmpty()");
} else if (argList.getSequencedArgument()!=null) {
String pname = argVarNames.get(p.getName());
gen.out(pname==null ? "undefined" : pname);
} else if (p.isDefaulted()) {
gen.out("undefined");
} else {
//It's an empty Iterable
gen.out(GenerateJsVisitor.getClAlias(), "getEmpty()");
}
first = false;
}
if (targs != null && !targs.getTypeModels().isEmpty()) {
Map<TypeParameter, ProducedType> invargs = TypeUtils.matchTypeParametersWithArguments(
func.getTypeParameters(), targs.getTypeModels());
if (!first) gen.out(",");
TypeUtils.printTypeArguments(argList, invargs, gen);
}
gen.out(")");
firstList = false;
}
}
void generatePositionalArguments(Tree.ArgumentList that, List<Tree.PositionalArgument> args, final boolean forceSequenced) {
if (!args.isEmpty()) {
boolean first=true;
boolean opened=false;
ProducedType sequencedType=null;
for (Tree.PositionalArgument arg: args) {
Tree.Expression expr;
if (arg instanceof Tree.ListedArgument) {
if (!first) gen.out(",");
expr = ((Tree.ListedArgument) arg).getExpression();
ProducedType exprType = expr.getTypeModel();
boolean dyncheck = gen.isInDynamicBlock() && !TypeUtils.isUnknown(arg.getParameter()) && exprType.isUnknown();
if (forceSequenced || (arg.getParameter() != null && arg.getParameter().isSequenced())) {
if (dyncheck) {
//We don't have a real type so get the one declared in the parameter
exprType = arg.getParameter().getType();
}
if (sequencedType == null) {
sequencedType=exprType;
} else {
ArrayList<ProducedType> cases = new ArrayList<ProducedType>(2);
Util.addToUnion(cases, sequencedType);
Util.addToUnion(cases, exprType);
if (cases.size() > 1) {
UnionType ut = new UnionType(that.getUnit());
ut.setCaseTypes(cases);
sequencedType = ut.getType();
} else {
sequencedType = cases.get(0);
}
}
if (!opened) gen.out("[");
opened=true;
}
int boxType = gen.boxUnboxStart(expr.getTerm(), arg.getParameter());
if (dyncheck) {
TypeUtils.generateDynamicCheck(((Tree.ListedArgument) arg).getExpression(),
arg.getParameter().getType(), gen);
} else {
arg.visit(gen);
}
gen.boxUnboxEnd(boxType);
} else if (arg instanceof Tree.SpreadArgument || arg instanceof Tree.Comprehension) {
if (arg instanceof Tree.SpreadArgument) {
expr = ((Tree.SpreadArgument) arg).getExpression();
} else {
expr = null;
}
if (!first) {
gen.closeSequenceWithReifiedType(that,
gen.getTypeUtils().wrapAsIterableArguments(sequencedType));
gen.out(".chain(");
sequencedType=null;
}
if (arg instanceof Tree.SpreadArgument) {
int boxType = gen.boxUnboxStart(expr.getTerm(), arg.getParameter());
arg.visit(gen);
gen.boxUnboxEnd(boxType);
} else {
((Tree.Comprehension)arg).visit(gen);
}
if (!first) {
gen.out(",");
if (expr == null) {
//it's a comprehension
TypeUtils.printTypeArguments(that,
gen.getTypeUtils().wrapAsIterableArguments(arg.getTypeModel()), gen);
} else {
ProducedType spreadType = TypeUtils.findSupertype(gen.getTypeUtils().sequential, expr.getTypeModel());
TypeUtils.printTypeArguments(that, spreadType.getTypeArguments(), gen);
}
gen.out(")");
}
if (arg instanceof Tree.Comprehension) {
break;
}
}
first = false;
}
if (sequencedType != null) {
gen.closeSequenceWithReifiedType(that,
gen.getTypeUtils().wrapAsIterableArguments(sequencedType));
}
}
}
/** Generate the code to create a native js object. */
void nativeObject(Tree.NamedArgumentList argList) {
if (argList.getSequencedArgument() == null) {
gen.out("{");
boolean first = true;
for (Tree.NamedArgument arg : argList.getNamedArguments()) {
if (first) { first = false; } else { gen.out(","); }
gen.out(arg.getIdentifier().getText(), ":");
arg.visit(gen);
}
gen.out("}");
} else {
String arr = null;
if (argList.getNamedArguments().size() > 0) {
gen.out("function()");
gen.beginBlock();
arr = names.createTempVariable();
gen.out("var ", arr, "=");
}
gen.out("[");
boolean first = true;
for (Tree.PositionalArgument arg : argList.getSequencedArgument().getPositionalArguments()) {
if (first) { first = false; } else { gen.out(","); }
arg.visit(gen);
}
gen.out("]");
if (argList.getNamedArguments().size() > 0) {
gen.endLine(true);
for (Tree.NamedArgument arg : argList.getNamedArguments()) {
gen.out(arr, ".", arg.getIdentifier().getText(), "=");
arg.visit(gen);
gen.endLine(true);
}
gen.out("return ", arr, ";");
gen.endBlock();
gen.out("()");
}
}
}
}
|
Use internal `dyntype` function to instantiate dynamic types #173
|
src/main/java/com/redhat/ceylon/compiler/js/InvocationGenerator.java
|
Use internal `dyntype` function to instantiate dynamic types #173
|
|
Java
|
apache-2.0
|
f2a97f3cd51362ad94596b01f672d35cedd86438
| 0
|
apache/tapestry-5,apache/tapestry-5,apache/tapestry-5,apache/tapestry-5,apache/tapestry-5
|
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.ioc.internal.services;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.tapestry5.ioc.Invokable;
import org.apache.tapestry5.ioc.ObjectCreator;
import org.apache.tapestry5.ioc.internal.util.CollectionFactory;
import org.apache.tapestry5.ioc.services.PerThreadValue;
import org.apache.tapestry5.ioc.services.PerthreadManager;
import org.apache.tapestry5.ioc.services.RegistryShutdownHub;
import org.apache.tapestry5.ioc.services.ThreadCleanupListener;
import org.slf4j.Logger;
@SuppressWarnings("all")
public class PerthreadManagerImpl implements PerthreadManager
{
private final PerThreadValue<List<Runnable>> callbacksValue;
private static class MapHolder extends ThreadLocal<Map>
{
@Override
protected Map initialValue()
{
return CollectionFactory.newMap();
}
}
private final Logger logger;
private final MapHolder holder = new MapHolder();
private final AtomicInteger uuidGenerator = new AtomicInteger();
private volatile boolean shutdown = false;
public PerthreadManagerImpl(Logger logger)
{
this.logger = logger;
callbacksValue = createValue();
}
public void registerForShutdown(RegistryShutdownHub hub)
{
hub.addRegistryShutdownListener(new Runnable()
{
@Override
public void run()
{
cleanup();
shutdown = true;
}
});
}
private Map getPerthreadMap()
{
// This is a degenerate case; it may not even exist; but if during registry shutdown somehow code executes
// that attempts to create new values or add new listeners, those go into a new map instance that is
// not referenced (and so immediately GCed).
if (shutdown)
{
return CollectionFactory.newMap();
}
return holder.get();
}
private List<Runnable> getCallbacks()
{
List<Runnable> result = callbacksValue.get();
if (result == null)
{
result = CollectionFactory.newList();
callbacksValue.set(result);
}
return result;
}
@Override
public void addThreadCleanupListener(final ThreadCleanupListener listener)
{
assert listener != null;
addThreadCleanupCallback(new Runnable()
{
@Override
public void run()
{
listener.threadDidCleanup();
}
});
}
@Override
public void addThreadCleanupCallback(Runnable callback)
{
assert callback != null;
getCallbacks().add(callback);
}
/**
* Instructs the hub to notify all its listeners (for the current thread).
* It also discards its list of listeners.
*/
@Override
public void cleanup()
{
List<Runnable> callbacks = getCallbacks();
callbacksValue.set(null);
for (Runnable callback : callbacks)
{
try
{
callback.run();
} catch (Exception ex)
{
logger.warn(String.format("Error invoking callback %s: %s", callback, ex),
ex);
}
}
// Listeners should not re-add themselves or store any per-thread state
// here, it will be lost.
// Discard the per-thread map of values, including the key that stores
// the listeners. This means that if a listener attempts to register
// new listeners, the new listeners will not be triggered and will be
// released to the GC.
holder.remove();
}
private static Object NULL_VALUE = new Object();
<T> ObjectCreator<T> createValue(final Object key, final ObjectCreator<T> delegate)
{
return new DefaultObjectCreator<T>(key, delegate);
}
public <T> ObjectCreator<T> createValue(ObjectCreator<T> delegate)
{
return createValue(uuidGenerator.getAndIncrement(), delegate);
}
<T> PerThreadValue<T> createValue(final Object key)
{
return new DefaultPerThreadValue(key);
}
@Override
public <T> PerThreadValue<T> createValue()
{
return createValue(uuidGenerator.getAndIncrement());
}
@Override
public void run(Runnable runnable)
{
assert runnable != null;
try
{
runnable.run();
} finally
{
cleanup();
}
}
@Override
public <T> T invoke(Invokable<T> invokable)
{
try
{
return invokable.invoke();
} finally
{
cleanup();
}
}
private final class DefaultPerThreadValue<T> implements PerThreadValue<T>
{
private final Object key;
DefaultPerThreadValue(final Object key)
{
this.key = key;
}
@Override
public T get()
{
return get(null);
}
@Override
public T get(T defaultValue)
{
Map map = getPerthreadMap();
Object storedValue = map.get(key);
if (storedValue == null)
{
return defaultValue;
}
if (storedValue == NULL_VALUE)
{
return null;
}
return (T) storedValue;
}
@Override
public T set(T newValue)
{
getPerthreadMap().put(key, newValue == null ? NULL_VALUE : newValue);
return newValue;
}
@Override
public boolean exists()
{
return getPerthreadMap().containsKey(key);
}
}
private final class DefaultObjectCreator<T> implements ObjectCreator<T>
{
private final Object key;
private final ObjectCreator<T> delegate;
DefaultObjectCreator(final Object key, final ObjectCreator<T> delegate)
{
this.key = key;
this.delegate = delegate;
}
public T createObject()
{
Map map = getPerthreadMap();
T storedValue = (T) map.get(key);
if (storedValue != null)
{
return (storedValue == NULL_VALUE) ? null : storedValue;
}
T newValue = delegate.createObject();
map.put(key, newValue == null ? NULL_VALUE : newValue);
return newValue;
}
}
}
|
tapestry-ioc/src/main/java/org/apache/tapestry5/ioc/internal/services/PerthreadManagerImpl.java
|
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.ioc.internal.services;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.tapestry5.ioc.Invokable;
import org.apache.tapestry5.ioc.ObjectCreator;
import org.apache.tapestry5.ioc.internal.util.CollectionFactory;
import org.apache.tapestry5.ioc.services.PerThreadValue;
import org.apache.tapestry5.ioc.services.PerthreadManager;
import org.apache.tapestry5.ioc.services.RegistryShutdownHub;
import org.apache.tapestry5.ioc.services.ThreadCleanupListener;
import org.slf4j.Logger;
@SuppressWarnings("all")
public class PerthreadManagerImpl implements PerthreadManager
{
private final PerThreadValue<List<Runnable>> callbacksValue;
private static class MapHolder extends ThreadLocal<Map>
{
@Override
protected Map initialValue()
{
return CollectionFactory.newMap();
}
}
private final Logger logger;
private final MapHolder holder = new MapHolder();
private final AtomicInteger uuidGenerator = new AtomicInteger();
private final AtomicBoolean shutdown = new AtomicBoolean();
public PerthreadManagerImpl(Logger logger)
{
this.logger = logger;
callbacksValue = createValue();
}
public void registerForShutdown(RegistryShutdownHub hub)
{
hub.addRegistryShutdownListener(new Runnable()
{
@Override
public void run()
{
cleanup();
shutdown.set(true);
}
});
}
private Map getPerthreadMap()
{
// This is a degenerate case; it may not even exist; but if during registry shutdown somehow code executes
// that attempts to create new values or add new listeners, those go into a new map instance that is
// not referenced (and so immediately GCed).
if (shutdown.get())
{
return CollectionFactory.newMap();
}
return holder.get();
}
private List<Runnable> getCallbacks()
{
List<Runnable> result = callbacksValue.get();
if (result == null)
{
result = CollectionFactory.newList();
callbacksValue.set(result);
}
return result;
}
@Override
public void addThreadCleanupListener(final ThreadCleanupListener listener)
{
assert listener != null;
addThreadCleanupCallback(new Runnable()
{
@Override
public void run()
{
listener.threadDidCleanup();
}
});
}
@Override
public void addThreadCleanupCallback(Runnable callback)
{
assert callback != null;
getCallbacks().add(callback);
}
/**
* Instructs the hub to notify all its listeners (for the current thread).
* It also discards its list of listeners.
*/
@Override
public void cleanup()
{
List<Runnable> callbacks = getCallbacks();
callbacksValue.set(null);
for (Runnable callback : callbacks)
{
try
{
callback.run();
} catch (Exception ex)
{
logger.warn(String.format("Error invoking callback %s: %s", callback, ex),
ex);
}
}
// Listeners should not re-add themselves or store any per-thread state
// here, it will be lost.
// Discard the per-thread map of values, including the key that stores
// the listeners. This means that if a listener attempts to register
// new listeners, the new listeners will not be triggered and will be
// released to the GC.
holder.remove();
}
private static Object NULL_VALUE = new Object();
<T> ObjectCreator<T> createValue(final Object key, final ObjectCreator<T> delegate)
{
return new ObjectCreator<T>()
{
public T createObject()
{
Map map = getPerthreadMap();
T storedValue = (T) map.get(key);
if (storedValue != null)
{
return (storedValue == NULL_VALUE) ? null : storedValue;
}
T newValue = delegate.createObject();
map.put(key, newValue == null ? NULL_VALUE : newValue);
return newValue;
}
};
}
public <T> ObjectCreator<T> createValue(ObjectCreator<T> delegate)
{
return createValue(uuidGenerator.getAndIncrement(), delegate);
}
<T> PerThreadValue<T> createValue(final Object key)
{
return new PerThreadValue<T>()
{
@Override
public T get()
{
return get(null);
}
@Override
public T get(T defaultValue)
{
Map map = getPerthreadMap();
Object storedValue = map.get(key);
if (storedValue == null)
{
return defaultValue;
}
if (storedValue == NULL_VALUE)
{
return null;
}
return (T) storedValue;
}
@Override
public T set(T newValue)
{
getPerthreadMap().put(key, newValue == null ? NULL_VALUE : newValue);
return newValue;
}
@Override
public boolean exists()
{
return getPerthreadMap().containsKey(key);
}
};
}
@Override
public <T> PerThreadValue<T> createValue()
{
return createValue(uuidGenerator.getAndIncrement());
}
@Override
public void run(Runnable runnable)
{
assert runnable != null;
try
{
runnable.run();
} finally
{
cleanup();
}
}
@Override
public <T> T invoke(Invokable<T> invokable)
{
try
{
return invokable.invoke();
} finally
{
cleanup();
}
}
}
|
TAP5-2477: improve performance of org.apache.tapestry5.ioc.internal.services.PerthreadManagerImpl.getPerthreadMap(), extract classes
|
tapestry-ioc/src/main/java/org/apache/tapestry5/ioc/internal/services/PerthreadManagerImpl.java
|
TAP5-2477: improve performance of org.apache.tapestry5.ioc.internal.services.PerthreadManagerImpl.getPerthreadMap(), extract classes
|
|
Java
|
apache-2.0
|
f9486d46acbd1ba293cadef41401031010c2996d
| 0
|
zcmoore/plp-grinder,Progressive-Learning-Platform/plp-grinder,zcmoore/plp-grinder,Progressive-Learning-Platform/plp-grinder
|
package edu.asu.plp;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Pattern;
import edu.asu.plp.compile.lex.LexException;
import edu.asu.util.Strings;
public class Token
{
public static interface Groups
{
public static final String[] CONTROL_TOKENS = new String[] { "\\.", "\\(", "\\)",
"\\{", "}", "\\[", "]", ";", "," };
}
public static enum Type
{
// "\\.", "\\(", "\\)", "\\{", "\\}", "\\[", "\\]", ";"
CONTROL(Groups.CONTROL_TOKENS),
LITERAL_INT("(0(x|b|d))?(\\d)+"),
LITERAL_LONG("(\\d)+[lL]"),
LITERAL_FLOAT("(((\\d+)?(\\.\\d+))|((\\d+)(\\.\\d+)?))[fF]"),
LITERAL_DOUBLE("(\\d+)?(\\.\\d+)"),
LITERAL_CHAR("'[a-z A-Z]?'"),
LITERAL_STRING("\"([^\"\\\\\\n\\r]|\\\\.)*\""),
LITERAL_BOOLEAN("true|false"),
LITERAL_NULL("null"),
OPERATOR("((\\+\\+|--|\\+|<<|>>|-|\\/|\\*|\\||&)=?)|="),
COMPARATOR(">|>=|<|<=|&&|\\|\\||==|instanceof"),
TYPE("boolean|long|int|byte|short|char|double|float|void"),
MODIFIER_ACCESS("public|private|protected|static"),
MODIFIER_BEHAVIOUR("final|volitile|transient|synchronized|native|abstract|throws"),
MODIFIER_INHERITENCE("extends|implements"),
ACTION("return|continue|break|throw|new|assert|strictfp"),
CONSTRUCT_BLOCK("if|else|do|while|switch|case:|default|for|try|catch|finally"),
CONSTRUCT_TYPE_DEF("class|interface|enum"),
SPECIAL_ORGANIZATION("package"),
SPECIAL_RESERVED("goto|const"),
SPECIAL_IMPORT("import"),
REFERNCE("[a-zA-Z]+[a-zA-Z\\d]*"),
UNSUPPORTED(LITERAL_LONG, LITERAL_FLOAT, LITERAL_DOUBLE, LITERAL_CHAR,
LITERAL_STRING, "\\/", MODIFIER_INHERITENCE, SPECIAL_RESERVED,
"try|catch|finally|enum|interface|assert|new|throw", SPECIAL_IMPORT,
"instanceof|double|float|volitile|transient|synchronized|native|abstract|throws|:");
public String regex;
private Type(String regex)
{
this.regex = regex;
}
private Type(Object first, Object... objects)
{
this.regex = compoundRegex(first, objects);
}
private Type(String[] strings)
{
this(strings[0], (Object[]) Arrays.copyOfRange(strings, 1, strings.length));
}
public boolean matches(String token)
{
return token.matches(regex);
}
public static String compoundRegex(Object first, Object... objects)
{
StringBuilder regexBuilder = new StringBuilder();
regexBuilder.append("(");
if (first instanceof Type)
regexBuilder.append(((Type) first).regex);
else
regexBuilder.append(first.toString());
regexBuilder.append(")");
for (Object object : objects)
{
regexBuilder.append("|(");
if (object instanceof Type)
regexBuilder.append(((Type) object).regex);
else
regexBuilder.append(object.toString());
regexBuilder.append(")");
}
return regexBuilder.toString();
}
}
public static final Pattern STRING_LITERAL_PATTERN = Pattern
.compile(Type.LITERAL_STRING.regex);
private Type type;
private String value;
public static List<Token> makeTokens(List<String> strings) throws LexException
{
List<Token> tokens = new LinkedList<>();
for (String string : strings)
{
List<Token> token = makeToken(string);
if (token != null)
tokens.addAll(token);
}
return tokens;
}
private static List<Token> makeToken(String string) throws LexException
{
if (string.trim().length() == 0)
return null;
try
{
Token token = new Token(string);
return Collections.<Token> singletonList(token);
}
catch (LexException e)
{
if (e.getMessage().startsWith("Type not found for"))
{
String regex = Type.compoundRegex(Type.OPERATOR, Type.COMPARATOR);
List<String> dividedToken = Strings.splitAndRetain(string, regex);
if (dividedToken.size() > 1)
return makeTokens(dividedToken);
else
throw e;
}
else
{
throw e;
}
}
}
public Token(String token) throws LexException
{
token = token.trim();
if (Type.UNSUPPORTED.matches(token))
throw new LexException("Unsupported Token: " + token);
this.value = token;
for (Type type : Type.values())
{
if (type.matches(token))
{
this.type = type;
break;
}
}
if (type == null)
throw new LexException("Type not found for: " + token);
}
public String toString()
{
return type + " " + value;
}
public Type getType()
{
return type;
}
public void setType(Type type)
{
this.type = type;
}
public String getValue()
{
return value;
}
public void setValue(String value)
{
this.value = value;
}
}
|
src/edu/asu/plp/Token.java
|
package edu.asu.plp;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Pattern;
import edu.asu.plp.compile.lex.LexException;
import edu.asu.util.Strings;
public class Token
{
public static interface Groups
{
public static final String[] CONTROL_TOKENS = new String[] { "\\.", "\\(", "\\)",
"\\{", "}", "\\[", "]", ";" };
}
public static enum Type
{
// "\\.", "\\(", "\\)", "\\{", "\\}", "\\[", "\\]", ";"
CONTROL(Groups.CONTROL_TOKENS),
LITERAL_INT("(0(x|b|d))?(\\d)+"),
LITERAL_LONG("(\\d)+[lL]"),
LITERAL_FLOAT("(((\\d+)?(\\.\\d+))|((\\d+)(\\.\\d+)?))[fF]"),
LITERAL_DOUBLE("(\\d+)?(\\.\\d+)"),
LITERAL_CHAR("'[a-z A-Z]?'"),
LITERAL_STRING("\"([^\"\\\\\\n\\r]|\\\\.)*\""),
LITERAL_BOOLEAN("true|false"),
LITERAL_NULL("null"),
OPERATOR("((\\+\\+|--|\\+|<<|>>|-|\\/|\\*|\\||&)=?)|="),
COMPARATOR(">|>=|<|<=|&&|\\|\\||==|instanceof"),
TYPE("boolean|long|int|byte|short|char|double|float|void"),
MODIFIER_ACCESS("public|private|protected|static"),
MODIFIER_BEHAVIOUR("final|volitile|transient|synchronized|native|abstract|throws"),
MODIFIER_INHERITENCE("extends|implements"),
ACTION("return|continue|break|throw|new|assert|strictfp"),
CONSTRUCT_BLOCK("if|else|do|while|switch|case:|default|for|try|catch|finally"),
CONSTRUCT_TYPE_DEF("class|interface|enum"),
SPECIAL_ORGANIZATION("package"),
SPECIAL_RESERVED("goto|const"),
SPECIAL_IMPORT("import"),
REFERNCE("[a-zA-Z]+[a-zA-Z\\d]*"),
UNSUPPORTED(LITERAL_LONG, LITERAL_FLOAT, LITERAL_DOUBLE, LITERAL_CHAR,
LITERAL_STRING, "\\/", MODIFIER_INHERITENCE, SPECIAL_RESERVED,
"try|catch|finally|enum|interface|assert|new|throw", SPECIAL_IMPORT,
"instanceof|double|float|volitile|transient|synchronized|native|abstract|throws|:");
public String regex;
private Type(String regex)
{
this.regex = regex;
}
private Type(Object first, Object... objects)
{
this.regex = compoundRegex(first, objects);
}
private Type(String[] strings)
{
this(strings[0], (Object[]) Arrays.copyOfRange(strings, 1, strings.length));
}
public boolean matches(String token)
{
return token.matches(regex);
}
public static String compoundRegex(Object first, Object... objects)
{
StringBuilder regexBuilder = new StringBuilder();
regexBuilder.append("(");
if (first instanceof Type)
regexBuilder.append(((Type) first).regex);
else
regexBuilder.append(first.toString());
regexBuilder.append(")");
for (Object object : objects)
{
regexBuilder.append("|(");
if (object instanceof Type)
regexBuilder.append(((Type) object).regex);
else
regexBuilder.append(object.toString());
regexBuilder.append(")");
}
return regexBuilder.toString();
}
}
public static final Pattern STRING_LITERAL_PATTERN = Pattern
.compile(Type.LITERAL_STRING.regex);
private Type type;
private String value;
public static List<Token> makeTokens(List<String> strings) throws LexException
{
List<Token> tokens = new LinkedList<>();
for (String string : strings)
{
List<Token> token = makeToken(string);
if (token != null)
tokens.addAll(token);
}
return tokens;
}
private static List<Token> makeToken(String string) throws LexException
{
if (string.trim().length() == 0)
return null;
try
{
Token token = new Token(string);
return Collections.<Token> singletonList(token);
}
catch (LexException e)
{
if (e.getMessage().startsWith("Type not found for"))
{
String regex = Type.compoundRegex(Type.OPERATOR, Type.COMPARATOR);
return makeTokens(Strings.splitAndRetain(string, regex));
}
else
{
throw e;
}
}
}
public Token(String token) throws LexException
{
token = token.trim();
if (Type.UNSUPPORTED.matches(token))
throw new LexException("Unsupported Token: " + token);
this.value = token;
for (Type type : Type.values())
{
if (type.matches(token))
{
this.type = type;
break;
}
}
if (type == null)
throw new LexException("Type not found for: " + token);
}
public String toString()
{
return type + " " + value;
}
public Type getType()
{
return type;
}
public void setType(Type type)
{
this.type = type;
}
public String getValue()
{
return value;
}
public void setValue(String value)
{
this.value = value;
}
}
|
Fix invalid token handling
Fixes infinite loop
|
src/edu/asu/plp/Token.java
|
Fix invalid token handling
|
|
Java
|
apache-2.0
|
bfaf938aeaa54f8aa1bab7513b73b03f26d832f9
| 0
|
estatio/estatio,estatio/estatio,estatio/estatio,estatio/estatio
|
package org.estatio.capex.dom.impmgr;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.inject.Inject;
import org.apache.isis.applib.annotation.DomainObject;
import org.apache.isis.applib.annotation.Nature;
import org.estatio.capex.dom.invoice.IncomingInvoiceItem;
import org.estatio.capex.dom.invoice.IncomingInvoiceItemRepository;
import org.estatio.capex.dom.order.OrderItem;
import org.estatio.capex.dom.order.OrderItemRepository;
import org.estatio.capex.dom.project.Project;
import org.estatio.capex.dom.project.ProjectRepository;
@DomainObject(
nature = Nature.VIEW_MODEL,
objectType = "org.estatio.capex.dom.impmgr.OrderInvoiceImportReport"
)
public class OrderInvoiceImportReport {
public String title(){
return "Report of orders and incoming invoices";
}
public List<OrderInvoiceImportReportLine> getLines(){
List<OrderInvoiceImportReportLine> result = new ArrayList<>();
Integer numberOfOrderlines = 0;
Integer numberOfinvoicelines = 0;
BigDecimal orderNetTotal = BigDecimal.ZERO;
BigDecimal orderVatTotal = BigDecimal.ZERO;
BigDecimal orderGrossTotal = BigDecimal.ZERO;
BigDecimal invoiceNetTotal = BigDecimal.ZERO;
BigDecimal invoiceVatTotal = BigDecimal.ZERO;
BigDecimal invoiceGrossTotal = BigDecimal.ZERO;
for (Project project : projectRepository.listAll()) {
for (String period : periodsPresent(project)) {
for (OrderItem orderItem : orderItemRepository.findByProject(project)) {
if (orderItem.getPeriod().equals(period)) {
numberOfOrderlines = numberOfOrderlines + 1;
orderNetTotal = orderNetTotal.add(orderItem.getNetAmount());
orderVatTotal = orderItem.getVatAmount()==null ? orderVatTotal : orderVatTotal.add(orderItem.getVatAmount());
orderGrossTotal = orderGrossTotal.add(orderItem.getGrossAmount());
}
}
for (IncomingInvoiceItem invoiceItem : incomingInvoiceItemRepository.findByProject(project)) {
if (invoiceItem.getPeriod().equals(period)) {
numberOfinvoicelines = numberOfinvoicelines + 1;
invoiceNetTotal = invoiceNetTotal.add(invoiceItem.getNetAmount());
invoiceVatTotal = invoiceItem.getVatAmount()==null ? invoiceVatTotal : invoiceVatTotal.add(invoiceItem.getVatAmount());
invoiceGrossTotal = invoiceGrossTotal.add(invoiceItem.getGrossAmount());
}
}
result.add(new OrderInvoiceImportReportLine(
project.getReference(),
period,
numberOfOrderlines,
orderNetTotal,
orderVatTotal,
orderGrossTotal,
numberOfinvoicelines,
invoiceNetTotal,
invoiceVatTotal,
invoiceGrossTotal
)
);
}
}
return result;
}
private List<String> periodsPresent(final Project project){
List<String> result = new ArrayList<>();
for (OrderItem item : orderItemRepository.findByProject(project)){
if (!result.contains(item.getPeriod())){
result.add(item.getPeriod());
}
}
for (IncomingInvoiceItem item : incomingInvoiceItemRepository.findByProject(project)){
if (!result.contains(item.getPeriod())){
result.add(item.getPeriod());
}
}
Collections.sort(result);
return result;
}
@Inject
ProjectRepository projectRepository;
@Inject
OrderItemRepository orderItemRepository;
@Inject
IncomingInvoiceItemRepository incomingInvoiceItemRepository;
}
|
estatioapp/module/capex/dom/src/main/java/org/estatio/capex/dom/impmgr/OrderInvoiceImportReport.java
|
package org.estatio.capex.dom.impmgr;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.inject.Inject;
import org.apache.isis.applib.annotation.DomainObject;
import org.apache.isis.applib.annotation.Nature;
import org.estatio.capex.dom.invoice.IncomingInvoiceItem;
import org.estatio.capex.dom.invoice.IncomingInvoiceItemRepository;
import org.estatio.capex.dom.order.OrderItem;
import org.estatio.capex.dom.order.OrderItemRepository;
import org.estatio.capex.dom.project.Project;
import org.estatio.capex.dom.project.ProjectRepository;
@DomainObject(
nature = Nature.VIEW_MODEL,
objectType = "org.estatio.capex.dom.impmgr.OrderInvoiceImportReport"
)
public class OrderInvoiceImportReport {
public String title(){
return "Report of orders and incoming invoices";
}
public List<OrderInvoiceImportReportLine> getLines(){
List<OrderInvoiceImportReportLine> result = new ArrayList<>();
Integer numberOfOrderlines = 0;
Integer numberOfinvoicelines = 0;
BigDecimal orderNetTotal = BigDecimal.ZERO;
BigDecimal orderVatTotal = BigDecimal.ZERO;
BigDecimal orderGrossTotal = BigDecimal.ZERO;
BigDecimal invoiceNetTotal = BigDecimal.ZERO;
BigDecimal invoiceVatTotal = BigDecimal.ZERO;
BigDecimal invoiceGrossTotal = BigDecimal.ZERO;
for (Project project : projectRepository.listAll()) {
for (String period : periodsPresent(project)) {
for (OrderItem orderItem : orderItemRepository.findByProject(project)) {
if (orderItem.getPeriod().equals(period)) {
numberOfOrderlines = numberOfOrderlines + 1;
orderNetTotal = orderNetTotal.add(orderItem.getNetAmount());
orderVatTotal = orderVatTotal.add(orderItem.getVatAmount());
orderGrossTotal = orderGrossTotal.add(orderItem.getGrossAmount());
}
}
for (IncomingInvoiceItem invoiceItem : incomingInvoiceItemRepository.findByProject(project)) {
if (invoiceItem.getPeriod().equals(period)) {
numberOfinvoicelines = numberOfinvoicelines + 1;
invoiceNetTotal = invoiceNetTotal.add(invoiceItem.getNetAmount());
invoiceVatTotal = invoiceVatTotal.add(invoiceItem.getVatAmount());
invoiceGrossTotal = invoiceGrossTotal.add(invoiceItem.getGrossAmount());
}
}
result.add(new OrderInvoiceImportReportLine(
project.getReference(),
period,
numberOfOrderlines,
orderNetTotal,
orderVatTotal,
orderGrossTotal,
numberOfinvoicelines,
invoiceNetTotal,
invoiceVatTotal,
invoiceGrossTotal
)
);
}
}
return result;
}
private List<String> periodsPresent(final Project project){
List<String> result = new ArrayList<>();
for (OrderItem item : orderItemRepository.findByProject(project)){
if (!result.contains(item.getPeriod())){
result.add(item.getPeriod());
}
}
for (IncomingInvoiceItem item : incomingInvoiceItemRepository.findByProject(project)){
if (!result.contains(item.getPeriod())){
result.add(item.getPeriod());
}
}
Collections.sort(result);
return result;
}
@Inject
ProjectRepository projectRepository;
@Inject
OrderItemRepository orderItemRepository;
@Inject
IncomingInvoiceItemRepository incomingInvoiceItemRepository;
}
|
fixes possible NPE in order invoice report
|
estatioapp/module/capex/dom/src/main/java/org/estatio/capex/dom/impmgr/OrderInvoiceImportReport.java
|
fixes possible NPE in order invoice report
|
|
Java
|
apache-2.0
|
5e0eefe5d3698883b83cabfcaef5af9d87b9f726
| 0
|
ClarisseSan/StockHawk
|
package com.sam_chordas.android.stockhawk.data;
import net.simonvt.schematic.annotation.Database;
import net.simonvt.schematic.annotation.Table;
/**
* Created by sam_chordas on 10/5/15.
*/
@Database(version = QuoteDatabase.VERSION)
public class QuoteDatabase {
private QuoteDatabase(){}
public static final int VERSION = 13;
@Table(QuoteColumns.class) public static final String QUOTES = "quotes";
@Table(QuoteStockHistoryColumns.class) public static final String QUOTES_HISTORY = "history";
}
|
app/src/main/java/com/sam_chordas/android/stockhawk/data/QuoteDatabase.java
|
package com.sam_chordas.android.stockhawk.data;
import net.simonvt.schematic.annotation.Database;
import net.simonvt.schematic.annotation.Table;
/**
* Created by sam_chordas on 10/5/15.
*/
@Database(version = QuoteDatabase.VERSION)
public class QuoteDatabase {
private QuoteDatabase(){}
public static final int VERSION = 12;
@Table(QuoteColumns.class) public static final String QUOTES = "quotes";
@Table(QuoteStockHistoryColumns.class) public static final String QUOTES_HISTORY = "history";
}
|
update database version
|
app/src/main/java/com/sam_chordas/android/stockhawk/data/QuoteDatabase.java
|
update database version
|
|
Java
|
apache-2.0
|
902ae946a1618f7423617c08c3cd7a5960f14e9d
| 0
|
apache/openwebbeans,apache/openwebbeans,apache/openwebbeans
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.webbeans.el;
import java.beans.FeatureDescriptor;
import java.util.Iterator;
import java.util.Set;
import javax.el.ELContext;
import javax.el.ELException;
import javax.el.ELResolver;
import javax.enterprise.context.Dependent;
import javax.enterprise.context.spi.CreationalContext;
import javax.enterprise.inject.spi.Bean;
import org.apache.webbeans.config.WebBeansContext;
import org.apache.webbeans.container.BeanManagerImpl;
/**
* JSF or JSP expression language a.k.a EL resolver.
*
* <p>
* EL is registered with the JSF in faces-config.xml if there exist a faces-config.xml
* in the application location <code>WEB-INF/</code>. Otherwise it is registered with
* JspApplicationContext at start-up.
* </p>
*
* <p>
* All <code>@Dependent</code> scoped contextual instances created during an EL
* expression evaluation are destroyed when the evaluation completes.
* </p>
*
* @version $Rev$ $Date$
*
*/
public class WebBeansELResolver extends ELResolver
{
private WebBeansContext webBeansContext;
public WebBeansELResolver()
{
webBeansContext = WebBeansContext.getInstance();
}
/**
* {@inheritDoc}
*/
@Override
public Class<?> getCommonPropertyType(ELContext arg0, Object arg1)
{
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Iterator<FeatureDescriptor> getFeatureDescriptors(ELContext arg0, Object arg1)
{
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Class<?> getType(ELContext arg0, Object arg1, Object arg2) throws ELException
{
return null;
}
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings({"unchecked","deprecation"})
public Object getValue(ELContext context, Object obj, Object property) throws ELException
{
// Check if the OWB actually got used in this application
if (!webBeansContext.getBeanManagerImpl().isInUse())
{
return null;
}
//Bean instance
Object contextualInstance = null;
ELContextStore elContextStore = null;
if (obj == null)
{
//Name of the bean
String name = (String) property;
//Local store, create if not exist
elContextStore = ELContextStore.getInstance(true);
contextualInstance = elContextStore.findBeanByName(name);
if(contextualInstance != null)
{
context.setPropertyResolved(true);
return contextualInstance;
}
//Manager instance
BeanManagerImpl manager = elContextStore.getBeanManager();
//Get beans
Set<Bean<?>> beans = manager.getBeans(name);
//Found?
if(beans != null && !beans.isEmpty())
{
//Managed bean
Bean<Object> bean = (Bean<Object>)beans.iterator().next();
if(bean.getScope().equals(Dependent.class))
{
contextualInstance = getDependentContextualInstance(manager, elContextStore, context, bean);
}
else
{
// now we check for NormalScoped beans
contextualInstance = getNormalScopedContextualInstance(manager, elContextStore, context, bean, name);
}
}
}
return contextualInstance;
}
protected Object getNormalScopedContextualInstance(BeanManagerImpl manager, ELContextStore store, ELContext context, Bean<Object> bean, String beanName)
{
CreationalContext<Object> creationalContext = manager.createCreationalContext(bean);
Object contextualInstance = manager.getReference(bean, Object.class, creationalContext);
if (contextualInstance != null)
{
context.setPropertyResolved(true);
//Adding into store
store.addNormalScoped(beanName, contextualInstance);
}
return contextualInstance;
}
protected Object getDependentContextualInstance(BeanManagerImpl manager, ELContextStore store, ELContext context, Bean<Object> bean)
{
Object contextualInstance = store.getDependent(bean);
if(contextualInstance != null)
{
//Object found on the store
context.setPropertyResolved(true);
}
else
{
// If no contextualInstance found on the store
CreationalContext<Object> creationalContext = manager.createCreationalContext(bean);
contextualInstance = manager.getReference(bean, Object.class, creationalContext);
if (contextualInstance != null)
{
context.setPropertyResolved(true);
//Adding into store
store.addDependent(bean, contextualInstance, creationalContext);
}
}
return contextualInstance;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly(ELContext arg0, Object arg1, Object arg2) throws ELException
{
return false;
}
/**
* {@inheritDoc}
*/
@Override
public void setValue(ELContext arg0, Object arg1, Object arg2, Object arg3) throws ELException
{
}
}
|
webbeans-impl/src/main/java/org/apache/webbeans/el/WebBeansELResolver.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.webbeans.el;
import java.beans.FeatureDescriptor;
import java.util.Iterator;
import java.util.Set;
import javax.el.ELContext;
import javax.el.ELException;
import javax.el.ELResolver;
import javax.el.PropertyNotFoundException;
import javax.el.PropertyNotWritableException;
import javax.enterprise.context.Dependent;
import javax.enterprise.context.spi.CreationalContext;
import javax.enterprise.inject.spi.Bean;
import org.apache.webbeans.config.WebBeansContext;
import org.apache.webbeans.container.BeanManagerImpl;
/**
* JSF or JSP expression language a.k.a EL resolver.
*
* <p>
* EL is registered with the JSF in faces-config.xml if there exist a faces-config.xml
* in the application location <code>WEB-INF/</code>. Otherwise it is registered with
* JspApplicationContext at start-up.
* </p>
*
* <p>
* All <code>@Dependent</code> scoped contextual instances created during an EL
* expression evaluation are destroyed when the evaluation completes.
* </p>
*
* @version $Rev$ $Date$
*
*/
public class WebBeansELResolver extends ELResolver
{
private WebBeansContext webBeansContext;
public WebBeansELResolver()
{
webBeansContext = WebBeansContext.getInstance();
}
/**
* {@inheritDoc}
*/
@Override
public Class<?> getCommonPropertyType(ELContext arg0, Object arg1)
{
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Iterator<FeatureDescriptor> getFeatureDescriptors(ELContext arg0, Object arg1)
{
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Class<?> getType(ELContext arg0, Object arg1, Object arg2) throws NullPointerException, PropertyNotFoundException, ELException
{
return null;
}
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings({"unchecked","deprecation"})
public Object getValue(ELContext context, Object obj, Object property) throws NullPointerException, PropertyNotFoundException, ELException
{
// Check if the OWB actually got used in this application
if (!webBeansContext.getBeanManagerImpl().isInUse())
{
return null;
}
//Bean instance
Object contextualInstance = null;
ELContextStore elContextStore = null;
if (obj == null)
{
//Name of the bean
String name = (String) property;
//Local store, create if not exist
elContextStore = ELContextStore.getInstance(true);
contextualInstance = elContextStore.findBeanByName(name);
if(contextualInstance != null)
{
context.setPropertyResolved(true);
return contextualInstance;
}
//Manager instance
BeanManagerImpl manager = elContextStore.getBeanManager();
//Get beans
Set<Bean<?>> beans = manager.getBeans(name);
//Found?
if(beans != null && !beans.isEmpty())
{
//Managed bean
Bean<Object> bean = (Bean<Object>)beans.iterator().next();
if(bean.getScope().equals(Dependent.class))
{
contextualInstance = getDependentContextualInstance(manager, elContextStore, context, bean);
}
else
{
// now we check for NormalScoped beans
contextualInstance = getNormalScopedContextualInstance(manager, elContextStore, context, bean, name);
}
}
}
return contextualInstance;
}
protected Object getNormalScopedContextualInstance(BeanManagerImpl manager, ELContextStore store, ELContext context, Bean<Object> bean, String beanName)
{
CreationalContext<Object> creationalContext = manager.createCreationalContext(bean);
Object contextualInstance = manager.getReference(bean, Object.class, creationalContext);
if (contextualInstance != null)
{
context.setPropertyResolved(true);
//Adding into store
store.addNormalScoped(beanName, contextualInstance);
}
return contextualInstance;
}
protected Object getDependentContextualInstance(BeanManagerImpl manager, ELContextStore store, ELContext context, Bean<Object> bean)
{
Object contextualInstance = store.getDependent(bean);
if(contextualInstance != null)
{
//Object found on the store
context.setPropertyResolved(true);
}
else
{
// If no contextualInstance found on the store
CreationalContext<Object> creationalContext = manager.createCreationalContext(bean);
contextualInstance = manager.getReference(bean, Object.class, creationalContext);
if (contextualInstance != null)
{
context.setPropertyResolved(true);
//Adding into store
store.addDependent(bean, contextualInstance, creationalContext);
}
}
return contextualInstance;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isReadOnly(ELContext arg0, Object arg1, Object arg2) throws NullPointerException, PropertyNotFoundException, ELException
{
return false;
}
/**
* {@inheritDoc}
*/
@Override
public void setValue(ELContext arg0, Object arg1, Object arg2, Object arg3) throws NullPointerException, PropertyNotFoundException, PropertyNotWritableException, ELException
{
}
}
|
remove unchecked exceptions from our methods
git-svn-id: 6e2e506005f11016269006bf59d22f905406eeba@1307826 13f79535-47bb-0310-9956-ffa450edef68
|
webbeans-impl/src/main/java/org/apache/webbeans/el/WebBeansELResolver.java
|
remove unchecked exceptions from our methods
|
|
Java
|
apache-2.0
|
3c6bf14c16a4ddac28afcdbbe6a546ccd305c9d5
| 0
|
dreis2211/spring-boot,spring-projects/spring-boot,chrylis/spring-boot,vpavic/spring-boot,scottfrederick/spring-boot,wilkinsona/spring-boot,chrylis/spring-boot,vpavic/spring-boot,htynkn/spring-boot,htynkn/spring-boot,htynkn/spring-boot,aahlenst/spring-boot,dreis2211/spring-boot,wilkinsona/spring-boot,Buzzardo/spring-boot,wilkinsona/spring-boot,spring-projects/spring-boot,spring-projects/spring-boot,htynkn/spring-boot,mdeinum/spring-boot,aahlenst/spring-boot,htynkn/spring-boot,mdeinum/spring-boot,scottfrederick/spring-boot,Buzzardo/spring-boot,mdeinum/spring-boot,scottfrederick/spring-boot,htynkn/spring-boot,aahlenst/spring-boot,Buzzardo/spring-boot,scottfrederick/spring-boot,chrylis/spring-boot,aahlenst/spring-boot,dreis2211/spring-boot,michael-simons/spring-boot,michael-simons/spring-boot,scottfrederick/spring-boot,michael-simons/spring-boot,Buzzardo/spring-boot,dreis2211/spring-boot,michael-simons/spring-boot,spring-projects/spring-boot,michael-simons/spring-boot,dreis2211/spring-boot,Buzzardo/spring-boot,wilkinsona/spring-boot,spring-projects/spring-boot,mdeinum/spring-boot,aahlenst/spring-boot,vpavic/spring-boot,mdeinum/spring-boot,Buzzardo/spring-boot,aahlenst/spring-boot,wilkinsona/spring-boot,chrylis/spring-boot,chrylis/spring-boot,vpavic/spring-boot,mdeinum/spring-boot,spring-projects/spring-boot,wilkinsona/spring-boot,dreis2211/spring-boot,scottfrederick/spring-boot,vpavic/spring-boot,chrylis/spring-boot,michael-simons/spring-boot,vpavic/spring-boot
|
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.jdbc;
import java.lang.reflect.Method;
import java.sql.SQLException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import javax.sql.DataSource;
import com.zaxxer.hikari.HikariDataSource;
import oracle.jdbc.datasource.OracleDataSource;
import oracle.ucp.jdbc.PoolDataSource;
import oracle.ucp.jdbc.PoolDataSourceImpl;
import org.apache.commons.dbcp2.BasicDataSource;
import org.h2.jdbcx.JdbcDataSource;
import org.postgresql.ds.PGSimpleDataSource;
import org.springframework.beans.BeanUtils;
import org.springframework.core.ResolvableType;
import org.springframework.jdbc.datasource.SimpleDriverDataSource;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
/**
* Convenience class for building a {@link DataSource}. Provides a limited subset of the
* properties supported by a typical {@link DataSource} as well as detection logic to pick
* the most suitable pooling {@link DataSource} implementation.
* <p>
* The following pooling {@link DataSource} implementations are supported by this builder.
* When no {@link #type(Class) type} has been explicitly set, the first available pool
* implementation will be picked:
* <ul>
* <li>Hikari ({@code com.zaxxer.hikari.HikariDataSource})</li>
* <li>Tomcat JDBC Pool ({@code org.apache.tomcat.jdbc.pool.DataSource})</li>
* <li>Apache DBCP2 ({@code org.apache.commons.dbcp2.BasicDataSource})</li>
* <li>Oracle UCP ({@code oracle.ucp.jdbc.PoolDataSourceImpl})</li>
* </ul>
* <p>
* The following non-pooling {@link DataSource} implementations can be used when
* explicitly set as a {@link #type(Class) type}:
* <ul>
* <li>Spring's {@code SimpleDriverDataSource}
* ({@code org.springframework.jdbc.datasource.SimpleDriverDataSource})</li>
* <li>Oracle ({@code oracle.jdbc.datasource.OracleDataSource})</li>
* <li>H2 ({@code org.h2.jdbcx.JdbcDataSource})</li>
* <li>Postgres ({@code org.postgresql.ds.PGSimpleDataSource})</li>
* <li>Any {@code DataSource} implementation with appropriately named methods</li>
* </ul>
* <p>
* This class is commonly used in an {@code @Bean} method and often combined with
* {@code @ConfigurationProperties}.
*
* @param <T> the {@link DataSource} type being built
* @author Dave Syer
* @author Madhura Bhave
* @author Fabio Grassi
* @author Phillip Webb
* @since 2.0.0
* @see #create()
* @see #create(ClassLoader)
* @see #derivedFrom(DataSource)
*/
public final class DataSourceBuilder<T extends DataSource> {
private final ClassLoader classLoader;
private final Map<DataSourceProperty, String> values = new HashMap<>();
private Class<T> type;
private final T deriveFrom;
private DataSourceBuilder(ClassLoader classLoader) {
this.classLoader = classLoader;
this.deriveFrom = null;
}
@SuppressWarnings("unchecked")
private DataSourceBuilder(T deriveFrom) {
Assert.notNull(deriveFrom, "DataSource must not be null");
this.classLoader = deriveFrom.getClass().getClassLoader();
this.type = (Class<T>) deriveFrom.getClass();
this.deriveFrom = deriveFrom;
}
/**
* Set the {@link DataSource} type that should be built.
* @param <D> the datasource type
* @param type the datasource type
* @return this builder
*/
@SuppressWarnings("unchecked")
public <D extends DataSource> DataSourceBuilder<D> type(Class<D> type) {
this.type = (Class<T>) type;
return (DataSourceBuilder<D>) this;
}
/**
* Set the URL that should be used when building the datasource.
* @param url the JDBC url
* @return this builder
*/
public DataSourceBuilder<T> url(String url) {
set(DataSourceProperty.URL, url);
return this;
}
/**
* Set the driver class name that should be used when building the datasource.
* @param driverClassName the driver class name
* @return this builder
*/
public DataSourceBuilder<T> driverClassName(String driverClassName) {
set(DataSourceProperty.DRIVER_CLASS_NAME, driverClassName);
return this;
}
/**
* Set the username that should be used when building the datasource.
* @param username the user name
* @return this builder
*/
public DataSourceBuilder<T> username(String username) {
set(DataSourceProperty.USERNAME, username);
return this;
}
/**
* Set the password that should be used when building the datasource.
* @param password the password
* @return this builder
*/
public DataSourceBuilder<T> password(String password) {
set(DataSourceProperty.PASSWORD, password);
return this;
}
private void set(DataSourceProperty property, String value) {
this.values.put(property, value);
}
/**
* Return a newly built {@link DataSource} instance.
* @return the built datasource
*/
public T build() {
DataSourceProperties<T> properties = DataSourceProperties.forType(this.classLoader, this.type);
DataSourceProperties<T> deriveFromProperties = (this.deriveFrom != null)
? DataSourceProperties.forType(this.classLoader, this.type) : null;
Class<? extends T> instanceType = (this.type != null) ? this.type : properties.getDataSourceInstanceType();
T dataSource = BeanUtils.instantiateClass(instanceType);
Set<DataSourceProperty> applied = new HashSet<>();
for (DataSourceProperty property : DataSourceProperty.values()) {
if (this.values.containsKey(property)) {
String value = this.values.get(property);
properties.set(dataSource, property, value);
applied.add(property);
}
else if (deriveFromProperties != null && properties.canSet(property)) {
String value = deriveFromProperties.get(this.deriveFrom, property);
if (value != null) {
properties.set(dataSource, property, value);
applied.add(property);
}
}
}
if (!applied.contains(DataSourceProperty.DRIVER_CLASS_NAME)
&& properties.canSet(DataSourceProperty.DRIVER_CLASS_NAME)
&& this.values.containsKey(DataSourceProperty.URL)) {
String url = this.values.get(DataSourceProperty.URL);
DatabaseDriver driver = DatabaseDriver.fromJdbcUrl(url);
properties.set(dataSource, DataSourceProperty.DRIVER_CLASS_NAME, driver.getDriverClassName());
}
return dataSource;
}
/**
* Create a new {@link DataSourceBuilder} instance.
* @return a new datasource builder instance
*/
public static DataSourceBuilder<?> create() {
return create(null);
}
/**
* Create a new {@link DataSourceBuilder} instance.
* @param classLoader the classloader used to discover preferred settings
* @return a new {@link DataSource} builder instance
*/
public static DataSourceBuilder<?> create(ClassLoader classLoader) {
return new DataSourceBuilder<>(classLoader);
}
/**
* Create a new {@link DataSourceBuilder} instance derived from the specified data
* source. The returned builder can be used to build the same type of
* {@link DataSource} with {@code username}, {@code password}, {@code url} and
* {@code driverClassName} properties copied from the original when not specifically
* set.
* @param dataSource the source {@link DataSource}
* @return a new {@link DataSource} builder
* @since 2.5.0
*/
public static DataSourceBuilder<?> derivedFrom(DataSource dataSource) {
if (dataSource instanceof EmbeddedDatabase) {
try {
dataSource = dataSource.unwrap(DataSource.class);
}
catch (SQLException ex) {
throw new IllegalStateException("Unable to unwrap embedded database", ex);
}
}
return new DataSourceBuilder<>(dataSource);
}
/**
* Find the {@link DataSource} type preferred for the given classloader.
* @param classLoader the classloader used to discover preferred settings
* @return the preferred {@link DataSource} type
*/
public static Class<? extends DataSource> findType(ClassLoader classLoader) {
MappedDataSourceProperties<?> mappings = MappedDataSourceProperties.forType(classLoader, null);
return (mappings != null) ? mappings.getDataSourceInstanceType() : null;
}
/**
* An individual DataSource property supported by the builder.
*/
private enum DataSourceProperty {
URL("url"),
DRIVER_CLASS_NAME("driverClassName"),
USERNAME("username"),
PASSWORD("password");
private final String name;
DataSourceProperty(String name) {
this.name = name;
}
@Override
public String toString() {
return this.name;
}
Method findSetter(Class<?> type) {
return ReflectionUtils.findMethod(type, "set" + StringUtils.capitalize(this.name), String.class);
}
Method findGetter(Class<?> type) {
return ReflectionUtils.findMethod(type, "get" + StringUtils.capitalize(this.name), String.class);
}
}
private interface DataSourceProperties<T extends DataSource> {
Class<? extends T> getDataSourceInstanceType();
boolean canSet(DataSourceProperty property);
void set(T dataSource, DataSourceProperty property, String value);
String get(T dataSource, DataSourceProperty property);
static <T extends DataSource> DataSourceProperties<T> forType(ClassLoader classLoader, Class<T> type) {
MappedDataSourceProperties<T> mapped = MappedDataSourceProperties.forType(classLoader, type);
return (mapped != null) ? mapped : new ReflectionDataSourceProperties<>(type);
}
}
private static class MappedDataSourceProperties<T extends DataSource> implements DataSourceProperties<T> {
private final Map<DataSourceProperty, MappedDataSourceProperty<T, ?>> mappedProperties = new HashMap<>();
private final Class<T> dataSourceType;
@SuppressWarnings("unchecked")
MappedDataSourceProperties() {
this.dataSourceType = (Class<T>) ResolvableType.forClass(MappedDataSourceProperties.class, getClass())
.resolveGeneric();
}
@Override
public Class<? extends T> getDataSourceInstanceType() {
return this.dataSourceType;
}
protected void add(DataSourceProperty property, Getter<T, String> getter, Setter<T, String> setter) {
add(property, String.class, getter, setter);
}
protected <V> void add(DataSourceProperty property, Class<V> type, Getter<T, V> getter, Setter<T, V> setter) {
this.mappedProperties.put(property, new MappedDataSourceProperty<>(property, type, getter, setter));
}
@Override
public boolean canSet(DataSourceProperty property) {
return this.mappedProperties.containsKey(property);
}
@Override
public void set(T dataSource, DataSourceProperty property, String value) {
MappedDataSourceProperty<T, ?> mappedProperty = getMapping(property);
mappedProperty.set(dataSource, value);
}
@Override
public String get(T dataSource, DataSourceProperty property) {
MappedDataSourceProperty<T, ?> mappedProperty = getMapping(property);
return mappedProperty.get(dataSource);
}
private MappedDataSourceProperty<T, ?> getMapping(DataSourceProperty property) {
MappedDataSourceProperty<T, ?> mappedProperty = this.mappedProperties.get(property);
UnsupportedDataSourcePropertyException.throwIf(mappedProperty == null,
() -> "No mapping found for " + property);
return mappedProperty;
}
static <T extends DataSource> MappedDataSourceProperties<T> forType(ClassLoader classLoader, Class<T> type) {
MappedDataSourceProperties<T> pooled = lookupPooled(classLoader, type);
if (type == null || pooled != null) {
return pooled;
}
return lookupBasic(classLoader, type);
}
private static <T extends DataSource> MappedDataSourceProperties<T> lookupPooled(ClassLoader classLoader,
Class<T> type) {
MappedDataSourceProperties<T> result = null;
result = lookup(classLoader, type, result, "com.zaxxer.hikari.HikariDataSource",
HikariDataSourceProperties::new);
result = lookup(classLoader, type, result, "org.apache.tomcat.jdbc.pool.DataSource",
TomcatPoolDataSourceProperties::new);
result = lookup(classLoader, type, result, "org.apache.commons.dbcp2.BasicDataSource",
MappedDbcp2DataSource::new);
result = lookup(classLoader, type, result, "oracle.ucp.jdbc.PoolDataSourceImpl",
OraclePoolDataSourceProperties::new, "oracle.jdbc.OracleConnection");
return result;
}
private static <T extends DataSource> MappedDataSourceProperties<T> lookupBasic(ClassLoader classLoader,
Class<T> dataSourceType) {
MappedDataSourceProperties<T> result = null;
result = lookup(classLoader, dataSourceType, result,
"org.springframework.jdbc.datasource.SimpleDriverDataSource",
() -> new SimpleDataSourceProperties());
result = lookup(classLoader, dataSourceType, result, "oracle.jdbc.datasource.OracleDataSource",
OracleDataSourceProperties::new);
result = lookup(classLoader, dataSourceType, result, "org.h2.jdbcx.JdbcDataSource",
H2DataSourceProperties::new);
result = lookup(classLoader, dataSourceType, result, "org.postgresql.ds.PGSimpleDataSource",
PostgresDataSourceProperties::new);
return result;
}
@SuppressWarnings("unchecked")
private static <T extends DataSource> MappedDataSourceProperties<T> lookup(ClassLoader classLoader,
Class<T> dataSourceType, MappedDataSourceProperties<T> existing, String dataSourceClassName,
Supplier<MappedDataSourceProperties<?>> propertyMappingsSupplier, String... requiredClassNames) {
if (existing != null || !allPresent(classLoader, dataSourceClassName, requiredClassNames)) {
return existing;
}
MappedDataSourceProperties<?> propertyMappings = propertyMappingsSupplier.get();
return (dataSourceType == null
|| propertyMappings.getDataSourceInstanceType().isAssignableFrom(dataSourceType))
? (MappedDataSourceProperties<T>) propertyMappings : null;
}
private static boolean allPresent(ClassLoader classLoader, String dataSourceClassName,
String[] requiredClassNames) {
boolean result = ClassUtils.isPresent(dataSourceClassName, classLoader);
for (String requiredClassName : requiredClassNames) {
result = result && ClassUtils.isPresent(requiredClassName, classLoader);
}
return result;
}
}
private static class MappedDataSourceProperty<T extends DataSource, V> {
private final DataSourceProperty property;
private final Class<V> type;
private final Getter<T, V> getter;
private final Setter<T, V> setter;
MappedDataSourceProperty(DataSourceProperty property, Class<V> type, Getter<T, V> getter, Setter<T, V> setter) {
this.property = property;
this.type = type;
this.getter = getter;
this.setter = setter;
}
void set(T dataSource, String value) {
try {
UnsupportedDataSourcePropertyException.throwIf(this.setter == null,
() -> "No setter mapped for '" + this.property + "' property");
this.setter.set(dataSource, convertFromString(value));
}
catch (SQLException ex) {
throw new IllegalStateException(ex);
}
}
String get(T dataSource) {
try {
UnsupportedDataSourcePropertyException.throwIf(this.getter == null,
() -> "No getter mapped for '" + this.property + "' property");
return convertToString(this.getter.get(dataSource));
}
catch (SQLException ex) {
throw new IllegalStateException(ex);
}
}
@SuppressWarnings("unchecked")
private V convertFromString(String value) {
if (String.class.equals(this.type)) {
return (V) value;
}
if (Class.class.equals(this.type)) {
return (V) ClassUtils.resolveClassName(value, null);
}
throw new IllegalStateException("Unsupported value type " + this.type);
}
private String convertToString(V value) {
if (String.class.equals(this.type)) {
return (String) value;
}
if (Class.class.equals(this.type)) {
return ((Class<?>) value).getName();
}
throw new IllegalStateException("Unsupported value type " + this.type);
}
}
private static class ReflectionDataSourceProperties<T extends DataSource> implements DataSourceProperties<T> {
private final Map<DataSourceProperty, Method> getters;
private final Map<DataSourceProperty, Method> setters;
private final Class<T> dataSourceType;
ReflectionDataSourceProperties(Class<T> dataSourceType) {
Assert.state(dataSourceType != null, "No supported DataSource type found");
Map<DataSourceProperty, Method> getters = new HashMap<>();
Map<DataSourceProperty, Method> setters = new HashMap<>();
for (DataSourceProperty property : DataSourceProperty.values()) {
putIfNotNull(getters, property, property.findGetter(dataSourceType));
putIfNotNull(setters, property, property.findSetter(dataSourceType));
}
this.dataSourceType = dataSourceType;
this.getters = Collections.unmodifiableMap(getters);
this.setters = Collections.unmodifiableMap(setters);
}
private void putIfNotNull(Map<DataSourceProperty, Method> map, DataSourceProperty property, Method method) {
if (method != null) {
map.put(property, method);
}
}
@Override
public Class<T> getDataSourceInstanceType() {
return this.dataSourceType;
}
@Override
public boolean canSet(DataSourceProperty property) {
return this.setters.containsKey(property);
}
@Override
public void set(T dataSource, DataSourceProperty property, String value) {
Method method = getMethod(property, this.setters);
ReflectionUtils.invokeMethod(method, dataSource, value);
}
@Override
public String get(T dataSource, DataSourceProperty property) {
Method method = getMethod(property, this.getters);
return (String) ReflectionUtils.invokeMethod(method, dataSource);
}
private Method getMethod(DataSourceProperty property, Map<DataSourceProperty, Method> setters2) {
Method method = setters2.get(property);
UnsupportedDataSourcePropertyException.throwIf(method == null,
() -> "Unable to find suitable method for " + property);
ReflectionUtils.makeAccessible(method);
return method;
}
}
@FunctionalInterface
private interface Getter<T, V> {
V get(T instance) throws SQLException;
}
@FunctionalInterface
private interface Setter<T, V> {
void set(T instance, V value) throws SQLException;
}
/**
* {@link DataSourceProperties} for Hikari.
*/
private static class HikariDataSourceProperties extends MappedDataSourceProperties<HikariDataSource> {
HikariDataSourceProperties() {
add(DataSourceProperty.URL, HikariDataSource::getJdbcUrl, HikariDataSource::setJdbcUrl);
add(DataSourceProperty.DRIVER_CLASS_NAME, HikariDataSource::getDriverClassName,
HikariDataSource::setDriverClassName);
add(DataSourceProperty.USERNAME, HikariDataSource::getUsername, HikariDataSource::setUsername);
add(DataSourceProperty.PASSWORD, HikariDataSource::getPassword, HikariDataSource::setPassword);
}
}
/**
* {@link DataSourceProperties} for Tomcat Pool.
*/
private static class TomcatPoolDataSourceProperties
extends MappedDataSourceProperties<org.apache.tomcat.jdbc.pool.DataSource> {
TomcatPoolDataSourceProperties() {
add(DataSourceProperty.URL, org.apache.tomcat.jdbc.pool.DataSource::getUrl,
org.apache.tomcat.jdbc.pool.DataSource::setUrl);
add(DataSourceProperty.DRIVER_CLASS_NAME, org.apache.tomcat.jdbc.pool.DataSource::getDriverClassName,
org.apache.tomcat.jdbc.pool.DataSource::setDriverClassName);
add(DataSourceProperty.USERNAME, org.apache.tomcat.jdbc.pool.DataSource::getUsername,
org.apache.tomcat.jdbc.pool.DataSource::setUsername);
add(DataSourceProperty.PASSWORD, org.apache.tomcat.jdbc.pool.DataSource::getPassword,
org.apache.tomcat.jdbc.pool.DataSource::setPassword);
}
}
/**
* {@link DataSourceProperties} for DBCP2.
*/
private static class MappedDbcp2DataSource extends MappedDataSourceProperties<BasicDataSource> {
MappedDbcp2DataSource() {
add(DataSourceProperty.URL, BasicDataSource::getUrl, BasicDataSource::setUrl);
add(DataSourceProperty.DRIVER_CLASS_NAME, BasicDataSource::getDriverClassName,
BasicDataSource::setDriverClassName);
add(DataSourceProperty.USERNAME, BasicDataSource::getUsername, BasicDataSource::setUsername);
add(DataSourceProperty.PASSWORD, BasicDataSource::getPassword, BasicDataSource::setPassword);
}
}
/**
* {@link DataSourceProperties} for Oracle Pool.
*/
private static class OraclePoolDataSourceProperties extends MappedDataSourceProperties<PoolDataSource> {
@Override
public Class<? extends PoolDataSource> getDataSourceInstanceType() {
return PoolDataSourceImpl.class;
}
OraclePoolDataSourceProperties() {
add(DataSourceProperty.URL, PoolDataSource::getURL, PoolDataSource::setURL);
add(DataSourceProperty.DRIVER_CLASS_NAME, PoolDataSource::getConnectionFactoryClassName,
PoolDataSource::setConnectionFactoryClassName);
add(DataSourceProperty.USERNAME, PoolDataSource::getUser, PoolDataSource::setUser);
add(DataSourceProperty.PASSWORD, PoolDataSource::getPassword, PoolDataSource::setPassword);
}
}
/**
* {@link DataSourceProperties} for Spring's {@link SimpleDriverDataSource}.
*/
private static class SimpleDataSourceProperties extends MappedDataSourceProperties<SimpleDriverDataSource> {
@SuppressWarnings("unchecked")
SimpleDataSourceProperties() {
add(DataSourceProperty.URL, SimpleDriverDataSource::getUrl, SimpleDriverDataSource::setUrl);
add(DataSourceProperty.DRIVER_CLASS_NAME, Class.class, (dataSource) -> dataSource.getDriver().getClass(),
(dataSource, driverClass) -> dataSource.setDriverClass(driverClass));
add(DataSourceProperty.USERNAME, SimpleDriverDataSource::getUsername, SimpleDriverDataSource::setUsername);
add(DataSourceProperty.PASSWORD, SimpleDriverDataSource::getPassword, SimpleDriverDataSource::setPassword);
}
}
/**
* {@link DataSourceProperties} for Oracle.
*/
private static class OracleDataSourceProperties extends MappedDataSourceProperties<OracleDataSource> {
OracleDataSourceProperties() {
add(DataSourceProperty.URL, OracleDataSource::getURL, OracleDataSource::setURL);
add(DataSourceProperty.USERNAME, OracleDataSource::getUser, OracleDataSource::setUser);
add(DataSourceProperty.PASSWORD, null, OracleDataSource::setPassword);
}
}
/**
* {@link DataSourceProperties} for H2.
*/
private static class H2DataSourceProperties extends MappedDataSourceProperties<JdbcDataSource> {
H2DataSourceProperties() {
add(DataSourceProperty.URL, JdbcDataSource::getUrl, JdbcDataSource::setUrl);
add(DataSourceProperty.USERNAME, JdbcDataSource::getUser, JdbcDataSource::setUser);
add(DataSourceProperty.PASSWORD, JdbcDataSource::getPassword, JdbcDataSource::setPassword);
}
}
/**
* {@link DataSourceProperties} for Postgres.
*/
private static class PostgresDataSourceProperties extends MappedDataSourceProperties<PGSimpleDataSource> {
PostgresDataSourceProperties() {
add(DataSourceProperty.URL, PGSimpleDataSource::getUrl, PGSimpleDataSource::setUrl);
add(DataSourceProperty.USERNAME, PGSimpleDataSource::getUser, PGSimpleDataSource::setUser);
add(DataSourceProperty.PASSWORD, PGSimpleDataSource::getPassword, PGSimpleDataSource::setPassword);
}
}
}
|
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/jdbc/DataSourceBuilder.java
|
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.jdbc;
import java.lang.reflect.Method;
import java.sql.SQLException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import javax.sql.DataSource;
import com.zaxxer.hikari.HikariDataSource;
import oracle.jdbc.datasource.OracleDataSource;
import oracle.ucp.jdbc.PoolDataSource;
import oracle.ucp.jdbc.PoolDataSourceImpl;
import org.apache.commons.dbcp2.BasicDataSource;
import org.h2.jdbcx.JdbcDataSource;
import org.postgresql.ds.PGSimpleDataSource;
import org.springframework.beans.BeanUtils;
import org.springframework.core.ResolvableType;
import org.springframework.jdbc.datasource.SimpleDriverDataSource;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabase;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StringUtils;
/**
* Convenience class for building a {@link DataSource}. Provides a limited subset of the
* properties supported by a typical {@link DataSource} as well as detection logic to pick
* the most suitable pooling {@link DataSource} implementation.
* <p>
* The following pooling {@link DataSource} implementations are supported by this builder.
* When no {@link #type(Class) type} has been explicitly set, the first available pool
* implementation will be picked:
* <ul>
* <li>Hikari ({@code com.zaxxer.hikari.HikariDataSource})</li>
* <li>Tomcat JDBC Pool ({@code org.apache.tomcat.jdbc.pool.DataSource})</li>
* <li>Apache DBCP2 ({@code org.apache.commons.dbcp2.BasicDataSource})</li>
* <li>Oracle UCP ({@code oracle.ucp.jdbc.PoolDataSourceImpl})</li>
* </ul>
* <p>
* The following non-pooling {@link DataSource} implementations can be used when
* explicitly set as a {@link #type(Class) type}:
* <ul>
* <li>Spring's {@code SimpleDriverDataSource}
* ({@code org.springframework.jdbc.datasource.SimpleDriverDataSource})</li>
* <li>Oracle ({@code oracle.jdbc.datasource.OracleDataSource})</li>
* <li>H2 ({@code org.h2.jdbcx.JdbcDataSource})</li>
* <li>Postgres ({@code org.postgresql.ds.PGSimpleDataSource})</li>
* <li>Any {@code DataSource} implementation with appropriately named methods</li>
* </ul>
* <p>
* This class is commonly used in an {@code @Bean} method and often combined with
* {@code @ConfigurationProperties}.
*
* @param <T> the {@link DataSource} type being built
* @author Dave Syer
* @author Madhura Bhave
* @author Fabio Grassi
* @author Phillip Webb
* @since 2.0.0
* @see #create()
* @see #create(ClassLoader)
* @see #derivedFrom(DataSource)
*/
public final class DataSourceBuilder<T extends DataSource> {
private final ClassLoader classLoader;
private final Map<DataSourceProperty, String> values = new HashMap<>();
private Class<T> type;
private final T deriveFrom;
private DataSourceBuilder(ClassLoader classLoader) {
this.classLoader = classLoader;
this.deriveFrom = null;
}
@SuppressWarnings("unchecked")
private DataSourceBuilder(T deriveFrom) {
Assert.notNull(deriveFrom, "DataSource must not be null");
this.classLoader = deriveFrom.getClass().getClassLoader();
this.type = (Class<T>) deriveFrom.getClass();
this.deriveFrom = deriveFrom;
}
/**
* Set the {@link DataSource} type that should be built.
* @param <D> the datasource type
* @param type the datasource type
* @return this builder
*/
@SuppressWarnings("unchecked")
public <D extends DataSource> DataSourceBuilder<D> type(Class<D> type) {
this.type = (Class<T>) type;
return (DataSourceBuilder<D>) this;
}
/**
* Set the URL that should be used when building the datasource.
* @param url the JDBC url
* @return this builder
*/
public DataSourceBuilder<T> url(String url) {
set(DataSourceProperty.URL, url);
return this;
}
/**
* Set the driver class name that should be used when building the datasource.
* @param driverClassName the driver class name
* @return this builder
*/
public DataSourceBuilder<T> driverClassName(String driverClassName) {
set(DataSourceProperty.DRIVER_CLASS_NAME, driverClassName);
return this;
}
/**
* Set the username that should be used when building the datasource.
* @param username the user name
* @return this builder
*/
public DataSourceBuilder<T> username(String username) {
set(DataSourceProperty.USERNAME, username);
return this;
}
/**
* Set the password that should be used when building the datasource.
* @param password the password
* @return this builder
*/
public DataSourceBuilder<T> password(String password) {
set(DataSourceProperty.PASSWORD, password);
return this;
}
private void set(DataSourceProperty property, String value) {
this.values.put(property, value);
}
/**
* Return a newly built {@link DataSource} instance.
* @return the built datasource
*/
public T build() {
DataSourceProperties<T> properties = DataSourceProperties.forType(this.classLoader, this.type);
DataSourceProperties<T> derriveFromProperties = (this.deriveFrom != null)
? DataSourceProperties.forType(this.classLoader, this.type) : null;
Class<? extends T> instanceType = (this.type != null) ? this.type : properties.getDataSourceInstanceType();
T dataSource = BeanUtils.instantiateClass(instanceType);
Set<DataSourceProperty> applied = new HashSet<>();
for (DataSourceProperty property : DataSourceProperty.values()) {
if (this.values.containsKey(property)) {
String value = this.values.get(property);
properties.set(dataSource, property, value);
applied.add(property);
}
else if (derriveFromProperties != null && properties.canSet(property)) {
String value = derriveFromProperties.get(this.deriveFrom, property);
if (value != null) {
properties.set(dataSource, property, value);
applied.add(property);
}
}
}
if (!applied.contains(DataSourceProperty.DRIVER_CLASS_NAME)
&& properties.canSet(DataSourceProperty.DRIVER_CLASS_NAME)
&& this.values.containsKey(DataSourceProperty.URL)) {
String url = this.values.get(DataSourceProperty.URL);
DatabaseDriver driver = DatabaseDriver.fromJdbcUrl(url);
properties.set(dataSource, DataSourceProperty.DRIVER_CLASS_NAME, driver.getDriverClassName());
}
return dataSource;
}
/**
* Create a new {@link DataSourceBuilder} instance.
* @return a new datasource builder instance
*/
public static DataSourceBuilder<?> create() {
return create(null);
}
/**
* Create a new {@link DataSourceBuilder} instance.
* @param classLoader the classloader used to discover preferred settings
* @return a new {@link DataSource} builder instance
*/
public static DataSourceBuilder<?> create(ClassLoader classLoader) {
return new DataSourceBuilder<>(classLoader);
}
/**
* Create a new {@link DataSourceBuilder} instance derived from the specified data
* source. The returned builder can be used to build the same type of
* {@link DataSource} with {@code username}, {@code password}, {@code url} and
* {@code driverClassName} properties copied from the original when not specifically
* set.
* @param dataSource the source {@link DataSource}
* @return a new {@link DataSource} builder
* @since 2.5.0
*/
public static DataSourceBuilder<?> derivedFrom(DataSource dataSource) {
if (dataSource instanceof EmbeddedDatabase) {
try {
dataSource = dataSource.unwrap(DataSource.class);
}
catch (SQLException ex) {
throw new IllegalStateException("Unable to unwap embedded database", ex);
}
}
return new DataSourceBuilder<>(dataSource);
}
/**
* Find the {@link DataSource} type preferred for the given classloader.
* @param classLoader the classloader used to discover preferred settings
* @return the preferred {@link DataSource} type
*/
public static Class<? extends DataSource> findType(ClassLoader classLoader) {
MappedDataSourceProperties<?> mappings = MappedDataSourceProperties.forType(classLoader, null);
return (mappings != null) ? mappings.getDataSourceInstanceType() : null;
}
/**
* An individual DataSource property supported by the builder.
*/
private enum DataSourceProperty {
URL("url"),
DRIVER_CLASS_NAME("driverClassName"),
USERNAME("username"),
PASSWORD("password");
private final String name;
DataSourceProperty(String name) {
this.name = name;
}
@Override
public String toString() {
return this.name;
}
Method findSetter(Class<?> type) {
return ReflectionUtils.findMethod(type, "set" + StringUtils.capitalize(this.name), String.class);
}
Method findGetter(Class<?> type) {
return ReflectionUtils.findMethod(type, "get" + StringUtils.capitalize(this.name), String.class);
}
}
private interface DataSourceProperties<T extends DataSource> {
Class<? extends T> getDataSourceInstanceType();
boolean canSet(DataSourceProperty property);
void set(T dataSource, DataSourceProperty property, String value);
String get(T dataSource, DataSourceProperty property);
static <T extends DataSource> DataSourceProperties<T> forType(ClassLoader classLoader, Class<T> type) {
MappedDataSourceProperties<T> mapped = MappedDataSourceProperties.forType(classLoader, type);
return (mapped != null) ? mapped : new ReflectionDataSourceProperties<>(type);
}
}
private static class MappedDataSourceProperties<T extends DataSource> implements DataSourceProperties<T> {
private final Map<DataSourceProperty, MappedDataSourceProperty<T, ?>> mappedProperties = new HashMap<>();
private final Class<T> dataSourceType;
@SuppressWarnings("unchecked")
MappedDataSourceProperties() {
this.dataSourceType = (Class<T>) ResolvableType.forClass(MappedDataSourceProperties.class, getClass())
.resolveGeneric();
}
@Override
public Class<? extends T> getDataSourceInstanceType() {
return this.dataSourceType;
}
protected void add(DataSourceProperty property, Getter<T, String> getter, Setter<T, String> setter) {
add(property, String.class, getter, setter);
}
protected <V> void add(DataSourceProperty property, Class<V> type, Getter<T, V> getter, Setter<T, V> setter) {
this.mappedProperties.put(property, new MappedDataSourceProperty<>(property, type, getter, setter));
}
@Override
public boolean canSet(DataSourceProperty property) {
return this.mappedProperties.containsKey(property);
}
@Override
public void set(T dataSource, DataSourceProperty property, String value) {
MappedDataSourceProperty<T, ?> mappedProperty = getMapping(property);
mappedProperty.set(dataSource, value);
}
@Override
public String get(T dataSource, DataSourceProperty property) {
MappedDataSourceProperty<T, ?> mappedProperty = getMapping(property);
return mappedProperty.get(dataSource);
}
private MappedDataSourceProperty<T, ?> getMapping(DataSourceProperty property) {
MappedDataSourceProperty<T, ?> mappedProperty = this.mappedProperties.get(property);
UnsupportedDataSourcePropertyException.throwIf(mappedProperty == null,
() -> "No mapping found for " + property);
return mappedProperty;
}
static <T extends DataSource> MappedDataSourceProperties<T> forType(ClassLoader classLoader, Class<T> type) {
MappedDataSourceProperties<T> pooled = lookupPooled(classLoader, type);
if (type == null || pooled != null) {
return pooled;
}
return lookupBasic(classLoader, type);
}
private static <T extends DataSource> MappedDataSourceProperties<T> lookupPooled(ClassLoader classLoader,
Class<T> type) {
MappedDataSourceProperties<T> result = null;
result = lookup(classLoader, type, result, "com.zaxxer.hikari.HikariDataSource",
HikariDataSourceProperties::new);
result = lookup(classLoader, type, result, "org.apache.tomcat.jdbc.pool.DataSource",
TomcatPoolDataSourceProperties::new);
result = lookup(classLoader, type, result, "org.apache.commons.dbcp2.BasicDataSource",
MappedDbcp2DataSource::new);
result = lookup(classLoader, type, result, "oracle.ucp.jdbc.PoolDataSourceImpl",
OraclePoolDataSourceProperties::new, "oracle.jdbc.OracleConnection");
return result;
}
private static <T extends DataSource> MappedDataSourceProperties<T> lookupBasic(ClassLoader classLoader,
Class<T> dataSourceType) {
MappedDataSourceProperties<T> result = null;
result = lookup(classLoader, dataSourceType, result,
"org.springframework.jdbc.datasource.SimpleDriverDataSource",
() -> new SimpleDataSourceProperties());
result = lookup(classLoader, dataSourceType, result, "oracle.jdbc.datasource.OracleDataSource",
OracleDataSourceProperties::new);
result = lookup(classLoader, dataSourceType, result, "org.h2.jdbcx.JdbcDataSource",
H2DataSourceProperties::new);
result = lookup(classLoader, dataSourceType, result, "org.postgresql.ds.PGSimpleDataSource",
PostgresDataSourceProperties::new);
return result;
}
@SuppressWarnings("unchecked")
private static <T extends DataSource> MappedDataSourceProperties<T> lookup(ClassLoader classLoader,
Class<T> dataSourceType, MappedDataSourceProperties<T> existing, String dataSourceClassName,
Supplier<MappedDataSourceProperties<?>> propertyMappingsSupplier, String... requiredClassNames) {
if (existing != null || !allPresent(classLoader, dataSourceClassName, requiredClassNames)) {
return existing;
}
MappedDataSourceProperties<?> propertyMappings = propertyMappingsSupplier.get();
return (dataSourceType == null
|| propertyMappings.getDataSourceInstanceType().isAssignableFrom(dataSourceType))
? (MappedDataSourceProperties<T>) propertyMappings : null;
}
private static boolean allPresent(ClassLoader classLoader, String dataSourceClassName,
String[] requiredClassNames) {
boolean result = ClassUtils.isPresent(dataSourceClassName, classLoader);
for (String requiredClassName : requiredClassNames) {
result = result && ClassUtils.isPresent(requiredClassName, classLoader);
}
return result;
}
}
private static class MappedDataSourceProperty<T extends DataSource, V> {
private final DataSourceProperty property;
private final Class<V> type;
private final Getter<T, V> getter;
private final Setter<T, V> setter;
MappedDataSourceProperty(DataSourceProperty property, Class<V> type, Getter<T, V> getter, Setter<T, V> setter) {
this.property = property;
this.type = type;
this.getter = getter;
this.setter = setter;
}
void set(T dataSource, String value) {
try {
UnsupportedDataSourcePropertyException.throwIf(this.setter == null,
() -> "No setter mapped for '" + this.property + "' property");
this.setter.set(dataSource, convertFromString(value));
}
catch (SQLException ex) {
throw new IllegalStateException(ex);
}
}
String get(T dataSource) {
try {
UnsupportedDataSourcePropertyException.throwIf(this.getter == null,
() -> "No getter mapped for '" + this.property + "' property");
return convertToString(this.getter.get(dataSource));
}
catch (SQLException ex) {
throw new IllegalStateException(ex);
}
}
@SuppressWarnings("unchecked")
private V convertFromString(String value) {
if (String.class.equals(this.type)) {
return (V) value;
}
if (Class.class.equals(this.type)) {
return (V) ClassUtils.resolveClassName(value, null);
}
throw new IllegalStateException("Unsupported value type " + this.type);
}
private String convertToString(V value) {
if (String.class.equals(this.type)) {
return (String) value;
}
if (Class.class.equals(this.type)) {
return ((Class<?>) value).getName();
}
throw new IllegalStateException("Unsupported value type " + this.type);
}
}
private static class ReflectionDataSourceProperties<T extends DataSource> implements DataSourceProperties<T> {
private final Map<DataSourceProperty, Method> getters;
private final Map<DataSourceProperty, Method> setters;
private Class<T> dataSourceType;
ReflectionDataSourceProperties(Class<T> dataSourceType) {
Assert.state(dataSourceType != null, "No supported DataSource type found");
Map<DataSourceProperty, Method> getters = new HashMap<>();
Map<DataSourceProperty, Method> setters = new HashMap<>();
for (DataSourceProperty property : DataSourceProperty.values()) {
putIfNotNull(getters, property, property.findGetter(dataSourceType));
putIfNotNull(setters, property, property.findSetter(dataSourceType));
}
this.dataSourceType = dataSourceType;
this.getters = Collections.unmodifiableMap(getters);
this.setters = Collections.unmodifiableMap(setters);
}
private void putIfNotNull(Map<DataSourceProperty, Method> map, DataSourceProperty property, Method method) {
if (method != null) {
map.put(property, method);
}
}
@Override
public Class<T> getDataSourceInstanceType() {
return this.dataSourceType;
}
@Override
public boolean canSet(DataSourceProperty property) {
return this.setters.containsKey(property);
}
@Override
public void set(T dataSource, DataSourceProperty property, String value) {
Method method = getMethod(property, this.setters);
ReflectionUtils.invokeMethod(method, dataSource, value);
}
@Override
public String get(T dataSource, DataSourceProperty property) {
Method method = getMethod(property, this.getters);
return (String) ReflectionUtils.invokeMethod(method, dataSource);
}
private Method getMethod(DataSourceProperty property, Map<DataSourceProperty, Method> setters2) {
Method method = setters2.get(property);
UnsupportedDataSourcePropertyException.throwIf(method == null,
() -> "Unable to find sutable method for " + property);
ReflectionUtils.makeAccessible(method);
return method;
}
}
@FunctionalInterface
private interface Getter<T, V> {
V get(T instance) throws SQLException;
}
@FunctionalInterface
private interface Setter<T, V> {
void set(T instance, V value) throws SQLException;
}
/**
* {@link MappedDataSource} for Hikari.
*/
private static class HikariDataSourceProperties extends MappedDataSourceProperties<HikariDataSource> {
HikariDataSourceProperties() {
add(DataSourceProperty.URL, HikariDataSource::getJdbcUrl, HikariDataSource::setJdbcUrl);
add(DataSourceProperty.DRIVER_CLASS_NAME, HikariDataSource::getDriverClassName,
HikariDataSource::setDriverClassName);
add(DataSourceProperty.USERNAME, HikariDataSource::getUsername, HikariDataSource::setUsername);
add(DataSourceProperty.PASSWORD, HikariDataSource::getPassword, HikariDataSource::setPassword);
}
}
/**
* {@link MappedDataSource} for Tomcat Pool.
*/
private static class TomcatPoolDataSourceProperties
extends MappedDataSourceProperties<org.apache.tomcat.jdbc.pool.DataSource> {
TomcatPoolDataSourceProperties() {
add(DataSourceProperty.URL, org.apache.tomcat.jdbc.pool.DataSource::getUrl,
org.apache.tomcat.jdbc.pool.DataSource::setUrl);
add(DataSourceProperty.DRIVER_CLASS_NAME, org.apache.tomcat.jdbc.pool.DataSource::getDriverClassName,
org.apache.tomcat.jdbc.pool.DataSource::setDriverClassName);
add(DataSourceProperty.USERNAME, org.apache.tomcat.jdbc.pool.DataSource::getUsername,
org.apache.tomcat.jdbc.pool.DataSource::setUsername);
add(DataSourceProperty.PASSWORD, org.apache.tomcat.jdbc.pool.DataSource::getPassword,
org.apache.tomcat.jdbc.pool.DataSource::setPassword);
}
}
/**
* {@link MappedDataSource} for DBCP2.
*/
private static class MappedDbcp2DataSource extends MappedDataSourceProperties<BasicDataSource> {
MappedDbcp2DataSource() {
add(DataSourceProperty.URL, BasicDataSource::getUrl, BasicDataSource::setUrl);
add(DataSourceProperty.DRIVER_CLASS_NAME, BasicDataSource::getDriverClassName,
BasicDataSource::setDriverClassName);
add(DataSourceProperty.USERNAME, BasicDataSource::getUsername, BasicDataSource::setUsername);
add(DataSourceProperty.PASSWORD, BasicDataSource::getPassword, BasicDataSource::setPassword);
}
}
/**
* {@link MappedDataSource} for Oracle Pool.
*/
private static class OraclePoolDataSourceProperties extends MappedDataSourceProperties<PoolDataSource> {
@Override
public Class<? extends PoolDataSource> getDataSourceInstanceType() {
return PoolDataSourceImpl.class;
}
OraclePoolDataSourceProperties() {
add(DataSourceProperty.URL, PoolDataSource::getURL, PoolDataSource::setURL);
add(DataSourceProperty.DRIVER_CLASS_NAME, PoolDataSource::getConnectionFactoryClassName,
PoolDataSource::setConnectionFactoryClassName);
add(DataSourceProperty.USERNAME, PoolDataSource::getUser, PoolDataSource::setUser);
add(DataSourceProperty.PASSWORD, PoolDataSource::getPassword, PoolDataSource::setPassword);
}
}
/**
* {@link MappedDataSource} for Spring's {@link SimpleDriverDataSource}.
*/
private static class SimpleDataSourceProperties extends MappedDataSourceProperties<SimpleDriverDataSource> {
@SuppressWarnings("unchecked")
SimpleDataSourceProperties() {
add(DataSourceProperty.URL, SimpleDriverDataSource::getUrl, SimpleDriverDataSource::setUrl);
add(DataSourceProperty.DRIVER_CLASS_NAME, Class.class, (dataSource) -> dataSource.getDriver().getClass(),
(dataSource, driverClass) -> dataSource.setDriverClass(driverClass));
add(DataSourceProperty.USERNAME, SimpleDriverDataSource::getUsername, SimpleDriverDataSource::setUsername);
add(DataSourceProperty.PASSWORD, SimpleDriverDataSource::getPassword, SimpleDriverDataSource::setPassword);
}
}
/**
* {@link MappedDataSource} for Oracle.
*/
private static class OracleDataSourceProperties extends MappedDataSourceProperties<OracleDataSource> {
OracleDataSourceProperties() {
add(DataSourceProperty.URL, OracleDataSource::getURL, OracleDataSource::setURL);
add(DataSourceProperty.USERNAME, OracleDataSource::getUser, OracleDataSource::setUser);
add(DataSourceProperty.PASSWORD, null, OracleDataSource::setPassword);
}
}
/**
* {@link MappedDataSource} for H2.
*/
private static class H2DataSourceProperties extends MappedDataSourceProperties<JdbcDataSource> {
H2DataSourceProperties() {
add(DataSourceProperty.URL, JdbcDataSource::getUrl, JdbcDataSource::setUrl);
add(DataSourceProperty.USERNAME, JdbcDataSource::getUser, JdbcDataSource::setUser);
add(DataSourceProperty.PASSWORD, JdbcDataSource::getPassword, JdbcDataSource::setPassword);
}
}
/**
* {@link MappedDataSource} for Postgres.
*/
private static class PostgresDataSourceProperties extends MappedDataSourceProperties<PGSimpleDataSource> {
PostgresDataSourceProperties() {
add(DataSourceProperty.URL, PGSimpleDataSource::getUrl, PGSimpleDataSource::setUrl);
add(DataSourceProperty.USERNAME, PGSimpleDataSource::getUser, PGSimpleDataSource::setUser);
add(DataSourceProperty.PASSWORD, PGSimpleDataSource::getPassword, PGSimpleDataSource::setPassword);
}
}
}
|
Polish
|
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/jdbc/DataSourceBuilder.java
|
Polish
|
|
Java
|
bsd-3-clause
|
e5cb0abb61dc78fa485bd4c10d1edfa1da6b6428
| 0
|
mplushnikov/lombok-intellij-plugin,mplushnikov/lombok-intellij-plugin
|
package de.plushnikov.intellij.plugin.lombokconfig;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.PathUtil;
import com.intellij.util.indexing.*;
import com.intellij.util.io.DataExternalizer;
import com.intellij.util.io.KeyDescriptor;
import de.plushnikov.intellij.plugin.language.LombokConfigFileType;
import de.plushnikov.intellij.plugin.language.psi.LombokConfigCleaner;
import de.plushnikov.intellij.plugin.language.psi.LombokConfigFile;
import de.plushnikov.intellij.plugin.language.psi.LombokConfigProperty;
import de.plushnikov.intellij.plugin.language.psi.LombokConfigPsiUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class LombokConfigIndex extends FileBasedIndexExtension<ConfigIndexKey, ConfigValue> {
@NonNls
public static final ID<ConfigIndexKey, ConfigValue> NAME = ID.create("LombokConfigIndex");
private static final int INDEX_FORMAT_VERSION = 10;
@NotNull
@Override
public ID<ConfigIndexKey, ConfigValue> getName() {
return NAME;
}
@NotNull
@Override
public DataIndexer<ConfigIndexKey, ConfigValue, FileContent> getIndexer() {
return new DataIndexer<ConfigIndexKey, ConfigValue, FileContent>() {
@NotNull
@Override
public Map<ConfigIndexKey, ConfigValue> map(@NotNull FileContent inputData) {
Map<ConfigIndexKey, ConfigValue> result = Collections.emptyMap();
final VirtualFile directoryFile = inputData.getFile().getParent();
if (null != directoryFile) {
final String canonicalPath = PathUtil.toSystemIndependentName(directoryFile.getCanonicalPath());
if (null != canonicalPath) {
final Map<String, String> configValues = extractValues((LombokConfigFile) inputData.getPsiFile());
final boolean stopBubblingValue = Boolean.parseBoolean(configValues.get(ConfigKey.CONFIG_STOP_BUBBLING.getConfigKey()));
result = Stream.of(ConfigKey.values())
.map(ConfigKey::getConfigKey)
.collect(Collectors.toMap(
key -> new ConfigIndexKey(canonicalPath, key),
key -> new ConfigValue(configValues.get(key), stopBubblingValue)));
}
}
return result;
}
private Map<String, String> extractValues(LombokConfigFile configFile) {
Map<String, String> result = new HashMap<>();
final LombokConfigCleaner[] configCleaners = LombokConfigUtil.getLombokConfigCleaners(configFile);
for (LombokConfigCleaner configCleaner : configCleaners) {
final String key = StringUtil.toLowerCase(LombokConfigPsiUtil.getKey(configCleaner));
final ConfigKey configKey = ConfigKey.fromConfigStringKey(key);
if (null != configKey) {
result.put(key, configKey.getConfigDefaultValue());
}
}
final LombokConfigProperty[] configProperties = LombokConfigUtil.getLombokConfigProperties(configFile);
for (LombokConfigProperty configProperty : configProperties) {
final String key = StringUtil.toLowerCase(LombokConfigPsiUtil.getKey(configProperty));
final String value = LombokConfigPsiUtil.getValue(configProperty);
final String sign = LombokConfigPsiUtil.getSign(configProperty);
if (null == sign) {
result.put(key, value);
} else {
final String previousValue = StringUtil.defaultIfEmpty(result.get(key), "");
final String combinedValue = previousValue + sign + value + ";";
result.put(key, combinedValue);
}
}
return result;
}
};
}
@NotNull
@Override
public KeyDescriptor<ConfigIndexKey> getKeyDescriptor() {
return new KeyDescriptor<ConfigIndexKey>() {
@Override
public int getHashCode(ConfigIndexKey configKey) {
return configKey.hashCode();
}
@Override
public boolean isEqual(ConfigIndexKey val1, ConfigIndexKey val2) {
return val1.equals(val2);
}
@Override
public void save(@NotNull DataOutput out, ConfigIndexKey value) throws IOException {
out.writeUTF(StringUtil.notNullize(value.getDirectoryName()));
out.writeUTF(StringUtil.notNullize(value.getConfigKey()));
}
@Override
public ConfigIndexKey read(@NotNull DataInput in) throws IOException {
return new ConfigIndexKey(in.readUTF(), in.readUTF());
}
};
}
@NotNull
@Override
public DataExternalizer<ConfigValue> getValueExternalizer() {
return new DataExternalizer<ConfigValue>() {
@Override
public void save(@NotNull DataOutput out, ConfigValue configValue) throws IOException {
final boolean hasNullValue = null == configValue.getValue();
out.writeBoolean(hasNullValue);
out.writeUTF(hasNullValue ? "" : configValue.getValue());
out.writeBoolean(configValue.isStopBubbling());
}
@Override
public ConfigValue read(@NotNull DataInput in) throws IOException {
final boolean hasNullValue = in.readBoolean();
final String configValue = in.readUTF();
final boolean stopBubbling = in.readBoolean();
return new ConfigValue(hasNullValue ? null : configValue, stopBubbling);
}
};
}
@NotNull
@Override
public FileBasedIndex.InputFilter getInputFilter() {
return new DefaultFileTypeSpecificInputFilter(LombokConfigFileType.INSTANCE);
}
@Override
public boolean dependsOnFileContent() {
return true;
}
// TODO: make this index shareable IDEA-253057. Avoid using canonical paths.
@Override
public boolean canBeShared() {
return false;
}
@Override
public int getVersion() {
return INDEX_FORMAT_VERSION;
}
}
|
src/main/java/de/plushnikov/intellij/plugin/lombokconfig/LombokConfigIndex.java
|
package de.plushnikov.intellij.plugin.lombokconfig;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.PathUtil;
import com.intellij.util.indexing.*;
import com.intellij.util.io.DataExternalizer;
import com.intellij.util.io.KeyDescriptor;
import de.plushnikov.intellij.plugin.language.LombokConfigFileType;
import de.plushnikov.intellij.plugin.language.psi.LombokConfigCleaner;
import de.plushnikov.intellij.plugin.language.psi.LombokConfigFile;
import de.plushnikov.intellij.plugin.language.psi.LombokConfigProperty;
import de.plushnikov.intellij.plugin.language.psi.LombokConfigPsiUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class LombokConfigIndex extends FileBasedIndexExtension<ConfigIndexKey, ConfigValue> {
@NonNls
public static final ID<ConfigIndexKey, ConfigValue> NAME = ID.create("LombokConfigIndex");
private static final int INDEX_FORMAT_VERSION = 10;
@NotNull
@Override
public ID<ConfigIndexKey, ConfigValue> getName() {
return NAME;
}
@NotNull
@Override
public DataIndexer<ConfigIndexKey, ConfigValue, FileContent> getIndexer() {
return new DataIndexer<ConfigIndexKey, ConfigValue, FileContent>() {
@NotNull
@Override
public Map<ConfigIndexKey, ConfigValue> map(@NotNull FileContent inputData) {
Map<ConfigIndexKey, ConfigValue> result = Collections.emptyMap();
final VirtualFile directoryFile = inputData.getFile().getParent();
if (null != directoryFile) {
final String canonicalPath = PathUtil.toSystemIndependentName(directoryFile.getCanonicalPath());
if (null != canonicalPath) {
final Map<String, String> configValues = extractValues((LombokConfigFile) inputData.getPsiFile());
final boolean stopBubblingValue = Boolean.parseBoolean(configValues.get(ConfigKey.CONFIG_STOP_BUBBLING.getConfigKey()));
result = Stream.of(ConfigKey.values())
.map(ConfigKey::getConfigKey)
.collect(Collectors.toMap(
key -> new ConfigIndexKey(canonicalPath, key),
key -> new ConfigValue(configValues.get(key), stopBubblingValue)));
}
}
return result;
}
private Map<String, String> extractValues(LombokConfigFile configFile) {
Map<String, String> result = new HashMap<>();
final LombokConfigCleaner[] configCleaners = LombokConfigUtil.getLombokConfigCleaners(configFile);
for (LombokConfigCleaner configCleaner : configCleaners) {
final String key = StringUtil.toLowerCase(LombokConfigPsiUtil.getKey(configCleaner));
final ConfigKey configKey = ConfigKey.fromConfigStringKey(key);
if (null != configKey) {
result.put(key, configKey.getConfigDefaultValue());
}
}
final LombokConfigProperty[] configProperties = LombokConfigUtil.getLombokConfigProperties(configFile);
for (LombokConfigProperty configProperty : configProperties) {
final String key = StringUtil.toLowerCase(LombokConfigPsiUtil.getKey(configProperty));
final String value = LombokConfigPsiUtil.getValue(configProperty);
final String sign = LombokConfigPsiUtil.getSign(configProperty);
if (null == sign) {
result.put(key, value);
} else {
final String previousValue = StringUtil.defaultIfEmpty(result.get(key), "");
final String combinedValue = previousValue + sign + value + ";";
result.put(key, combinedValue);
}
}
return result;
}
};
}
@NotNull
@Override
public KeyDescriptor<ConfigIndexKey> getKeyDescriptor() {
return new KeyDescriptor<ConfigIndexKey>() {
@Override
public int getHashCode(ConfigIndexKey configKey) {
return configKey.hashCode();
}
@Override
public boolean isEqual(ConfigIndexKey val1, ConfigIndexKey val2) {
return val1.equals(val2);
}
@Override
public void save(@NotNull DataOutput out, ConfigIndexKey value) throws IOException {
out.writeUTF(StringUtil.notNullize(value.getDirectoryName()));
out.writeUTF(StringUtil.notNullize(value.getConfigKey()));
}
@Override
public ConfigIndexKey read(@NotNull DataInput in) throws IOException {
return new ConfigIndexKey(in.readUTF(), in.readUTF());
}
};
}
@NotNull
@Override
public DataExternalizer<ConfigValue> getValueExternalizer() {
return new DataExternalizer<ConfigValue>() {
@Override
public void save(@NotNull DataOutput out, ConfigValue configValue) throws IOException {
final boolean hasNullValue = null == configValue.getValue();
out.writeBoolean(hasNullValue);
out.writeUTF(hasNullValue ? "" : configValue.getValue());
out.writeBoolean(configValue.isStopBubbling());
}
@Override
public ConfigValue read(@NotNull DataInput in) throws IOException {
final boolean hasNullValue = in.readBoolean();
final String configValue = in.readUTF();
final boolean stopBubbling = in.readBoolean();
return new ConfigValue(hasNullValue ? null : configValue, stopBubbling);
}
};
}
@NotNull
@Override
public FileBasedIndex.InputFilter getInputFilter() {
return new DefaultFileTypeSpecificInputFilter(LombokConfigFileType.INSTANCE);
}
@Override
public boolean dependsOnFileContent() {
return true;
}
@Override
public int getVersion() {
return INDEX_FORMAT_VERSION;
}
}
|
shared-indexes: add FileBasedIndexExtension.canBeShared to exclude problematic indexes from sharing, exclude LombokIndex IDEA-253057.
GitOrigin-RevId: a139ff9e587ebeba1b44ba676df14bf7637cd1c9
|
src/main/java/de/plushnikov/intellij/plugin/lombokconfig/LombokConfigIndex.java
|
shared-indexes: add FileBasedIndexExtension.canBeShared to exclude problematic indexes from sharing, exclude LombokIndex IDEA-253057.
|
|
Java
|
bsd-3-clause
|
e413605605187daa8da4424c4f5dd3c4a84593c3
| 0
|
pepyakin/threetenbp,naixx/threetenbp,ThreeTen/threetenbp,jnehlmeier/threetenbp,jnehlmeier/threetenbp,ThreeTen/threetenbp,naixx/threetenbp,pepyakin/threetenbp
|
/*
* Copyright (c) 2008-2010, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package javax.time.format;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertSame;
import java.io.IOException;
import org.testng.annotations.Test;
/**
* Test CalendricalPrintException.
*
* @author Stephen Colebourne
*/
@Test
public class TestCalendricalPrintException {
@Test(groups={"tck"})
public void test_constructor_String() throws Exception {
CalendricalPrintException ex = new CalendricalPrintException("TEST");
assertEquals(ex.getMessage(), "TEST");
}
@Test(groups={"implementation"})
public void test_constructor_StringThrowable_notIOException_same() throws Exception {
IllegalArgumentException iaex = new IllegalArgumentException("INNER");
CalendricalPrintException ex = new CalendricalPrintException("TEST", iaex);
assertEquals(ex.getMessage(), "TEST");
assertSame(ex.getCause(), iaex);
ex.rethrowIOException(); // no effect
}
@Test(groups={"tck"})
public void test_constructor_StringThrowable_notIOException_equal() throws Exception {
IllegalArgumentException iaex = new IllegalArgumentException("INNER");
CalendricalPrintException ex = new CalendricalPrintException("TEST", iaex);
assertEquals(ex.getMessage(), "TEST");
assertEquals(ex.getCause(), iaex);
ex.rethrowIOException(); // no effect
}
@Test(expectedExceptions=IOException.class, groups={"implementation"})
public void test_constructor_StringThrowable_IOException_same() throws Exception {
IOException ioex = new IOException("INNER");
CalendricalPrintException ex = new CalendricalPrintException("TEST", ioex);
assertEquals(ex.getMessage(), "TEST");
assertSame(ex.getCause(), ioex);
ex.rethrowIOException(); // rethrows
}
@Test(expectedExceptions=IOException.class, groups={"tck"})
public void test_constructor_StringThrowable_IOException() throws Exception {
IOException ioex = new IOException("INNER");
CalendricalPrintException ex = new CalendricalPrintException("TEST", ioex);
assertEquals(ex.getMessage(), "TEST");
assertEquals(ex.getCause(), ioex);
ex.rethrowIOException(); // rethrows
}
}
|
src/test/java/javax/time/format/TestCalendricalPrintException.java
|
/*
* Copyright (c) 2008-2010, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package javax.time.format;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertSame;
import java.io.IOException;
import org.testng.annotations.Test;
/**
* Test CalendricalPrintException.
*
* @author Stephen Colebourne
*/
@Test
public class TestCalendricalPrintException {
public void test_constructor_String() throws Exception {
CalendricalPrintException ex = new CalendricalPrintException("TEST");
assertEquals(ex.getMessage(), "TEST");
}
public void test_constructor_StringThrowable_notIOException() throws Exception {
IllegalArgumentException iaex = new IllegalArgumentException("INNER");
CalendricalPrintException ex = new CalendricalPrintException("TEST", iaex);
assertEquals(ex.getMessage(), "TEST");
assertSame(ex.getCause(), iaex);
ex.rethrowIOException(); // no effect
}
@Test(expectedExceptions=IOException.class)
public void test_constructor_StringThrowable_IOException() throws Exception {
IOException ioex = new IOException("INNER");
CalendricalPrintException ex = new CalendricalPrintException("TEST", ioex);
assertEquals(ex.getMessage(), "TEST");
assertSame(ex.getCause(), ioex);
ex.rethrowIOException(); // rethrows
}
}
|
TestCalendricalPrintException annotated
|
src/test/java/javax/time/format/TestCalendricalPrintException.java
|
TestCalendricalPrintException annotated
|
|
Java
|
bsd-3-clause
|
74b4c45ecc5de9c90efc68d0c7d6dc78d7a9d8a1
| 0
|
sebastiangraf/treetank,sebastiangraf/treetank,sebastiangraf/treetank
|
package com.treetank.xmlprague;
import java.io.File;
import java.util.Properties;
import com.treetank.access.Database;
import com.treetank.api.IDatabase;
import com.treetank.api.ISession;
import com.treetank.api.IWriteTransaction;
import com.treetank.exception.TTException;
import com.treetank.service.xml.shredder.EShredderInsert;
import com.treetank.service.xml.shredder.XMLShredder;
import org.perfidix.AbstractConfig;
import org.perfidix.Benchmark;
import org.perfidix.annotation.Bench;
import org.perfidix.element.KindOfArrangement;
import org.perfidix.meter.AbstractMeter;
import org.perfidix.meter.Time;
import org.perfidix.meter.TimeMeter;
import org.perfidix.ouput.AbstractOutput;
import org.perfidix.ouput.CSVOutput;
import org.perfidix.ouput.TabularSummaryOutput;
import org.perfidix.result.BenchmarkResult;
public class IncrementalShred {
public static File XMLFile = new File("");
public static File TNKFolder = new File("tnk");
// @Bench
// public void benchNormal() {
// try {
// System.out.println("0 started");
// final IDatabase database = Database.openDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
// final ISession session = database.getSession();
// final IWriteTransaction wtx = session.beginWriteTransaction();
// final XMLShredderWithCommit shredderNone = new XMLShredderWithCommit(wtx, XMLShredder.createReader(XMLFile),0.0d);
// shredderNone.call();
// wtx.commit();
// Database.forceCloseDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
// System.out.println("0 ended");
// } catch (Exception e) {
// e.printStackTrace();
// }
// }
//
// @Bench
// public void bench100Commit() {
// try {
// System.out.println("100 started");
// final IDatabase database = Database.openDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
// final ISession session = database.getSession();
// final IWriteTransaction wtx = session.beginWriteTransaction();
// final XMLShredderWithCommit shredderNone = new XMLShredderWithCommit(wtx, XMLShredder.createReader(XMLFile),1.0d);
// shredderNone.call();
// wtx.commit();
// Database.forceCloseDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
// System.out.println("100 ended");
// } catch (Exception e) {
// e.printStackTrace();
// }
// }
@Bench
public void bench75Commit() {
try {
System.out.println("75 started");
final IDatabase database = Database.openDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
final ISession session = database.getSession();
final IWriteTransaction wtx = session.beginWriteTransaction();
final XMLShredderWithCommit shredderNone = new XMLShredderWithCommit(wtx, XMLShredder.createReader(XMLFile),0.75d);
shredderNone.call();
wtx.commit();
Database.forceCloseDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
System.out.println("75 ended");
} catch (Exception e) {
e.printStackTrace();
}
}
@Bench
public void bench25Commit() {
try {
System.out.println("25 started");
final IDatabase database = Database.openDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
final ISession session = database.getSession();
final IWriteTransaction wtx = session.beginWriteTransaction();
final XMLShredderWithCommit shredderNone = new XMLShredderWithCommit(wtx, XMLShredder.createReader(XMLFile),0.25d);
shredderNone.call();
wtx.commit();
Database.forceCloseDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
System.out.println("25 ended");
} catch (Exception e) {
e.printStackTrace();
}
}
@Bench
public void bench50Commit() {
try {
System.out.println("50 started");
final IDatabase database = Database.openDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
final ISession session = database.getSession();
final IWriteTransaction wtx = session.beginWriteTransaction();
final XMLShredderWithCommit shredderNone = new XMLShredderWithCommit(wtx, XMLShredder.createReader(XMLFile),0.50d);
shredderNone.call();
wtx.commit();
Database.forceCloseDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
System.out.println("50 ended");
} catch (Exception e) {
e.printStackTrace();
}
}
public static void main(final String[] args) {
if (args.length != 2) {
System.out
.println("Please use java -jar JAR \"folder with xmls to parse\" \"folder to write csv\"");
System.exit(-1);
}
// Argument is a folder with only XML in there. For each XML one benchmark should be executed.
final File filetoshred = new File(args[0]);
final File[] files = filetoshred.listFiles();
final File filetoexport = new File(args[1]);
for (final File currentFile : files) {
XMLFile = currentFile;
System.out.println("Starting benchmark for " + XMLFile.getName());
final int index = currentFile.getName().lastIndexOf(".");
final File folder = new File(filetoexport, currentFile.getName().substring(0, index));
folder.mkdirs();
final FilesizeMeter meter =
new FilesizeMeter(new File(new File(new File(TNKFolder, XMLFile.getName() + ".tnk"), "tt"),
"tt.tnk"));
final Benchmark bench = new Benchmark(new AbstractConfig(1, new AbstractMeter[] {
meter, new TimeMeter(Time.MilliSeconds)
}, new AbstractOutput[0], KindOfArrangement.SequentialMethodArrangement, 1.0d) {
});
bench.add(IncrementalShred.class);
final BenchmarkResult res = bench.run();
new TabularSummaryOutput(System.out).visitBenchmark(res);
new CSVOutput(folder).visitBenchmark(res);
System.out.println("Finished benchmark for " + XMLFile.getName());
}
}
}
|
bundles/treetank-paper/src/main/java/com/treetank/xmlprague/IncrementalShred.java
|
package com.treetank.xmlprague;
import java.io.File;
import java.util.Properties;
import com.treetank.access.Database;
import com.treetank.api.IDatabase;
import com.treetank.api.ISession;
import com.treetank.api.IWriteTransaction;
import com.treetank.exception.TTException;
import com.treetank.service.xml.shredder.EShredderInsert;
import com.treetank.service.xml.shredder.XMLShredder;
import org.perfidix.AbstractConfig;
import org.perfidix.Benchmark;
import org.perfidix.annotation.Bench;
import org.perfidix.element.KindOfArrangement;
import org.perfidix.meter.AbstractMeter;
import org.perfidix.meter.Time;
import org.perfidix.meter.TimeMeter;
import org.perfidix.ouput.AbstractOutput;
import org.perfidix.ouput.CSVOutput;
import org.perfidix.ouput.TabularSummaryOutput;
import org.perfidix.result.BenchmarkResult;
public class IncrementalShred {
public static File XMLFile = new File("");
public static File TNKFolder = new File("tnk");
@Bench
public void benchNormal() {
try {
System.out.println("0 started");
final IDatabase database = Database.openDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
final ISession session = database.getSession();
final IWriteTransaction wtx = session.beginWriteTransaction();
final XMLShredderWithCommit shredderNone = new XMLShredderWithCommit(wtx, XMLShredder.createReader(XMLFile),0.0d);
shredderNone.call();
wtx.commit();
Database.forceCloseDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
System.out.println("0 ended");
} catch (Exception e) {
e.printStackTrace();
}
}
@Bench
public void bench100Commit() {
try {
System.out.println("100 started");
final IDatabase database = Database.openDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
final ISession session = database.getSession();
final IWriteTransaction wtx = session.beginWriteTransaction();
final XMLShredderWithCommit shredderNone = new XMLShredderWithCommit(wtx, XMLShredder.createReader(XMLFile),1.0d);
shredderNone.call();
wtx.commit();
Database.forceCloseDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
System.out.println("100 ended");
} catch (Exception e) {
e.printStackTrace();
}
}
// @Bench
// public void bench75Commit() {
// try {
// System.out.println("75 started");
// final IDatabase database = Database.openDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
// final ISession session = database.getSession();
// final IWriteTransaction wtx = session.beginWriteTransaction();
// final XMLShredderWithCommit shredderNone = new XMLShredderWithCommit(wtx, XMLShredder.createReader(XMLFile),0.75d);
// shredderNone.call();
// wtx.commit();
// Database.forceCloseDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
// System.out.println("75 ended");
// } catch (Exception e) {
// e.printStackTrace();
// }
// }
//
// @Bench
// public void bench25Commit() {
// try {
// System.out.println("25 started");
// final IDatabase database = Database.openDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
// final ISession session = database.getSession();
// final IWriteTransaction wtx = session.beginWriteTransaction();
// final XMLShredderWithCommit shredderNone = new XMLShredderWithCommit(wtx, XMLShredder.createReader(XMLFile),0.25d);
// shredderNone.call();
// wtx.commit();
// Database.forceCloseDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
// System.out.println("25 ended");
// } catch (Exception e) {
// e.printStackTrace();
// }
// }
@Bench
public void bench50Commit() {
try {
System.out.println("50 started");
final IDatabase database = Database.openDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
final ISession session = database.getSession();
final IWriteTransaction wtx = session.beginWriteTransaction();
final XMLShredderWithCommit shredderNone = new XMLShredderWithCommit(wtx, XMLShredder.createReader(XMLFile),0.50d);
shredderNone.call();
wtx.commit();
Database.forceCloseDatabase(new File(TNKFolder, XMLFile.getName() + ".tnk"));
System.out.println("50 ended");
} catch (Exception e) {
e.printStackTrace();
}
}
public static void main(final String[] args) {
if (args.length != 2) {
System.out
.println("Please use java -jar JAR \"folder with xmls to parse\" \"folder to write csv\"");
System.exit(-1);
}
// Argument is a folder with only XML in there. For each XML one benchmark should be executed.
final File filetoshred = new File(args[0]);
final File[] files = filetoshred.listFiles();
final File filetoexport = new File(args[1]);
for (final File currentFile : files) {
XMLFile = currentFile;
System.out.println("Starting benchmark for " + XMLFile.getName());
final int index = currentFile.getName().lastIndexOf(".");
final File folder = new File(filetoexport, currentFile.getName().substring(0, index));
folder.mkdirs();
final FilesizeMeter meter =
new FilesizeMeter(new File(new File(new File(TNKFolder, XMLFile.getName() + ".tnk"), "tt"),
"tt.tnk"));
final Benchmark bench = new Benchmark(new AbstractConfig(1, new AbstractMeter[] {
meter, new TimeMeter(Time.MilliSeconds)
}, new AbstractOutput[0], KindOfArrangement.SequentialMethodArrangement, 1.0d) {
});
bench.add(IncrementalShred.class);
final BenchmarkResult res = bench.run();
new TabularSummaryOutput(System.out).visitBenchmark(res);
new CSVOutput(folder).visitBenchmark(res);
System.out.println("Finished benchmark for " + XMLFile.getName());
}
}
}
|
adapted benchmark
git-svn-id: a5379eb5ca3beb2b6e029be3b1b7f6aa53f2352b@5756 e3ddb328-5bfe-0310-b762-aafcbcbd2528
|
bundles/treetank-paper/src/main/java/com/treetank/xmlprague/IncrementalShred.java
|
adapted benchmark
|
|
Java
|
bsd-3-clause
|
861611f9180da02e26523e8c6745fa598930e70d
| 0
|
kops/jbehave-core,skundrik/jbehave-core,jeremiehuchet/jbehave-core,irfanah/jbehave-core,eugen-eugen/eugensjbehave,jbehave/jbehave-core,eugen-eugen/eugensjbehave,bsaylor/jbehave-core,irfanah/jbehave-core,valfirst/jbehave-core,pocamin/jbehave-core,mhariri/jbehave-core,skundrik/jbehave-core,jeremiehuchet/jbehave-core,codehaus/jbehave-core,sischnei/jbehave-core,sischnei/jbehave-core,valfirst/jbehave-core,sischnei/jbehave-core,donsenior/jbehave-core,codehaus/jbehave-core,mestihudson/jbehave-core,gmandnepr/jbehave-core,pocamin/jbehave-core,mhariri/jbehave-core,bsaylor/jbehave-core,kops/jbehave-core,bsaylor/jbehave-core,gmandnepr/jbehave-core,valfirst/jbehave-core,mestihudson/jbehave-core,pocamin/jbehave-core,irfanah/jbehave-core,donsenior/jbehave-core,mestihudson/jbehave-core,skundrik/jbehave-core,sischnei/jbehave-core,sischnei/jbehave-core,kops/jbehave-core,codehaus/jbehave-core,donsenior/jbehave-core,mhariri/jbehave-core,mestihudson/jbehave-core,jbehave/jbehave-core,eugen-eugen/eugensjbehave,eugen-eugen/eugensjbehave,jeremiehuchet/jbehave-core,irfanah/jbehave-core,eugen-eugen/eugensjbehave,jbehave/jbehave-core,codehaus/jbehave-core,irfanah/jbehave-core,mestihudson/jbehave-core,codehaus/jbehave-core,jbehave/jbehave-core,gmandnepr/jbehave-core,valfirst/jbehave-core,eugen-eugen/eugensjbehave
|
package org.jbehave.core.steps;
import static java.util.Arrays.asList;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.text.DateFormat;
import java.text.NumberFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.jbehave.core.model.ExamplesTable;
/**
* <p>
* Facade responsible for converting parameter values to Java objects.
* </p>
* <p>
* A number of default converters are provided:
* <ul>
* <li>{@link ParameterConverters#NumberConverter}</li>
* <li>{@link ParameterConverters#NumberListConverter}</li>
* <li>{@link ParameterConverters#StringListConverter}</li>
* <li>{@link ParameterConverters#DateConverter}</li>
* <li>{@link ParameterConverters#ExamplesTableConverter}</li>
* </ul>
* </p>
*/
public class ParameterConverters {
private static final String NEWLINES_PATTERN = "(\n)|(\r\n)";
private static final String SYSTEM_NEWLINE = System
.getProperty("line.separator");
private static final String COMMA = ",";
private static final ParameterConverter[] DEFAULT_CONVERTERS = {
new NumberConverter(), new NumberListConverter(),
new StringListConverter(), new DateConverter(),
new ExamplesTableConverter() };
private final StepMonitor monitor;
private final List<ParameterConverter> converters = new ArrayList<ParameterConverter>();
public ParameterConverters() {
this(new SilentStepMonitor());
}
public ParameterConverters(StepMonitor monitor) {
this.monitor = monitor;
this.addConverters(DEFAULT_CONVERTERS);
}
public ParameterConverters addConverters(ParameterConverter... converters) {
return addConverters(asList(converters));
}
public ParameterConverters addConverters(List<ParameterConverter> converters) {
this.converters.addAll(0, converters);
return this;
}
public Object convert(String value, Type type) {
// check if any converters accepts type
for (ParameterConverter converter : converters) {
if (converter.accept(type)) {
Object converted = converter.convertValue(value, type);
monitor.convertedValueOfType(value, type, converted, converter
.getClass());
return converted;
}
}
// default to String
return replaceNewlinesWithSystemNewlines(value);
}
private Object replaceNewlinesWithSystemNewlines(String value) {
return value.replaceAll(NEWLINES_PATTERN, SYSTEM_NEWLINE);
}
public static interface ParameterConverter {
boolean accept(Type type);
Object convertValue(String value, Type type);
}
@SuppressWarnings("serial")
public static class ParameterConvertionFailed extends RuntimeException {
public ParameterConvertionFailed(String message, Throwable cause) {
super(message, cause);
}
}
/**
* Converts values to numbers. Supports
* <ul>
* <li>Integer, int</li>
* <li>Long, long</li>
* <li>Double, double</li>
* <li>Float, float</li>
* <li>BigDecimale, BigInteger</li>
* </ul>
*/
public static class NumberConverter implements ParameterConverter {
private static List<Class<?>> acceptedClasses = asList(new Class<?>[] {
Integer.class, int.class, Long.class, long.class, Double.class,
double.class, Float.class, float.class, BigDecimal.class,
BigInteger.class });
public boolean accept(Type type) {
if (type instanceof Class<?>) {
return acceptedClasses.contains(type);
}
return false;
}
public Object convertValue(String value, Type type) {
if (type == Integer.class || type == int.class) {
return Integer.valueOf(value);
} else if (type == Long.class || type == long.class) {
return Long.valueOf(value);
} else if (type == Double.class || type == double.class) {
return Double.valueOf(value);
} else if (type == Float.class || type == float.class) {
return Float.valueOf(value);
} else if (type == BigDecimal.class) {
return new BigDecimal(value);
} else if (type == BigInteger.class) {
return new BigInteger(value);
}
return value;
}
}
/**
* Converts value to list of numbers. Splits value to a list, using an
* injectable value separator (defaults to ",") and converts each element of
* list via the {@link NumberCoverter}.
*/
public static class NumberListConverter implements ParameterConverter {
private NumberConverter numberConverter = new NumberConverter();
private NumberFormat numberFormat;
private String valueSeparator;
public NumberListConverter() {
this(NumberFormat.getInstance(), COMMA);
}
public NumberListConverter(NumberFormat numberFormat,
String valueSeparator) {
this.numberFormat = numberFormat;
this.valueSeparator = valueSeparator;
}
public boolean accept(Type type) {
if (type instanceof ParameterizedType) {
Type rawType = rawType(type);
Type argumentType = argumentType(type);
return List.class.isAssignableFrom((Class<?>) rawType)
&& Number.class
.isAssignableFrom((Class<?>) argumentType);
}
return false;
}
private Type rawType(Type type) {
return ((ParameterizedType) type).getRawType();
}
private Type argumentType(Type type) {
return ((ParameterizedType) type).getActualTypeArguments()[0];
}
@SuppressWarnings("unchecked")
public Object convertValue(String value, Type type) {
Class<? extends Number> argumentType = (Class<? extends Number>) argumentType(type);
List<String> values = trim(asList(value.split(valueSeparator)));
if (argumentType.equals(Number.class)) {
return convertWithNumberFormat(values);
}
return convertWithNumberConverter(values, argumentType);
}
private List<Number> convertWithNumberConverter(List<String> values,
Class<? extends Number> type) {
List<Number> numbers = new ArrayList<Number>();
for (String value : values) {
numbers.add((Number) numberConverter.convertValue(value, type));
}
return numbers;
}
private List<Number> convertWithNumberFormat(List<String> values) {
List<Number> numbers = new ArrayList<Number>();
for (String numberValue : values) {
try {
numbers.add(numberFormat.parse(numberValue));
} catch (ParseException e) {
throw new ParameterConvertionFailed(numberValue, e);
}
}
return numbers;
}
}
/**
* Converts value to list of String. Splits value to a list, using an
* injectable value separator (defaults to ",") and trimming each element of
* the list.
*/
public static class StringListConverter implements ParameterConverter {
private String valueSeparator;
public StringListConverter() {
this(COMMA);
}
public StringListConverter(String valueSeparator) {
this.valueSeparator = valueSeparator;
}
public boolean accept(Type type) {
if (type instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType) type;
Type rawType = parameterizedType.getRawType();
Type argumentType = parameterizedType.getActualTypeArguments()[0];
return List.class.isAssignableFrom((Class<?>) rawType)
&& String.class
.isAssignableFrom((Class<?>) argumentType);
}
return false;
}
public Object convertValue(String value, Type type) {
if (value.trim().length() == 0)
return asList();
return trim(asList(value.split(valueSeparator)));
}
}
public static List<String> trim(List<String> values) {
List<String> trimmed = new ArrayList<String>();
for (String value : values) {
trimmed.add(value.trim());
}
return trimmed;
}
/**
* Parses value to a {@link Date} using an injectable {@link DateFormat}
* (defaults to <b>new SimpleDateFormat("dd/MM/yyyy")</b>)
*/
public static class DateConverter implements ParameterConverter {
public static final DateFormat DEFAULT_FORMAT = new SimpleDateFormat("dd/MM/yyyy");
private final DateFormat dateFormat;
public DateConverter() {
this(DEFAULT_FORMAT);
}
public DateConverter(DateFormat dateFormat) {
this.dateFormat = dateFormat;
}
public boolean accept(Type type) {
if (type instanceof Class<?>) {
return Date.class.isAssignableFrom((Class<?>) type);
}
return false;
}
public Object convertValue(String value, Type type) {
try {
return dateFormat.parse(value);
} catch (ParseException e) {
throw new ParameterConvertionFailed("Could not convert value "
+ value + " with date format " + dateFormat, e);
}
}
}
public static class ExamplesTableConverter implements ParameterConverter {
private String headerSeparator;
private String valueSeparator;
public ExamplesTableConverter() {
this("|", "|");
}
public ExamplesTableConverter(String headerSeparator,
String valueSeparator) {
this.headerSeparator = headerSeparator;
this.valueSeparator = valueSeparator;
}
public boolean accept(Type type) {
if (type instanceof Class<?>) {
return ExamplesTable.class.isAssignableFrom((Class<?>) type);
}
return false;
}
public Object convertValue(String value, Type type) {
return new ExamplesTable(value, headerSeparator, valueSeparator);
}
}
public static class MethodReturningConverter implements ParameterConverter {
private Object instance;
private Method method;
public MethodReturningConverter(Method method, Object instance) {
this.method = method;
this.instance = instance;
}
public boolean accept(Type type) {
if (type instanceof Class<?>) {
return method.getReturnType().isAssignableFrom((Class<?>) type);
}
return false;
}
public Object convertValue(String value, Type type) {
try {
return method.invoke(instance, value);
} catch (Exception e) {
throw new ParameterConvertionFailed("Failed to invoke method "
+ method + " with value " + value + " in " + instance,
e);
}
}
}
}
|
jbehave-core/src/main/java/org/jbehave/core/steps/ParameterConverters.java
|
package org.jbehave.core.steps;
import static java.util.Arrays.asList;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.text.DateFormat;
import java.text.NumberFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.jbehave.core.model.ExamplesTable;
/**
* <p>
* Facade responsible for converting parameter values to Java objects.
* </p>
* <p>
* A number of default converters are provided:
* <ul>
* <li>{@link ParameterConverters#NumberConverter}</li>
* <li>{@link ParameterConverters#NumberListConverter}</li>
* <li>{@link ParameterConverters#StringListConverter}</li>
* <li>{@link ParameterConverters#DateConverter}</li>
* <li>{@link ParameterConverters#ExamplesTableConverter}</li>
* </ul>
* </p>
*/
public class ParameterConverters {
private static final String NEWLINES_PATTERN = "(\n)|(\r\n)";
private static final String SYSTEM_NEWLINE = System
.getProperty("line.separator");
private static final String COMMA = ",";
private static final ParameterConverter[] DEFAULT_CONVERTERS = {
new NumberConverter(), new NumberListConverter(),
new StringListConverter(), new DateConverter(),
new ExamplesTableConverter() };
private final StepMonitor monitor;
private final List<ParameterConverter> converters = new ArrayList<ParameterConverter>();
public ParameterConverters() {
this(new SilentStepMonitor());
}
public ParameterConverters(StepMonitor monitor) {
this.monitor = monitor;
this.addConverters(DEFAULT_CONVERTERS);
}
public ParameterConverters addConverters(ParameterConverter... converters) {
return addConverters(asList(converters));
}
public ParameterConverters addConverters(List<ParameterConverter> converters) {
this.converters.addAll(0, converters);
return this;
}
public Object convert(String value, Type type) {
// check if any converters accepts type
for (ParameterConverter converter : converters) {
if (converter.accept(type)) {
Object converted = converter.convertValue(value, type);
monitor.convertedValueOfType(value, type, converted, converter
.getClass());
return converted;
}
}
// default to String
return replaceNewlinesWithSystemNewlines(value);
}
private Object replaceNewlinesWithSystemNewlines(String value) {
return value.replaceAll(NEWLINES_PATTERN, SYSTEM_NEWLINE);
}
public static interface ParameterConverter {
boolean accept(Type type);
Object convertValue(String value, Type type);
}
@SuppressWarnings("serial")
public static class ParameterConvertionFailed extends RuntimeException {
public ParameterConvertionFailed(String message, Throwable cause) {
super(message, cause);
}
}
/**
* Converts values to numbers. Supports
* <ul>
* <li>Integer, int</li>
* <li>Long, long</li>
* <li>Double, double</li>
* <li>Float, float</li>
* <li>BigDecimale, BigInteger</li>
* </ul>
*/
public static class NumberConverter implements ParameterConverter {
@SuppressWarnings("unchecked")
private static List<Class> acceptedClasses = asList(new Class[] {
Integer.class, int.class, Long.class, long.class, Double.class,
double.class, Float.class, float.class, BigDecimal.class,
BigInteger.class });
public boolean accept(Type type) {
if (type instanceof Class<?>) {
return acceptedClasses.contains(type);
}
return false;
}
public Object convertValue(String value, Type type) {
if (type == Integer.class || type == int.class) {
return Integer.valueOf(value);
} else if (type == Long.class || type == long.class) {
return Long.valueOf(value);
} else if (type == Double.class || type == double.class) {
return Double.valueOf(value);
} else if (type == Float.class || type == float.class) {
return Float.valueOf(value);
} else if (type == BigDecimal.class) {
return new BigDecimal(value);
} else if (type == BigInteger.class) {
return new BigInteger(value);
}
return value;
}
}
/**
* Converts value to list of numbers. Splits value to a list, using an
* injectable value separator (defaults to ",") and converts each element of
* list via the {@link NumberCoverter}.
*/
public static class NumberListConverter implements ParameterConverter {
private NumberConverter numberConverter = new NumberConverter();
private NumberFormat numberFormat;
private String valueSeparator;
public NumberListConverter() {
this(NumberFormat.getInstance(), COMMA);
}
public NumberListConverter(NumberFormat numberFormat,
String valueSeparator) {
this.numberFormat = numberFormat;
this.valueSeparator = valueSeparator;
}
public boolean accept(Type type) {
if (type instanceof ParameterizedType) {
Type rawType = rawType(type);
Type argumentType = argumentType(type);
return List.class.isAssignableFrom((Class<?>) rawType)
&& Number.class
.isAssignableFrom((Class<?>) argumentType);
}
return false;
}
private Type rawType(Type type) {
return ((ParameterizedType) type).getRawType();
}
private Type argumentType(Type type) {
return ((ParameterizedType) type).getActualTypeArguments()[0];
}
@SuppressWarnings("unchecked")
public Object convertValue(String value, Type type) {
Class<? extends Number> argumentType = (Class<? extends Number>) argumentType(type);
List<String> values = trim(asList(value.split(valueSeparator)));
if (argumentType.equals(Number.class)) {
return convertWithNumberFormat(values);
}
return convertWithNumberConverter(values, argumentType);
}
private List<Number> convertWithNumberConverter(List<String> values,
Class<? extends Number> type) {
List<Number> numbers = new ArrayList<Number>();
for (String value : values) {
numbers.add((Number) numberConverter.convertValue(value, type));
}
return numbers;
}
private List<Number> convertWithNumberFormat(List<String> values) {
List<Number> numbers = new ArrayList<Number>();
for (String numberValue : values) {
try {
numbers.add(numberFormat.parse(numberValue));
} catch (ParseException e) {
throw new ParameterConvertionFailed(numberValue, e);
}
}
return numbers;
}
}
/**
* Converts value to list of String. Splits value to a list, using an
* injectable value separator (defaults to ",") and trimming each element of
* the list.
*/
public static class StringListConverter implements ParameterConverter {
private String valueSeparator;
public StringListConverter() {
this(COMMA);
}
public StringListConverter(String valueSeparator) {
this.valueSeparator = valueSeparator;
}
public boolean accept(Type type) {
if (type instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType) type;
Type rawType = parameterizedType.getRawType();
Type argumentType = parameterizedType.getActualTypeArguments()[0];
return List.class.isAssignableFrom((Class<?>) rawType)
&& String.class
.isAssignableFrom((Class<?>) argumentType);
}
return false;
}
public Object convertValue(String value, Type type) {
if (value.trim().length() == 0)
return asList();
return trim(asList(value.split(valueSeparator)));
}
}
public static List<String> trim(List<String> values) {
List<String> trimmed = new ArrayList<String>();
for (String value : values) {
trimmed.add(value.trim());
}
return trimmed;
}
/**
* Parses value to a {@link Date} using an injectable {@link DateFormat}
* (defaults to <b>new SimpleDateFormat("dd/MM/yyyy")</b>)
*/
public static class DateConverter implements ParameterConverter {
public static final DateFormat DEFAULT_FORMAT = new SimpleDateFormat("dd/MM/yyyy");
private final DateFormat dateFormat;
public DateConverter() {
this(DEFAULT_FORMAT);
}
public DateConverter(DateFormat dateFormat) {
this.dateFormat = dateFormat;
}
public boolean accept(Type type) {
if (type instanceof Class<?>) {
return Date.class.isAssignableFrom((Class<?>) type);
}
return false;
}
public Object convertValue(String value, Type type) {
try {
return dateFormat.parse(value);
} catch (ParseException e) {
throw new ParameterConvertionFailed("Could not convert value "
+ value + " with date format " + dateFormat, e);
}
}
}
public static class ExamplesTableConverter implements ParameterConverter {
private String headerSeparator;
private String valueSeparator;
public ExamplesTableConverter() {
this("|", "|");
}
public ExamplesTableConverter(String headerSeparator,
String valueSeparator) {
this.headerSeparator = headerSeparator;
this.valueSeparator = valueSeparator;
}
public boolean accept(Type type) {
if (type instanceof Class<?>) {
return ExamplesTable.class.isAssignableFrom((Class<?>) type);
}
return false;
}
public Object convertValue(String value, Type type) {
return new ExamplesTable(value, headerSeparator, valueSeparator);
}
}
public static class MethodReturningConverter implements ParameterConverter {
private Object instance;
private Method method;
public MethodReturningConverter(Method method, Object instance) {
this.method = method;
this.instance = instance;
}
public boolean accept(Type type) {
if (type instanceof Class<?>) {
return method.getReturnType().isAssignableFrom((Class<?>) type);
}
return false;
}
public Object convertValue(String value, Type type) {
try {
return method.invoke(instance, value);
} catch (Exception e) {
throw new ParameterConvertionFailed("Failed to invoke method "
+ method + " with value " + value + " in " + instance,
e);
}
}
}
}
|
Fixed generics warnings
|
jbehave-core/src/main/java/org/jbehave/core/steps/ParameterConverters.java
|
Fixed generics warnings
|
|
Java
|
mit
|
cecedf2e87498ff9bc8c95a8ff5fac5b2f47bdb3
| 0
|
vincentzhang96/VahrhedralBot
|
package co.phoenixlab.discord;
import co.phoenixlab.discord.api.DiscordApiClient;
import co.phoenixlab.discord.api.entities.Channel;
import co.phoenixlab.discord.api.entities.Message;
import co.phoenixlab.discord.api.entities.User;
import java.time.Duration;
import java.time.Instant;
import java.util.StringJoiner;
public class Commands {
private Instant registerTime;
private final CommandDispatcher adminCommandDispatcher;
public Commands(VahrhedralBot bot) {
adminCommandDispatcher = new CommandDispatcher(bot, "");
}
public void register(CommandDispatcher dispatcher) {
registerAdminCommands();
dispatcher.registerAlwaysActiveCommand("admin", this::admin,
"Administrative commands");
dispatcher.registerCommand("admins", this::listAdmins,
"List admins");
dispatcher.registerCommand("info", this::info,
"Display information about the caller or the provided name, if present. @Mentions and partial front " +
"matches are supported");
registerTime = Instant.now();
}
private void registerAdminCommands() {
adminCommandDispatcher.registerAlwaysActiveCommand("start", this::adminStart, "Start bot");
adminCommandDispatcher.registerAlwaysActiveCommand("stop", this::adminStop, "Stop bot");
adminCommandDispatcher.registerAlwaysActiveCommand("status", this::adminStatus, "Bot status");
adminCommandDispatcher.registerAlwaysActiveCommand("kill", this::adminKill, "Kill the bot (terminate app)");
adminCommandDispatcher.registerAlwaysActiveCommand("blacklist", this::adminBlacklist,
"Prints the blacklist, or blacklists the given user. Supports @mention and partial front matching");
adminCommandDispatcher.registerAlwaysActiveCommand("pardon", this::adminPardon,
"Pardons the given user. Supports @mention and partial front matching");
}
private void adminKill(MessageContext context, String args) {
context.getApiClient().sendMessage("Sudoku time, bye", context.getMessage().getChannelId());
System.exit(0);
}
private void adminStart(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
CommandDispatcher mainDispatcher = context.getBot().getMainCommandDispatcher();
if (mainDispatcher.active().compareAndSet(false, true)) {
apiClient.sendMessage("Bot started", context.getMessage().getChannelId());
} else {
apiClient.sendMessage("Bot was already started", context.getMessage().getChannelId());
}
}
private void adminStop(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
CommandDispatcher mainDispatcher = context.getBot().getMainCommandDispatcher();
if (mainDispatcher.active().compareAndSet(true, false)) {
apiClient.sendMessage("Bot stopped", context.getMessage().getChannelId());
} else {
apiClient.sendMessage("Bot was already stopped", context.getMessage().getChannelId());
}
}
private void adminStatus(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
CommandDispatcher mainDispatcher = context.getBot().getMainCommandDispatcher();
Instant now = Instant.now();
Duration duration = Duration.between(registerTime, now);
long s = duration.getSeconds();
String uptime = String.format("%d:%02d:%02d:%02d", s / 86400, (s / 3600) % 24, (s % 3600) / 60, (s % 60));
Runtime r = Runtime.getRuntime();
String memory = String.format("%,dMB Used %,dMB Free %,dMB Max",
(r.maxMemory() - r.freeMemory()) / 1024 / 1024,
r.freeMemory() / 1024 / 1024,
r.maxMemory() / 1024 / 1024);
String response = String.format("**Status:** %s\n**Servers:** %d\n**Uptime:** %s\n**Memory:** `%s`",
mainDispatcher.active().get() ? "Running" : "Stopped",
apiClient.getServers().size(),
uptime,
memory);
apiClient.sendMessage(response, context.getMessage().getChannelId());
}
private void adminBlacklist(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
VahrhedralBot bot = context.getBot();
if (args.isEmpty()) {
StringJoiner joiner = new StringJoiner(", ");
bot.getConfig().getBlacklist().stream().
map(apiClient::getUserById).
filter(user -> user != null).
map(User::getUsername).
forEach(joiner::add);
String res = joiner.toString();
if (res.isEmpty()) {
res = "None";
}
apiClient.sendMessage("Blacklisted users: " + res, context.getMessage().getChannelId());
return;
}
User user = findUser(context, args);
if (user == null) {
apiClient.sendMessage("Unable to find user", context.getMessage().getChannelId());
return;
}
if (bot.getConfig().getAdmins().contains(user.getId())) {
apiClient.sendMessage("Cannot blacklist an admin", context.getMessage().getChannelId());
return;
}
bot.getConfig().getBlacklist().add(user.getId());
bot.saveConfig();
apiClient.sendMessage(String.format("`%s` has been blacklisted", user.getUsername()),
context.getMessage().getChannelId());
}
private void adminPardon(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
if (args.isEmpty()) {
apiClient.sendMessage("Please specify a user", context.getMessage().getChannelId());
return;
}
User user = findUser(context, args);
if (user == null) {
apiClient.sendMessage("Unable to find user", context.getMessage().getChannelId());
return;
}
boolean removed = context.getBot().getConfig().getBlacklist().remove(user.getId());
context.getBot().saveConfig();
if (removed) {
apiClient.sendMessage(String.format("`%s` has been pardoned", user.getUsername()),
context.getMessage().getChannelId());
} else {
apiClient.sendMessage(String.format("`%s` was not blacklisted", user.getUsername()),
context.getMessage().getChannelId());
}
}
private User findUser(MessageContext context, String username) {
Message message = context.getMessage();
User user = null;
Channel channel = context.getApiClient().getChannelById(message.getChannelId());
// Attempt to find the given user
// If the user is @mentioned, try that first
if (message.getMentions() != null && message.getMentions().length > 0) {
user = message.getMentions()[0];
} else {
User temp = context.getApiClient().findUser(username, channel.getParent());
if (temp != null) {
user = temp;
}
}
return user;
}
private void admin(MessageContext context, String args) {
// Permission check
if (!context.getBot().getConfig().getAdmins().contains(context.getMessage().getAuthor().getId())) {
return;
}
Message original = context.getMessage();
adminCommandDispatcher.handleCommand(new Message(original.getAuthor(), original.getChannelId(), args,
original.getChannelId(), original.getMentions(), original.getTime()));
}
private void listAdmins(MessageContext context, String s) {
DiscordApiClient apiClient = context.getApiClient();
VahrhedralBot bot = context.getBot();
StringJoiner joiner = new StringJoiner(", ");
bot.getConfig().getAdmins().stream().
map(apiClient::getUserById).
filter(user -> user != null).
map(User::getUsername).
forEach(joiner::add);
String res = joiner.toString();
if (res.isEmpty()) {
res = "None";
}
apiClient.sendMessage("Admins: " + res, context.getMessage().getChannelId());
}
private void info(MessageContext context, String args) {
Message message = context.getMessage();
Configuration config = context.getBot().getConfig();
User user;
if (!args.isEmpty()) {
user = findUser(context, args);
} else {
user = message.getAuthor();
}
if (user == null) {
context.getApiClient().sendMessage("Unable to find user. Try typing their name EXACTLY or" +
" @mention them instead", message.getChannelId());
} else {
String avatar = (user.getAvatar() == null ? "N/A" : user.getAvatarUrl().toExternalForm());
String response = String.format("**Username:** %s\n**ID:** %s:%s\n%s%s**Avatar:** %s",
user.getUsername(), user.getId(), user.getDiscriminator(),
config.getBlacklist().contains(user.getId()) ? "**Blacklisted**\n" : "",
config.getAdmins().contains(user.getId()) ? "**Bot Administrator**\n" : "",
avatar);
context.getApiClient().sendMessage(response, message.getChannelId());
}
}
}
|
src/main/java/co/phoenixlab/discord/Commands.java
|
package co.phoenixlab.discord;
import co.phoenixlab.discord.api.DiscordApiClient;
import co.phoenixlab.discord.api.entities.Channel;
import co.phoenixlab.discord.api.entities.Message;
import co.phoenixlab.discord.api.entities.User;
import java.time.Duration;
import java.time.Instant;
import java.util.StringJoiner;
public class Commands {
private Instant registerTime;
private final CommandDispatcher adminCommandDispatcher;
public Commands(VahrhedralBot bot) {
adminCommandDispatcher = new CommandDispatcher(bot, "");
}
public void register(CommandDispatcher dispatcher) {
registerAdminCommands();
dispatcher.registerAlwaysActiveCommand("admin", this::admin,
"Administrative commands");
dispatcher.registerAlwaysActiveCommand("admins", this::listAdmins,
"List admins");
dispatcher.registerCommand("info", this::info,
"Display information about the caller or the provided name, if present. @Mentions and partial front " +
"matches are supported");
registerTime = Instant.now();
}
private void registerAdminCommands() {
adminCommandDispatcher.registerAlwaysActiveCommand("start", this::adminStart, "Start bot");
adminCommandDispatcher.registerAlwaysActiveCommand("stop", this::adminStop, "Stop bot");
adminCommandDispatcher.registerAlwaysActiveCommand("status", this::adminStatus, "Bot status");
adminCommandDispatcher.registerAlwaysActiveCommand("kill", this::adminKill, "Kill the bot (terminate app)");
adminCommandDispatcher.registerAlwaysActiveCommand("blacklist", this::adminBlacklist,
"Prints the blacklist, or blacklists the given user. Supports @mention and partial front matching");
adminCommandDispatcher.registerAlwaysActiveCommand("pardon", this::adminPardon,
"Pardons the given user. Supports @mention and partial front matching");
}
private void adminKill(MessageContext context, String args) {
context.getApiClient().sendMessage("Sudoku time, bye", context.getMessage().getChannelId());
System.exit(0);
}
private void adminStart(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
CommandDispatcher mainDispatcher = context.getBot().getMainCommandDispatcher();
if (mainDispatcher.active().compareAndSet(false, true)) {
apiClient.sendMessage("Bot started", context.getMessage().getChannelId());
} else {
apiClient.sendMessage("Bot was already started", context.getMessage().getChannelId());
}
}
private void adminStop(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
CommandDispatcher mainDispatcher = context.getBot().getMainCommandDispatcher();
if (mainDispatcher.active().compareAndSet(true, false)) {
apiClient.sendMessage("Bot stopped", context.getMessage().getChannelId());
} else {
apiClient.sendMessage("Bot was already stopped", context.getMessage().getChannelId());
}
}
private void adminStatus(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
CommandDispatcher mainDispatcher = context.getBot().getMainCommandDispatcher();
Instant now = Instant.now();
Duration duration = Duration.between(registerTime, now);
long s = duration.getSeconds();
String uptime = String.format("%d:%02d:%02d:%02d", s / 86400, (s / 3600) % 24, (s % 3600) / 60, (s % 60));
Runtime r = Runtime.getRuntime();
String memory = String.format("%,dMB Used %,dMB Free %,dMB Max",
(r.maxMemory() - r.freeMemory()) / 1024 / 1024,
r.freeMemory() / 1024 / 1024,
r.maxMemory() / 1024 / 1024);
String response = String.format("**Status:** %s\n**Servers:** %d\n**Uptime:** %s\n**Memory:** `%s`",
mainDispatcher.active().get() ? "Running" : "Stopped",
apiClient.getServers().size(),
uptime,
memory);
apiClient.sendMessage(response, context.getMessage().getChannelId());
}
private void adminBlacklist(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
VahrhedralBot bot = context.getBot();
if (args.isEmpty()) {
StringJoiner joiner = new StringJoiner(", ");
bot.getConfig().getBlacklist().stream().
map(apiClient::getUserById).
filter(user -> user != null).
map(User::getUsername).
forEach(joiner::add);
String res = joiner.toString();
if (res.isEmpty()) {
res = "None";
}
apiClient.sendMessage("Blacklisted users: " + res, context.getMessage().getChannelId());
return;
}
User user = findUser(context, args);
if (user == null) {
apiClient.sendMessage("Unable to find user", context.getMessage().getChannelId());
return;
}
if (bot.getConfig().getAdmins().contains(user.getId())) {
apiClient.sendMessage("Cannot blacklist an admin", context.getMessage().getChannelId());
return;
}
bot.getConfig().getBlacklist().add(user.getId());
bot.saveConfig();
apiClient.sendMessage(String.format("`%s` has been blacklisted", user.getUsername()),
context.getMessage().getChannelId());
}
private void adminPardon(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
if (args.isEmpty()) {
apiClient.sendMessage("Please specify a user", context.getMessage().getChannelId());
return;
}
User user = findUser(context, args);
if (user == null) {
apiClient.sendMessage("Unable to find user", context.getMessage().getChannelId());
return;
}
boolean removed = context.getBot().getConfig().getBlacklist().remove(user.getId());
context.getBot().saveConfig();
if (removed) {
apiClient.sendMessage(String.format("`%s` has been pardoned", user.getUsername()),
context.getMessage().getChannelId());
} else {
apiClient.sendMessage(String.format("`%s` was not blacklisted", user.getUsername()),
context.getMessage().getChannelId());
}
}
private User findUser(MessageContext context, String username) {
Message message = context.getMessage();
User user = null;
Channel channel = context.getApiClient().getChannelById(message.getChannelId());
// Attempt to find the given user
// If the user is @mentioned, try that first
if (message.getMentions() != null && message.getMentions().length > 0) {
user = message.getMentions()[0];
} else {
User temp = context.getApiClient().findUser(username, channel.getParent());
if (temp != null) {
user = temp;
}
}
return user;
}
private void admin(MessageContext context, String args) {
// Permission check
if (!context.getBot().getConfig().getAdmins().contains(context.getMessage().getAuthor().getId())) {
return;
}
Message original = context.getMessage();
adminCommandDispatcher.handleCommand(new Message(original.getAuthor(), original.getChannelId(), args,
original.getChannelId(), original.getMentions(), original.getTime()));
}
private void listAdmins(MessageContext context, String s) {
DiscordApiClient apiClient = context.getApiClient();
VahrhedralBot bot = context.getBot();
StringJoiner joiner = new StringJoiner(", ");
bot.getConfig().getAdmins().stream().
map(apiClient::getUserById).
filter(user -> user != null).
map(User::getUsername).
forEach(joiner::add);
String res = joiner.toString();
if (res.isEmpty()) {
res = "None";
}
apiClient.sendMessage("Admins: " + res, context.getMessage().getChannelId());
}
private void info(MessageContext context, String args) {
Message message = context.getMessage();
Configuration config = context.getBot().getConfig();
User user;
if (!args.isEmpty()) {
user = findUser(context, args);
} else {
user = message.getAuthor();
}
if (user == null) {
context.getApiClient().sendMessage("Unable to find user. Try typing their name EXACTLY or" +
" @mention them instead", message.getChannelId());
} else {
String avatar = (user.getAvatar() == null ? "N/A" : user.getAvatarUrl().toExternalForm());
String response = String.format("**Username:** %s\n**ID:** %s:%s\n%s%s**Avatar:** %s",
user.getUsername(), user.getId(), user.getDiscriminator(),
config.getBlacklist().contains(user.getId()) ? "**Blacklisted**\n" : "",
config.getAdmins().contains(user.getId()) ? "**Bot Administrator**\n" : "",
avatar);
context.getApiClient().sendMessage(response, message.getChannelId());
}
}
}
|
Cmd admins should be a regular command
|
src/main/java/co/phoenixlab/discord/Commands.java
|
Cmd admins should be a regular command
|
|
Java
|
mit
|
e8de211976b31b7a273366416943cfde3d926031
| 0
|
bugsnag/bugsnag-java,bugsnag/bugsnag-java,bugsnag/bugsnag-java
|
package com.bugsnag;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import com.bugsnag.callbacks.Callback;
import com.bugsnag.delivery.Delivery;
import com.bugsnag.delivery.HttpDelivery;
import com.bugsnag.delivery.OutputStreamDelivery;
import com.bugsnag.serialization.Serializer;
import org.junit.Test;
import javax.servlet.http.HttpServletRequest;
import java.io.ByteArrayOutputStream;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class BugsnagTest {
@Test
public void testNoDeliveryFails() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(null);
boolean result = bugsnag.notify(new RuntimeException());
assertFalse(result);
}
@Test
public void testIgnoreClasses() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(BugsnagTestUtils.generateDelivery());
// Ignore neither
bugsnag.setIgnoreClasses();
assertTrue(bugsnag.notify(new RuntimeException()));
assertTrue(bugsnag.notify(new TestException()));
// Ignore just RuntimeException
bugsnag.setIgnoreClasses(RuntimeException.class.getName());
assertFalse(bugsnag.notify(new RuntimeException()));
assertTrue(bugsnag.notify(new TestException()));
// Ignore both
bugsnag.setIgnoreClasses(RuntimeException.class.getName(), TestException.class.getName());
assertFalse(bugsnag.notify(new RuntimeException()));
assertFalse(bugsnag.notify(new TestException()));
}
@Test
public void testNotifyReleaseStages() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(BugsnagTestUtils.generateDelivery());
bugsnag.setReleaseStage("production");
// Never send
bugsnag.setNotifyReleaseStages();
assertFalse(bugsnag.notify(new Throwable()));
// Ignore 'production'
bugsnag.setNotifyReleaseStages("staging", "development");
assertFalse(bugsnag.notify(new Throwable()));
// Allow 'production'
bugsnag.setNotifyReleaseStages("production");
assertTrue(bugsnag.notify(new Throwable()));
// Allow 'production' and others
bugsnag.setNotifyReleaseStages("production", "staging", "development");
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testProjectPackages() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertTrue(report.getExceptions().get(0).getStacktrace().get(0).isInProject());
}
@Override
public void close() {
}
});
bugsnag.setProjectPackages("com.bugsnag");
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testAppVersion() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setAppVersion("1.2.3");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("1.2.3", report.getApp().get("version"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testAppType() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setAppType("testtype");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("testtype", report.getApp().get("type"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSeverity() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals(Severity.INFO.getValue(), report.getSeverity());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), Severity.INFO));
}
@Test
public void testFilters() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setFilters("testfilter1", "testfilter2");
bugsnag.setDelivery(new Delivery() {
@SuppressWarnings("unchecked")
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
Map<String, Object> firstTab =
(Map<String, Object>) report.getMetaData().get("firsttab");
final Map<String, Object> secondTab =
(Map<String, Object>) report.getMetaData().get("secondtab");
assertEquals("[FILTERED]", firstTab.get("testfilter1"));
assertEquals("[FILTERED]", firstTab.get("testfilter2"));
assertEquals("secretpassword", firstTab.get("testfilter3"));
assertEquals("[FILTERED]", secondTab.get("testfilter1"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), new Callback() {
@Override
public void beforeNotify(Report report) {
report.addToTab("firsttab", "testfilter1", "secretpassword");
report.addToTab("firsttab", "testfilter2", "secretpassword");
report.addToTab("firsttab", "testfilter3", "secretpassword");
report.addToTab("secondtab", "testfilter1", "secretpassword");
}
}));
}
@Test
public void testFilterHeaders() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new Delivery() {
@SuppressWarnings("unchecked")
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
Map<String, Object> requestTab =
(Map<String, Object>) report.getMetaData().get("request");
Map<String, Object> headersMap =
(Map<String, Object>) requestTab.get("headers");
assertEquals("[FILTERED]", headersMap.get("Authorization"));
assertEquals("User:Password", headersMap.get("authorization"));
assertEquals("[FILTERED]", headersMap.get("Cookie"));
assertEquals("123456ABCDEF", headersMap.get("cookie"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), new Callback() {
@Override
public void beforeNotify(Report report) {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Authorization", "User:Password");
headers.put("authorization", "User:Password");
headers.put("Cookie", "123456ABCDEF");
headers.put("cookie", "123456ABCDEF");
report.addToTab("request", "headers", headers);
}
}));
}
@Test
public void testUser() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("123", report.getUser().get("id"));
assertEquals("test@example.com", report.getUser().get("email"));
assertEquals("test name", report.getUser().get("name"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), new Callback() {
@Override
public void beforeNotify(Report report) {
report.setUser("123", "test@example.com", "test name");
}
}));
}
@Test
public void testContext() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setContext("the context");
}
});
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("the context", report.getContext());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testGroupingHash() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setGroupingHash("the grouping hash");
}
});
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("the grouping hash", report.getGroupingHash());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSingleCallback() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setApiKey("newapikey");
}
});
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("newapikey", report.getApiKey());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSingleCallbackInNotify() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("newapikey", report.getApiKey());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), new Callback() {
@Override
public void beforeNotify(Report report) {
report.setApiKey("newapikey");
}
}));
}
@Test
public void testCallbackOrder() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setApiKey("newapikey");
}
});
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setApiKey("secondnewapikey");
}
});
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("secondnewapikey", report.getApiKey());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testCallbackCancel() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(BugsnagTestUtils.generateDelivery());
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.cancel();
}
});
// Test the report is not sent
assertFalse(bugsnag.notify(new Throwable()));
}
@SuppressWarnings("deprecation") // ensures deprecated setEndpoint method still works correctly
@Test
public void testEndpoint() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new HttpDelivery() {
String endpoint;
@Override
public void setEndpoint(String endpoint) {
this.endpoint = endpoint;
}
@Override
public void setTimeout(int timeout) {
}
@Override
public void setProxy(Proxy proxy) {
}
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
assertEquals("https://www.example.com", endpoint);
}
@Override
public void close() {
}
});
bugsnag.setEndpoints("https://www.example.com", null);
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testProxy() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new HttpDelivery() {
Proxy proxy;
@Override
public void setEndpoint(String endpoint) {
}
@Override
public void setTimeout(int timeout) {
}
@Override
public void setProxy(Proxy proxy) {
this.proxy = proxy;
}
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
assertEquals("/127.0.0.1:8080", proxy.address().toString());
}
@Override
public void close() {
}
});
Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress("127.0.0.1", 8080));
bugsnag.setProxy(proxy);
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSendThreads() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setSendThreads(true);
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
// There is information about at least one thread
assertTrue(report.getThreads().size() > 0);
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testHandledIncrementNoSession() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertNull(report.getSession());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testHandledIncrementWithSession() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.startSession();
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
Map<String, Object> session = report.getSession();
assertNotNull(session);
@SuppressWarnings("unchecked")
Map<String, Object> handledCounts = (Map<String, Object>) session.get("events");
assertEquals(1, handledCounts.get("handled"));
assertEquals(0, handledCounts.get("unhandled"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSerialization() {
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new OutputStreamDelivery(byteStream));
bugsnag.notify(new RuntimeException());
// Exact content will vary with stacktrace so just check for some content
assertTrue(new String(byteStream.toByteArray()).length() > 0);
}
@Test(expected = UnsupportedOperationException.class)
public void testUncaughtHandlerModification() {
Set<Bugsnag> bugsnags = Bugsnag.uncaughtExceptionClients();
bugsnags.clear();
}
// Test exception class
private class TestException extends RuntimeException {
private static final long serialVersionUID = -458298914160798211L;
}
}
|
bugsnag/src/test/java/com/bugsnag/BugsnagTest.java
|
package com.bugsnag;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import com.bugsnag.callbacks.Callback;
import com.bugsnag.delivery.Delivery;
import com.bugsnag.delivery.HttpDelivery;
import com.bugsnag.delivery.OutputStreamDelivery;
import com.bugsnag.serialization.Serializer;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.util.Map;
import java.util.Set;
public class BugsnagTest {
@Test
public void testNoDeliveryFails() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(null);
boolean result = bugsnag.notify(new RuntimeException());
assertFalse(result);
}
@Test
public void testIgnoreClasses() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(BugsnagTestUtils.generateDelivery());
// Ignore neither
bugsnag.setIgnoreClasses();
assertTrue(bugsnag.notify(new RuntimeException()));
assertTrue(bugsnag.notify(new TestException()));
// Ignore just RuntimeException
bugsnag.setIgnoreClasses(RuntimeException.class.getName());
assertFalse(bugsnag.notify(new RuntimeException()));
assertTrue(bugsnag.notify(new TestException()));
// Ignore both
bugsnag.setIgnoreClasses(RuntimeException.class.getName(), TestException.class.getName());
assertFalse(bugsnag.notify(new RuntimeException()));
assertFalse(bugsnag.notify(new TestException()));
}
@Test
public void testNotifyReleaseStages() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(BugsnagTestUtils.generateDelivery());
bugsnag.setReleaseStage("production");
// Never send
bugsnag.setNotifyReleaseStages();
assertFalse(bugsnag.notify(new Throwable()));
// Ignore 'production'
bugsnag.setNotifyReleaseStages("staging", "development");
assertFalse(bugsnag.notify(new Throwable()));
// Allow 'production'
bugsnag.setNotifyReleaseStages("production");
assertTrue(bugsnag.notify(new Throwable()));
// Allow 'production' and others
bugsnag.setNotifyReleaseStages("production", "staging", "development");
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testProjectPackages() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertTrue(report.getExceptions().get(0).getStacktrace().get(0).isInProject());
}
@Override
public void close() {
}
});
bugsnag.setProjectPackages("com.bugsnag");
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testAppVersion() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setAppVersion("1.2.3");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("1.2.3", report.getApp().get("version"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testAppType() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setAppType("testtype");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("testtype", report.getApp().get("type"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSeverity() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals(Severity.INFO.getValue(), report.getSeverity());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), Severity.INFO));
}
@Test
public void testFilters() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setFilters("testfilter1", "testfilter2");
bugsnag.setDelivery(new Delivery() {
@SuppressWarnings("unchecked")
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
Map<String, Object> firstTab =
(Map<String, Object>) report.getMetaData().get("firsttab");
final Map<String, Object> secondTab =
(Map<String, Object>) report.getMetaData().get("secondtab");
assertEquals("[FILTERED]", firstTab.get("testfilter1"));
assertEquals("[FILTERED]", firstTab.get("testfilter2"));
assertEquals("secretpassword", firstTab.get("testfilter3"));
assertEquals("[FILTERED]", secondTab.get("testfilter1"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), new Callback() {
@Override
public void beforeNotify(Report report) {
report.addToTab("firsttab", "testfilter1", "secretpassword");
report.addToTab("firsttab", "testfilter2", "secretpassword");
report.addToTab("firsttab", "testfilter3", "secretpassword");
report.addToTab("secondtab", "testfilter1", "secretpassword");
}
}));
}
@Test
public void testUser() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("123", report.getUser().get("id"));
assertEquals("test@example.com", report.getUser().get("email"));
assertEquals("test name", report.getUser().get("name"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), new Callback() {
@Override
public void beforeNotify(Report report) {
report.setUser("123", "test@example.com", "test name");
}
}));
}
@Test
public void testContext() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setContext("the context");
}
});
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("the context", report.getContext());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testGroupingHash() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setGroupingHash("the grouping hash");
}
});
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("the grouping hash", report.getGroupingHash());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSingleCallback() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setApiKey("newapikey");
}
});
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("newapikey", report.getApiKey());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSingleCallbackInNotify() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("newapikey", report.getApiKey());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), new Callback() {
@Override
public void beforeNotify(Report report) {
report.setApiKey("newapikey");
}
}));
}
@Test
public void testCallbackOrder() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setApiKey("newapikey");
}
});
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setApiKey("secondnewapikey");
}
});
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("secondnewapikey", report.getApiKey());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testCallbackCancel() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(BugsnagTestUtils.generateDelivery());
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.cancel();
}
});
// Test the report is not sent
assertFalse(bugsnag.notify(new Throwable()));
}
@SuppressWarnings("deprecation") // ensures deprecated setEndpoint method still works correctly
@Test
public void testEndpoint() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new HttpDelivery() {
String endpoint;
@Override
public void setEndpoint(String endpoint) {
this.endpoint = endpoint;
}
@Override
public void setTimeout(int timeout) {
}
@Override
public void setProxy(Proxy proxy) {
}
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
assertEquals("https://www.example.com", endpoint);
}
@Override
public void close() {
}
});
bugsnag.setEndpoints("https://www.example.com", null);
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testProxy() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new HttpDelivery() {
Proxy proxy;
@Override
public void setEndpoint(String endpoint) {
}
@Override
public void setTimeout(int timeout) {
}
@Override
public void setProxy(Proxy proxy) {
this.proxy = proxy;
}
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
assertEquals("/127.0.0.1:8080", proxy.address().toString());
}
@Override
public void close() {
}
});
Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress("127.0.0.1", 8080));
bugsnag.setProxy(proxy);
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSendThreads() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setSendThreads(true);
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
// There is information about at least one thread
assertTrue(report.getThreads().size() > 0);
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testHandledIncrementNoSession() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertNull(report.getSession());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testHandledIncrementWithSession() {
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.startSession();
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
Map<String, Object> session = report.getSession();
assertNotNull(session);
@SuppressWarnings("unchecked")
Map<String, Object> handledCounts = (Map<String, Object>) session.get("events");
assertEquals(1, handledCounts.get("handled"));
assertEquals(0, handledCounts.get("unhandled"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSerialization() {
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
Bugsnag bugsnag = Bugsnag.init("apikey");
bugsnag.setDelivery(new OutputStreamDelivery(byteStream));
bugsnag.notify(new RuntimeException());
// Exact content will vary with stacktrace so just check for some content
assertTrue(new String(byteStream.toByteArray()).length() > 0);
}
@Test(expected = UnsupportedOperationException.class)
public void testUncaughtHandlerModification() {
Set<Bugsnag> bugsnags = Bugsnag.uncaughtExceptionClients();
bugsnags.clear();
}
// Test exception class
private class TestException extends RuntimeException {
private static final long serialVersionUID = -458298914160798211L;
}
}
|
added a test for default filters on request headers
|
bugsnag/src/test/java/com/bugsnag/BugsnagTest.java
|
added a test for default filters on request headers
|
|
Java
|
agpl-3.0
|
b6a79a0e911144283c112fe84662568776ad603e
| 0
|
papparazzo/moba-appServer,papparazzo/moba-server
|
/*
* Project: moba-server
*
* Copyright (C) 2016 Stefan Paproth <pappi-@gmx.de>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.txt>.
*
*/
package moba.server.messagehandler;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import moba.server.com.SenderI;
import moba.server.database.Database;
import moba.server.datatypes.enumerations.ErrorId;
import moba.server.datatypes.objects.TrackLayoutInfoData;
import moba.server.datatypes.objects.TracklayoutSymbolData;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.NoSuchElementException;
import moba.server.json.JSONException;
import moba.server.messages.Message;
import moba.server.messages.MessageHandlerA;
import moba.server.messages.messageType.LayoutMessage;
import moba.server.tracklayout.utilities.TracklayoutLock;
import moba.server.utilities.config.Config;
import moba.server.utilities.config.ConfigException;
import moba.server.utilities.exceptions.ErrorException;
public class Layout extends MessageHandlerA {
protected static final Logger LOGGER = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
protected Database database = null;
protected SenderI dispatcher = null;
protected TracklayoutLock lock = null;
protected Config config = null;
protected long activeLayout = 0;
public Layout(SenderI dispatcher, Database database, TracklayoutLock lock, Config config) {
this.database = database;
this.dispatcher = dispatcher;
this.lock = lock;
this.config = config;
}
@Override
public int getGroupId() {
return LayoutMessage.GROUP_ID;
}
@Override
public void init() {
Object o;
o = config.getSection("trackLayout.activeTracklayoutId");
if(o != null) {
activeLayout = (long)o;
}
}
@Override
public void shutdown() {
freeResources(-1);
try {
storeData();
} catch(ConfigException | IOException | JSONException e) {
Layout.LOGGER.log(Level.WARNING, "<{0}>", new Object[]{e.toString()});
}
}
@Override
public void freeResources(long appId) {
lock.freeLocks(appId);
}
@Override
public void handleMsg(Message msg) throws ErrorException {
try {
switch(LayoutMessage.fromId(msg.getMessageId())) {
case GET_LAYOUTS_REQ:
getLayouts(msg);
break;
case GET_LAYOUT_REQ:
getLayout(msg, true);
break;
case GET_LAYOUT_READ_ONLY_REQ:
getLayout(msg, false);
break;
case DELETE_LAYOUT:
deleteLayout(msg);
break;
case CREATE_LAYOUT:
createLayout(msg);
break;
case UPDATE_LAYOUT:
updateLayout(msg);
break;
case UNLOCK_LAYOUT:
unlockLayout(msg);
break;
case LOCK_LAYOUT:
lockLayout(msg);
break;
case SAVE_LAYOUT:
saveLayout(msg);
break;
default:
throw new ErrorException(ErrorId.UNKNOWN_MESSAGE_ID, "unknow msg <" + Long.toString(msg.getMessageId()) + ">.");
}
} catch(SQLException e) {
throw new ErrorException(ErrorId.DATABASE_ERROR, e.getMessage());
} catch(ConfigException | IOException | JSONException e) {
throw new ErrorException(ErrorId.UNKNOWN_ERROR, e.getMessage());
}
}
protected void getLayouts(Message msg)
throws SQLException {
String q = "SELECT * FROM `TrackLayouts`;";
ArrayList<TrackLayoutInfoData> arraylist;
Layout.LOGGER.log(Level.INFO, q);
try(ResultSet rs = database.query(q)) {
arraylist = new ArrayList();
while(rs.next()) {
long id = rs.getLong("Id");
arraylist.add(new TrackLayoutInfoData(
id,
rs.getString("Name"),
rs.getString("Description"),
rs.getInt("Locked"),
(id == activeLayout),
rs.getDate("ModificationDate"),
rs.getDate("CreationDate")
));
}
}
dispatcher.dispatch(new Message(LayoutMessage.GET_LAYOUTS_RES, arraylist), msg.getEndpoint());
}
protected void deleteLayout(Message msg)
throws SQLException, IOException, ConfigException, JSONException, ErrorException {
long id = (Long)msg.getData();
lock.isLockedByApp(id, msg.getEndpoint());
Connection con = database.getConnection();
String q = "DELETE FROM `TrackLayouts` WHERE (`locked` IS NULL OR `locked` = ?) AND `id` = ? ";
try (PreparedStatement pstmt = con.prepareStatement(q)) {
pstmt.setLong(1, msg.getEndpoint().getAppId());
pstmt.setLong(2, id);
Layout.LOGGER.log(Level.INFO, "<{0}>", new Object[]{pstmt.toString()});
if(pstmt.executeUpdate() == 0) {
throw new ErrorException(ErrorId.DATASET_MISSING, "could not delete <" + String.valueOf(id) + ">");
}
}
if(id == activeLayout) {
storeData(-1);
}
dispatcher.dispatch(new Message(LayoutMessage.LAYOUT_DELETED, id));
}
protected void createLayout(Message msg)
throws SQLException, ConfigException, IOException, JSONException {
Map<String, Object> map = (Map)msg.getData();
boolean isActive = (boolean)map.get("active");
long currAppId = msg.getEndpoint().getAppId();
TrackLayoutInfoData tl = new TrackLayoutInfoData((String)map.get("name"), (String)map.get("description"), currAppId, isActive);
Connection con = database.getConnection();
String q = "INSERT INTO `TrackLayouts` (`Name`, `Description`, `CreationDate`, `ModificationDate`, `Locked`) VALUES (?, ?, NOW(), NOW(), ?)";
try(PreparedStatement pstmt = con.prepareStatement(q, PreparedStatement.RETURN_GENERATED_KEYS)) {
pstmt.setString(1, tl.getName());
pstmt.setString(2, tl.getDescription());
pstmt.setLong(3, currAppId);
pstmt.executeUpdate();
Layout.LOGGER.log(Level.INFO, pstmt.toString());
try(ResultSet rs = pstmt.getGeneratedKeys()) {
rs.next();
int id = rs.getInt(1);
if(isActive) {
storeData(id);
}
tl.setId(id);
}
}
dispatcher.dispatch(new Message(LayoutMessage.LAYOUT_CREATED, tl));
}
protected void updateLayout(Message msg)
throws SQLException, ConfigException, IOException, JSONException, ErrorException {
Map<String, Object> map = (Map)msg.getData();
long id = (Long)map.get("id");
lock.isLockedByApp(id, msg.getEndpoint());
TrackLayoutInfoData tl;
boolean active = (boolean)map.get("active");
long appId = msg.getEndpoint().getAppId();
tl = new TrackLayoutInfoData(id, (String)map.get("name"), (String)map.get("description"), appId, active, new Date(), getCreationDate(id));
Connection con = database.getConnection();
String q = "UPDATE `TrackLayouts` SET `Name` = ?, `Description` = ?, `ModificationDate` = ? WHERE (`locked` IS NULL OR `locked` = ?) AND `id` = ? ";
try (PreparedStatement pstmt = con.prepareStatement(q)) {
pstmt.setString(1, tl.getName());
pstmt.setString(2, tl.getDescription());
pstmt.setDate(3, new java.sql.Date(tl.getModificationDate().getTime()));
pstmt.setLong(4, appId);
pstmt.setLong(5, id);
Layout.LOGGER.log(Level.INFO, pstmt.toString());
if(pstmt.executeUpdate() == 0) {
throw new ErrorException(ErrorId.DATASET_MISSING, "could not update <" + String.valueOf(id) + ">");
}
if(active) {
storeData(id);
}
dispatcher.dispatch(new Message(LayoutMessage.LAYOUT_UPDATED, tl));
}
}
protected void unlockLayout(Message msg)
throws SQLException, ErrorException {
long id = getId(msg.getData());
lock.unlockLayout(id, msg.getEndpoint());
dispatcher.dispatch(new Message(LayoutMessage.LAYOUT_UNLOCKED, id));
}
protected void lockLayout(Message msg)
throws SQLException, ErrorException {
long id = getId(msg.getData());
lock.lockLayout(id, msg.getEndpoint());
dispatcher.dispatch(new Message(LayoutMessage.LAYOUT_LOCKED, id));
}
protected void getLayout(Message msg, boolean tryLock)
throws SQLException, ErrorException {
long id = getId(msg.getData());
if(tryLock) {
lock.lockLayout(id, msg.getEndpoint());
}
Connection con = database.getConnection();
HashMap<String, Object> map = new HashMap<>();
map.put("id", id);
String q = "SELECT `Id`, `XPos`, `YPos`, `Symbol` FROM `TrackLayoutSymbols` WHERE `TrackLayoutId` = ?";
try (PreparedStatement pstmt = con.prepareStatement(q)) {
pstmt.setLong(1, id);
Layout.LOGGER.log(Level.INFO, pstmt.toString());
ArrayList<TracklayoutSymbolData> arraylist;
ResultSet rs = pstmt.executeQuery();
arraylist = new ArrayList();
while(rs.next()) {
arraylist.add(new TracklayoutSymbolData(
rs.getLong("Id"),
rs.getLong("XPos"),
rs.getLong("YPos"),
rs.getLong("Symbol")
));
}
map.put("symbols", arraylist);
dispatcher.dispatch(new Message(LayoutMessage.GET_LAYOUT_RES, map), msg.getEndpoint());
}
}
protected void saveLayout(Message msg)
throws SQLException, ErrorException {
Map<String, Object> map = (Map<String, Object>)msg.getData();
long id = getId(map.get("id"));
if(!lock.isLockedByApp(id, msg.getEndpoint())) {
throw new ErrorException(ErrorId.DATASET_NOT_LOCKED, "layout <" + String.valueOf(id) + "> not locked");
}
Connection con = database.getConnection();
String stmt = "UPDATE `TrackLayouts` SET `ModificationDate` = NOW() WHERE `Id` = ? ";
try (PreparedStatement pstmt = con.prepareStatement(stmt)) {
pstmt.setLong(1, id);
Layout.LOGGER.log(Level.INFO, pstmt.toString());
if(pstmt.executeUpdate() == 0) {
throw new ErrorException(ErrorId.DATASET_MISSING, "could not save <" + String.valueOf(id) + ">");
}
}
ArrayList<Object> arrayList = (ArrayList<Object>)map.get("symbols");
stmt = "DELETE FROM `TrackLayoutSymbols` WHERE `TrackLayoutId` = ?";
try(PreparedStatement pstmt = con.prepareStatement(stmt)) {
pstmt.setLong(1, id);
Layout.LOGGER.log(Level.INFO, pstmt.toString());
pstmt.executeUpdate();
}
for(Object item : arrayList) {
Map<String, Object> symbol = (Map<String, Object>)item;
stmt =
"INSERT INTO `TrackLayoutSymbols` (`Id`, `TrackLayoutId`, `XPos`, `YPos`, `Symbol`) " +
"VALUES (?, ?, ?, ?, ?)";
try(PreparedStatement pstmt = con.prepareStatement(stmt)) {
if(symbol.get("id") == null) {
pstmt.setNull(1, java.sql.Types.INTEGER);
} else {
pstmt.setInt(1, (int)symbol.get("id"));
}
pstmt.setLong(2, id);
pstmt.setLong(3, (long)symbol.get("xPos"));
pstmt.setLong(4, (long)symbol.get("yPos"));
pstmt.setLong(5, (long)symbol.get("symbol"));
Layout.LOGGER.log(Level.INFO, pstmt.toString());
pstmt.executeUpdate();
}
}
dispatcher.dispatch(new Message(LayoutMessage.LAYOUT_CHANGED, map));
}
protected Date getCreationDate(long id)
throws SQLException {
String q = "SELECT `CreationDate` FROM `TrackLayouts` WHERE `Id` = ?;";
Connection con = database.getConnection();
try (PreparedStatement pstmt = con.prepareStatement(q)) {
pstmt.setLong(1, id);
Layout.LOGGER.log(Level.INFO, pstmt.toString());
ResultSet rs = pstmt.executeQuery();
if(!rs.next()) {
throw new NoSuchElementException(String.format("no elements found for layout <%4d>", id));
}
return rs.getDate("CreationDate");
}
}
protected void storeData(long id)
throws ConfigException, IOException, JSONException {
activeLayout = id;
storeData();
}
protected void storeData()
throws ConfigException, IOException, JSONException {
HashMap<String, Object> map = new HashMap<>();
map.put("activeTracklayoutId", activeLayout);
config.setSection("trackLayout", map);
config.writeFile();
}
protected long getId(Object o)
throws ErrorException {
if(o != null) {
return (long)o;
}
if(activeLayout >= 0) {
return activeLayout;
}
throw new ErrorException(ErrorId.NO_DEFAULT_GIVEN, "no default-tracklayout given");
}
}
|
src/main/java/moba/server/messagehandler/Layout.java
|
/*
* Project: moba-server
*
* Copyright (C) 2016 Stefan Paproth <pappi-@gmx.de>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.txt>.
*
*/
package moba.server.messagehandler;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import moba.server.com.SenderI;
import moba.server.database.Database;
import moba.server.datatypes.enumerations.ErrorId;
import moba.server.datatypes.objects.TrackLayoutInfoData;
import moba.server.datatypes.objects.TracklayoutSymbolData;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.NoSuchElementException;
import moba.server.json.JSONException;
import moba.server.messages.Message;
import moba.server.messages.MessageHandlerA;
import moba.server.messages.messageType.LayoutMessage;
import moba.server.tracklayout.utilities.TracklayoutLock;
import moba.server.utilities.config.Config;
import moba.server.utilities.config.ConfigException;
import moba.server.utilities.exceptions.ErrorException;
public class Layout extends MessageHandlerA {
protected static final Logger LOGGER = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
protected Database database = null;
protected SenderI dispatcher = null;
protected TracklayoutLock lock = null;
protected Config config = null;
protected long activeLayout = 0;
public Layout(SenderI dispatcher, Database database, TracklayoutLock lock, Config config) {
this.database = database;
this.dispatcher = dispatcher;
this.lock = lock;
this.config = config;
}
@Override
public int getGroupId() {
return LayoutMessage.GROUP_ID;
}
@Override
public void init() {
Object o;
o = config.getSection("trackLayout.activeTracklayoutId");
if(o != null) {
activeLayout = (long)o;
}
}
@Override
public void shutdown() {
freeResources(-1);
try {
storeData();
} catch(ConfigException | IOException | JSONException e) {
Layout.LOGGER.log(Level.WARNING, "<{0}>", new Object[]{e.toString()});
}
}
@Override
public void freeResources(long appId) {
lock.freeLocks(appId);
}
@Override
public void handleMsg(Message msg) throws ErrorException {
try {
switch(LayoutMessage.fromId(msg.getMessageId())) {
case GET_LAYOUTS_REQ:
getLayouts(msg);
break;
case GET_LAYOUT_REQ:
getLayout(msg, true);
break;
case GET_LAYOUT_READ_ONLY_REQ:
getLayout(msg, false);
break;
case DELETE_LAYOUT:
deleteLayout(msg);
break;
case CREATE_LAYOUT:
createLayout(msg);
break;
case UPDATE_LAYOUT:
updateLayout(msg);
break;
case UNLOCK_LAYOUT:
unlockLayout(msg);
break;
case LOCK_LAYOUT:
lockLayout(msg);
break;
case SAVE_LAYOUT:
saveLayout(msg);
break;
default:
throw new ErrorException(ErrorId.UNKNOWN_MESSAGE_ID, "unknow msg <" + Long.toString(msg.getMessageId()) + ">.");
}
} catch(SQLException e) {
throw new ErrorException(ErrorId.DATABASE_ERROR, e.getMessage());
} catch(ConfigException | IOException | JSONException e) {
throw new ErrorException(ErrorId.UNKNOWN_ERROR, e.getMessage());
}
}
protected void getLayouts(Message msg)
throws SQLException {
String q = "SELECT * FROM `TrackLayouts`;";
ArrayList<TrackLayoutInfoData> arraylist;
Layout.LOGGER.log(Level.INFO, q);
try(ResultSet rs = database.query(q)) {
arraylist = new ArrayList();
while(rs.next()) {
long id = rs.getLong("Id");
arraylist.add(new TrackLayoutInfoData(
id,
rs.getString("Name"),
rs.getString("Description"),
rs.getInt("Locked"),
(id == activeLayout),
rs.getDate("ModificationDate"),
rs.getDate("CreationDate")
));
}
}
dispatcher.dispatch(new Message(LayoutMessage.GET_LAYOUTS_RES, arraylist), msg.getEndpoint());
}
protected void deleteLayout(Message msg)
throws SQLException, IOException, ConfigException, JSONException, ErrorException {
long id = (Long)msg.getData();
lock.isLockedByApp(id, msg.getEndpoint());
Connection con = database.getConnection();
String q = "DELETE FROM `TrackLayouts` WHERE (`locked` IS NULL OR `locked` = ?) AND `id` = ? ";
try (PreparedStatement pstmt = con.prepareStatement(q)) {
pstmt.setLong(1, msg.getEndpoint().getAppId());
pstmt.setLong(2, id);
Layout.LOGGER.log(Level.INFO, "<{0}>", new Object[]{pstmt.toString()});
if(pstmt.executeUpdate() == 0) {
throw new ErrorException(ErrorId.DATASET_MISSING, "could not delete <" + String.valueOf(id) + ">");
}
}
if(id == activeLayout) {
storeData(-1);
}
dispatcher.dispatch(new Message(LayoutMessage.LAYOUT_DELETED, id));
}
protected void createLayout(Message msg)
throws SQLException, ConfigException, IOException, JSONException {
Map<String, Object> map = (Map)msg.getData();
boolean isActive = (boolean)map.get("active");
long currAppId = msg.getEndpoint().getAppId();
TrackLayoutInfoData tl = new TrackLayoutInfoData((String)map.get("name"), (String)map.get("description"), currAppId, isActive);
Connection con = database.getConnection();
String q = "INSERT INTO `TrackLayouts` (`Name`, `Description`, `CreationDate`, `ModificationDate`, `Locked`) VALUES (?, ?, NOW(), NOW(), ?)";
try(PreparedStatement pstmt = con.prepareStatement(q, PreparedStatement.RETURN_GENERATED_KEYS)) {
pstmt.setString(1, tl.getName());
pstmt.setString(2, tl.getDescription());
pstmt.setLong(3, currAppId);
pstmt.executeUpdate();
Layout.LOGGER.log(Level.INFO, pstmt.toString());
try(ResultSet rs = pstmt.getGeneratedKeys()) {
rs.next();
int id = rs.getInt(1);
if(isActive) {
storeData(id);
}
tl.setId(id);
}
}
dispatcher.dispatch(new Message(LayoutMessage.LAYOUT_CREATED, tl));
}
protected void updateLayout(Message msg)
throws SQLException, ConfigException, IOException, JSONException, ErrorException {
Map<String, Object> map = (Map)msg.getData();
long id = (Long)map.get("id");
lock.isLockedByApp(id, msg.getEndpoint());
TrackLayoutInfoData tl;
boolean active = (boolean)map.get("active");
long appId = msg.getEndpoint().getAppId();
tl = new TrackLayoutInfoData(id, (String)map.get("name"), (String)map.get("description"), appId, active, new Date(), getCreationDate(id));
Connection con = database.getConnection();
String q = "UPDATE `TrackLayouts` SET `Name` = ?, `Description` = ?, `ModificationDate` = ? WHERE (`locked` IS NULL OR `locked` = ?) AND `id` = ? ";
try (PreparedStatement pstmt = con.prepareStatement(q)) {
pstmt.setString(1, tl.getName());
pstmt.setString(2, tl.getDescription());
pstmt.setDate(3, new java.sql.Date(tl.getModificationDate().getTime()));
pstmt.setLong(4, appId);
pstmt.setLong(5, id);
Layout.LOGGER.log(Level.INFO, pstmt.toString());
if(pstmt.executeUpdate() == 0) {
throw new ErrorException(ErrorId.DATASET_MISSING, "could not update <" + String.valueOf(id) + ">");
}
if(active) {
storeData(id);
}
dispatcher.dispatch(new Message(LayoutMessage.LAYOUT_UPDATED, tl));
}
}
protected void unlockLayout(Message msg)
throws SQLException, ErrorException {
long id = getId(msg.getData());
lock.unlockLayout(id, msg.getEndpoint());
dispatcher.dispatch(new Message(LayoutMessage.LAYOUT_UNLOCKED, id));
}
protected void lockLayout(Message msg)
throws SQLException, ErrorException {
long id = getId(msg.getData());
lock.lockLayout(id, msg.getEndpoint());
dispatcher.dispatch(new Message(LayoutMessage.LAYOUT_LOCKED, id));
}
protected void getLayout(Message msg, boolean tryLock)
throws SQLException, ErrorException {
long id = getId(msg.getData());
if(tryLock) {
lock.lockLayout(id, msg.getEndpoint());
}
Connection con = database.getConnection();
HashMap<String, Object> map = new HashMap<>();
map.put("id", id);
String q = "SELECT `Id`, `XPos`, `YPos`, `Symbol` FROM `TrackLayoutSymbols` WHERE `TrackLayoutId` = ?";
try (PreparedStatement pstmt = con.prepareStatement(q)) {
pstmt.setLong(1, id);
Layout.LOGGER.log(Level.INFO, pstmt.toString());
ArrayList<TracklayoutSymbolData> arraylist;
ResultSet rs = pstmt.executeQuery();
arraylist = new ArrayList();
while(rs.next()) {
arraylist.add(new TracklayoutSymbolData(
rs.getLong("Id"),
rs.getLong("XPos"),
rs.getLong("YPos"),
rs.getLong("Symbol")
));
}
map.put("symbols", arraylist);
dispatcher.dispatch(new Message(LayoutMessage.GET_LAYOUT_RES, map), msg.getEndpoint());
}
}
protected void saveLayout(Message msg)
throws SQLException, ErrorException {
Map<String, Object> map = (Map<String, Object>)msg.getData();
long id = getId(map.get("id"));
Connection con = database.getConnection();
String stmt = "UPDATE `TrackLayouts` SET `ModificationDate` = NOW() WHERE `Id` = ? ";
try (PreparedStatement pstmt = con.prepareStatement(stmt)) {
pstmt.setLong(1, id);
Layout.LOGGER.log(Level.INFO, pstmt.toString());
if(pstmt.executeUpdate() == 0) {
throw new ErrorException(ErrorId.DATASET_MISSING, "could not save <" + String.valueOf(id) + ">");
}
}
if(!lock.isLockedByApp(id, msg.getEndpoint())) {
throw new ErrorException(ErrorId.DATASET_NOT_LOCKED, "layout <" + String.valueOf(id) + "> not locked");
}
ArrayList<Object> arrayList = (ArrayList<Object>)map.get("symbols");
try(PreparedStatement pstmt = con.prepareStatement(stmt)) {
for(Object item : arrayList) {
Map<String, Object> symbol = (Map<String, Object>)item;
if(symbol.get("xPos") == null || symbol.get("yPos") == null) {
stmt = "DELETE FROM `TrackLayoutSymbols` WHERE `Id` = ? AND `TrackLayoutId` = ?";
pstmt.setInt(1, (int)symbol.get("id"));
pstmt.setLong(2, id);
Layout.LOGGER.log(Level.INFO, pstmt.toString());
if(pstmt.executeUpdate() == 0) {
throw new ErrorException(ErrorId.DATASET_MISSING, "could not save <" + String.valueOf(id) + ">");
}
continue;
}
stmt =
"INSERT INTO `TrackLayoutSymbols` (`Id`, `TrackLayoutId`, `XPos`, `YPos`, `Symbol`) " +
"VALUES (?, ?, ?, ?, ?) " +
"ON DUPLICATE KEY UPDATE `TrackLayoutId` = ?, `XPos` = ?, `YPos` = ?, `Symbol` = ?" ;
pstmt.setInt(1, (int)symbol.get("id"));
pstmt.setLong(2, id);
pstmt.setInt(3, (int)symbol.get("xPos"));
pstmt.setInt(4, (int)symbol.get("yPos"));
pstmt.setLong(5, id);
pstmt.setInt(6, (int)symbol.get("xPos"));
pstmt.setInt(7, (int)symbol.get("yPos"));
Layout.LOGGER.log(Level.INFO, pstmt.toString());
if(pstmt.executeUpdate() == 0) {
throw new ErrorException(ErrorId.DATASET_MISSING, "could not save <" + String.valueOf(id) + ">");
}
}
}
dispatcher.dispatch(new Message(LayoutMessage.LAYOUT_CHANGED, map));
}
protected Date getCreationDate(long id)
throws SQLException {
String q = "SELECT `CreationDate` FROM `TrackLayouts` WHERE `Id` = ?;";
Connection con = database.getConnection();
try (PreparedStatement pstmt = con.prepareStatement(q)) {
pstmt.setLong(1, id);
Layout.LOGGER.log(Level.INFO, pstmt.toString());
ResultSet rs = pstmt.executeQuery();
if(!rs.next()) {
throw new NoSuchElementException(String.format("no elements found for layout <%4d>", id));
}
return rs.getDate("CreationDate");
}
}
protected void storeData(long id)
throws ConfigException, IOException, JSONException {
activeLayout = id;
storeData();
}
protected void storeData()
throws ConfigException, IOException, JSONException {
HashMap<String, Object> map = new HashMap<>();
map.put("activeTracklayoutId", activeLayout);
config.setSection("trackLayout", map);
config.writeFile();
}
protected long getId(Object o)
throws ErrorException {
if(o != null) {
return (long)o;
}
if(activeLayout >= 0) {
return activeLayout;
}
throw new ErrorException(ErrorId.NO_DEFAULT_GIVEN, "no default-tracklayout given");
}
}
|
wip...
|
src/main/java/moba/server/messagehandler/Layout.java
|
wip...
|
|
Java
|
agpl-3.0
|
0f8fdd3c804c925ac07910e4667cf9b86e04b703
| 0
|
quikkian-ua-devops/will-financials,ua-eas/kfs-devops-automation-fork,ua-eas/kfs-devops-automation-fork,smith750/kfs,kkronenb/kfs,UniversityOfHawaii/kfs,smith750/kfs,quikkian-ua-devops/kfs,bhutchinson/kfs,ua-eas/kfs,ua-eas/kfs,UniversityOfHawaii/kfs,quikkian-ua-devops/kfs,ua-eas/kfs,UniversityOfHawaii/kfs,ua-eas/kfs,quikkian-ua-devops/will-financials,ua-eas/kfs-devops-automation-fork,bhutchinson/kfs,ua-eas/kfs-devops-automation-fork,bhutchinson/kfs,kuali/kfs,UniversityOfHawaii/kfs,kkronenb/kfs,kuali/kfs,quikkian-ua-devops/kfs,smith750/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/kfs,ua-eas/kfs-devops-automation-fork,kuali/kfs,quikkian-ua-devops/will-financials,kuali/kfs,quikkian-ua-devops/kfs,ua-eas/kfs,smith750/kfs,quikkian-ua-devops/will-financials,bhutchinson/kfs,kkronenb/kfs,quikkian-ua-devops/will-financials,quikkian-ua-devops/will-financials,UniversityOfHawaii/kfs,kkronenb/kfs,kuali/kfs
|
/*
* Copyright 2006 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.module.kra.routingform.document;
import org.kuali.core.authorization.MaintenanceDocumentAuthorizations;
import org.kuali.core.bo.user.UniversalUser;
import org.kuali.core.document.MaintenanceDocument;
import org.kuali.core.document.MaintenanceDocumentAuthorizerBase;
import org.kuali.core.util.ObjectUtils;
import org.kuali.module.kra.KraPropertyConstants;
import org.kuali.module.kra.routingform.bo.ResearchRiskType;
/**
* Authorizer class for ResearchRiskTypeMaintenanceDocument - allows for targeted field disabling.
*
*
*/
public class ResearchRiskTypeMaintenanceDocumentAuthorizer extends MaintenanceDocumentAuthorizerBase {
@Override
public MaintenanceDocumentAuthorizations getFieldAuthorizations(MaintenanceDocument document, UniversalUser user) {
MaintenanceDocumentAuthorizations auths = new MaintenanceDocumentAuthorizations();
ResearchRiskType researchRisk = (ResearchRiskType) document.getNewMaintainableObject().getBusinessObject();
//If the research risk exists in db, set read-only fields
if (ObjectUtils.isNotNull(researchRisk) && researchRisk.getResearchRiskTypeCode() != null) {
auths.addReadonlyAuthField(KraPropertyConstants.RESEARCH_RISK_TYPE_DESCRIPTION);
auths.addReadonlyAuthField(KraPropertyConstants.CONTROL_ATTRIBUTE_TYPE_CODE);
}
return auths;
}
}
|
work/src/org/kuali/kfs/module/cg/document/authorization/ResearchRiskTypeMaintenanceDocumentAuthorizer.java
|
/*
* Copyright 2006 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.module.kra.routingform.document;
import java.util.List;
import org.kuali.core.authorization.MaintenanceDocumentAuthorizations;
import org.kuali.core.bo.user.UniversalUser;
import org.kuali.core.document.MaintenanceDocument;
import org.kuali.core.document.MaintenanceDocumentAuthorizerBase;
import org.kuali.core.service.BusinessObjectService;
import org.kuali.core.util.ObjectUtils;
import org.kuali.core.util.SpringServiceLocator;
import org.kuali.module.kra.KraPropertyConstants;
import org.kuali.module.kra.routingform.bo.ResearchRiskType;
import org.kuali.module.purap.PurapPropertyConstants;
import org.kuali.module.purap.bo.VendorSupplierDiversity;
import org.kuali.module.purap.util.PurapUtils;
/**
* Authorizer class for ResearchRiskTypeMaintenanceDocument - allows for targeted field disabling.
*
*
*/
public class ResearchRiskTypeMaintenanceDocumentAuthorizer extends MaintenanceDocumentAuthorizerBase {
@Override
public MaintenanceDocumentAuthorizations getFieldAuthorizations(MaintenanceDocument document, UniversalUser user) {
MaintenanceDocumentAuthorizations auths = new MaintenanceDocumentAuthorizations();
ResearchRiskType researchRisk = (ResearchRiskType) document.getNewMaintainableObject().getBusinessObject();
BusinessObjectService service = SpringServiceLocator.getBusinessObjectService();
ResearchRiskType persistedResearchRisk = (ResearchRiskType) service.retrieve(researchRisk);
//If the research risk exists in db, set read-only fields
if (ObjectUtils.isNotNull(persistedResearchRisk)) {
auths.addReadonlyAuthField(KraPropertyConstants.RESEARCH_RISK_TYPE_DESCRIPTION);
auths.addReadonlyAuthField(KraPropertyConstants.CONTROL_ATTRIBUTE_TYPE_CODE);
}
return auths;
}
}
|
Refresh object; don't need business object service
|
work/src/org/kuali/kfs/module/cg/document/authorization/ResearchRiskTypeMaintenanceDocumentAuthorizer.java
|
Refresh object; don't need business object service
|
|
Java
|
lgpl-2.1
|
ade129b7b564eb3c11bde20e8cc01c24c8b5c1c4
| 0
|
koying/libbluray,mwgoldsmith/bluray,Azzuro/libbluray,ShiftMediaProject/libbluray,tourettes/libbluray,ShiftMediaProject/libbluray,tourettes/libbluray,UIKit0/libbluray,koying/libbluray,EdwardNewK/libbluray,Distrotech/libbluray,Distrotech/libbluray,vlc-mirror/libbluray,ShiftMediaProject/libbluray,Azzuro/libbluray,vlc-mirror/libbluray,koying/libbluray,ShiftMediaProject/libbluray,UIKit0/libbluray,EdwardNewK/libbluray,ace20022/libbluray,Distrotech/libbluray,UIKit0/libbluray,ace20022/libbluray,mwgoldsmith/bluray,ace20022/libbluray,tourettes/libbluray,vlc-mirror/libbluray,vlc-mirror/libbluray,mwgoldsmith/bluray,Distrotech/libbluray,tourettes/libbluray,ace20022/libbluray,koying/libbluray,Azzuro/libbluray,EdwardNewK/libbluray,Azzuro/libbluray,UIKit0/libbluray,EdwardNewK/libbluray,mwgoldsmith/bluray
|
/*
* This file is part of libbluray
* Copyright (C) 2012 libbluray
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see
* <http://www.gnu.org/licenses/>.
*/
package java.awt;
import java.lang.reflect.Field;
import java.text.AttributedCharacterIterator;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.awt.image.AreaAveragingScaleFilter;
import java.awt.image.BufferedImage;
import java.awt.image.ImageConsumer;
import java.awt.image.ImageObserver;
import org.dvb.ui.DVBBufferedImage;
import sun.awt.ConstrainableGraphics;
import org.videolan.Logger;
class BDGraphics extends Graphics2D implements ConstrainableGraphics {
private static final Color DEFAULT_COLOR = Color.BLACK;
private static final Font DEFAULT_FONT = new Font("Dialog", Font.PLAIN, 12);
private int width;
private int height;
private int[] backBuffer;
private Area dirty;
private GraphicsConfiguration gc;
private Color foreground;
private Color background;
private Font font;
private BDFontMetrics fontMetrics;
private AlphaComposite composite;
/** The current xor color. If null then we are in paint mode. */
private Color xorColor;
/** Translated X, Y offset from native offset. */
private int originX;
private int originY;
/** The actual clip rectangle that is intersection of user clip and constrained rectangle. */
private Rectangle actualClip;
/** The current user clip rectangle or null if no clip has been set. This is stored in the
native coordinate system and not the (possibly) translated Java coordinate system. */
private Rectangle clip = null;
/** The rectangle this graphics object has been constrained too. This is stored in the
native coordinate system and not the (possibly) translated Java coordinate system.
If it is null then this graphics has not been constrained. The constrained rectangle
is another layer of clipping independant of the user clip. */
private Rectangle constrainedRect = null;
BDGraphics(BDGraphics g) {
backBuffer = g.backBuffer;
dirty = g.dirty;
width = g.width;
height = g.height;
gc = g.gc;
foreground = g.foreground;
background = g.background;
composite = g.composite;
font = g.font;
fontMetrics = g.fontMetrics;
originX = g.originX;
originY = g.originY;
if (g.clip != null) {
clip = new Rectangle(g.clip);
}
setupClip();
}
BDGraphics(BDRootWindow window) {
width = window.getWidth();
height = window.getHeight();
backBuffer = window.getBdBackBuffer();
dirty = window.getDirtyArea();
gc = window.getGraphicsConfiguration();
foreground = window.getForeground();
background = window.getBackground();
font = window.getFont();
if (foreground == null)
foreground = DEFAULT_COLOR;
if (background == null)
background = DEFAULT_COLOR;
if (font == null)
font = DEFAULT_FONT;
fontMetrics = BDFontMetrics.getFontMetrics(font);
composite = AlphaComposite.SrcOver;
setupClip();
}
BDGraphics(BDImage image) {
width = image.getWidth();
height = image.getHeight();
backBuffer = image.getBdBackBuffer();
dirty = image.getDirtyArea();
gc = image.getGraphicsConfiguration();
Component component = image.getComponent();
if (component != null) {
foreground = component.getForeground();
background = component.getBackground();
font = component.getFont();
}
if (foreground == null)
foreground = DEFAULT_COLOR;
if (background == null)
background = new Color(0, 0, 0, 0);
if (font == null)
font = DEFAULT_FONT;
fontMetrics = BDFontMetrics.getFontMetrics(font);
composite = AlphaComposite.SrcOver;
setupClip();
}
public Graphics create() {
return new BDGraphics(this);
}
public void translate(int x, int y) {
originX += x;
originY += y;
}
public void setFont(Font font) {
if (font != null && !font.equals(this.font)) {
this.font = font;
fontMetrics = BDFontMetrics.getFontMetrics(font);
}
}
public Font getFont() {
return font;
}
public FontMetrics getFontMetrics() {
return fontMetrics;
}
public FontMetrics getFontMetrics(Font font) {
return BDFontMetrics.getFontMetrics(font);
}
public void setColor(Color c) {
if ((c != null) && (c != foreground))
foreground = c;
}
public Color getColor() {
return foreground;
}
public Composite getComposite() {
return composite;
}
public GraphicsConfiguration getDeviceConfiguration() {
return gc;
}
public void setComposite(Composite comp) {
if ((comp != null) && (comp != composite)) {
if (!(comp instanceof AlphaComposite))
throw new IllegalArgumentException("Only AlphaComposite is supported");
composite = (AlphaComposite) comp;
}
}
public void setPaintMode() {
xorColor = null;
composite = AlphaComposite.SrcOver;
}
public void setXORMode(Color color) {
xorColor = color;
}
/** Gets the current clipping area. */
public Rectangle getClipBounds() {
if (clip != null)
return new Rectangle (clip.x - originX, clip.y - originY, clip.width, clip.height);
return null;
}
public void constrain(int x, int y, int w, int h) {
Rectangle rect;
if (constrainedRect != null)
rect = constrainedRect;
else
rect = new Rectangle(0, 0, width, height);
constrainedRect = rect.intersection(new Rectangle(rect.x + x, rect.y + y, w, h));
originX = constrainedRect.x;
originY = constrainedRect.y;
setupClip();
}
/** Returns a Shape object representing the clip. */
public Shape getClip() {
return getClipBounds();
}
/** Crops the clipping rectangle. */
public void clipRect(int x, int y, int w, int h) {
Rectangle rect = new Rectangle(x + originX, y + originY, w, h);
if (clip != null)
clip = clip.intersection(rect);
else
clip = rect;
setupClip();
}
/** Sets the clipping rectangle. */
public void setClip(int x, int y, int w, int h) {
clip = new Rectangle (x + originX, y + originY, w, h);
setupClip();
}
/** Sets the clip to a Shape (only Rectangle allowed). */
public void setClip(Shape clip) {
if (clip == null) {
this.clip = null;
setupClip();
} else if (clip instanceof Rectangle) {
Rectangle rect = (Rectangle) clip;
setClip(rect.x, rect.y, rect.width, rect.height);
} else
throw new IllegalArgumentException("setClip(Shape) only supports Rectangle objects");
}
private void setupClip() {
Rectangle rect;
if (constrainedRect != null)
rect = constrainedRect;
else
rect = new Rectangle(0, 0, width, height);
if (clip != null)
actualClip = clip.intersection(rect);
else
actualClip = rect;
}
private int alphaBlend(int dest, int src) {
int As = src >>> 24;
if (As == 0)
return dest;
if (As == 255)
return src;
int Ad = (dest >>> 24);
if (Ad == 0)
return src;
int R, G, B;
R = ((src >>> 16) & 255) * As * 255;
G = ((src >>> 8) & 255) * As * 255;
B = (src & 255) * As * 255;
Ad = Ad * (255 - As);
As = As * 255 + Ad;
R = (R + ((dest >>> 16) & 255) * Ad) / As;
G = (G + ((dest >>> 8) & 255) * Ad) / As;
B = (B + (dest & 255) * Ad) / As;
R = Math.min(255, R);
G = Math.min(255, G);
B = Math.min(255, B);
Ad = As / 255;
Ad = Math.min(255, Ad);
return (Ad << 24) | (R << 16) | (G << 8) | B;
}
private int applyComposite(int rgb) {
return ((int)((rgb >>> 24) * composite.getAlpha()) << 24) | (rgb & 0x00FFFFFF);
}
private void drawSpanN(int x, int y, int length, int rgb) {
Rectangle rect = new Rectangle(x, y, length, 1);
rect = actualClip.intersection(rect);
if (rect.width <= 0 || rect.height <= 0 || rect.x < 0 || rect.y < 0) {
return;
}
x = rect.x;
length = rect.width;
dirty.add(rect);
if (xorColor != null) {
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] ^= xorColor.getRGB() ^ rgb;
}
return;
}
switch (composite.getRule()) {
case AlphaComposite.CLEAR:
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] = 0;
}
break;
case AlphaComposite.SRC:
rgb = applyComposite(rgb);
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] = rgb;
}
break;
case AlphaComposite.SRC_OVER:
rgb = applyComposite(rgb);
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] = alphaBlend(backBuffer[y * width + x + i], rgb);
}
break;
}
}
private void drawSpanN(int x, int y, int length, int src[], int srcOffset) {
Rectangle rect = new Rectangle(x, y, length, 1);
rect = actualClip.intersection(rect);
if (rect.width <= 0 || rect.height <= 0 || rect.x < 0 || rect.y < 0) {
return;
}
srcOffset += rect.x - x;
x = rect.x;
length = rect.width;
dirty.add(rect);
if (xorColor != null) {
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] ^= xorColor.getRGB() ^ src[srcOffset + i];
}
return;
}
switch (composite.getRule()) {
case AlphaComposite.CLEAR:
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] = 0;
}
break;
case AlphaComposite.SRC:
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] = applyComposite(src[srcOffset + i]);
}
break;
case AlphaComposite.SRC_OVER:
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] = alphaBlend(backBuffer[y * width + x + i], applyComposite(src[srcOffset + i]));
}
break;
}
}
private void drawSpan(int x, int y, int length, int rgb) {
x += originX;
y += originY;
drawSpanN(x, y, length, rgb);
}
private void drawSpan(int x, int y, int length, int src[], int srcOffset) {
x += originX;
y += originY;
drawSpanN(x, y, length, src, srcOffset);
}
private void drawPointN(int x, int y, int rgb) {
drawSpanN(x, y, 1, rgb);
}
private void drawGlyph(int[] rgbArray, int x0, int y0, int w, int h) {
for (int y = 0; y < h; y++)
for (int x = 0; x < w; x++)
drawPoint(x + x0, y + y0, rgbArray[y * w + x]);
}
private void drawPoint(int x, int y, int rgb) {
x += originX;
y += originY;
if (actualClip.contains(x, y))
drawPointN(x, y, rgb);
}
public void clearRect(int x, int y, int w, int h) {
x += originX;
y += originY;
Rectangle rect = new Rectangle(x, y, w, h);
rect = actualClip.intersection(rect);
if (rect.isEmpty()) {
return;
}
x = rect.x;
y = rect.y;
w = rect.width;
h = rect.height;
int rgb = background.getRGB();
for (int i = 0; i < h; i++)
Arrays.fill(backBuffer, (y + i) * width + x, (y + i) * width + x + w, rgb);
dirty.add(rect);
}
public void fillRect(int x, int y, int w, int h) {
x += originX;
y += originY;
Rectangle rect = new Rectangle(x, y, w, h);
rect = actualClip.intersection(rect);
x = rect.x;
y = rect.y;
w = rect.width;
h = rect.height;
int rgb = foreground.getRGB();
for (int Y = y; Y < (y + h); Y++)
drawSpanN(x, Y, w, rgb);
}
public void drawRect(int x, int y, int w, int h) {
x += originX;
y += originY;
drawLineN(x, y, x + w, y);
drawLineN(x, y + h, x + w, y + h);
drawLineN(x, y, x, y + h);
drawLineN(x + w, y, x + w, y + h);
}
private void drawLineN(int x1, int y1, int x2, int y2) {
int rgb = foreground.getRGB();
int dy = y2 - y1;
int dx = x2 - x1;
int stepx, stepy;
int fraction;
if (dy < 0) {
dy = -dy;
stepy = -1;
} else {
stepy = 1;
}
if (dx < 0) {
dx = -dx;
stepx = -1;
} else {
stepx = 1;
}
dy <<= 1;
dx <<= 1;
drawPointN(x1, y1, rgb);
if (dx > dy) {
fraction = dy - (dx >> 1);
while (x1 != x2) {
if (fraction >= 0) {
y1 += stepy;
fraction -= dx;
}
x1 += stepx;
fraction += dy;
drawPointN(x1, y1, rgb);
}
} else {
fraction = dx - (dy >> 1);
while (y1 != y2) {
if (fraction >= 0) {
x1 += stepx;
fraction -= dy;
}
y1 += stepy;
fraction += dx;
drawPointN(x1, y1, rgb);
}
}
}
public void drawLine(int x1, int y1, int x2, int y2) {
x1 += originX;
y1 += originY;
x2 += originX;
y2 += originY;
drawLineN(x1, y1, x2, y2);
}
/**
* Copies an area of the canvas that this graphics context paints to.
* @param X the x-coordinate of the source.
* @param Y the y-coordinate of the source.
* @param W the width.
* @param H the height.
* @param dx the horizontal distance to copy the pixels.
* @param dy the vertical distance to copy the pixels.
*/
public void copyArea(int x, int y, int w, int h, int dx, int dy) {
x += originX;
y += originY;
Rectangle rect = new Rectangle(x, y, w, h);
rect = actualClip.intersection(rect);
if (rect.width <= 0 || rect.height <= 0) {
return;
}
x = rect.x;
y = rect.y;
w = rect.width;
h = rect.height;
int subImage[] = new int[w * h];
// copy back buffer
for (int i = 0; i < h; i++) {
System.arraycopy(backBuffer, ((y + i) * width) + x, subImage, w * i, w);
}
// draw sub image
for (int i = 0; i < h; i++) {
drawSpanN(x + dx, y + i + dy, w, subImage, w * i);
}
}
/** Draws lines defined by an array of x points and y points */
public void drawPolyline(int xPoints[], int yPoints[], int nPoints) {
if (nPoints == 1) {
drawPoint(xPoints[0], yPoints[0], foreground.getRGB());
} else {
for (int i = 0; i < (nPoints - 1); i++)
drawLine(xPoints[i], xPoints[i], xPoints[i + 1], xPoints[i + 1]);
}
}
/** Draws a polygon defined by an array of x points and y points */
public void drawPolygon(int xPoints[], int yPoints[], int nPoints) {
if (nPoints == 1) {
drawPoint(xPoints[0], yPoints[0], foreground.getRGB());
} else {
for (int i = 0; i < (nPoints - 1); i++)
drawLine(xPoints[i], xPoints[i], xPoints[i + 1], xPoints[i + 1]);
if (nPoints > 2)
drawLine(xPoints[0], xPoints[0], xPoints[nPoints - 1], xPoints[nPoints - 1]);
}
}
/** Fills a polygon with the current fill mask */
public void fillPolygon(int xPoints[], int yPoints[], int nPoints) {
int minY = Integer.MAX_VALUE;
int maxY = Integer.MIN_VALUE;
int colour = foreground.getRGB();
if (nPoints < 3) {
return;
}
for (int i = 0; i < nPoints; i++) {
if (yPoints[i] > maxY) {
maxY = yPoints[i];
}
if (yPoints[i] < minY) {
minY = yPoints[i];
}
}
// check the last point to see if its the same as the first
if (xPoints[0] == xPoints[nPoints - 1] && yPoints[0] == yPoints[nPoints - 1]) {
nPoints--;
}
PolyEdge[] polyEdges = new PolyEdge[nPoints];
for (int i = 0; i < nPoints - 1; i++) {
polyEdges[i] = new PolyEdge(xPoints[i], yPoints[i], xPoints[i + 1], yPoints[i + 1]);
}
// add the last one
polyEdges[nPoints - 1] = new PolyEdge(xPoints[nPoints - 1], yPoints[nPoints - 1], xPoints[0], yPoints[0]);
ArrayList xList = new ArrayList();
for (int i = minY; i <= maxY; i++) {
for (int j = 0; j < nPoints; j++) {
if (polyEdges[j].intersects(i)) {
int x = polyEdges[j].intersectionX(i);
xList.add(new Integer(x));
}
}
// probably a better way of doing this (removing duplicates);
HashSet hs = new HashSet();
hs.addAll(xList);
xList.clear();
xList.addAll(hs);
if (xList.size() % 2 > 0) {
xList.clear();
continue; // this should be impossible unless the poly is open somewhere
}
Collections.sort(xList);
for (int j = 0; j < xList.size(); j +=2 ) {
int x1 = ((Integer)xList.get(j)).intValue();
int x2 = ((Integer)xList.get(j + 1)).intValue();
drawSpan(x1, i, x2 - x1, colour);
}
xList.clear();
}
}
/** Draws an oval to fit in the given rectangle */
public void drawOval(int x, int y, int w, int h) {
int startX;
int endX;
int offset;
int[] xList;
int[] yList;
int numPoints;
int count;
float as;
float bs;
if (w <= 0 || h <=0 ) {
return;
}
count = 0;
numPoints = ((h/2) + (h/2) + 1) * 2;
numPoints += 1; // to close
xList = new int[numPoints];
yList = new int[numPoints];
as = (w/2.0f) * (w/2.0f);
bs = (h/2.0f) * (h/2.0f);
for (int i = -h/2; i <= h/2; i++) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + w/2;
xList[count] = startX;
yList[count] = y + i + h/2;
count++;
}
for (int i = h/2; i >= -h/2; i--) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
endX = x + offset + w/2;
xList[count] = endX;
yList[count] = y + i + h/2;
count++;
}
xList[count] = xList[0]; // close the loop
yList[count] = yList[0]; // close the loop
drawPolyline(xList, yList, numPoints);
}
/** Fills an oval to fit in the given rectangle */
public void fillOval(int x, int y, int w, int h) {
int startX;
int endX;
int offset;
int colour;
float as;
float bs;
if (w <= 0 || h <= 0) {
return;
}
as = (w/2.0f) * (w/2.0f);
bs = (h/2.0f) * (h/2.0f);
colour = foreground.getRGB();
for(int i=-h/2; i<=h/2; i++) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + w/2;
endX = x + offset + w/2;
drawSpan(startX, y + i + h/2, endX - startX + 1, colour);
}
}
/**
* Draws an arc bounded by the given rectangle from startAngle to
* endAngle. 0 degrees is a vertical line straight up from the
* center of the rectangle. Positive start angle indicate clockwise
* rotations, negative angle are counter-clockwise.
*/
public void drawArc(int x, int y, int w, int h, int startAngle, int endAngle) {
logger.unimplemented("drawArc");
}
/** fills an arc. arguments are the same as drawArc. */
public void fillArc(int x, int y, int w, int h, int startAngle, int endAngle) {
logger.unimplemented("fillArc");
}
/** Draws a rounded rectangle. */
public void drawRoundRect(int x, int y, int w, int h, int arcWidth, int arcHeight) {
int[] xList;
int[] yList;
int numPoints;
int count;
int startX;
int endX;
int offset;
if (w <= 0 || h <= 0) {
return;
}
if (arcWidth == 0 || arcHeight == 0) {
drawRect(x, y, w, h);
return;
}
if (arcWidth < 0) { // matches behaviour of normal java version
arcWidth *= -1;
}
if (arcHeight < 0) {
arcHeight *= -1;
}
count = 0;
numPoints = ((arcHeight/2) + 1) * 2;
numPoints += ((arcHeight/2) + 1) * 2;
numPoints += 1; // last point to close the loop
xList = new int[numPoints];
yList = new int[numPoints];
float as = (arcWidth/2.0f) * (arcWidth/2.0f);
float bs = (arcHeight/2.0f) * (arcHeight/2.0f);
// draw top curved half of box
for (int i = 0; -arcHeight/2 <= i; i--) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + arcWidth/2;
xList[count] = startX;
yList[count] = y+i+(arcHeight/2);
count++;
}
for (int i = -arcHeight / 2; i <= 0; i++) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
endX = x + offset + (w-arcWidth) + arcWidth/2;
xList[count] = endX;
yList[count] = y + i + (arcHeight/2);
count++;
}
// draw bottom box
for (int i = 0; i <= arcHeight / 2; i++) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + arcWidth/2;
endX = x + offset + (w - arcWidth) + arcWidth/2;
xList[count] = endX;
yList[count] = y + i + h - arcHeight/2;
count++;
}
// draw bottom box
for (int i = arcHeight / 2; i >= 0; i--) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + arcWidth/2;
endX = x + offset + (w-arcWidth) + arcWidth/2;
xList[count] = startX;
yList[count] = y+i+h-arcHeight/2;
count++;
}
xList[count] = xList[0];
yList[count] = yList[0];
drawPolyline(xList, yList, numPoints);
}
/** Draws a filled rounded rectangle. */
public void fillRoundRect(int x, int y, int w, int h, int arcWidth, int arcHeight) {
int startX;
int endX;
int offset;
int colour;
if (w <= 0 || h <= 0) {
return;
}
if (arcWidth == 0 || arcHeight == 0) {
fillRect(x,y,w,h);
return;
}
if (arcWidth < 0) { // matches behaviour of normal java version
arcWidth *= -1;
}
if (arcHeight < 0) {
arcHeight *= -1;
}
float as = (arcWidth/2.0f) * (arcWidth/2.0f);
float bs = (arcHeight/2.0f) * (arcHeight/2.0f);
colour = foreground.getRGB();
// draw top curved half of box
for (int i = -arcHeight/2; i < 0; i++) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + arcWidth/2;
endX = x + offset + (w - arcWidth) + arcWidth/2;
drawSpan(startX, y + i + (arcHeight/2), endX - startX + 1, colour);
}
// draw middle section
for (int i = 0; i < h - arcHeight; i++) {
drawSpan(x, y + i + arcHeight/2, w, colour);
}
// draw bottom box
for (int i = 0; i <= arcHeight/2; i++) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + arcWidth/2;
endX = x + offset + (w - arcWidth) + arcWidth/2;
drawSpan(startX, y + i + h - 1 - arcHeight/2, endX - startX + 1, colour);
}
}
protected native void drawStringN(long ftFace, String string, int x, int y, int rgb);
/** Draws the given string. */
public void drawString(String string, int x, int y) {
if (fontMetrics != null) {
fontMetrics.drawString(this, string, x, y, foreground.getRGB());
}
}
/** Draws the given character array. */
public void drawChars(char chars[], int offset, int length, int x, int y) {
drawString(new String(chars, offset, length), x, y);
}
public void drawString(AttributedCharacterIterator arg0, int arg1, int arg2) {
logger.unimplemented("drawString");
}
/**
* Draws an image at x,y in nonblocking mode with a callback object.
*/
public boolean drawImage(Image img, int x, int y, ImageObserver observer) {
return drawImage(img, x, y, null, observer);
}
/**
* Draws an image at x,y in nonblocking mode with a solid background
* color and a callback object.
*/
public boolean drawImage(Image img, int x, int y, Color bg,
ImageObserver observer) {
return drawImageN(img, x, y, -1, -1, 0, 0, -1, -1, bg, observer);
}
/**
* Draws an image scaled to x,y,w,h in nonblocking mode with a
* callback object.
*/
public boolean drawImage(Image img, int x, int y, int w, int h,
ImageObserver observer) {
return drawImage(img, x, y, w, h, null, observer);
}
/**
* Draws an image scaled to x,y,w,h in nonblocking mode with a
* solid background color and a callback object.
*/
public boolean drawImage(Image img, int x, int y, int w, int h,
Color bg, ImageObserver observer) {
return drawImageN(img, x, y, w, h, 0, 0, -1, -1, bg, observer);
}
/**
* Draws a subrectangle of an image scaled to a destination rectangle
* in nonblocking mode with a callback object.
*/
public boolean drawImage(Image img,
int dx1, int dy1, int dx2, int dy2,
int sx1, int sy1, int sx2, int sy2,
ImageObserver observer) {
return drawImage(img, dx1, dy1, dx2, dy2, sx1, sy1, sx2, sy2, null, observer);
}
/**
* Draws a subrectangle of an image scaled to a destination rectangle in
* nonblocking mode with a solid background color and a callback object.
*/
public boolean drawImage(Image img,
int dx1, int dy1, int dx2, int dy2,
int sx1, int sy1, int sx2, int sy2,
Color bg, ImageObserver observer) {
if (dx1 > dx2) {
int swap = dx1;
dx1 = dx2;
dx2 = swap;
}
if (dy1 > dy2) {
int swap = dy1;
dy1 = dy2;
dy2 = swap;
}
if (sx1 > sx2) {
int swap = sx1;
sx1 = sx2;
sx2 = swap;
}
if (sy1 > sy2) {
int swap = sy1;
sy1 = sy2;
sy2 = swap;
}
return drawImageN(img, dx1, dy1, dx2 - dx1, dy2 - dy1,
sx1, sy1, sx2 - sx1, sy2 - sy1, bg, observer);
}
/**
* Draws a subrectangle of an image scaled to a destination rectangle in
* nonblocking mode with a solid background color and a callback object.
*/
protected boolean drawImageN(Image img,
int dx, int dy, int dw, int dh,
int sx, int sy, int sw, int sh,
Color bg, ImageObserver observer) {
if ((sx < 0) || (sy < 0) ||
(sw == 0) || (sh == 0) || (dw == 0) || (dh == 0))
return false;
BDImage bdImage;
if (img instanceof BDImage) {
bdImage = (BDImage)img;
} else if (img instanceof DVBBufferedImage) {
bdImage = (BDImage)getBufferedImagePeer(
(BufferedImage)(((DVBBufferedImage)img).getImage()));
} else if (img instanceof BufferedImage) {
bdImage = (BDImage)getBufferedImagePeer((BufferedImage)img);
} else {
logger.unimplemented("drawImageN: unsupported image type " + img.getClass().getName());
return false;
}
if (bdImage instanceof BDImageConsumer) {
BDImageConsumer consumer = (BDImageConsumer)bdImage;
if (!consumer.isComplete(observer)) {
return false;
}
}
if(sx + sw > bdImage.width || sy + sh > bdImage.height) {
logger.info("drawImageN: fixing too small src image (src " + sx + "," + sy + " " + sw + "x" + sh + " ; img " + bdImage.width + "x" + bdImage.height + ")");
BDImage subImage = new BDImage(null, sw, sh, null);
bdImage.getRGB(sx, sy, Math.min(sw, bdImage.width - sx), Math.min(sh, bdImage.height - sy), subImage.getBdBackBuffer(), 0, sw);
bdImage = subImage;
sx = 0;
sy = 0;
}
/*
if(sx + sw > bdImage.width) {
int n = sx + sw - bdImage.width;
dw -= dw * n / sw;
sw -= n;
}
if(sy + sh > bdImage.height) {
int n = sy + sh - bdImage.height;
dh -= dh * n / sh;
sh -= n;
}
*/
if ((sw > 0) && (sh > 0) &&
((sx != 0) || (sy != 0) || (sw != bdImage.width) || (sh != bdImage.height))) {
BDImage subImage = new BDImage(null, sw, sh, null);
bdImage.getRGB(sx, sy, sw, sh, subImage.getBdBackBuffer(), 0, sw);
bdImage = subImage;
}
if ((dw > 0) && (dh > 0) &&
((dw != bdImage.width) || (dh != bdImage.height))) {
BDImageConsumer scaledImage = new BDImageConsumer(null);
AreaAveragingScaleFilter scaleFilter =
new AreaAveragingScaleFilter(dw, dh);
scaleFilter = (AreaAveragingScaleFilter)scaleFilter.getFilterInstance(scaledImage);
scaleFilter.setDimensions(bdImage.width, bdImage.height);
scaleFilter.setPixels(0, 0, bdImage.width, bdImage.height,
bdImage.getColorModel(), bdImage.getBdBackBuffer(),
0, bdImage.width);
scaleFilter.imageComplete(ImageConsumer.STATICIMAGEDONE);
bdImage = scaledImage;
}
int[] rgbArray = bdImage.getBdBackBuffer();
int bgColor;
if (bg != null)
bgColor = bg.getRGB();
else
bgColor = 0;
for (int y = dy; y < (dy + bdImage.height); y++) {
if (bg != null) {
drawSpan(dx, y, bdImage.width, bgColor);
}
drawSpan(dx, y, bdImage.width, rgbArray, (y - dy) * bdImage.width);
}
return true;
}
public Stroke getStroke() {
logger.unimplemented("getStroke");
throw new Error();
}
public void setStroke(Stroke stroke) {
logger.unimplemented("setStroke");
}
public void dispose() {
}
public String toString() {
return getClass().getName() + "[" + originX + "," + originY + "]";
}
private static Image getBufferedImagePeer(BufferedImage image) {
try {
return (Image)bufferedImagePeer.get(image);
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return null;
}
private static Field bufferedImagePeer;
static {
try {
Class c = Class.forName("java.awt.image.BufferedImage");
bufferedImagePeer = c.getDeclaredField("peer");
bufferedImagePeer.setAccessible(true);
} catch (ClassNotFoundException e) {
throw new AWTError("java.awt.image.BufferedImage not found");
} catch (SecurityException e) {
throw new AWTError("java.awt.image.BufferedImage.peer not accessible");
} catch (NoSuchFieldException e) {
throw new AWTError("java.awt.image.BufferedImage.peer not found");
}
}
private static final Logger logger = Logger.getLogger(BDGraphics.class.getName());
}
|
src/libbluray/bdj/java-j2me/java/awt/BDGraphics.java
|
/*
* This file is part of libbluray
* Copyright (C) 2012 libbluray
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see
* <http://www.gnu.org/licenses/>.
*/
package java.awt;
import java.lang.reflect.Field;
import java.text.AttributedCharacterIterator;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.awt.image.AreaAveragingScaleFilter;
import java.awt.image.BufferedImage;
import java.awt.image.ImageConsumer;
import java.awt.image.ImageObserver;
import org.dvb.ui.DVBBufferedImage;
import sun.awt.ConstrainableGraphics;
import org.videolan.Logger;
class BDGraphics extends Graphics2D implements ConstrainableGraphics {
private static final Color DEFAULT_COLOR = Color.BLACK;
private static final Font DEFAULT_FONT = new Font("Dialog", Font.PLAIN, 12);
private int width;
private int height;
private int[] backBuffer;
private Area dirty;
private GraphicsConfiguration gc;
private Color foreground;
private Color background;
private Font font;
private BDFontMetrics fontMetrics;
private AlphaComposite composite;
/** The current xor color. If null then we are in paint mode. */
private Color xorColor;
/** Translated X, Y offset from native offset. */
private int originX;
private int originY;
/** The actual clip rectangle that is intersection of user clip and constrained rectangle. */
private Rectangle actualClip;
/** The current user clip rectangle or null if no clip has been set. This is stored in the
native coordinate system and not the (possibly) translated Java coordinate system. */
private Rectangle clip = null;
/** The rectangle this graphics object has been constrained too. This is stored in the
native coordinate system and not the (possibly) translated Java coordinate system.
If it is null then this graphics has not been constrained. The constrained rectangle
is another layer of clipping independant of the user clip. */
private Rectangle constrainedRect = null;
BDGraphics(BDGraphics g) {
backBuffer = g.backBuffer;
dirty = g.dirty;
width = g.width;
height = g.height;
gc = g.gc;
foreground = g.foreground;
background = g.background;
composite = g.composite;
font = g.font;
fontMetrics = g.fontMetrics;
originX = g.originX;
originY = g.originY;
if (g.clip != null) {
clip = new Rectangle(g.clip);
}
setupClip();
}
BDGraphics(BDRootWindow window) {
width = window.getWidth();
height = window.getHeight();
backBuffer = window.getBdBackBuffer();
dirty = window.getDirtyArea();
gc = window.getGraphicsConfiguration();
foreground = window.getForeground();
background = window.getBackground();
font = window.getFont();
if (foreground == null)
foreground = DEFAULT_COLOR;
if (background == null)
background = DEFAULT_COLOR;
if (font == null)
font = DEFAULT_FONT;
fontMetrics = BDFontMetrics.getFontMetrics(font);
composite = AlphaComposite.SrcOver;
setupClip();
}
BDGraphics(BDImage image) {
width = image.getWidth();
height = image.getHeight();
backBuffer = image.getBdBackBuffer();
dirty = image.getDirtyArea();
gc = image.getGraphicsConfiguration();
Component component = image.getComponent();
if (component != null) {
foreground = component.getForeground();
background = component.getBackground();
font = component.getFont();
}
if (foreground == null)
foreground = DEFAULT_COLOR;
if (background == null)
background = new Color(0, 0, 0, 0);
if (font == null)
font = DEFAULT_FONT;
fontMetrics = BDFontMetrics.getFontMetrics(font);
composite = AlphaComposite.SrcOver;
setupClip();
}
public Graphics create() {
return new BDGraphics(this);
}
public void translate(int x, int y) {
originX += x;
originY += y;
}
public void setFont(Font font) {
if (font != null && !font.equals(this.font)) {
this.font = font;
fontMetrics = BDFontMetrics.getFontMetrics(font);
}
}
public Font getFont() {
return font;
}
public FontMetrics getFontMetrics() {
return fontMetrics;
}
public FontMetrics getFontMetrics(Font font) {
return BDFontMetrics.getFontMetrics(font);
}
public void setColor(Color c) {
if ((c != null) && (c != foreground))
foreground = c;
}
public Color getColor() {
return foreground;
}
public Composite getComposite() {
return composite;
}
public GraphicsConfiguration getDeviceConfiguration() {
return gc;
}
public void setComposite(Composite comp) {
if ((comp != null) && (comp != composite)) {
if (!(comp instanceof AlphaComposite))
throw new IllegalArgumentException("Only AlphaComposite is supported");
composite = (AlphaComposite) comp;
}
}
public void setPaintMode() {
xorColor = null;
}
public void setXORMode(Color color) {
xorColor = color;
}
/** Gets the current clipping area. */
public Rectangle getClipBounds() {
if (clip != null)
return new Rectangle (clip.x - originX, clip.y - originY, clip.width, clip.height);
return null;
}
public void constrain(int x, int y, int w, int h) {
Rectangle rect;
if (constrainedRect != null)
rect = constrainedRect;
else
rect = new Rectangle(0, 0, width, height);
constrainedRect = rect.intersection(new Rectangle(rect.x + x, rect.y + y, w, h));
originX = constrainedRect.x;
originY = constrainedRect.y;
setupClip();
}
/** Returns a Shape object representing the clip. */
public Shape getClip() {
return getClipBounds();
}
/** Crops the clipping rectangle. */
public void clipRect(int x, int y, int w, int h) {
Rectangle rect = new Rectangle(x + originX, y + originY, w, h);
if (clip != null)
clip = clip.intersection(rect);
else
clip = rect;
setupClip();
}
/** Sets the clipping rectangle. */
public void setClip(int x, int y, int w, int h) {
clip = new Rectangle (x + originX, y + originY, w, h);
setupClip();
}
/** Sets the clip to a Shape (only Rectangle allowed). */
public void setClip(Shape clip) {
if (clip == null) {
this.clip = null;
setupClip();
} else if (clip instanceof Rectangle) {
Rectangle rect = (Rectangle) clip;
setClip(rect.x, rect.y, rect.width, rect.height);
} else
throw new IllegalArgumentException("setClip(Shape) only supports Rectangle objects");
}
private void setupClip() {
Rectangle rect;
if (constrainedRect != null)
rect = constrainedRect;
else
rect = new Rectangle(0, 0, width, height);
if (clip != null)
actualClip = clip.intersection(rect);
else
actualClip = rect;
}
private int alphaBlend(int dest, int src) {
int As = src >>> 24;
if (As == 0)
return dest;
if (As == 255)
return src;
int Ad = (dest >>> 24);
if (Ad == 0)
return src;
int R, G, B;
R = ((src >>> 16) & 255) * As * 255;
G = ((src >>> 8) & 255) * As * 255;
B = (src & 255) * As * 255;
Ad = Ad * (255 - As);
As = As * 255 + Ad;
R = (R + ((dest >>> 16) & 255) * Ad) / As;
G = (G + ((dest >>> 8) & 255) * Ad) / As;
B = (B + (dest & 255) * Ad) / As;
R = Math.min(255, R);
G = Math.min(255, G);
B = Math.min(255, B);
Ad = As / 255;
Ad = Math.min(255, Ad);
return (Ad << 24) | (R << 16) | (G << 8) | B;
}
private int applyComposite(int rgb) {
return ((int)((rgb >>> 24) * composite.getAlpha()) << 24) | (rgb & 0x00FFFFFF);
}
private void drawSpanN(int x, int y, int length, int rgb) {
Rectangle rect = new Rectangle(x, y, length, 1);
rect = actualClip.intersection(rect);
if (rect.width <= 0 || rect.height <= 0 || rect.x < 0 || rect.y < 0) {
return;
}
x = rect.x;
length = rect.width;
dirty.add(rect);
if (xorColor != null) {
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] ^= xorColor.getRGB() ^ rgb;
}
return;
}
switch (composite.getRule()) {
case AlphaComposite.CLEAR:
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] = 0;
}
break;
case AlphaComposite.SRC:
rgb = applyComposite(rgb);
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] = rgb;
}
break;
case AlphaComposite.SRC_OVER:
rgb = applyComposite(rgb);
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] = alphaBlend(backBuffer[y * width + x + i], rgb);
}
break;
}
}
private void drawSpanN(int x, int y, int length, int src[], int srcOffset) {
Rectangle rect = new Rectangle(x, y, length, 1);
rect = actualClip.intersection(rect);
if (rect.width <= 0 || rect.height <= 0 || rect.x < 0 || rect.y < 0) {
return;
}
srcOffset += rect.x - x;
x = rect.x;
length = rect.width;
dirty.add(rect);
if (xorColor != null) {
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] ^= xorColor.getRGB() ^ src[srcOffset + i];
}
return;
}
switch (composite.getRule()) {
case AlphaComposite.CLEAR:
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] = 0;
}
break;
case AlphaComposite.SRC:
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] = applyComposite(src[srcOffset + i]);
}
break;
case AlphaComposite.SRC_OVER:
for (int i = 0; i < length; i++) {
backBuffer[y * width + x + i] = alphaBlend(backBuffer[y * width + x + i], applyComposite(src[srcOffset + i]));
}
break;
}
}
private void drawSpan(int x, int y, int length, int rgb) {
x += originX;
y += originY;
drawSpanN(x, y, length, rgb);
}
private void drawSpan(int x, int y, int length, int src[], int srcOffset) {
x += originX;
y += originY;
drawSpanN(x, y, length, src, srcOffset);
}
private void drawPointN(int x, int y, int rgb) {
drawSpanN(x, y, 1, rgb);
}
private void drawGlyph(int[] rgbArray, int x0, int y0, int w, int h) {
for (int y = 0; y < h; y++)
for (int x = 0; x < w; x++)
drawPoint(x + x0, y + y0, rgbArray[y * w + x]);
}
private void drawPoint(int x, int y, int rgb) {
x += originX;
y += originY;
if (actualClip.contains(x, y))
drawPointN(x, y, rgb);
}
public void clearRect(int x, int y, int w, int h) {
x += originX;
y += originY;
Rectangle rect = new Rectangle(x, y, w, h);
rect = actualClip.intersection(rect);
x = rect.x;
y = rect.y;
w = rect.width;
h = rect.height;
int rgb = background.getRGB();
for (int i = 0; i < h; i++)
Arrays.fill(backBuffer, (y + i) * width + x, (y + i) * width + x + w, rgb);
dirty.add(rect);
}
public void fillRect(int x, int y, int w, int h) {
x += originX;
y += originY;
Rectangle rect = new Rectangle(x, y, w, h);
rect = actualClip.intersection(rect);
x = rect.x;
y = rect.y;
w = rect.width;
h = rect.height;
int rgb = foreground.getRGB();
for (int Y = y; Y < (y + h); Y++)
drawSpanN(x, Y, w, rgb);
}
public void drawRect(int x, int y, int w, int h) {
x += originX;
y += originY;
drawLineN(x, y, x + w, y);
drawLineN(x, y + h, x + w, y + h);
drawLineN(x, y, x, y + h);
drawLineN(x + w, y, x + w, y + h);
}
private void drawLineN(int x1, int y1, int x2, int y2) {
int rgb = foreground.getRGB();
int dy = y2 - y1;
int dx = x2 - x1;
int stepx, stepy;
int fraction;
if (dy < 0) {
dy = -dy;
stepy = -1;
} else {
stepy = 1;
}
if (dx < 0) {
dx = -dx;
stepx = -1;
} else {
stepx = 1;
}
dy <<= 1;
dx <<= 1;
drawPointN(x1, y1, rgb);
if (dx > dy) {
fraction = dy - (dx >> 1);
while (x1 != x2) {
if (fraction >= 0) {
y1 += stepy;
fraction -= dx;
}
x1 += stepx;
fraction += dy;
drawPointN(x1, y1, rgb);
}
} else {
fraction = dx - (dy >> 1);
while (y1 != y2) {
if (fraction >= 0) {
x1 += stepx;
fraction -= dy;
}
y1 += stepy;
fraction += dx;
drawPointN(x1, y1, rgb);
}
}
}
public void drawLine(int x1, int y1, int x2, int y2) {
x1 += originX;
y1 += originY;
x2 += originX;
y2 += originY;
drawLineN(x1, y1, x2, y2);
}
/**
* Copies an area of the canvas that this graphics context paints to.
* @param X the x-coordinate of the source.
* @param Y the y-coordinate of the source.
* @param W the width.
* @param H the height.
* @param dx the horizontal distance to copy the pixels.
* @param dy the vertical distance to copy the pixels.
*/
public void copyArea(int x, int y, int w, int h, int dx, int dy) {
x += originX;
y += originY;
Rectangle rect = new Rectangle(x, y, w, h);
rect = actualClip.intersection(rect);
if (rect.width <= 0 || rect.height <= 0) {
return;
}
x = rect.x;
y = rect.y;
w = rect.width;
h = rect.height;
int subImage[] = new int[w * h];
// copy back buffer
for (int i = 0; i < h; i++) {
System.arraycopy(backBuffer, ((y + i) * width) + x, subImage, w * i, w);
}
// draw sub image
for (int i = 0; i < h; i++) {
drawSpanN(x + dx, y + i + dy, w, subImage, w * i);
}
}
/** Draws lines defined by an array of x points and y points */
public void drawPolyline(int xPoints[], int yPoints[], int nPoints) {
if (nPoints == 1) {
drawPoint(xPoints[0], yPoints[0], foreground.getRGB());
} else {
for (int i = 0; i < (nPoints - 1); i++)
drawLine(xPoints[i], xPoints[i], xPoints[i + 1], xPoints[i + 1]);
}
}
/** Draws a polygon defined by an array of x points and y points */
public void drawPolygon(int xPoints[], int yPoints[], int nPoints) {
if (nPoints == 1) {
drawPoint(xPoints[0], yPoints[0], foreground.getRGB());
} else {
for (int i = 0; i < (nPoints - 1); i++)
drawLine(xPoints[i], xPoints[i], xPoints[i + 1], xPoints[i + 1]);
if (nPoints > 2)
drawLine(xPoints[0], xPoints[0], xPoints[nPoints - 1], xPoints[nPoints - 1]);
}
}
/** Fills a polygon with the current fill mask */
public void fillPolygon(int xPoints[], int yPoints[], int nPoints) {
int minY = Integer.MAX_VALUE;
int maxY = Integer.MIN_VALUE;
int colour = foreground.getRGB();
if (nPoints < 3) {
return;
}
for (int i = 0; i < nPoints; i++) {
if (yPoints[i] > maxY) {
maxY = yPoints[i];
}
if (yPoints[i] < minY) {
minY = yPoints[i];
}
}
// check the last point to see if its the same as the first
if (xPoints[0] == xPoints[nPoints - 1] && yPoints[0] == yPoints[nPoints - 1]) {
nPoints--;
}
PolyEdge[] polyEdges = new PolyEdge[nPoints];
for (int i = 0; i < nPoints - 1; i++) {
polyEdges[i] = new PolyEdge(xPoints[i], yPoints[i], xPoints[i + 1], yPoints[i + 1]);
}
// add the last one
polyEdges[nPoints - 1] = new PolyEdge(xPoints[nPoints - 1], yPoints[nPoints - 1], xPoints[0], yPoints[0]);
ArrayList xList = new ArrayList();
for (int i = minY; i <= maxY; i++) {
for (int j = 0; j < nPoints; j++) {
if (polyEdges[j].intersects(i)) {
int x = polyEdges[j].intersectionX(i);
xList.add(new Integer(x));
}
}
// probably a better way of doing this (removing duplicates);
HashSet hs = new HashSet();
hs.addAll(xList);
xList.clear();
xList.addAll(hs);
if (xList.size() % 2 > 0) {
xList.clear();
continue; // this should be impossible unless the poly is open somewhere
}
Collections.sort(xList);
for (int j = 0; j < xList.size(); j +=2 ) {
int x1 = ((Integer)xList.get(j)).intValue();
int x2 = ((Integer)xList.get(j + 1)).intValue();
drawSpan(x1, i, x2 - x1, colour);
}
xList.clear();
}
}
/** Draws an oval to fit in the given rectangle */
public void drawOval(int x, int y, int w, int h) {
int startX;
int endX;
int offset;
int[] xList;
int[] yList;
int numPoints;
int count;
float as;
float bs;
if (w <= 0 || h <=0 ) {
return;
}
count = 0;
numPoints = ((h/2) + (h/2) + 1) * 2;
numPoints += 1; // to close
xList = new int[numPoints];
yList = new int[numPoints];
as = (w/2.0f) * (w/2.0f);
bs = (h/2.0f) * (h/2.0f);
for (int i = -h/2; i <= h/2; i++) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + w/2;
xList[count] = startX;
yList[count] = y + i + h/2;
count++;
}
for (int i = h/2; i >= -h/2; i--) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
endX = x + offset + w/2;
xList[count] = endX;
yList[count] = y + i + h/2;
count++;
}
xList[count] = xList[0]; // close the loop
yList[count] = yList[0]; // close the loop
drawPolyline(xList, yList, numPoints);
}
/** Fills an oval to fit in the given rectangle */
public void fillOval(int x, int y, int w, int h) {
int startX;
int endX;
int offset;
int colour;
float as;
float bs;
if (w <= 0 || h <= 0) {
return;
}
as = (w/2.0f) * (w/2.0f);
bs = (h/2.0f) * (h/2.0f);
colour = foreground.getRGB();
for(int i=-h/2; i<=h/2; i++) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + w/2;
endX = x + offset + w/2;
drawSpan(startX, y + i + h/2, endX - startX + 1, colour);
}
}
/**
* Draws an arc bounded by the given rectangle from startAngle to
* endAngle. 0 degrees is a vertical line straight up from the
* center of the rectangle. Positive start angle indicate clockwise
* rotations, negative angle are counter-clockwise.
*/
public void drawArc(int x, int y, int w, int h, int startAngle, int endAngle) {
logger.unimplemented("drawArc");
}
/** fills an arc. arguments are the same as drawArc. */
public void fillArc(int x, int y, int w, int h, int startAngle, int endAngle) {
logger.unimplemented("fillArc");
}
/** Draws a rounded rectangle. */
public void drawRoundRect(int x, int y, int w, int h, int arcWidth, int arcHeight) {
int[] xList;
int[] yList;
int numPoints;
int count;
int startX;
int endX;
int offset;
if (w <= 0 || h <= 0) {
return;
}
if (arcWidth == 0 || arcHeight == 0) {
drawRect(x, y, w, h);
return;
}
if (arcWidth < 0) { // matches behaviour of normal java version
arcWidth *= -1;
}
if (arcHeight < 0) {
arcHeight *= -1;
}
count = 0;
numPoints = ((arcHeight/2) + 1) * 2;
numPoints += ((arcHeight/2) + 1) * 2;
numPoints += 1; // last point to close the loop
xList = new int[numPoints];
yList = new int[numPoints];
float as = (arcWidth/2.0f) * (arcWidth/2.0f);
float bs = (arcHeight/2.0f) * (arcHeight/2.0f);
// draw top curved half of box
for (int i = 0; -arcHeight/2 <= i; i--) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + arcWidth/2;
xList[count] = startX;
yList[count] = y+i+(arcHeight/2);
count++;
}
for (int i = -arcHeight / 2; i <= 0; i++) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
endX = x + offset + (w-arcWidth) + arcWidth/2;
xList[count] = endX;
yList[count] = y + i + (arcHeight/2);
count++;
}
// draw bottom box
for (int i = 0; i <= arcHeight / 2; i++) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + arcWidth/2;
endX = x + offset + (w - arcWidth) + arcWidth/2;
xList[count] = endX;
yList[count] = y + i + h - arcHeight/2;
count++;
}
// draw bottom box
for (int i = arcHeight / 2; i >= 0; i--) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + arcWidth/2;
endX = x + offset + (w-arcWidth) + arcWidth/2;
xList[count] = startX;
yList[count] = y+i+h-arcHeight/2;
count++;
}
xList[count] = xList[0];
yList[count] = yList[0];
drawPolyline(xList, yList, numPoints);
}
/** Draws a filled rounded rectangle. */
public void fillRoundRect(int x, int y, int w, int h, int arcWidth, int arcHeight) {
int startX;
int endX;
int offset;
int colour;
if (w <= 0 || h <= 0) {
return;
}
if (arcWidth == 0 || arcHeight == 0) {
fillRect(x,y,w,h);
return;
}
if (arcWidth < 0) { // matches behaviour of normal java version
arcWidth *= -1;
}
if (arcHeight < 0) {
arcHeight *= -1;
}
float as = (arcWidth/2.0f) * (arcWidth/2.0f);
float bs = (arcHeight/2.0f) * (arcHeight/2.0f);
colour = foreground.getRGB();
// draw top curved half of box
for (int i = -arcHeight/2; i < 0; i++) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + arcWidth/2;
endX = x + offset + (w - arcWidth) + arcWidth/2;
drawSpan(startX, y + i + (arcHeight/2), endX - startX + 1, colour);
}
// draw middle section
for (int i = 0; i < h - arcHeight; i++) {
drawSpan(x, y + i + arcHeight/2, w, colour);
}
// draw bottom box
for (int i = 0; i <= arcHeight/2; i++) {
offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as );
startX = x - offset + arcWidth/2;
endX = x + offset + (w - arcWidth) + arcWidth/2;
drawSpan(startX, y + i + h - 1 - arcHeight/2, endX - startX + 1, colour);
}
}
protected native void drawStringN(long ftFace, String string, int x, int y, int rgb);
/** Draws the given string. */
public void drawString(String string, int x, int y) {
if (fontMetrics != null) {
fontMetrics.drawString(this, string, x, y, foreground.getRGB());
}
}
/** Draws the given character array. */
public void drawChars(char chars[], int offset, int length, int x, int y) {
drawString(new String(chars, offset, length), x, y);
}
public void drawString(AttributedCharacterIterator arg0, int arg1, int arg2) {
logger.unimplemented("drawString");
}
/**
* Draws an image at x,y in nonblocking mode with a callback object.
*/
public boolean drawImage(Image img, int x, int y, ImageObserver observer) {
return drawImage(img, x, y, null, observer);
}
/**
* Draws an image at x,y in nonblocking mode with a solid background
* color and a callback object.
*/
public boolean drawImage(Image img, int x, int y, Color bg,
ImageObserver observer) {
return drawImageN(img, x, y, -1, -1, 0, 0, -1, -1, bg, observer);
}
/**
* Draws an image scaled to x,y,w,h in nonblocking mode with a
* callback object.
*/
public boolean drawImage(Image img, int x, int y, int w, int h,
ImageObserver observer) {
return drawImage(img, x, y, w, h, null, observer);
}
/**
* Draws an image scaled to x,y,w,h in nonblocking mode with a
* solid background color and a callback object.
*/
public boolean drawImage(Image img, int x, int y, int w, int h,
Color bg, ImageObserver observer) {
return drawImageN(img, x, y, w, h, 0, 0, -1, -1, bg, observer);
}
/**
* Draws a subrectangle of an image scaled to a destination rectangle
* in nonblocking mode with a callback object.
*/
public boolean drawImage(Image img,
int dx1, int dy1, int dx2, int dy2,
int sx1, int sy1, int sx2, int sy2,
ImageObserver observer) {
return drawImage(img, dx1, dy1, dx2, dy2, sx1, sy1, sx2, sy2, null, observer);
}
/**
* Draws a subrectangle of an image scaled to a destination rectangle in
* nonblocking mode with a solid background color and a callback object.
*/
public boolean drawImage(Image img,
int dx1, int dy1, int dx2, int dy2,
int sx1, int sy1, int sx2, int sy2,
Color bg, ImageObserver observer) {
if (dx1 > dx2) {
int swap = dx1;
dx1 = dx2;
dx2 = swap;
}
if (dy1 > dy2) {
int swap = dy1;
dy1 = dy2;
dy2 = swap;
}
if (sx1 > sx2) {
int swap = sx1;
sx1 = sx2;
sx2 = swap;
}
if (sy1 > sy2) {
int swap = sy1;
sy1 = sy2;
sy2 = swap;
}
return drawImageN(img, dx1, dy1, dx2 - dx1, dy2 - dy1,
sx1, sy1, sx2 - sx1, sy2 - sy1, bg, observer);
}
/**
* Draws a subrectangle of an image scaled to a destination rectangle in
* nonblocking mode with a solid background color and a callback object.
*/
protected boolean drawImageN(Image img,
int dx, int dy, int dw, int dh,
int sx, int sy, int sw, int sh,
Color bg, ImageObserver observer) {
if ((sx < 0) || (sy < 0) ||
(sw == 0) || (sh == 0) || (dw == 0) || (dh == 0))
return false;
BDImage bdImage;
if (img instanceof BDImage) {
bdImage = (BDImage)img;
} else if (img instanceof DVBBufferedImage) {
bdImage = (BDImage)getBufferedImagePeer(
(BufferedImage)(((DVBBufferedImage)img).getImage()));
} else if (img instanceof BufferedImage) {
bdImage = (BDImage)getBufferedImagePeer((BufferedImage)img);
} else {
logger.unimplemented("drawImageN: unsupported image type " + img.getClass().getName());
return false;
}
if (bdImage instanceof BDImageConsumer) {
BDImageConsumer consumer = (BDImageConsumer)bdImage;
if (!consumer.isComplete(observer)) {
return false;
}
}
if(sx + sw > bdImage.width || sy + sh > bdImage.height) {
logger.info("drawImageN: fixing too small src image (src " + sx + "," + sy + " " + sw + "x" + sh + " ; img " + bdImage.width + "x" + bdImage.height + ")");
BDImage subImage = new BDImage(null, sw, sh, null);
bdImage.getRGB(sx, sy, Math.min(sw, bdImage.width - sx), Math.min(sh, bdImage.height - sy), subImage.getBdBackBuffer(), 0, sw);
bdImage = subImage;
sx = 0;
sy = 0;
}
/*
if(sx + sw > bdImage.width) {
int n = sx + sw - bdImage.width;
dw -= dw * n / sw;
sw -= n;
}
if(sy + sh > bdImage.height) {
int n = sy + sh - bdImage.height;
dh -= dh * n / sh;
sh -= n;
}
*/
if ((sw > 0) && (sh > 0) &&
((sx != 0) || (sy != 0) || (sw != bdImage.width) || (sh != bdImage.height))) {
BDImage subImage = new BDImage(null, sw, sh, null);
bdImage.getRGB(sx, sy, sw, sh, subImage.getBdBackBuffer(), 0, sw);
bdImage = subImage;
}
if ((dw > 0) && (dh > 0) &&
((dw != bdImage.width) || (dh != bdImage.height))) {
BDImageConsumer scaledImage = new BDImageConsumer(null);
AreaAveragingScaleFilter scaleFilter =
new AreaAveragingScaleFilter(dw, dh);
scaleFilter = (AreaAveragingScaleFilter)scaleFilter.getFilterInstance(scaledImage);
scaleFilter.setDimensions(bdImage.width, bdImage.height);
scaleFilter.setPixels(0, 0, bdImage.width, bdImage.height,
bdImage.getColorModel(), bdImage.getBdBackBuffer(),
0, bdImage.width);
scaleFilter.imageComplete(ImageConsumer.STATICIMAGEDONE);
bdImage = scaledImage;
}
int[] rgbArray = bdImage.getBdBackBuffer();
int bgColor;
if (bg != null)
bgColor = bg.getRGB();
else
bgColor = 0;
// TODO: optimize this
for (int y = dy; y < (dy + bdImage.height); y++) {
if (bg != null) {
drawSpan(dx, y, bdImage.width, bgColor);
}
drawSpan(dx, y, bdImage.width, rgbArray, (y - dy) * bdImage.width);
}
return true;
}
public Stroke getStroke() {
logger.unimplemented("getStroke");
throw new Error();
}
public void setStroke(Stroke stroke) {
logger.unimplemented("setStroke");
}
public void dispose() {
}
public String toString() {
return getClass().getName() + "[" + originX + "," + originY + "]";
}
private static Image getBufferedImagePeer(BufferedImage image) {
try {
return (Image)bufferedImagePeer.get(image);
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return null;
}
private static Field bufferedImagePeer;
static {
try {
Class c = Class.forName("java.awt.image.BufferedImage");
bufferedImagePeer = c.getDeclaredField("peer");
bufferedImagePeer.setAccessible(true);
} catch (ClassNotFoundException e) {
throw new AWTError("java.awt.image.BufferedImage not found");
} catch (SecurityException e) {
throw new AWTError("java.awt.image.BufferedImage.peer not accessible");
} catch (NoSuchFieldException e) {
throw new AWTError("java.awt.image.BufferedImage.peer not found");
}
}
private static final Logger logger = Logger.getLogger(BDGraphics.class.getName());
}
|
BDGraphics: merge from SE to ME
|
src/libbluray/bdj/java-j2me/java/awt/BDGraphics.java
|
BDGraphics: merge from SE to ME
|
|
Java
|
lgpl-2.1
|
4b100da3bd8fc781c18b1ddfec1aebe9a808d0c1
| 0
|
threerings/narya,threerings/narya,threerings/narya,threerings/narya,threerings/narya
|
//
// $Id: CharacterSprite.java,v 1.23 2002/03/08 22:35:55 mdb Exp $
package com.threerings.cast;
import com.threerings.media.sprite.MultiFrameImage;
import com.threerings.media.sprite.Path;
import com.threerings.media.sprite.Sprite;
/**
* A character sprite is a sprite that animates itself while walking
* about in a scene.
*/
public class CharacterSprite
extends Sprite implements StandardActions
{
/**
* Initializes this character sprite with the specified character
* descriptor and character manager. It will obtain animation data
* from the supplied character manager.
*/
public void init (CharacterDescriptor descrip, CharacterManager charmgr)
{
// keep track of this stuff
_descrip = descrip;
_charmgr = charmgr;
// assign an arbitrary starting orientation
_orient = NORTH;
}
/**
* Reconfigures this sprite to use the specified character descriptor.
*/
public void setCharacterDescriptor (CharacterDescriptor descrip)
{
// keep the new descriptor
_descrip = descrip;
// reset our action which will reload our frames
setActionSequence(_action);
}
/**
* Specifies the action to use when the sprite is at rest. The default
* is <code>STANDING</code>.
*/
public void setRestingAction (String action)
{
_restingAction = action;
}
/**
* Specifies the action to use when the sprite is following a path.
* The default is <code>WALKING</code>.
*/
public void setFollowingPathAction (String action)
{
_followingPathAction = action;
}
/**
* Sets the action sequence used when rendering the character, from
* the set of available sequences.
*/
public void setActionSequence (String action)
{
// keep track of our current action in case someone swaps out our
// character description
_action = action;
// get a reference to the action sequence so that we can obtain
// our animation frames and configure our frames per second
ActionSequence actseq = _charmgr.getActionSequence(action);
if (actseq == null) {
String errmsg = "No such action '" + action + "'.";
throw new IllegalArgumentException(errmsg);
}
try {
// obtain our animation frames for this action sequence
_frames = _charmgr.getActionFrames(_descrip, action);
// update the sprite render attributes
setOrigin(actseq.origin.x, actseq.origin.y);
setFrameRate(actseq.framesPerSecond);
setFrames(_frames[_orient]);
} catch (NoSuchComponentException nsce) {
Log.warning("Character sprite references non-existent " +
"component [sprite=" + this + ", err=" + nsce + "].");
}
}
// documentation inherited
public void setOrientation (int orient)
{
super.setOrientation(orient);
// update the sprite frames to reflect the direction
if (_frames != null) {
setFrames(_frames[orient]);
}
}
/**
* Sets the origin coordinates representing the "base" of the
* sprite, which in most cases corresponds to the center of the
* bottom of the sprite image.
*/
public void setOrigin (int x, int y)
{
_xorigin = x;
_yorigin = y;
updateRenderOffset();
updateRenderOrigin();
}
// documentation inherited
protected void updateRenderOffset ()
{
super.updateRenderOffset();
if (_frame != null) {
// our location is based on the character origin coordinates
_rxoff = -_xorigin;
_ryoff = -_yorigin;
}
}
// documentation inherited
public void cancelMove ()
{
super.cancelMove();
halt();
}
// documentation inherited
protected void pathBeginning ()
{
super.pathBeginning();
// enable walking animation
setActionSequence(_followingPathAction);
setAnimationMode(TIME_BASED);
}
// documentation inherited
protected void pathCompleted ()
{
super.pathCompleted();
halt();
}
/**
* Updates the sprite animation frame to reflect the cessation of
* movement and disables any further animation.
*/
protected void halt ()
{
// disable animation
setAnimationMode(NO_ANIMATION);
// come to a halt looking settled and at peace
setActionSequence(_restingAction);
}
/** The action to use when at rest. */
protected String _restingAction = STANDING;
/** The action to use when following a path. */
protected String _followingPathAction = WALKING;
/** A reference to the descriptor for the character that we're
* visualizing. */
protected CharacterDescriptor _descrip;
/** A reference to the character manager that created us. */
protected CharacterManager _charmgr;
/** The action we are currently displaying. */
protected String _action;
/** The animation frames for the active action sequence in each
* orientation. */
protected MultiFrameImage[] _frames;
/** The origin of the sprite. */
protected int _xorigin, _yorigin;
}
|
src/java/com/threerings/cast/CharacterSprite.java
|
//
// $Id: CharacterSprite.java,v 1.22 2002/03/04 22:47:06 mdb Exp $
package com.threerings.cast;
import com.threerings.media.sprite.MultiFrameImage;
import com.threerings.media.sprite.Path;
import com.threerings.media.sprite.Sprite;
/**
* A character sprite is a sprite that animates itself while walking
* about in a scene.
*/
public class CharacterSprite
extends Sprite implements StandardActions
{
/**
* Initializes this character sprite with the specified character
* descriptor and character manager. It will obtain animation data
* from the supplied character manager.
*/
public void init (CharacterDescriptor descrip, CharacterManager charmgr)
{
// keep track of this stuff
_descrip = descrip;
_charmgr = charmgr;
// assign an arbitrary starting orientation
_orient = NORTH;
}
/**
* Specifies the action to use when the sprite is at rest. The default
* is <code>STANDING</code>.
*/
public void setRestingAction (String action)
{
_restingAction = action;
}
/**
* Specifies the action to use when the sprite is following a path.
* The default is <code>WALKING</code>.
*/
public void setFollowingPathAction (String action)
{
_followingPathAction = action;
}
/**
* Sets the action sequence used when rendering the character, from
* the set of available sequences.
*/
public void setActionSequence (String action)
{
// get a reference to the action sequence so that we can obtain
// our animation frames and configure our frames per second
ActionSequence actseq = _charmgr.getActionSequence(action);
if (actseq == null) {
String errmsg = "No such action '" + action + "'.";
throw new IllegalArgumentException(errmsg);
}
try {
// obtain our animation frames for this action sequence
_frames = _charmgr.getActionFrames(_descrip, action);
// update the sprite render attributes
setOrigin(actseq.origin.x, actseq.origin.y);
setFrameRate(actseq.framesPerSecond);
setFrames(_frames[_orient]);
} catch (NoSuchComponentException nsce) {
Log.warning("Character sprite references non-existent " +
"component [sprite=" + this + ", err=" + nsce + "].");
}
}
// documentation inherited
public void setOrientation (int orient)
{
super.setOrientation(orient);
// update the sprite frames to reflect the direction
if (_frames != null) {
setFrames(_frames[orient]);
}
}
/**
* Sets the origin coordinates representing the "base" of the
* sprite, which in most cases corresponds to the center of the
* bottom of the sprite image.
*/
public void setOrigin (int x, int y)
{
_xorigin = x;
_yorigin = y;
updateRenderOffset();
updateRenderOrigin();
}
// documentation inherited
protected void updateRenderOffset ()
{
super.updateRenderOffset();
if (_frame != null) {
// our location is based on the character origin coordinates
_rxoff = -_xorigin;
_ryoff = -_yorigin;
}
}
// documentation inherited
public void cancelMove ()
{
super.cancelMove();
halt();
}
// documentation inherited
protected void pathBeginning ()
{
super.pathBeginning();
// enable walking animation
setActionSequence(_followingPathAction);
setAnimationMode(TIME_BASED);
}
// documentation inherited
protected void pathCompleted ()
{
super.pathCompleted();
halt();
}
/**
* Updates the sprite animation frame to reflect the cessation of
* movement and disables any further animation.
*/
protected void halt ()
{
// disable animation
setAnimationMode(NO_ANIMATION);
// come to a halt looking settled and at peace
setActionSequence(_restingAction);
}
/** The action to use when at rest. */
protected String _restingAction = STANDING;
/** The action to use when following a path. */
protected String _followingPathAction = WALKING;
/** A reference to the descriptor for the character that we're
* visualizing. */
protected CharacterDescriptor _descrip;
/** A reference to the character manager that created us. */
protected CharacterManager _charmgr;
/** The animation frames for the active action sequence in each
* orientation. */
protected MultiFrameImage[] _frames;
/** The origin of the sprite. */
protected int _xorigin, _yorigin;
}
|
Added support for changing a sprite's character descriptor in situ.
git-svn-id: a1a4b28b82a3276cc491891159dd9963a0a72fae@1110 542714f4-19e9-0310-aa3c-eee0fc999fb1
|
src/java/com/threerings/cast/CharacterSprite.java
|
Added support for changing a sprite's character descriptor in situ.
|
|
Java
|
apache-2.0
|
649e327c59dba37accbf2a0b3c4013a908034860
| 0
|
SpineEventEngine/core-java,SpineEventEngine/core-java,SpineEventEngine/core-java
|
/*
* Copyright 2018, TeamDev Ltd. All rights reserved.
*
* Redistribution and use in source and/or binary forms, with or without
* modification, must retain the above copyright notice and the following
* disclaimer.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.spine.server.bus;
import com.google.common.testing.NullPointerTester;
import com.google.protobuf.Any;
import com.google.protobuf.Message;
import io.spine.base.Error;
import io.spine.core.Ack;
import io.spine.core.Rejection;
import io.spine.grpc.MemoizingObserver;
import io.spine.server.bus.given.BusesTestEnv.Exceptions.DeadMessageException;
import io.spine.server.bus.given.BusesTestEnv.Exceptions.FailedValidationException;
import io.spine.server.bus.given.BusesTestEnv.Exceptions.FailingFilterException;
import io.spine.server.bus.given.BusesTestEnv.Filters.FailingFilter;
import io.spine.server.bus.given.BusesTestEnv.Filters.PassingFilter;
import io.spine.server.bus.given.BusesTestEnv.TestMessageBus;
import io.spine.test.bus.BusMessage;
import org.junit.Test;
import java.util.List;
import static io.spine.grpc.StreamObservers.memoizingObserver;
import static io.spine.server.bus.given.BusesTestEnv.STATUS_OK;
import static io.spine.server.bus.given.BusesTestEnv.busMessage;
import static io.spine.server.bus.given.BusesTestEnv.errorType;
import static io.spine.server.bus.given.BusesTestEnv.testContents;
import static io.spine.test.Tests.assertHasPrivateParameterlessCtor;
import static io.spine.test.Verify.assertSize;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* @author Dmytro Dashenkov
*/
public class BusesShould {
@Test
public void have_private_util_ctor() {
assertHasPrivateParameterlessCtor(Buses.class);
}
@Test
public void not_accept_nulls() {
new NullPointerTester()
.setDefault(Message.class, Any.getDefaultInstance())
.setDefault(Error.class, Error.newBuilder()
.setCode(1)
.build())
.setDefault(Rejection.class, Rejection.newBuilder()
.setMessage(Any.getDefaultInstance())
.build())
.testAllPublicStaticMethods(Buses.class);
}
@Test
public void deliver_a_valid_message_with_registered_dispatcher() {
final TestMessageBus bus = busBuilder().build();
final BusMessage message = busMessage(testContents());
final MemoizingObserver<Ack> observer = memoizingObserver();
bus.post(message, observer);
final List<Ack> responses = observer.responses();
assertSize(1, responses);
final Ack response = responses.get(0);
assertEquals(STATUS_OK, response.getStatus());
}
@Test
public void apply_the_validating_filter_prior_to_the_dead_message_filter() {
final TestMessageBus deadBusFailingValidation = busBuilder().withNoDispatchers()
.failingValidation()
.build();
assertBusPostErrs(deadBusFailingValidation, FailedValidationException.TYPE);
}
@Test
public void apply_registered_filters_prior_to_the_validating_filter() {
final TestMessageBus deadBusFailingValidation = busBuilder().withNoDispatchers()
.failingValidation()
.addFilter(new FailingFilter())
.build();
assertBusPostErrs(deadBusFailingValidation, FailingFilterException.TYPE);
}
@Test
public void apply_the_validating_filter() {
final TestMessageBus busFailingValidation = busBuilder().failingValidation()
.build();
assertBusPostErrs(busFailingValidation, FailedValidationException.TYPE);
}
@Test
public void apply_a_registered_filter() {
final TestMessageBus bus = busBuilder().addFilter(new FailingFilter())
.build();
assertBusPostErrs(bus, FailingFilterException.TYPE);
}
@Test
public void apply_registered_filters() {
final PassingFilter passingFilter = new PassingFilter();
final PassingFilter passingFilter2 = new PassingFilter();
final TestMessageBus bus = busBuilder().addFilter(passingFilter)
.addFilter(passingFilter2)
.addFilter(new FailingFilter())
.build();
assertBusPostErrs(bus, FailingFilterException.TYPE);
assertTrue(passingFilter.passed());
assertTrue(passingFilter2.passed());
}
@Test
public void apply_the_dead_message_filter() {
final TestMessageBus deadBus = busBuilder().withNoDispatchers()
.build();
assertBusPostErrs(deadBus, DeadMessageException.TYPE);
}
private static TestMessageBus.Builder busBuilder() {
return TestMessageBus.newBuilder();
}
/**
* Asserts that bus acknowledges the error when posting a single message.
*/
private static void assertBusPostErrs(TestMessageBus bus, String type) {
final BusMessage message = busMessage(testContents());
final MemoizingObserver<Ack> observer = memoizingObserver();
bus.post(message, observer);
final List<Ack> responses = observer.responses();
assertSize(1, responses);
final Ack response = responses.get(0);
assertEquals(type, errorType(response));
assertSize(0, bus.storedMessages());
}
}
|
server/src/test/java/io/spine/server/bus/BusesShould.java
|
/*
* Copyright 2018, TeamDev Ltd. All rights reserved.
*
* Redistribution and use in source and/or binary forms, with or without
* modification, must retain the above copyright notice and the following
* disclaimer.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.spine.server.bus;
import com.google.common.testing.NullPointerTester;
import com.google.protobuf.Any;
import com.google.protobuf.Message;
import io.spine.base.Error;
import io.spine.core.Ack;
import io.spine.core.Rejection;
import io.spine.grpc.MemoizingObserver;
import io.spine.server.bus.given.BusesTestEnv.Exceptions.DeadMessageException;
import io.spine.server.bus.given.BusesTestEnv.Exceptions.FailedValidationException;
import io.spine.server.bus.given.BusesTestEnv.Exceptions.FailingFilterException;
import io.spine.server.bus.given.BusesTestEnv.Filters.FailingFilter;
import io.spine.server.bus.given.BusesTestEnv.Filters.PassingFilter;
import io.spine.server.bus.given.BusesTestEnv.TestMessageBus;
import io.spine.test.bus.BusMessage;
import org.junit.Test;
import java.util.List;
import static io.spine.grpc.StreamObservers.memoizingObserver;
import static io.spine.server.bus.given.BusesTestEnv.STATUS_OK;
import static io.spine.server.bus.given.BusesTestEnv.busMessage;
import static io.spine.server.bus.given.BusesTestEnv.errorType;
import static io.spine.server.bus.given.BusesTestEnv.testContents;
import static io.spine.test.Tests.assertHasPrivateParameterlessCtor;
import static io.spine.test.Verify.assertSize;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* @author Dmytro Dashenkov
*/
public class BusesShould {
@Test
public void have_private_util_ctor() {
assertHasPrivateParameterlessCtor(Buses.class);
}
@Test
public void not_accept_nulls() {
new NullPointerTester()
.setDefault(Message.class, Any.getDefaultInstance())
.setDefault(Error.class, Error.newBuilder()
.setCode(1)
.build())
.setDefault(Rejection.class, Rejection.newBuilder()
.setMessage(Any.getDefaultInstance())
.build())
.testAllPublicStaticMethods(Buses.class);
}
@Test
public void deliver_a_valid_message_with_registered_dispatcher() {
final TestMessageBus bus = busBuilder().build();
final BusMessage message = busMessage(testContents());
final MemoizingObserver<Ack> observer = memoizingObserver();
bus.post(message, observer);
final List<Ack> responses = observer.responses();
assertSize(1, responses);
final Ack response = responses.get(0);
assertEquals(STATUS_OK, response.getStatus());
}
@Test
public void apply_the_validating_filter_prior_to_the_dead_message_filter() {
final TestMessageBus deadBusFailingValidation = busBuilder().withNoDispatchers()
.failingValidation()
.build();
testBusForError(deadBusFailingValidation, FailedValidationException.TYPE);
}
@Test
public void apply_registered_filters_prior_to_the_validating_filter() {
final TestMessageBus deadBusFailingValidation = busBuilder().withNoDispatchers()
.failingValidation()
.addFilter(new FailingFilter())
.build();
testBusForError(deadBusFailingValidation, FailingFilterException.TYPE);
}
@Test
public void apply_the_validating_filter() {
final TestMessageBus busFailingValidation = busBuilder().failingValidation()
.build();
testBusForError(busFailingValidation, FailedValidationException.TYPE);
}
@Test
public void apply_a_registered_filter() {
final TestMessageBus bus = busBuilder().addFilter(new FailingFilter())
.build();
testBusForError(bus, FailingFilterException.TYPE);
}
@Test
public void apply_registered_filters() {
final PassingFilter filter1 = new PassingFilter();
final PassingFilter filter2 = new PassingFilter();
final TestMessageBus bus = busBuilder().addFilter(filter1)
.addFilter(filter2)
.addFilter(new FailingFilter())
.build();
testBusForError(bus, FailingFilterException.TYPE);
assertTrue(filter1.passed());
assertTrue(filter2.passed());
}
@Test
public void apply_the_dead_message_filter() {
final TestMessageBus deadBus = busBuilder().withNoDispatchers()
.build();
testBusForError(deadBus, DeadMessageException.TYPE);
}
private static TestMessageBus.Builder busBuilder() {
return TestMessageBus.newBuilder();
}
/**
* Asserts that bus acknowledges the error when posting a single message.
*/
private static void testBusForError(TestMessageBus bus, String type) {
final BusMessage message = busMessage(testContents());
final MemoizingObserver<Ack> observer = memoizingObserver();
bus.post(message, observer);
final List<Ack> responses = observer.responses();
assertSize(1, responses);
final Ack response = responses.get(0);
assertEquals(type, errorType(response));
assertSize(0, bus.storedMessages());
}
}
|
Rename `testBusForError` to `assertBusPostErrs`.
|
server/src/test/java/io/spine/server/bus/BusesShould.java
|
Rename `testBusForError` to `assertBusPostErrs`.
|
|
Java
|
apache-2.0
|
52f8efa3a4b25c875650fd8a104642818e8e6ef6
| 0
|
zace-yuan/retrofit,c0deh4xor/retrofit,enieber/retrofit,ggchxx/retrofit,YlJava110/retrofit,zhupengGitHub/retrofit,b-cuts/retrofit,aurae/retrofit,vignesh-iopex/retrofit,java02014/retrofit,yoslabs/retrofit,avbk/retrofit,janzoner/retrofit,xfumihiro/retrofit,square/retrofit,zace-yuan/retrofit,equinoxel/retrofit,ianrumac/retrofit,nsmolenskii/retrofit,hgl888/retrofit,elijah513/retrofit,davidcrotty/retrofit,ztelur/retrofit,nsmolenskii/retrofit,pmk2429/retrofit,siilobv/retrofit,f2prateek/retrofit,bhargav1/retrofit,square/retrofit,java02014/retrofit,Jackear/retrofit,equinoxel/retrofit,thangtc/retrofit,michelangelo13/retrofit,cnso/retrofit,cnso/retrofit,janzoner/retrofit,checkdroid/retrofit,viacheslavokolitiy/retrofit,Appstrakt/retrofit,c0deh4xor/retrofit,mbStavola/retrofit,enieber/retrofit,huihui4045/retrofit,ggchxx/retrofit,loiclefloch/retrofit,cnso/retrofit,NikoYuwono/retrofit,murat8505/REST_client_retrofit,msdgwzhy6/retrofit,Pannarrow/retrofit,dlew/retrofit,xfumihiro/retrofit,squery/retrofit,vamsirajendra/retrofit,ztelur/retrofit,yuhuayi/retrofit,deshion/retrofit,zero21ke/retrofit,PlumaBrava/retrofit,google/retrofit,tmxdyf/retrofit,airbnb/retrofit,ltshddx/retrofit,J-Sizzle/retrofit,mbStavola/retrofit,dmitryustimov/retrofit,squery/retrofit,ChinaKim/retrofit,siilobv/retrofit,vignesh-iopex/retrofit,MaTriXy/retrofit,maany/retrofit,FilippoMito/retrofit,PlumaBrava/retrofit,melbic/retrofit,Gary111/retrofit,Sellegit/retrofit,michelangelo13/retrofit,vignesh-iopex/retrofit,huihui4045/retrofit,AungWinnHtut/retrofit,squery/retrofit,nsmolenskii/retrofit,timehop/retrofit,aurae/retrofit,Jackear/retrofit,shermax/retrofit,andforce/retrofit,xfumihiro/retrofit,junenn/retrofit,yongjhih/retrofit2,wlrhnh-David/retrofit,java02014/retrofit,segmentio/retrofit,michelangelo13/retrofit,fjg1989/retrofit,andypliu/retrofit,bestwpw/retrofit,zero21ke/retrofit,GovindaPaliwal/retrofit,junenn/retrofit,xsingHu/retrofit,yoslabs/retrofit,NightlyNexus/retrofit,msdgwzhy6/retrofit,WiiliamChik/retrofit,yuhuayi/retrofit,ze-pequeno/retrofit,barodapride/retrofit,sunios/retrofit,lgx0955/retrofit,ltshddx/retrofit,guoGavin/retrofit,f2prateek/retrofit,larsgrefer/retrofit,Sellegit/retrofit,NightlyNexus/retrofit,bestwpw/retrofit,timehop/retrofit,lncosie/retrofit,thangtc/retrofit,larsgrefer/retrofit,zhupengGitHub/retrofit,square/retrofit,jimxj/retrofit,bhargav1/retrofit,segmentio/retrofit,c0deh4xor/retrofit,iagreen/retrofit,zero21ke/retrofit,wanjingyan001/retrofit,deshion/retrofit,bhargav1/retrofit,square/retrofit,ajju4455/retrofit,lemaiyan/retrofit,aurae/retrofit,sitexa/retrofit,msdgwzhy6/retrofit,benoitdion/retrofit,NikoYuwono/retrofit,PlumaBrava/retrofit,bestwpw/retrofit,thangtc/retrofit,vabym8/NotRetrofit,TomkeyZhang/retrofit,andforce/retrofit,ruhaly/retrofit,junenn/retrofit,fjg1989/retrofit,MaTriXy/retrofit,sitexa/retrofit,melbic/retrofit,wlrhnh-David/retrofit,geekinpink/retrofit,dlew/retrofit,vamsirajendra/retrofit,YlJava110/retrofit,AungWinnHtut/retrofit,maany/retrofit,juliendn/retrofit,Sellegit/retrofit,FilippoMito/retrofit,juliendn/retrofit,J-Sizzle/retrofit,Pannarrow/retrofit,tmxdyf/retrofit,ruhaly/retrofit,juliendn/retrofit,AmauryEsparza/retrofit,lemaiyan/retrofit,JunyiZhou/retrofit,Heart2009/retrofit,JunyiZhou/retrofit,GovindaPaliwal/retrofit,huihui4045/retrofit,Pannarrow/retrofit,google/retrofit,larsgrefer/retrofit,wanjingyan001/retrofit,barodapride/retrofit,noikiy/retrofit,avbk/retrofit,Gary111/retrofit,guoGavin/retrofit,lemaiyan/retrofit,loiclefloch/retrofit,MaTriXy/retrofit,jianxiansining/retrofit,pitatensai/retrofit,Heart2009/retrofit,yuhuayi/retrofit,xsingHu/retrofit,artem-zinnatullin/retrofit,wanjingyan001/retrofit,xiaozuzu/retrofit,hgl888/retrofit,pmk2429/retrofit,iagreen/retrofit,sarvex/retrofit,renatohsc/retrofit,Appstrakt/retrofit,b-cuts/retrofit,ianrumac/retrofit,barodapride/retrofit,leasual/retrofit,enieber/retrofit,ajju4455/retrofit,andypliu/retrofit,10045125/retrofit,ltshddx/retrofit,WiiliamChik/retrofit,hgl888/retrofit,benoitdion/retrofit,promeG/retrofit,loiclefloch/retrofit,shauvik/retrofit,jimxj/retrofit,geekinpink/retrofit,ruhaly/retrofit,guoGavin/retrofit,zheng1733/retrofit,YOLOSPAGHETTI/final-project,WiiliamChik/retrofit,AungWinnHtut/retrofit,shihabmi7/retrofit,shihabmi7/retrofit,chundongwang/retrofit,deshion/retrofit,renatohsc/retrofit,tmxdyf/retrofit,chundongwang/retrofit,maduhu/retrofit,davidcrotty/retrofit,messipuyol/retrofit,fjg1989/retrofit,checkdroid/retrofit,lncosie/retrofit,J-Sizzle/retrofit,sitexa/retrofit,viacheslavokolitiy/retrofit,YlJava110/retrofit,lichblitz/retrofit,maduhu/retrofit,shauvik/retrofit,dmitryustimov/retrofit,mbStavola/retrofit,airbnb/retrofit,viacheslavokolitiy/retrofit,YOLOSPAGHETTI/final-project,ianrumac/retrofit,pmk2429/retrofit,murat8505/REST_client_retrofit,artem-zinnatullin/retrofit,dmitryustimov/retrofit,geekinpink/retrofit,f2prateek/retrofit,ze-pequeno/retrofit,Heart2009/retrofit,Jackear/retrofit,artem-zinnatullin/retrofit,pitatensai/retrofit,sarvex/retrofit,promeG/retrofit,equinoxel/retrofit,YOLOSPAGHETTI/final-project,checkdroid/retrofit,andforce/retrofit,sunios/retrofit,jianxiansining/retrofit,xiaozuzu/retrofit,shihabmi7/retrofit,vamsirajendra/retrofit,xiaomeixw/NotRetrofit,FilippoMito/retrofit,ajju4455/retrofit,sarvex/retrofit,zace-yuan/retrofit,TomkeyZhang/retrofit,wlrhnh-David/retrofit,chundongwang/retrofit,iagreen/retrofit,murat8505/REST_client_retrofit,pitatensai/retrofit,jimxj/retrofit,JunyiZhou/retrofit,Gary111/retrofit,siilobv/retrofit,elijah513/retrofit,renatohsc/retrofit,NikoYuwono/retrofit,shauvik/retrofit,Gaantz/retrofit,maany/retrofit,maduhu/retrofit,ChinaKim/retrofit,janzoner/retrofit,ChinaKim/retrofit,b-cuts/retrofit,dlew/retrofit,lichblitz/retrofit,zheng1733/retrofit,messipuyol/retrofit,messipuyol/retrofit,melbic/retrofit,lgx0955/retrofit,airbnb/retrofit,ztelur/retrofit,jianxiansining/retrofit,NightlyNexus/retrofit,zheng1733/retrofit,noikiy/retrofit,ze-pequeno/retrofit,ze-pequeno/retrofit,yoslabs/retrofit,lichblitz/retrofit,Gaantz/retrofit,noikiy/retrofit,xsingHu/retrofit,davidcrotty/retrofit,GovindaPaliwal/retrofit,zhupengGitHub/retrofit,leasual/retrofit,Gaantz/retrofit,segmentio/retrofit,yongjhih/NotRetrofit,leasual/retrofit,shermax/retrofit,ggchxx/retrofit,10045125/retrofit,lncosie/retrofit,promeG/retrofit,avbk/retrofit,andypliu/retrofit,shermax/retrofit,xiaozuzu/retrofit,elijah513/retrofit,AmauryEsparza/retrofit,Appstrakt/retrofit,lgx0955/retrofit,sunios/retrofit
|
package retrofit.http;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.ResponseHandler;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpUriRequest;
import javax.inject.Provider;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.lang.reflect.WildcardType;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Converts Java method calls to Rest calls.
*
* @author Bob Lee (bob@squareup.com)
*/
public class RestAdapter {
private static final Logger LOGGER = Logger.getLogger(RestAdapter.class.getName());
static final ThreadLocal<DateFormat> DATE_FORMAT = new ThreadLocal<DateFormat>() {
@Override protected DateFormat initialValue() {
return new SimpleDateFormat("HH:mm:ss");
}
};
private final Server server;
private final Provider<HttpClient> httpClientProvider;
private final Executor executor;
private final MainThread mainThread;
private final Headers headers;
private final Converter converter;
private final HttpProfiler profiler;
public RestAdapter(Server server, Provider<HttpClient> httpClientProvider, Executor executor, MainThread mainThread,
Headers headers, Converter converter, HttpProfiler profiler) {
this.server = server;
this.httpClientProvider = httpClientProvider;
this.executor = executor;
this.mainThread = mainThread;
this.headers = headers;
this.converter = converter;
this.profiler = profiler;
}
/**
* Adapts a Java interface to a REST API. HTTP requests happen in a background thread. Callbacks
* happen in the UI thread.
*
* <p>Gets the relative path for a given method from a {@link GET}, {@link POST}, {@link PUT}, or
* {@link DELETE} annotation on the method. Gets the names of URL parameters from {@link
* javax.inject.Named} annotations on the method parameters.
*
* <p>The last method parameter should be of type {@link Callback}. The JSON HTTP response will be
* converted to the callback's parameter type using GSON. If the callback parameter type uses a
* wildcard, the lower bound will be used as the conversion type.
*
* <p>For example:
*
* <pre>
* public interface MyApi {
* @POST("go") public void go(@Named("a") String a, @Named("b") int b,
* Callback<? super MyResult> callback);
* }
* </pre>
*
* @param type to implement
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> type) {
return (T) Proxy.newProxyInstance(type.getClassLoader(),
new Class<?>[] {type}, new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, Type> responseTypeCache = new HashMap<Method, Type>();
@Override public Object invoke(Object proxy, final Method method, final Object[] args) {
// Determine whether or not the execution will be synchronous.
boolean isSynchronousInvocation = methodWantsSynchronousInvocation(method);
if (isSynchronousInvocation) {
// TODO support synchronous invocations!
throw new UnsupportedOperationException("Synchronous invocation not supported.");
}
// Construct HTTP request.
final Callback<?> callback =
UiCallback.create((Callback<?>) args[args.length - 1], mainThread);
String url = server.apiUrl();
String startTime = "NULL";
try {
// Build the request and headers.
final HttpUriRequest request = new HttpRequestBuilder(converter) //
.setMethod(method)
.setArgs(args)
.setApiUrl(server.apiUrl())
.setHeaders(headers)
.build();
url = request.getURI().toString();
// Determine deserialization type by method return type or generic parameter to Callback argument.
Type type = responseTypeCache.get(method);
if (type == null) {
type = getResponseObjectType(method, isSynchronousInvocation);
responseTypeCache.put(method, type);
}
LOGGER.fine("Sending " + request.getMethod() + " to " + request.getURI());
final Date start = new Date();
startTime = DATE_FORMAT.get().format(start);
ResponseHandler<Void> rh = new CallbackResponseHandler(callback, type, converter, url, start, DATE_FORMAT);
// Optionally wrap the response handler for server call profiling.
if (profiler != HttpProfiler.NONE) {
rh = createProfiler(rh, (HttpProfiler<?>) profiler, getRequestInfo(method, request), start);
}
// Execute HTTP request in the background.
final String finalUrl = url;
final String finalStartTime = startTime;
final ResponseHandler<Void> finalResponseHandler = rh;
executor.execute(new Runnable() {
@Override public void run() {
invokeRequest(request, finalResponseHandler, callback, finalUrl, finalStartTime);
}
});
} catch (Throwable t) {
LOGGER.log(Level.WARNING, t.getMessage() + " from " + url + " at " + startTime, t);
callback.unexpectedError(t);
}
// Methods should return void.
return null;
}
private HttpProfiler.RequestInformation getRequestInfo(Method method, HttpUriRequest request) {
RequestLine requestLine = RequestLine.fromMethod(method);
HttpMethodType httpMethod = requestLine.getHttpMethod();
HttpProfiler.Method profilerMethod = httpMethod.profilerMethod();
long contentLength = 0;
String contentType = null;
if (request instanceof HttpEntityEnclosingRequestBase) {
HttpEntityEnclosingRequestBase entityReq = (HttpEntityEnclosingRequestBase) request;
HttpEntity entity = entityReq.getEntity();
contentLength = entity.getContentLength();
Header entityContentType = entity.getContentType();
contentType = entityContentType != null ? entityContentType.getValue() : null;
}
return new HttpProfiler.RequestInformation(profilerMethod, server.apiUrl(), requestLine.getRelativePath(),
contentLength, contentType);
}
private void invokeRequest(HttpUriRequest request, ResponseHandler<Void> rh,
Callback<?> callback, String url, String startTime) {
try {
httpClientProvider.get().execute(request, rh);
} catch (IOException e) {
LOGGER.log(Level.WARNING, e.getMessage() + " from " + url + " at " + startTime, e);
callback.networkError();
} catch (Throwable t) {
LOGGER.log(Level.WARNING, t.getMessage() + " from " + url + " at " + startTime, t);
callback.unexpectedError(t);
}
}
/** Wraps a {@code GsonResponseHandler} with a {@code ProfilingResponseHandler}. */
private <T> ProfilingResponseHandler<T> createProfiler(ResponseHandler<Void> handlerToWrap,
HttpProfiler<T> profiler, HttpProfiler.RequestInformation requestInfo, Date start) {
ProfilingResponseHandler<T> responseHandler = new ProfilingResponseHandler<T>(handlerToWrap, profiler,
requestInfo, start.getTime());
responseHandler.beforeCall();
return responseHandler;
}
}
/**
* Determine whether or not execution for a method should be done synchronously.
*
* @throws IllegalArgumentException if the supplied {@code method} has both a return type and {@link Callback}
* argument or neither of the two.
*/
static boolean methodWantsSynchronousInvocation(Method method) {
boolean hasReturnType = method.getReturnType() != void.class;
Class<?>[] parameterTypes = method.getParameterTypes();
boolean hasCallback = parameterTypes.length > 0
&& Callback.class.isAssignableFrom(parameterTypes[parameterTypes.length - 1]);
if ((hasReturnType && hasCallback) || (!hasReturnType && !hasCallback)) {
throw new IllegalArgumentException("Method must have either a return type or Callback as last argument.");
}
return hasReturnType;
}
/** Get the callback parameter types. */
static Type getResponseObjectType(Method method, boolean isSynchronousInvocation) {
if (isSynchronousInvocation) {
return method.getGenericReturnType();
}
Type[] parameterTypes = method.getGenericParameterTypes();
Type callbackType = parameterTypes[parameterTypes.length - 1];
Class<?> callbackClass;
if (callbackType instanceof Class) {
callbackClass = (Class<?>) callbackType;
} else if (callbackType instanceof ParameterizedType) {
callbackClass = (Class<?>) ((ParameterizedType) callbackType).getRawType();
} else {
throw new ClassCastException(
String.format("Last parameter of %s must be a Class or ParameterizedType", method));
}
if (Callback.class.isAssignableFrom(callbackClass)) {
callbackType = Types.getGenericSupertype(callbackType, callbackClass, Callback.class);
if (callbackType instanceof ParameterizedType) {
Type[] types = ((ParameterizedType) callbackType).getActualTypeArguments();
for (int i = 0; i < types.length; i++) {
Type type = types[i];
if (type instanceof WildcardType) {
types[i] = ((WildcardType) type).getUpperBounds()[0];
}
}
return types[0];
}
}
throw new IllegalArgumentException(
String.format("Last parameter of %s must be of type Callback<X,Y,Z> or Callback<? super X,..,..>.", method));
}
/** Sends server call times and response status codes to {@link HttpProfiler}. */
private static class ProfilingResponseHandler<T> implements ResponseHandler<Void> {
private final ResponseHandler<Void> delegate;
private final HttpProfiler<T> profiler;
private final HttpProfiler.RequestInformation requestInfo;
private final long startTime;
private final AtomicReference<T> beforeCallData = new AtomicReference<T>();
/** Wraps the delegate response handler. */
private ProfilingResponseHandler(ResponseHandler<Void> delegate, HttpProfiler<T> profiler,
HttpProfiler.RequestInformation requestInfo, long startTime) {
this.delegate = delegate;
this.profiler = profiler;
this.requestInfo = requestInfo;
this.startTime = startTime;
}
public void beforeCall() {
try {
beforeCallData.set(profiler.beforeCall());
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "Error occurred in HTTP profiler beforeCall().", e);
}
}
@Override public Void handleResponse(HttpResponse httpResponse) throws IOException {
// Intercept the response and send data to profiler.
long elapsedTime = System.currentTimeMillis() - startTime;
int statusCode = httpResponse.getStatusLine().getStatusCode();
try {
profiler.afterCall(requestInfo, elapsedTime, statusCode, beforeCallData.get());
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "Error occurred in HTTP profiler afterCall().", e);
}
// Pass along the response to the normal handler.
return delegate.handleResponse(httpResponse);
}
}
}
|
http/src/main/java/retrofit/http/RestAdapter.java
|
package retrofit.http;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.ResponseHandler;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpUriRequest;
import javax.inject.Inject;
import javax.inject.Provider;
import javax.inject.Singleton;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.lang.reflect.WildcardType;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Converts Java method calls to Rest calls.
*
* @author Bob Lee (bob@squareup.com)
*/
@Singleton public class RestAdapter {
private static final Logger LOGGER = Logger.getLogger(RestAdapter.class.getName());
static final ThreadLocal<DateFormat> DATE_FORMAT = new ThreadLocal<DateFormat>() {
@Override protected DateFormat initialValue() {
return new SimpleDateFormat("HH:mm:ss");
}
};
private final Server server;
private final Provider<HttpClient> httpClientProvider;
private final Executor executor;
private final MainThread mainThread;
private final Headers headers;
private final Converter converter;
private final HttpProfiler profiler;
@Inject public RestAdapter(Server server, Provider<HttpClient> httpClientProvider, Executor executor,
MainThread mainThread, Headers headers, Converter converter, HttpProfiler profiler) {
this.server = server;
this.httpClientProvider = httpClientProvider;
this.executor = executor;
this.mainThread = mainThread;
this.headers = headers;
this.converter = converter;
this.profiler = profiler;
}
/**
* Adapts a Java interface to a REST API. HTTP requests happen in a background thread. Callbacks
* happen in the UI thread.
*
* <p>Gets the relative path for a given method from a {@link GET}, {@link POST}, {@link PUT}, or
* {@link DELETE} annotation on the method. Gets the names of URL parameters from {@link
* javax.inject.Named} annotations on the method parameters.
*
* <p>The last method parameter should be of type {@link Callback}. The JSON HTTP response will be
* converted to the callback's parameter type using GSON. If the callback parameter type uses a
* wildcard, the lower bound will be used as the conversion type.
*
* <p>For example:
*
* <pre>
* public interface MyApi {
* @POST("go") public void go(@Named("a") String a, @Named("b") int b,
* Callback<? super MyResult> callback);
* }
* </pre>
*
* @param type to implement
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> type) {
return (T) Proxy.newProxyInstance(type.getClassLoader(),
new Class<?>[] {type}, new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, Type> responseTypeCache = new HashMap<Method, Type>();
@Override public Object invoke(Object proxy, final Method method, final Object[] args) {
// Determine whether or not the execution will be synchronous.
boolean isSynchronousInvocation = methodWantsSynchronousInvocation(method);
if (isSynchronousInvocation) {
// TODO support synchronous invocations!
throw new UnsupportedOperationException("Synchronous invocation not supported.");
}
// Construct HTTP request.
final Callback<?> callback =
UiCallback.create((Callback<?>) args[args.length - 1], mainThread);
String url = server.apiUrl();
String startTime = "NULL";
try {
// Build the request and headers.
final HttpUriRequest request = new HttpRequestBuilder(converter) //
.setMethod(method)
.setArgs(args)
.setApiUrl(server.apiUrl())
.setHeaders(headers)
.build();
url = request.getURI().toString();
// Determine deserialization type by method return type or generic parameter to Callback argument.
Type type = responseTypeCache.get(method);
if (type == null) {
type = getResponseObjectType(method, isSynchronousInvocation);
responseTypeCache.put(method, type);
}
LOGGER.fine("Sending " + request.getMethod() + " to " + request.getURI());
final Date start = new Date();
startTime = DATE_FORMAT.get().format(start);
ResponseHandler<Void> rh = new CallbackResponseHandler(callback, type, converter, url, start, DATE_FORMAT);
// Optionally wrap the response handler for server call profiling.
if (profiler != HttpProfiler.NONE) {
rh = createProfiler(rh, (HttpProfiler<?>) profiler, getRequestInfo(method, request), start);
}
// Execute HTTP request in the background.
final String finalUrl = url;
final String finalStartTime = startTime;
final ResponseHandler<Void> finalResponseHandler = rh;
executor.execute(new Runnable() {
@Override public void run() {
invokeRequest(request, finalResponseHandler, callback, finalUrl, finalStartTime);
}
});
} catch (Throwable t) {
LOGGER.log(Level.WARNING, t.getMessage() + " from " + url + " at " + startTime, t);
callback.unexpectedError(t);
}
// Methods should return void.
return null;
}
private HttpProfiler.RequestInformation getRequestInfo(Method method, HttpUriRequest request) {
RequestLine requestLine = RequestLine.fromMethod(method);
HttpMethodType httpMethod = requestLine.getHttpMethod();
HttpProfiler.Method profilerMethod = httpMethod.profilerMethod();
long contentLength = 0;
String contentType = null;
if (request instanceof HttpEntityEnclosingRequestBase) {
HttpEntityEnclosingRequestBase entityReq = (HttpEntityEnclosingRequestBase) request;
HttpEntity entity = entityReq.getEntity();
contentLength = entity.getContentLength();
Header entityContentType = entity.getContentType();
contentType = entityContentType != null ? entityContentType.getValue() : null;
}
return new HttpProfiler.RequestInformation(profilerMethod, server.apiUrl(), requestLine.getRelativePath(),
contentLength, contentType);
}
private void invokeRequest(HttpUriRequest request, ResponseHandler<Void> rh,
Callback<?> callback, String url, String startTime) {
try {
httpClientProvider.get().execute(request, rh);
} catch (IOException e) {
LOGGER.log(Level.WARNING, e.getMessage() + " from " + url + " at " + startTime, e);
callback.networkError();
} catch (Throwable t) {
LOGGER.log(Level.WARNING, t.getMessage() + " from " + url + " at " + startTime, t);
callback.unexpectedError(t);
}
}
/** Wraps a {@code GsonResponseHandler} with a {@code ProfilingResponseHandler}. */
private <T> ProfilingResponseHandler<T> createProfiler(ResponseHandler<Void> handlerToWrap,
HttpProfiler<T> profiler, HttpProfiler.RequestInformation requestInfo, Date start) {
ProfilingResponseHandler<T> responseHandler = new ProfilingResponseHandler<T>(handlerToWrap, profiler,
requestInfo, start.getTime());
responseHandler.beforeCall();
return responseHandler;
}
}
/**
* Determine whether or not execution for a method should be done synchronously.
*
* @throws IllegalArgumentException if the supplied {@code method} has both a return type and {@link Callback}
* argument or neither of the two.
*/
static boolean methodWantsSynchronousInvocation(Method method) {
boolean hasReturnType = method.getReturnType() != void.class;
Class<?>[] parameterTypes = method.getParameterTypes();
boolean hasCallback = parameterTypes.length > 0
&& Callback.class.isAssignableFrom(parameterTypes[parameterTypes.length - 1]);
if ((hasReturnType && hasCallback) || (!hasReturnType && !hasCallback)) {
throw new IllegalArgumentException("Method must have either a return type or Callback as last argument.");
}
return hasReturnType;
}
/** Get the callback parameter types. */
static Type getResponseObjectType(Method method, boolean isSynchronousInvocation) {
if (isSynchronousInvocation) {
return method.getGenericReturnType();
}
Type[] parameterTypes = method.getGenericParameterTypes();
Type callbackType = parameterTypes[parameterTypes.length - 1];
Class<?> callbackClass;
if (callbackType instanceof Class) {
callbackClass = (Class<?>) callbackType;
} else if (callbackType instanceof ParameterizedType) {
callbackClass = (Class<?>) ((ParameterizedType) callbackType).getRawType();
} else {
throw new ClassCastException(
String.format("Last parameter of %s must be a Class or ParameterizedType", method));
}
if (Callback.class.isAssignableFrom(callbackClass)) {
callbackType = Types.getGenericSupertype(callbackType, callbackClass, Callback.class);
if (callbackType instanceof ParameterizedType) {
Type[] types = ((ParameterizedType) callbackType).getActualTypeArguments();
for (int i = 0; i < types.length; i++) {
Type type = types[i];
if (type instanceof WildcardType) {
types[i] = ((WildcardType) type).getUpperBounds()[0];
}
}
return types[0];
}
}
throw new IllegalArgumentException(
String.format("Last parameter of %s must be of type Callback<X,Y,Z> or Callback<? super X,..,..>.", method));
}
/** Sends server call times and response status codes to {@link HttpProfiler}. */
private static class ProfilingResponseHandler<T> implements ResponseHandler<Void> {
private final ResponseHandler<Void> delegate;
private final HttpProfiler<T> profiler;
private final HttpProfiler.RequestInformation requestInfo;
private final long startTime;
private final AtomicReference<T> beforeCallData = new AtomicReference<T>();
/** Wraps the delegate response handler. */
private ProfilingResponseHandler(ResponseHandler<Void> delegate, HttpProfiler<T> profiler,
HttpProfiler.RequestInformation requestInfo, long startTime) {
this.delegate = delegate;
this.profiler = profiler;
this.requestInfo = requestInfo;
this.startTime = startTime;
}
public void beforeCall() {
try {
beforeCallData.set(profiler.beforeCall());
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "Error occurred in HTTP profiler beforeCall().", e);
}
}
@Override public Void handleResponse(HttpResponse httpResponse) throws IOException {
// Intercept the response and send data to profiler.
long elapsedTime = System.currentTimeMillis() - startTime;
int statusCode = httpResponse.getStatusLine().getStatusCode();
try {
profiler.afterCall(requestInfo, elapsedTime, statusCode, beforeCallData.get());
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "Error occurred in HTTP profiler afterCall().", e);
}
// Pass along the response to the normal handler.
return delegate.handleResponse(httpResponse);
}
}
}
|
Remove support for implicit injection.
|
http/src/main/java/retrofit/http/RestAdapter.java
|
Remove support for implicit injection.
|
|
Java
|
apache-2.0
|
c89358f3b7b8f3fa5cf75db3e4e908bef8e34b11
| 0
|
nyholmniklas/finvoice2csv
|
package finvoice2csv;
import static org.junit.Assert.*;
import java.io.File;
import java.math.BigDecimal;
import org.junit.Before;
import org.junit.Test;
import org.niklas.finvoice2csv.model.Finvoice;
import org.niklas.finvoice2csv.util.mappers.Xml2ModelMapper;
import org.niklas.finvoice2csv.util.mappers.Xml2ModelMapperImpl;
public class Xml2ModelMapperTest {
private Finvoice finvoice;
@Before
public void setUp() {
Xml2ModelMapper mapper = new Xml2ModelMapperImpl();
finvoice = mapper.getFinvoiceFromXml(new File("test/res/lasku.xml"));
}
@Test
public void testBuyerPartyIdentifier() {
assertTrue(finvoice.getBuyerPartyDetails().getBuyerPartyIdentifier()
.equals("0836922-4"));
}
@Test
public void testBuyerOrganisationName() {
assertTrue(finvoice.getBuyerPartyDetails().getBuyerOrganisationName()
.equals("ProCountor International Oy"));
}
@Test
public void testBuyerStreetName() {
assertTrue(finvoice.getBuyerPartyDetails()
.getBuyerPostalAddressDetails().getBuyerStreetName()
.equals("Maapallonkuja 1 A"));
}
@Test
public void testBuyerTownName() {
assertTrue(finvoice.getBuyerPartyDetails()
.getBuyerPostalAddressDetails().getBuyerTownName()
.equals("ESPOO"));
}
@Test
public void testBuyerPostcodeIdentifier() {
assertTrue(finvoice.getBuyerPartyDetails()
.getBuyerPostalAddressDetails().getBuyerPostCodeIdentifier()
.equals("02150"));
}
@Test
public void testBuyerCountryCode() {
assertTrue(finvoice.getBuyerPartyDetails()
.getBuyerPostalAddressDetails().getCountryCode().equals("FI"));
}
@Test
public void testDeliveryOrganisationName() {
assertTrue(finvoice.getDeliveryPartyDetails()
.getDeliveryOrganisationName()
.equals("ProCountor International Oy"));
}
@Test
public void testDeliveryStreetName() {
assertTrue(finvoice.getDeliveryPartyDetails()
.getDeliveryPostalAddressDetails().getDeliveryStreetName()
.equals("Keilaranta 8"));
}
@Test
public void testDeliveryTownName() {
assertTrue(finvoice.getDeliveryPartyDetails()
.getDeliveryPostalAddressDetails().getDeliveryTownName()
.equals("ESPOO"));
}
@Test
public void testDeliveryPostcodeIdentifier() {
assertTrue(finvoice.getDeliveryPartyDetails()
.getDeliveryPostalAddressDetails()
.getDeliveryPostCodeIdentifier().equals("02150"));
}
@Test
public void testDeliveryCountryCode() {
assertTrue(finvoice.getDeliveryPartyDetails()
.getDeliveryPostalAddressDetails().getCountryCode()
.equals("FI"));
}
// TODO TEST DATES!!!!
@Test
public void testInvoiceTypeCode() {
assertTrue(finvoice.getInvoiceDetails().getInvoiceTypeCode()
.equals("M"));
}
@Test
public void testInvoiceTotalVatExcludedAmount() {
assertTrue(finvoice.getInvoiceDetails()
.getInvoiceTotalVatExcludedAmount()
.equals(new BigDecimal("2450.00")));
}
@Test
public void testInvoiceTotalVatAmount() {
assertTrue(finvoice.getInvoiceDetails().getInvoiceTotalVatAmount()
.equals(new BigDecimal("563.50")));
}
@Test
public void testPaymentOverDueFinePercent() {
assertTrue(finvoice.getInvoiceDetails().getPaymentTermsDetails()
.getPaymentOverDueFineDetails().getPaymentOverDueFinePercent()
.equals(new BigDecimal("10.5")));
}
@Test
public void testInvoiceRowArticleIdentifier() {
assertTrue(finvoice.getInvoiceRow().get(1).getArticleIdentifier()
.equals("SA2"));
}
@Test
public void testInvoiceRowArticleName() {
assertTrue(finvoice.getInvoiceRow().get(1).getArticleName()
.equals("Premium Saula Cafe espressonapit"));
}
@Test
public void testInvoiceRowOrderedQuantity() {
assertTrue(finvoice.getInvoiceRow().get(1).getOrderedQuantity() == 1000);
}
@Test
public void testInvoiceRowUnitPrice() {
assertTrue(finvoice.getInvoiceRow().get(1).getUnitPriceAmount()
.equals(new BigDecimal("0.15")));
}
@Test
public void testInvoiceRowVatPercent() {
assertTrue(finvoice.getInvoiceRow().get(1).getRowVatRatePercent() == 23);
}
@Test
public void testInvoiceRowVatAmount() {
assertTrue(finvoice.getInvoiceRow().get(1).getRowVatAmount()
.equals(new BigDecimal("34.50")));
}
@Test
public void testInvoiceRowVatExcludedAmount() {
assertTrue(finvoice.getInvoiceRow().get(1).getRowVatExcludedAmount()
.equals(new BigDecimal("150.00")));
}
@Test
public void testInvoiceRowAmount() {
assertTrue(finvoice.getInvoiceRow().get(1).getRowAmount()
.equals(new BigDecimal("184.50")));
}
}
|
test/main/java/finvoice2csv/Xml2ModelMapperTest.java
|
package finvoice2csv;
import static org.junit.Assert.*;
import java.io.File;
import java.math.BigDecimal;
import org.junit.Before;
import org.junit.Test;
import org.niklas.finvoice2csv.model.Finvoice;
import org.niklas.finvoice2csv.util.mappers.Xml2ModelMapper;
import org.niklas.finvoice2csv.util.mappers.Xml2ModelMapperImpl;
public class Xml2ModelMapperTest {
private Finvoice finvoice;
@Before
public void setUp() {
Xml2ModelMapper mapper = new Xml2ModelMapperImpl();
finvoice = mapper.getFinvoiceFromXml(new File("c:\\temp\\lasku.xml"));
}
@Test
public void testBuyerPartyIdentifier() {
assertTrue(finvoice.getBuyerPartyDetails().getBuyerPartyIdentifier()
.equals("0836922-4"));
}
@Test
public void testBuyerOrganisationName() {
assertTrue(finvoice.getBuyerPartyDetails().getBuyerOrganisationName()
.equals("ProCountor International Oy"));
}
@Test
public void testBuyerStreetName() {
assertTrue(finvoice.getBuyerPartyDetails()
.getBuyerPostalAddressDetails().getBuyerStreetName()
.equals("Maapallonkuja 1 A"));
}
@Test
public void testBuyerTownName() {
assertTrue(finvoice.getBuyerPartyDetails()
.getBuyerPostalAddressDetails().getBuyerTownName()
.equals("ESPOO"));
}
@Test
public void testBuyerPostcodeIdentifier() {
assertTrue(finvoice.getBuyerPartyDetails()
.getBuyerPostalAddressDetails().getBuyerPostCodeIdentifier()
.equals("02150"));
}
@Test
public void testBuyerCountryCode() {
assertTrue(finvoice.getBuyerPartyDetails()
.getBuyerPostalAddressDetails().getCountryCode().equals("FI"));
}
@Test
public void testDeliveryOrganisationName() {
assertTrue(finvoice.getDeliveryPartyDetails()
.getDeliveryOrganisationName()
.equals("ProCountor International Oy"));
}
@Test
public void testDeliveryStreetName() {
assertTrue(finvoice.getDeliveryPartyDetails()
.getDeliveryPostalAddressDetails().getDeliveryStreetName()
.equals("Keilaranta 8"));
}
@Test
public void testDeliveryTownName() {
assertTrue(finvoice.getDeliveryPartyDetails()
.getDeliveryPostalAddressDetails().getDeliveryTownName()
.equals("ESPOO"));
}
@Test
public void testDeliveryPostcodeIdentifier() {
assertTrue(finvoice.getDeliveryPartyDetails()
.getDeliveryPostalAddressDetails()
.getDeliveryPostCodeIdentifier().equals("02150"));
}
@Test
public void testDeliveryCountryCode() {
assertTrue(finvoice.getDeliveryPartyDetails()
.getDeliveryPostalAddressDetails().getCountryCode()
.equals("FI"));
}
// TODO TEST DATES!!!!
@Test
public void testInvoiceTypeCode() {
assertTrue(finvoice.getInvoiceDetails().getInvoiceTypeCode()
.equals("M"));
}
@Test
public void testInvoiceTotalVatExcludedAmount() {
assertTrue(finvoice.getInvoiceDetails()
.getInvoiceTotalVatExcludedAmount()
.equals(new BigDecimal("2450.00")));
}
@Test
public void testInvoiceTotalVatAmount() {
assertTrue(finvoice.getInvoiceDetails().getInvoiceTotalVatAmount()
.equals(new BigDecimal("563.50")));
}
@Test
public void testPaymentOverDueFinePercent() {
assertTrue(finvoice.getInvoiceDetails().getPaymentTermsDetails()
.getPaymentOverDueFineDetails().getPaymentOverDueFinePercent()
.equals(new BigDecimal("10.5")));
}
@Test
public void testInvoiceRowArticleIdentifier() {
assertTrue(finvoice.getInvoiceRow().get(1).getArticleIdentifier()
.equals("SA2"));
}
@Test
public void testInvoiceRowArticleName() {
assertTrue(finvoice.getInvoiceRow().get(1).getArticleName()
.equals("Premium Saula Cafe espressonapit"));
}
@Test
public void testInvoiceRowOrderedQuantity() {
assertTrue(finvoice.getInvoiceRow().get(1).getOrderedQuantity() == 1000);
}
@Test
public void testInvoiceRowUnitPrice() {
assertTrue(finvoice.getInvoiceRow().get(1).getUnitPriceAmount()
.equals(new BigDecimal("0.15")));
}
@Test
public void testInvoiceRowVatPercent() {
assertTrue(finvoice.getInvoiceRow().get(1).getRowVatRatePercent() == 23);
}
@Test
public void testInvoiceRowVatAmount() {
assertTrue(finvoice.getInvoiceRow().get(1).getRowVatAmount()
.equals(new BigDecimal("34.50")));
}
@Test
public void testInvoiceRowVatExcludedAmount() {
assertTrue(finvoice.getInvoiceRow().get(1).getRowVatExcludedAmount()
.equals(new BigDecimal("150.00")));
}
@Test
public void testInvoiceRowAmount() {
assertTrue(finvoice.getInvoiceRow().get(1).getRowAmount()
.equals(new BigDecimal("184.50")));
}
}
|
Changed JUnit xml file path to relative
|
test/main/java/finvoice2csv/Xml2ModelMapperTest.java
|
Changed JUnit xml file path to relative
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.