focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
@Override
public char readChar()
throws EOFException {
return (char) readUnsignedShort();
}
|
@Test
void testReadChar()
throws EOFException {
char read = _dataBufferPinotInputStream.readChar();
assertEquals(read, _byteBuffer.getChar(0));
assertEquals(_dataBufferPinotInputStream.getCurrentOffset(), Character.BYTES);
}
|
public static String formatSql(final AstNode root) {
final StringBuilder builder = new StringBuilder();
new Formatter(builder).process(root, 0);
return StringUtils.stripEnd(builder.toString(), "\n");
}
|
@Test
public void shouldFormatCreateOrReplaceTableStatement() {
// Given:
final CreateSourceProperties props = CreateSourceProperties.from(
new ImmutableMap.Builder<String, Literal>()
.putAll(SOME_WITH_PROPS.copyOfOriginalLiterals())
.build()
);
final CreateTable createTable = new CreateTable(
TEST,
ELEMENTS_WITH_PRIMARY_KEY,
true,
false,
props,
false);
// When:
final String sql = SqlFormatter.formatSql(createTable);
// Then:
assertThat(sql, is("CREATE OR REPLACE TABLE TEST (`k3` STRING PRIMARY KEY, `Foo` STRING) "
+ "WITH (KAFKA_TOPIC='topic_test', VALUE_FORMAT='JSON');"));
}
|
@Override
public final void getSize(@NonNull SizeReadyCallback cb) {
sizeDeterminer.getSize(cb);
}
|
@Test
public void testCallbacksFromMultipleRequestsAreNotifiedOnPreDraw() {
SizeReadyCallback firstCb = mock(SizeReadyCallback.class);
SizeReadyCallback secondCb = mock(SizeReadyCallback.class);
target.getSize(firstCb);
target.getSize(secondCb);
int width = 68;
int height = 875;
LayoutParams layoutParams = new FrameLayout.LayoutParams(width, height);
view.setLayoutParams(layoutParams);
activity.visible();
view.getViewTreeObserver().dispatchOnPreDraw();
view.getViewTreeObserver().dispatchOnPreDraw();
verify(firstCb, times(1)).onSizeReady(eq(width), eq(height));
verify(secondCb, times(1)).onSizeReady(eq(width), eq(height));
}
|
static void populateCorrectSegmentId(final Segment segment, final String modelName, final int i) {
String toSet;
if (segment.getId() == null || segment.getId().isEmpty()) {
toSet = String.format(SEGMENTID_TEMPLATE,
modelName,
i);
} else {
toSet = getSanitizedId(segment.getId(), modelName);
}
segment.setId(toSet);
}
|
@Test
void populateCorrectSegmentId() throws Exception {
final InputStream inputStream = getFileInputStream(NO_MODELNAME_NO_SEGMENTID_SAMPLE_NAME);
final PMML pmml = org.jpmml.model.PMMLUtil.unmarshal(inputStream);
final Model retrieved = pmml.getModels().get(0);
assertThat(retrieved).isInstanceOf(MiningModel.class);
MiningModel miningModel = (MiningModel) retrieved;
Segment toPopulate = miningModel.getSegmentation().getSegments().get(0);
assertThat(toPopulate.getId()).isNull();
String modelName = "MODEL_NAME";
int i = 0;
KiePMMLUtil.populateCorrectSegmentId(toPopulate, modelName, i);
assertThat(toPopulate.getId()).isNotNull();
String expected = String.format(SEGMENTID_TEMPLATE,
modelName,
i);
assertThat(toPopulate.getId()).isEqualTo(expected);
}
|
public String failureMessage(
int epoch,
OptionalLong deltaUs,
boolean isActiveController,
long lastCommittedOffset
) {
StringBuilder bld = new StringBuilder();
if (deltaUs.isPresent()) {
bld.append("event failed with ");
} else {
bld.append("event unable to start processing because of ");
}
bld.append(internalException.getClass().getSimpleName());
if (externalException.isPresent()) {
bld.append(" (treated as ").
append(externalException.get().getClass().getSimpleName()).append(")");
}
if (causesFailover()) {
bld.append(" at epoch ").append(epoch);
}
if (deltaUs.isPresent()) {
bld.append(" in ").append(deltaUs.getAsLong()).append(" microseconds");
}
if (causesFailover()) {
if (isActiveController) {
bld.append(". Renouncing leadership and reverting to the last committed offset ");
bld.append(lastCommittedOffset);
} else {
bld.append(". The controller is already in standby mode");
}
}
bld.append(".");
if (!isFault && internalException.getMessage() != null) {
bld.append(" Exception message: ");
bld.append(internalException.getMessage());
}
return bld.toString();
}
|
@Test
public void testRejectedExecutionExceptionFailureMessage() {
assertEquals("event unable to start processing because of RejectedExecutionException (treated " +
"as TimeoutException).",
REJECTED_EXECUTION.failureMessage(123, OptionalLong.empty(), true, 456L));
}
|
@Override
public CompletableFuture<RemotingCommand> invoke(String addr, RemotingCommand request,
long timeoutMillis) {
CompletableFuture<RemotingCommand> future = new CompletableFuture<>();
try {
final ChannelFuture channelFuture = this.getAndCreateChannelAsync(addr);
if (channelFuture == null) {
future.completeExceptionally(new RemotingConnectException(addr));
return future;
}
channelFuture.addListener(f -> {
if (f.isSuccess()) {
Channel channel = channelFuture.channel();
if (channel != null && channel.isActive()) {
invokeImpl(channel, request, timeoutMillis).whenComplete((v, t) -> {
if (t == null) {
updateChannelLastResponseTime(addr);
}
}).thenApply(ResponseFuture::getResponseCommand).whenComplete((v, t) -> {
if (t != null) {
future.completeExceptionally(t);
} else {
future.complete(v);
}
});
} else {
this.closeChannel(addr, channel);
future.completeExceptionally(new RemotingConnectException(addr));
}
} else {
future.completeExceptionally(new RemotingConnectException(addr));
}
});
} catch (Throwable t) {
future.completeExceptionally(t);
}
return future;
}
|
@Test
public void testRemotingTimeoutException() throws Exception {
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.PULL_MESSAGE, null);
RemotingCommand response = RemotingCommand.createResponseCommand(null);
response.setCode(ResponseCode.SUCCESS);
CompletableFuture<RemotingCommand> future0 = new CompletableFuture<>();
future0.completeExceptionally(new RemotingTimeoutException(""));
doReturn(future0).when(remotingClient).invoke(anyString(), any(RemotingCommand.class), anyLong());
CompletableFuture<RemotingCommand> future = remotingClient.invoke("0.0.0.0", request, 1000);
Throwable thrown = catchThrowable(future::get);
assertThat(thrown.getCause()).isInstanceOf(RemotingTimeoutException.class);
}
|
public static ProducingResult createProducingResult(
ResolvedSchema inputSchema, @Nullable Schema declaredSchema) {
// no schema has been declared by the user,
// the schema will be entirely derived from the input
if (declaredSchema == null) {
// go through data type to erase time attributes
final DataType physicalDataType = inputSchema.toSourceRowDataType();
final Schema schema = Schema.newBuilder().fromRowDataType(physicalDataType).build();
return new ProducingResult(null, schema, null);
}
final List<UnresolvedColumn> declaredColumns = declaredSchema.getColumns();
// the declared schema does not contain physical information,
// thus, it only replaces physical columns with metadata rowtime or adds a primary key
if (declaredColumns.stream().noneMatch(SchemaTranslator::isPhysical)) {
// go through data type to erase time attributes
final DataType sourceDataType = inputSchema.toSourceRowDataType();
final DataType physicalDataType =
patchDataTypeWithoutMetadataRowtime(sourceDataType, declaredColumns);
final Schema.Builder builder = Schema.newBuilder();
builder.fromRowDataType(physicalDataType);
builder.fromSchema(declaredSchema);
return new ProducingResult(null, builder.build(), null);
}
return new ProducingResult(null, declaredSchema, null);
}
|
@Test
void testOutputToDeclaredSchema() {
final ResolvedSchema tableSchema =
ResolvedSchema.of(
Column.physical("id", BIGINT()),
Column.physical("rowtime", TIMESTAMP_LTZ(3)),
Column.physical("name", STRING()));
final ProducingResult result =
SchemaTranslator.createProducingResult(
tableSchema,
Schema.newBuilder()
.column("id", BIGINT())
.columnByMetadata("rowtime", TIMESTAMP_LTZ(3))
.column("name", STRING().bridgedTo(StringData.class))
.build());
assertThat(result.getSchema())
.isEqualTo(
Schema.newBuilder()
.column("id", BIGINT())
.columnByMetadata("rowtime", TIMESTAMP_LTZ(3))
.column("name", STRING().bridgedTo(StringData.class))
.build());
}
|
public int[] difference(SparseVector other) {
List<Integer> diffIndicesList = new ArrayList<>();
if (other.numActiveElements() == 0) {
return Arrays.copyOf(indices,indices.length);
} else if (indices.length == 0) {
return new int[0];
} else {
Iterator<VectorTuple> itr = iterator();
Iterator<VectorTuple> otherItr = other.iterator();
VectorTuple tuple = itr.next();
VectorTuple otherTuple = otherItr.next();
while (itr.hasNext() && otherItr.hasNext()) {
if (tuple.index == otherTuple.index) {
tuple = itr.next();
otherTuple = otherItr.next();
} else if (tuple.index < otherTuple.index) {
diffIndicesList.add(tuple.index);
tuple = itr.next();
} else {
otherTuple = otherItr.next();
}
}
while (itr.hasNext()) {
if (tuple.index != otherTuple.index) {
diffIndicesList.add(tuple.index);
}
tuple = itr.next();
}
while (otherItr.hasNext()) {
if (tuple.index == otherTuple.index) {
break; // break out of loop as we've found the last value.
}
otherTuple = otherItr.next();
}
if (tuple.index != otherTuple.index) {
diffIndicesList.add(tuple.index);
}
}
return Util.toPrimitiveInt(diffIndicesList);
}
|
@Test
public void difference() {
SparseVector a = generateVectorA();
SparseVector b = generateVectorB();
SparseVector c = generateVectorC();
assertArrayEquals(a.difference(b), new int[0]);
assertArrayEquals(a.difference(c), new int[]{0,4,8});
assertArrayEquals(c.difference(a), new int[]{6,7,9});
}
|
public static List<Type> decode(String rawInput, List<TypeReference<Type>> outputParameters) {
return decoder.decodeFunctionResult(rawInput, outputParameters);
}
|
@Test
public void testDecodeStructMultipleDynamicStaticArray2() {
String rawInput =
"0x0000000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000000"
+ "000000000000000000000000000000000000000000000000000000000000007b"
+ "000000000000000000000000000000000000000000000000000000000000007b"
+ "0000000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000140"
+ "0000000000000000000000000000000000000000000000000000000000000460"
+ "0000000000000000000000000000000000000000000000000000000000000560"
+ "00000000000000000000000000000000000000000000000000000000000008a0"
+ "0000000000000000000000000000000000000000000000000000000000000060"
+ "0000000000000000000000000000000000000000000000000000000000000160"
+ "0000000000000000000000000000000000000000000000000000000000000220"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000040"
+ "0000000000000000000000000000000000000000000000000000000000000080"
+ "0000000000000000000000000000000000000000000000000000000000000001"
+ "3400000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000009"
+ "6e6573746564466f6f0000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000040"
+ "0000000000000000000000000000000000000000000000000000000000000060"
+ "0000000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000040"
+ "0000000000000000000000000000000000000000000000000000000000000080"
+ "0000000000000000000000000000000000000000000000000000000000000001"
+ "3400000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000009"
+ "6e6573746564466f6f0000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000001"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000040"
+ "0000000000000000000000000000000000000000000000000000000000000080"
+ "0000000000000000000000000000000000000000000000000000000000000002"
+ "6964000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000004"
+ "6e616d6500000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000003"
+ "0000000000000000000000000000000000000000000000000000000000000060"
+ "0000000000000000000000000000000000000000000000000000000000000160"
+ "0000000000000000000000000000000000000000000000000000000000000260"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000040"
+ "0000000000000000000000000000000000000000000000000000000000000080"
+ "0000000000000000000000000000000000000000000000000000000000000001"
+ "3400000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000009"
+ "6e6573746564466f6f0000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000040"
+ "0000000000000000000000000000000000000000000000000000000000000080"
+ "0000000000000000000000000000000000000000000000000000000000000001"
+ "3400000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000009"
+ "6e6573746564466f6f0000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000040"
+ "0000000000000000000000000000000000000000000000000000000000000060"
+ "0000000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000060"
+ "0000000000000000000000000000000000000000000000000000000000000120"
+ "00000000000000000000000000000000000000000000000000000000000001e0"
+ "0000000000000000000000000000000000000000000000000000000000000040"
+ "0000000000000000000000000000000000000000000000000000000000000080"
+ "0000000000000000000000000000000000000000000000000000000000000002"
+ "6964000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000004"
+ "6e616d6500000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000040"
+ "0000000000000000000000000000000000000000000000000000000000000080"
+ "0000000000000000000000000000000000000000000000000000000000000002"
+ "6964000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000004"
+ "6e616d6500000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000040"
+ "0000000000000000000000000000000000000000000000000000000000000080"
+ "0000000000000000000000000000000000000000000000000000000000000002"
+ "6964000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000004"
+ "6e616d6500000000000000000000000000000000000000000000000000000000";
assertEquals(
FunctionReturnDecoder.decode(
rawInput,
AbiV2TestFixture.idBarNarFooNarFooArraysFunction.getOutputParameters()),
Arrays.asList(
new StaticArray3<>(
AbiV2TestFixture.Bar.class,
new AbiV2TestFixture.Bar(BigInteger.ZERO, BigInteger.ZERO),
new AbiV2TestFixture.Bar(
BigInteger.valueOf(123), BigInteger.valueOf(123)),
new AbiV2TestFixture.Bar(BigInteger.ZERO, BigInteger.ZERO)),
new StaticArray3<>(
AbiV2TestFixture.Nar.class,
new AbiV2TestFixture.Nar(
new AbiV2TestFixture.Nuu(
new AbiV2TestFixture.Foo("4", "nestedFoo"))),
new AbiV2TestFixture.Nar(
new AbiV2TestFixture.Nuu(new AbiV2TestFixture.Foo("", ""))),
new AbiV2TestFixture.Nar(
new AbiV2TestFixture.Nuu(
new AbiV2TestFixture.Foo("4", "nestedFoo")))),
new DynamicArray<>(
AbiV2TestFixture.Foo.class, new AbiV2TestFixture.Foo("id", "name")),
new DynamicArray<>(
AbiV2TestFixture.Nar.class,
new AbiV2TestFixture.Nar(
new AbiV2TestFixture.Nuu(
new AbiV2TestFixture.Foo("4", "nestedFoo"))),
new AbiV2TestFixture.Nar(
new AbiV2TestFixture.Nuu(
new AbiV2TestFixture.Foo("4", "nestedFoo"))),
new AbiV2TestFixture.Nar(
new AbiV2TestFixture.Nuu(
new AbiV2TestFixture.Foo("", "")))),
new StaticArray3<>(
AbiV2TestFixture.Foo.class,
new AbiV2TestFixture.Foo("id", "name"),
new AbiV2TestFixture.Foo("id", "name"),
new AbiV2TestFixture.Foo("id", "name"))));
}
|
@Override
public boolean isDone() {
return state.isDoneState();
}
|
@Test
public void testIsDone() throws Exception {
assertThat(promise.isDone()).isEqualTo(false);
promise.set("Done");
assertThat(promise.isDone()).isEqualTo(true);
}
|
@Override
@Nonnull
public <T> T invokeAny(@Nonnull Collection<? extends Callable<T>> tasks)
throws ExecutionException {
throwRejectedExecutionExceptionIfShutdown();
Exception exception = null;
for (Callable<T> task : tasks) {
try {
return task.call();
} catch (Exception e) {
// try next task
exception = e;
}
}
throw new ExecutionException("No tasks finished successfully.", exception);
}
|
@Test
void testInvokeAnyWithTimeout() {
final CompletableFuture<Thread> future = new CompletableFuture<>();
testTaskSubmissionBeforeShutdown(
testInstance ->
testInstance.invokeAny(
callableCollectionFromFuture(future), 1, TimeUnit.DAYS));
assertThat(future).isCompletedWithValue(Thread.currentThread());
}
|
public void createBucket(String projectId, Bucket bucket) throws IOException {
createBucket(projectId, bucket, createBackOff(), Sleeper.DEFAULT);
}
|
@Test
public void testCreateBucket() throws IOException {
GcsOptions pipelineOptions = gcsOptionsWithTestCredential();
GcsUtil gcsUtil = pipelineOptions.getGcsUtil();
Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class);
Storage mockStorage = Mockito.mock(Storage.class);
gcsUtil.setStorageClient(mockStorage);
Storage.Buckets.Insert mockStorageInsert = Mockito.mock(Storage.Buckets.Insert.class);
BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff());
when(mockStorage.buckets()).thenReturn(mockStorageObjects);
when(mockStorageObjects.insert(any(String.class), any(Bucket.class)))
.thenReturn(mockStorageInsert);
when(mockStorageInsert.execute())
.thenThrow(new SocketTimeoutException("SocketException"))
.thenReturn(new Bucket());
gcsUtil.createBucket("a", new Bucket(), mockBackOff, new FastNanoClockAndSleeper()::sleep);
}
|
@Override
public ValidationResult validate(Object value) {
if (value == null || value instanceof String) {
return new ValidationResult.ValidationPassed();
} else {
return new ValidationResult.ValidationFailed("Value \"" + value + "\" is not a valid string!");
}
}
|
@Test
public void validateEmptyString() {
assertThat(validator.validate("")).isInstanceOf(ValidationResult.ValidationPassed.class);
}
|
public void clear() {
checkState(
!isClosed,
"Bag user state is no longer usable because it is closed for %s",
request.getStateKey());
isCleared = true;
newValues = new ArrayList<>();
}
|
@Test
public void testClear() throws Exception {
FakeBeamFnStateClient fakeClient =
new FakeBeamFnStateClient(
StringUtf8Coder.of(), ImmutableMap.of(key("A"), asList("A1", "A2", "A3")));
BagUserState<String> userState =
new BagUserState<>(
Caches.noop(), fakeClient, "instructionId", key("A"), StringUtf8Coder.of());
assertArrayEquals(
new String[] {"A1", "A2", "A3"}, Iterables.toArray(userState.get(), String.class));
userState.clear();
assertFalse(userState.get().iterator().hasNext());
userState.append("A4");
assertArrayEquals(new String[] {"A4"}, Iterables.toArray(userState.get(), String.class));
userState.clear();
assertFalse(userState.get().iterator().hasNext());
userState.asyncClose();
assertNull(fakeClient.getData().get(key("A")));
assertThrows(IllegalStateException.class, () -> userState.clear());
}
|
@Override
public final Object getValue(final int columnIndex, final Class<?> type) throws SQLException {
ShardingSpherePreconditions.checkNotContains(INVALID_MEMORY_TYPES, type, () -> new SQLFeatureNotSupportedException(String.format("Get value from `%s`", type.getName())));
Object result = currentResultSetRow.getCell(columnIndex);
wasNull = null == result;
return result;
}
|
@Test
void assertGetValueForInputStream() {
assertThrows(SQLFeatureNotSupportedException.class, () -> memoryMergedResult.getValue(1, InputStream.class));
}
|
public JcrProducer(JcrEndpoint jcrEndpoint) {
super(jcrEndpoint);
}
|
@Test
public void testJcrProducer() throws Exception {
Exchange exchange = createExchangeWithBody("<hello>world!</hello>");
exchange.getIn().setHeader(JcrConstants.JCR_NODE_NAME, "node");
exchange.getIn().setHeader("my.contents.property", exchange.getIn().getBody());
Exchange out = template.send("direct:a", exchange);
assertNotNull(out);
String uuid = out.getMessage().getBody(String.class);
Session session = openSession();
try {
Node node = session.getNodeByIdentifier(uuid);
assertNotNull(node);
assertEquals("/home/test/node", node.getPath());
assertEquals("<hello>world!</hello>", node.getProperty("my.contents.property").getString());
} finally {
if (session != null && session.isLive()) {
session.logout();
}
}
}
|
@GET
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public ClusterInfo get() {
return getClusterInfo();
}
|
@Test
public void testInfoXML() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("info").accept("application/xml").get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8,
response.getType().toString());
String xml = response.getEntity(String.class);
verifyClusterInfoXML(xml);
}
|
@Override
public List<CodegenColumnDO> getCodegenColumnListByTableId(Long tableId) {
return codegenColumnMapper.selectListByTableId(tableId);
}
|
@Test
public void testGetCodegenColumnListByTableId() {
// mock 数据
CodegenColumnDO column01 = randomPojo(CodegenColumnDO.class);
codegenColumnMapper.insert(column01);
CodegenColumnDO column02 = randomPojo(CodegenColumnDO.class);
codegenColumnMapper.insert(column02);
// 准备参数
Long tableId = column01.getTableId();
// 调用
List<CodegenColumnDO> result = codegenService.getCodegenColumnListByTableId(tableId);
// 断言
assertEquals(1, result.size());
assertPojoEquals(column01, result.get(0));
}
|
public String[] getRemainingArgs() {
return (commandLine == null) ? new String[]{} : commandLine.getArgs();
}
|
@Test
public void testNullArgs() throws IOException {
GenericOptionsParser parser = new GenericOptionsParser(conf, null);
parser.getRemainingArgs();
}
|
@JsonProperty("ksql")
public String getUnmaskedKsql() {
return ksql;
}
|
@Test
public void shouldHandleNullStatement() {
assertThat(
new KsqlRequest(null, SOME_PROPS, SOME_REQUEST_PROPS, SOME_COMMAND_NUMBER).getUnmaskedKsql(),
is(""));
}
|
public boolean isValid() throws IOException {
if (contractBinary.equals(BIN_NOT_PROVIDED)) {
throw new UnsupportedOperationException(
"Contract binary not present in contract wrapper, "
+ "please generate your wrapper using -abiFile=<file>");
}
if (contractAddress.equals("")) {
throw new UnsupportedOperationException(
"Contract binary not present, you will need to regenerate your smart "
+ "contract wrapper with web3j v2.2.0+");
}
EthGetCode ethGetCode =
transactionManager.getCode(contractAddress, DefaultBlockParameterName.LATEST);
if (ethGetCode.hasError()) {
return false;
}
String code = cleanHexPrefix(ethGetCode.getCode());
int metadataIndex = -1;
for (String metadataIndicator : METADATA_HASH_INDICATORS) {
metadataIndex = code.indexOf(metadataIndicator);
if (metadataIndex != -1) {
code = code.substring(0, metadataIndex);
break;
}
}
// There may be multiple contracts in the Solidity bytecode, hence we only check for a
// match with a subset
return !code.isEmpty() && contractBinary.contains(code);
}
|
@Test
public void testIsValidDifferentCode() throws Exception {
prepareEthGetCode(TEST_CONTRACT_BINARY + "0");
Contract contract = deployContract(createTransactionReceipt());
assertFalse(contract.isValid());
}
|
public int getBulkActivitiesFailedRetrieved(){
return numGetBulkActivitiesFailedRetrieved.value();
}
|
@Test
public void testGetBulkActivitiesRetrievedFailed() {
long totalBadBefore = metrics.getBulkActivitiesFailedRetrieved();
badSubCluster.getBulkActivitiesFailed();
Assert.assertEquals(totalBadBefore + 1,
metrics.getBulkActivitiesFailedRetrieved());
}
|
public boolean putIfDifferentValues(final K key,
final ValueAndTimestamp<V> newValue,
final byte[] oldSerializedValue) {
try {
return maybeMeasureLatency(
() -> {
final byte[] newSerializedValue = serdes.rawValue(newValue);
if (ValueAndTimestampSerializer.valuesAreSameAndTimeIsIncreasing(oldSerializedValue, newSerializedValue)) {
return false;
} else {
wrapped().put(keyBytes(key), newSerializedValue);
return true;
}
},
time,
putSensor
);
} catch (final ProcessorStateException e) {
final String message = String.format(e.getMessage(), key, newValue);
throw new ProcessorStateException(message, e);
}
}
|
@Test
public void shouldPutIfOutOfOrder() {
setUp();
doNothing().when(inner).put(KEY_BYTES, VALUE_AND_TIMESTAMP_BYTES);
init();
metered.put(KEY, VALUE_AND_TIMESTAMP);
final ValueAndTimestampSerde<String> stringSerde = new ValueAndTimestampSerde<>(Serdes.String());
final byte[] encodedOldValue = stringSerde.serializer().serialize("TOPIC", VALUE_AND_TIMESTAMP);
final ValueAndTimestamp<String> outOfOrderValueAndTimestamp = ValueAndTimestamp.make("value", 95L);
assertTrue(metered.putIfDifferentValues(KEY, outOfOrderValueAndTimestamp, encodedOldValue));
}
|
@Override
public void init(Properties config) throws ServletException {
try {
String principal = config.getProperty(PRINCIPAL);
if (principal == null || principal.trim().length() == 0) {
throw new ServletException("Principal not defined in configuration");
}
keytab = config.getProperty(KEYTAB, keytab);
if (keytab == null || keytab.trim().length() == 0) {
throw new ServletException("Keytab not defined in configuration");
}
File keytabFile = new File(keytab);
if (!keytabFile.exists()) {
throw new ServletException("Keytab does not exist: " + keytab);
}
// use all SPNEGO principals in the keytab if a principal isn't
// specifically configured
final String[] spnegoPrincipals;
if (principal.equals("*")) {
spnegoPrincipals = KerberosUtil.getPrincipalNames(
keytab, Pattern.compile("HTTP/.*"));
if (spnegoPrincipals.length == 0) {
throw new ServletException("Principals do not exist in the keytab");
}
} else {
spnegoPrincipals = new String[]{principal};
}
KeyTab keytabInstance = KeyTab.getInstance(keytabFile);
serverSubject.getPrivateCredentials().add(keytabInstance);
for (String spnegoPrincipal : spnegoPrincipals) {
Principal krbPrincipal = new KerberosPrincipal(spnegoPrincipal);
LOG.info("Using keytab {}, for principal {}",
keytab, krbPrincipal);
serverSubject.getPrincipals().add(krbPrincipal);
}
String nameRules = config.getProperty(NAME_RULES, null);
if (nameRules != null) {
KerberosName.setRules(nameRules);
}
String ruleMechanism = config.getProperty(RULE_MECHANISM, null);
if (ruleMechanism != null) {
KerberosName.setRuleMechanism(ruleMechanism);
}
final String whitelistStr = config.getProperty(ENDPOINT_WHITELIST, null);
if (whitelistStr != null) {
final String[] strs = whitelistStr.trim().split("\\s*[,\n]\\s*");
for (String s: strs) {
if (s.isEmpty()) continue;
if (ENDPOINT_PATTERN.matcher(s).matches()) {
whitelist.add(s);
} else {
throw new ServletException(
"The element of the whitelist: " + s + " must start with '/'"
+ " and must not contain special characters afterwards");
}
}
}
try {
gssManager = Subject.doAs(serverSubject,
new PrivilegedExceptionAction<GSSManager>() {
@Override
public GSSManager run() throws Exception {
return GSSManager.getInstance();
}
});
} catch (PrivilegedActionException ex) {
throw ex.getException();
}
} catch (Exception ex) {
throw new ServletException(ex);
}
}
|
@Test
public void testInit() throws Exception {
Assert.assertEquals(KerberosTestUtils.getKeytabFile(), handler.getKeytab());
Set<KerberosPrincipal> principals = handler.getPrincipals();
Principal expectedPrincipal =
new KerberosPrincipal(KerberosTestUtils.getServerPrincipal());
Assert.assertTrue(principals.contains(expectedPrincipal));
Assert.assertEquals(1, principals.size());
}
|
@VisibleForTesting
ZonedDateTime parseZoned(final String text, final ZoneId zoneId) {
final TemporalAccessor parsed = formatter.parse(text);
final ZoneId parsedZone = parsed.query(TemporalQueries.zone());
ZonedDateTime resolved = DEFAULT_ZONED_DATE_TIME.apply(
ObjectUtils.defaultIfNull(parsedZone, zoneId));
for (final TemporalField override : ChronoField.values()) {
if (parsed.isSupported(override)) {
if (!resolved.isSupported(override)) {
throw new KsqlException(
"Unsupported temporal field in timestamp: " + text + " (" + override + ")");
}
final long value = parsed.getLong(override);
if (override == ChronoField.DAY_OF_YEAR && value == LEAP_DAY_OF_THE_YEAR) {
if (!parsed.isSupported(ChronoField.YEAR)) {
throw new KsqlException("Leap day cannot be parsed without supplying the year field");
}
// eagerly override year, to avoid mismatch with epoch year, which is not a leap year
resolved = resolved.withYear(parsed.get(ChronoField.YEAR));
}
resolved = resolved.with(override, value);
}
}
return resolved;
}
|
@Test
public void shouldParseDateTimeWithDayOfYear() {
// Given
final String format = "yyyy-DDD HH";
final String timestamp = String.format("1605-%d 10", FIFTH_OF_NOVEMBER.getDayOfYear());
// When
final ZonedDateTime ts = new StringToTimestampParser(format).parseZoned(timestamp, ZID);
// Then
assertThat(ts, is(sameInstant(FIFTH_OF_NOVEMBER.withHour(10))));
}
|
@Override
public boolean retryRequest(
HttpRequest request, IOException exception, int execCount, HttpContext context) {
if (execCount > maxRetries) {
// Do not retry if over max retries
return false;
}
if (nonRetriableExceptions.contains(exception.getClass())) {
return false;
} else {
for (Class<? extends IOException> rejectException : nonRetriableExceptions) {
if (rejectException.isInstance(exception)) {
return false;
}
}
}
if (request instanceof CancellableDependency
&& ((CancellableDependency) request).isCancelled()) {
return false;
}
// Retry if the request is considered idempotent
return Method.isIdempotent(request.getMethod());
}
|
@Test
public void noRetryOnAbortedRequests() {
HttpGet request = new HttpGet("/");
request.cancel();
assertThat(retryStrategy.retryRequest(request, new IOException(), 1, null)).isFalse();
}
|
@Override
public ServiceConfig<U> build() {
ServiceConfig<U> serviceConfig = new ServiceConfig<>();
super.build(serviceConfig);
serviceConfig.setInterface(interfaceName);
serviceConfig.setInterface(interfaceClass);
serviceConfig.setRef(ref);
serviceConfig.setPath(path);
serviceConfig.setMethods(methods);
serviceConfig.setProvider(provider);
serviceConfig.setProviderIds(providerIds);
serviceConfig.setGeneric(generic);
return serviceConfig;
}
|
@Test
void build() {
MethodConfig method = new MethodConfig();
ProviderConfig provider = new ProviderConfig();
ServiceBuilder builder = new ServiceBuilder();
builder.path("path")
.addMethod(method)
.provider(provider)
.providerIds("providerIds")
.generic(GENERIC_SERIALIZATION_DEFAULT);
ServiceConfig config = builder.build();
ServiceConfig config2 = builder.build();
assertThat(config.getGeneric(), equalTo(GENERIC_SERIALIZATION_DEFAULT));
Assertions.assertEquals("path", config.getPath());
Assertions.assertEquals("providerIds", config.getProviderIds());
Assertions.assertSame(provider, config.getProvider());
Assertions.assertTrue(config.getMethods().contains(method));
Assertions.assertEquals(1, config.getMethods().size());
Assertions.assertNotSame(config, config2);
}
|
public static CodecFactory fromHadoopString(String hadoopCodecClass) {
CodecFactory o = null;
try {
String avroCodec = HADOOP_AVRO_NAME_MAP.get(hadoopCodecClass);
if (avroCodec != null) {
o = CodecFactory.fromString(avroCodec);
}
} catch (Exception e) {
throw new AvroRuntimeException("Unrecognized hadoop codec: " + hadoopCodecClass, e);
}
return o;
}
|
@Test
void hadoopCodecFactoryGZip() {
CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.GZipCodec");
CodecFactory avroSnappyCodec = CodecFactory.fromString("deflate");
assertEquals(hadoopSnappyCodec.getClass(), avroSnappyCodec.getClass());
}
|
public void onStreamRequest(StreamRequest req, final RequestContext requestContext, final Map<String, String> wireAttrs,
final NextFilter<StreamRequest, StreamResponse> nextFilter)
{
//Set accepted encoding for compressed response
String operation = (String) requestContext.getLocalAttr(R2Constants.OPERATION);
if (!_acceptEncodingHeader.isEmpty() && _helper.shouldCompressResponseForOperation(operation))
{
CompressionOption responseCompressionOverride =
(CompressionOption) requestContext.getLocalAttr(R2Constants.RESPONSE_COMPRESSION_OVERRIDE);
req = addResponseCompressionHeaders(responseCompressionOverride, req);
}
if (_requestContentEncoding != StreamEncodingType.IDENTITY)
{
final StreamRequest request = req;
final StreamingCompressor compressor = _requestContentEncoding.getCompressor(_executor);
CompressionOption option = (CompressionOption) requestContext.getLocalAttr(R2Constants.REQUEST_COMPRESSION_OVERRIDE);
if (option == null || option != CompressionOption.FORCE_OFF)
{
final int threshold = option == CompressionOption.FORCE_ON ? 0 : _requestCompressionConfig.getCompressionThreshold();
PartialReader reader = new PartialReader(threshold, new Callback<EntityStream[]>()
{
@Override
public void onError(Throwable ex)
{
nextFilter.onError(ex, requestContext, wireAttrs);
}
@Override
public void onSuccess(EntityStream[] result)
{
if (result.length == 1)
{
StreamRequest uncompressedRequest = request.builder().build(result[0]);
nextFilter.onRequest(uncompressedRequest, requestContext, wireAttrs);
}
else
{
StreamRequestBuilder builder = request.builder();
EntityStream compressedStream = compressor.deflate(EntityStreams.newEntityStream(new CompositeWriter(result)));
Map<String, String> headers = stripHeaders(builder.getHeaders(), HttpConstants.CONTENT_LENGTH);
StreamRequest compressedRequest = builder.setHeaders(headers)
.setHeader(HttpConstants.CONTENT_ENCODING, compressor.getContentEncodingName())
.build(compressedStream);
nextFilter.onRequest(compressedRequest, requestContext, wireAttrs);
}
}
});
req.getEntityStream().setReader(reader);
return;
}
}
nextFilter.onRequest(req, requestContext, wireAttrs);
}
|
@Test(dataProvider = "requestData")
public void testAcceptEncodingHeader(CompressionConfig requestCompressionConfig,
CompressionOption requestCompressionOverride,
boolean headerShouldBePresent,
String operation)
throws CompressionException, URISyntaxException, InterruptedException, ExecutionException, TimeoutException {
Executor executor = Executors.newCachedThreadPool();
ClientStreamCompressionFilter clientCompressionFilter = new ClientStreamCompressionFilter(
StreamEncodingType.GZIP.getHttpName(),
requestCompressionConfig,
ACCEPT_COMPRESSIONS,
new CompressionConfig(Integer.MAX_VALUE),
Arrays.asList(ClientCompressionHelper.COMPRESS_ALL_RESPONSES_INDICATOR),
executor);
// The entity should be compressible for this test.
int original = 100;
byte[] entity = new byte[original];
Arrays.fill(entity, (byte)'A');
StreamRequest streamRequest =
new StreamRequestBuilder(new URI(URI))
.setMethod(RestMethod.POST)
.build(EntityStreams.newEntityStream(new ByteStringWriter(ByteString.copy(entity))));
int compressed = EncodingType.GZIP.getCompressor().deflate(new ByteArrayInputStream(entity)).length;
RequestContext context = new RequestContext();
if (operation != null)
{
context.putLocalAttr(R2Constants.OPERATION, operation);
}
context.putLocalAttr(R2Constants.REQUEST_COMPRESSION_OVERRIDE, requestCompressionOverride);
int entityLength = headerShouldBePresent ? compressed : original;
clientCompressionFilter.onStreamRequest(streamRequest, context, Collections.<String, String>emptyMap(),
new HeaderCaptureFilter(HttpConstants.ACCEPT_ENCODING, true, null));
}
|
@Udf(description = "Returns the inverse (arc) tangent of y / x")
public Double atan2(
@UdfParameter(
value = "y",
description = "The ordinate (y) coordinate."
) final Integer y,
@UdfParameter(
value = "x",
description = "The abscissa (x) coordinate."
) final Integer x
) {
return atan2(y == null ? null : y.doubleValue(), x == null ? null : x.doubleValue());
}
|
@Test
public void shouldHandleNegativeYZeroX() {
assertThat(udf.atan2(-1.1, 0.0), closeTo(-1.5707963267948966, 0.000000000000001));
assertThat(udf.atan2(-6.0, 0.0), closeTo(-1.5707963267948966, 0.000000000000001));
assertThat(udf.atan2(-2, 0), closeTo(-1.5707963267948966, 0.000000000000001));
assertThat(udf.atan2(-2L, 0L), closeTo(-1.5707963267948966, 0.000000000000001));
}
|
@Override
public Comparison compare(final Path.Type type, final PathAttributes local, final PathAttributes remote) {
if(TransferStatus.UNKNOWN_LENGTH != local.getSize() && TransferStatus.UNKNOWN_LENGTH != remote.getSize()) {
if(local.getSize() == remote.getSize()) {
if(log.isDebugEnabled()) {
log.debug(String.format("Equal size %s", remote.getSize()));
}
return Comparison.equal;
}
if(remote.getSize() == 0) {
return Comparison.local;
}
if(local.getSize() == 0) {
return Comparison.remote;
}
if(log.isDebugEnabled()) {
log.debug(String.format("Local size %s not equal remote %s", local.getSize(), remote.getSize()));
}
// Different file size
return Comparison.notequal;
}
return Comparison.unknown;
}
|
@Test
public void testCompare() {
ComparisonService s = new SizeComparisonService();
assertEquals(Comparison.remote, s.compare(Path.Type.file, new PathAttributes().withSize(0L), new PathAttributes().withSize(1L)));
assertEquals(Comparison.local, s.compare(Path.Type.file, new PathAttributes().withSize(1L), new PathAttributes().withSize(0L)));
assertEquals(Comparison.equal, s.compare(Path.Type.file, new PathAttributes().withSize(1L), new PathAttributes().withSize(1L)));
assertEquals(Comparison.notequal, s.compare(Path.Type.file, new PathAttributes().withSize(2L), new PathAttributes().withSize(1L)));
}
|
public static File generate(String content, int width, int height, File targetFile) {
String extName = FileUtil.extName(targetFile);
switch (extName) {
case QR_TYPE_SVG:
String svg = generateAsSvg(content, new QrConfig(width, height));
FileUtil.writeString(svg, targetFile, StandardCharsets.UTF_8);
break;
case QR_TYPE_TXT:
String txt = generateAsAsciiArt(content, new QrConfig(width, height));
FileUtil.writeString(txt, targetFile, StandardCharsets.UTF_8);
break;
default:
final BufferedImage image = generate(content, width, height);
ImgUtil.write(image, targetFile);
break;
}
return targetFile;
}
|
@Test
@Disabled
public void generateWithLogoTest() {
final String icon = FileUtil.isWindows() ? "d:/test/pic/face.jpg" : "~/Desktop/hutool/pic/face.jpg";
final String targetPath = FileUtil.isWindows() ? "d:/test/qrcodeWithLogo.jpg" : "~/Desktop/hutool/qrcodeWithLogo.jpg";
QrCodeUtil.generate(//
"https://hutool.cn/", //
QrConfig.create().setImg(icon), //
FileUtil.touch(targetPath));
}
|
public ConfigCenterBuilder appendParameters(Map<String, String> appendParameters) {
this.parameters = appendParameters(this.parameters, appendParameters);
return getThis();
}
|
@Test
void appendParameters() {
Map<String, String> source = new HashMap<>();
source.put("default.num", "one");
source.put("num", "ONE");
ConfigCenterBuilder builder = ConfigCenterBuilder.newBuilder();
builder.appendParameters(source);
Map<String, String> parameters = builder.build().getParameters();
Assertions.assertTrue(parameters.containsKey("default.num"));
Assertions.assertEquals("ONE", parameters.get("num"));
}
|
@Override
public void putTaskConfigs(final String connName, final List<Map<String, String>> configs, final Callback<Void> callback, InternalRequestSignature requestSignature) {
log.trace("Submitting put task configuration request {}", connName);
if (requestNotSignedProperly(requestSignature, callback)) {
return;
}
addRequest(
() -> {
if (!isLeader())
callback.onCompletion(new NotLeaderException("Only the leader may write task configurations.", leaderUrl()), null);
else if (!configState.contains(connName))
callback.onCompletion(new NotFoundException("Connector " + connName + " not found"), null);
else {
writeTaskConfigs(connName, configs);
callback.onCompletion(null, null);
}
return null;
},
forwardErrorAndTickThreadStages(callback)
);
}
|
@Test
public void testPutTaskConfigsSignatureNotRequiredV1() {
when(member.currentProtocolVersion()).thenReturn(CONNECT_PROTOCOL_V1);
Callback<Void> taskConfigCb = mock(Callback.class);
List<String> stages = expectRecordStages(taskConfigCb);
herder.putTaskConfigs(CONN1, TASK_CONFIGS, taskConfigCb, null);
// Expect a wakeup call after the request to write task configs is added to the herder's request queue
verify(member).wakeup();
verifyNoMoreInteractions(member, taskConfigCb);
assertEquals(
singletonList("awaiting startup"),
stages
);
}
|
@Override
public Set<FSTFlags> getFlags() {
return _flags;
}
|
@Test
public void testVersion5()
throws IOException {
try (InputStream inputStream = getClass().getClassLoader().getResourceAsStream("data/abc.native.fst")) {
FST fst = FST.read(inputStream);
assertFalse(fst.getFlags().contains(FSTFlags.NUMBERS));
verifyContent(fst, _expected);
}
}
|
@VisibleForTesting
public int getAppsFailedCreated() {
return numAppsFailedCreated.value();
}
|
@Test
public void testAppsFailedCreated() {
long totalBadbefore = metrics.getAppsFailedCreated();
badSubCluster.getNewApplication();
Assert.assertEquals(totalBadbefore + 1, metrics.getAppsFailedCreated());
}
|
void handleTestStepFinished(TestStepFinished event) {
if (event.getTestStep() instanceof PickleStepTestStep && event.getResult().getStatus().is(Status.PASSED)) {
PickleStepTestStep testStep = (PickleStepTestStep) event.getTestStep();
addUsageEntry(event.getResult(), testStep);
}
}
|
@Test
void resultWithZeroDuration() {
OutputStream out = new ByteArrayOutputStream();
UsageFormatter usageFormatter = new UsageFormatter(out);
TestStep testStep = mockTestStep();
Result result = new Result(Status.PASSED, Duration.ZERO, null);
usageFormatter
.handleTestStepFinished(new TestStepFinished(Instant.EPOCH, mock(TestCase.class), testStep, result));
Map<String, List<UsageFormatter.StepContainer>> usageMap = usageFormatter.usageMap;
assertThat(usageMap.size(), is(equalTo(1)));
List<UsageFormatter.StepContainer> durationEntries = usageMap.get("stepDef");
assertThat(durationEntries.size(), is(equalTo(1)));
assertThat(durationEntries.get(0).getName(), is(equalTo("step")));
assertThat(durationEntries.get(0).getDurations().size(), is(equalTo(1)));
assertThat(durationEntries.get(0).getDurations().get(0).getDuration(), is(equalTo(0.0)));
}
|
@Override
public void export(RegisterTypeEnum registerType) {
if (this.exported) {
return;
}
if (getScopeModel().isLifeCycleManagedExternally()) {
// prepare model for reference
getScopeModel().getDeployer().prepare();
} else {
// ensure start module, compatible with old api usage
getScopeModel().getDeployer().start();
}
synchronized (this) {
if (this.exported) {
return;
}
if (!this.isRefreshed()) {
this.refresh();
}
if (this.shouldExport()) {
this.init();
if (shouldDelay()) {
// should register if delay export
doDelayExport();
} else if (Integer.valueOf(-1).equals(getDelay())
&& Boolean.parseBoolean(ConfigurationUtils.getProperty(
getScopeModel(), CommonConstants.DUBBO_MANUAL_REGISTER_KEY, "false"))) {
// should not register by default
doExport(RegisterTypeEnum.MANUAL_REGISTER);
} else {
doExport(registerType);
}
}
}
}
|
@Test
void testMethodConfigWithUnknownArgumentType() {
Assertions.assertThrows(IllegalArgumentException.class, () -> {
ServiceConfig<DemoServiceImpl> service = new ServiceConfig<>();
service.setInterface(DemoService.class);
service.setRef(new DemoServiceImpl());
service.setProtocol(new ProtocolConfig() {
{
setName("dubbo");
}
});
MethodConfig methodConfig = new MethodConfig();
methodConfig.setName("sayName");
// invalid argument index.
methodConfig.setArguments(Lists.newArrayList(new ArgumentConfig() {
{
setType(Integer.class.getName());
setCallback(false);
}
}));
service.setMethods(Lists.newArrayList(methodConfig));
service.export();
});
}
|
@Deprecated
@Override
public KStream<K, V> through(final String topic) {
return through(topic, Produced.with(keySerde, valueSerde, null));
}
|
@Deprecated // specifically testing the deprecated variant
@Test
public void shouldNotAllowNullTopicOnThrough() {
final NullPointerException exception = assertThrows(
NullPointerException.class,
() -> testStream.through(null));
assertThat(exception.getMessage(), equalTo("topic can't be null"));
}
|
@Override
public void checkBeforeUpdate(final DropReadwriteSplittingRuleStatement sqlStatement) {
if (!sqlStatement.isIfExists()) {
checkToBeDroppedRuleNames(sqlStatement);
}
checkToBeDroppedInUsed(sqlStatement);
}
|
@Test
void assertCheckSQLStatementWithInUsed() throws RuleDefinitionException {
DataSourceMapperRuleAttribute dataSourceMapperRuleAttribute = mock(DataSourceMapperRuleAttribute.class);
when(database.getRuleMetaData().getAttributes(DataSourceMapperRuleAttribute.class)).thenReturn(Collections.singleton(dataSourceMapperRuleAttribute));
DataNodeRuleAttribute dataNodeRuleAttribute = mock(DataNodeRuleAttribute.class);
when(dataNodeRuleAttribute.getAllDataNodes()).thenReturn(Collections.singletonMap("foo_ds", Collections.singleton(new DataNode("readwrite_ds.tbl"))));
ReadwriteSplittingRule rule = mock(ReadwriteSplittingRule.class);
when(rule.getAttributes()).thenReturn(new RuleAttributes(dataNodeRuleAttribute));
when(database.getRuleMetaData().getRules()).thenReturn(Collections.singleton(rule));
executor.setDatabase(database);
when(rule.getConfiguration()).thenReturn(createCurrentRuleConfiguration());
executor.setRule(rule);
assertThrows(InUsedRuleException.class, () -> executor.checkBeforeUpdate(createSQLStatement()));
}
|
@VisibleForTesting
static Optional<String> performUpdateCheck(
Path configDir,
String currentVersion,
String versionUrl,
String toolName,
Consumer<LogEvent> log) {
Path lastUpdateCheck = configDir.resolve(LAST_UPDATE_CHECK_FILENAME);
try {
// Check time of last update check
if (Files.exists(lastUpdateCheck)) {
try {
String fileContents =
new String(Files.readAllBytes(lastUpdateCheck), StandardCharsets.UTF_8);
Instant modifiedTime = Instant.parse(fileContents);
if (modifiedTime.plus(Duration.ofDays(1)).isAfter(Instant.now())) {
return Optional.empty();
}
} catch (DateTimeParseException | IOException ex) {
// If reading update time failed, file might be corrupt, so delete it
log.accept(LogEvent.debug("Failed to read lastUpdateCheck; " + ex.getMessage()));
Files.delete(lastUpdateCheck);
}
}
// Check for update
FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});
try {
Response response =
httpClient.get(
new URL(versionUrl),
Request.builder()
.setHttpTimeout(3000)
.setUserAgent("jib " + currentVersion + " " + toolName)
.build());
VersionJsonTemplate version =
JsonTemplateMapper.readJson(response.getBody(), VersionJsonTemplate.class);
Path lastUpdateCheckTemp =
Files.createTempFile(configDir, LAST_UPDATE_CHECK_FILENAME, null);
lastUpdateCheckTemp.toFile().deleteOnExit();
Files.write(lastUpdateCheckTemp, Instant.now().toString().getBytes(StandardCharsets.UTF_8));
Files.move(lastUpdateCheckTemp, lastUpdateCheck, StandardCopyOption.REPLACE_EXISTING);
if (currentVersion.equals(version.latest)) {
return Optional.empty();
}
return Optional.of(version.latest);
} finally {
httpClient.shutDown();
}
} catch (IOException ex) {
log.accept(LogEvent.debug("Update check failed; " + ex.getMessage()));
}
return Optional.empty();
}
|
@Test
public void testPerformUpdateCheck_noLastUpdateCheck() throws IOException, InterruptedException {
Instant before = Instant.now();
Thread.sleep(100);
Optional<String> message =
UpdateChecker.performUpdateCheck(
configDir, "1.0.2", testWebServer.getEndpoint(), "tool-name", ignored -> {});
assertThat(message).hasValue("2.0.0");
String modifiedTime =
new String(
Files.readAllBytes(configDir.resolve("lastUpdateCheck")), StandardCharsets.UTF_8);
assertThat(Instant.parse(modifiedTime)).isGreaterThan(before);
}
|
public static Credentials loadJsonCredentials(String password, String content)
throws IOException, CipherException {
WalletFile walletFile = objectMapper.readValue(content, WalletFile.class);
return Credentials.create(Wallet.decrypt(password, walletFile));
}
|
@Test
public void testLoadJsonCredentials() throws Exception {
Credentials credentials =
WalletUtils.loadJsonCredentials(
PASSWORD,
convertStreamToString(
WalletUtilsTest.class.getResourceAsStream(
"/keyfiles/"
+ "UTC--2016-11-03T05-55-06."
+ "340672473Z--ef678007d18427e6022059dbc264f27507cd1ffc")));
assertEquals(credentials, (CREDENTIALS));
}
|
public boolean isJoin() {
return sourceSchemas.size() > 1;
}
|
@Test
public void shouldNotBeJoinIfSingleSchema() {
// When:
sourceSchemas = new SourceSchemas(ImmutableMap.of(ALIAS_1, SCHEMA_1));
// Then:
assertThat(sourceSchemas.isJoin(), is(false));
}
|
public static Long validateIssuedAt(String claimName, Long claimValue) throws ValidateException {
if (claimValue != null && claimValue < 0)
throw new ValidateException(String.format("%s value must be null or non-negative; value given was \"%s\"", claimName, claimValue));
return claimValue;
}
|
@Test
public void testValidateIssuedAtAllowsNull() {
Long expected = null;
Long actual = ClaimValidationUtils.validateIssuedAt("iat", expected);
assertEquals(expected, actual);
}
|
@Override
public ColumnStatisticsObj aggregate(List<ColStatsObjWithSourceInfo> colStatsWithSourceInfo,
List<String> partNames, boolean areAllPartsFound) throws MetaException {
checkStatisticsList(colStatsWithSourceInfo);
ColumnStatisticsObj statsObj = null;
String colType;
String colName;
BinaryColumnStatsData aggregateData = null;
for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) {
ColumnStatisticsObj cso = csp.getColStatsObj();
if (statsObj == null) {
colName = cso.getColName();
colType = cso.getColType();
statsObj = ColumnStatsAggregatorFactory.newColumnStaticsObj(colName, colType,
cso.getStatsData().getSetField());
}
BinaryColumnStatsData newData = cso.getStatsData().getBinaryStats();
if (aggregateData == null) {
aggregateData = newData.deepCopy();
} else {
aggregateData.setMaxColLen(Math.max(aggregateData.getMaxColLen(), newData.getMaxColLen()));
aggregateData.setAvgColLen(Math.max(aggregateData.getAvgColLen(), newData.getAvgColLen()));
aggregateData.setNumNulls(aggregateData.getNumNulls() + newData.getNumNulls());
}
}
ColumnStatisticsData columnStatisticsData = initColumnStatisticsData();
columnStatisticsData.setBinaryStats(aggregateData);
statsObj.setStatsData(columnStatisticsData);
return statsObj;
}
|
@Test
public void testAggregateMultiStatsWhenOnlySomeAvailable() throws MetaException {
List<String> partitions = Arrays.asList("part1", "part2", "part3", "part4");
ColumnStatisticsData data1 = new ColStatsBuilder<>(byte[].class).numNulls(1).avgColLen(20.0 / 3).maxColLen(13).build();
ColumnStatisticsData data3 = new ColStatsBuilder<>(byte[].class).numNulls(3).avgColLen(17.5).maxColLen(18).build();
ColumnStatisticsData data4 = new ColStatsBuilder<>(byte[].class).numNulls(2).avgColLen(14).maxColLen(18).build();
List<ColStatsObjWithSourceInfo> statsList = Arrays.asList(
createStatsWithInfo(data1, TABLE, COL, partitions.get(0)),
createStatsWithInfo(data3, TABLE, COL, partitions.get(2)),
createStatsWithInfo(data4, TABLE, COL, partitions.get(3)));
BinaryColumnStatsAggregator aggregator = new BinaryColumnStatsAggregator();
ColumnStatisticsObj computedStatsObj = aggregator.aggregate(statsList, partitions, false);
ColumnStatisticsData expectedStats = new ColStatsBuilder<>(byte[].class).numNulls(6).avgColLen(17.5).maxColLen(18).build();
Assert.assertEquals(expectedStats, computedStatsObj.getStatsData());
}
|
public Future<Void> reconcile(boolean isOpenShift, ImagePullPolicy imagePullPolicy, List<LocalObjectReference> imagePullSecrets, Clock clock) {
return serviceAccount()
.compose(i -> certificatesSecret(clock))
.compose(i -> networkPolicy())
.compose(i -> deployment(isOpenShift, imagePullPolicy, imagePullSecrets))
.compose(i -> waitForDeploymentReadiness());
}
|
@Test
public void reconcileWithDisabledExporterWithoutNetworkPolicies(VertxTestContext context) {
ResourceOperatorSupplier supplier = ResourceUtils.supplierWithMocks(false);
ServiceAccountOperator mockSaOps = supplier.serviceAccountOperations;
ArgumentCaptor<ServiceAccount> saCaptor = ArgumentCaptor.forClass(ServiceAccount.class);
when(mockSaOps.reconcile(any(), eq(NAMESPACE), eq(KafkaExporterResources.componentName(NAME)), saCaptor.capture())).thenReturn(Future.succeededFuture());
SecretOperator mockSecretOps = supplier.secretOperations;
ArgumentCaptor<Secret> secretCaptor = ArgumentCaptor.forClass(Secret.class);
when(mockSecretOps.reconcile(any(), eq(NAMESPACE), eq(KafkaExporterResources.secretName(NAME)), secretCaptor.capture())).thenReturn(Future.succeededFuture());
NetworkPolicyOperator mockNetPolicyOps = supplier.networkPolicyOperator;
when(mockNetPolicyOps.reconcile(any(), eq(NAMESPACE), eq(KafkaExporterResources.componentName(NAME)), any())).thenReturn(Future.succeededFuture());
DeploymentOperator mockDepOps = supplier.deploymentOperations;
ArgumentCaptor<Deployment> depCaptor = ArgumentCaptor.forClass(Deployment.class);
when(mockDepOps.reconcile(any(), eq(NAMESPACE), eq(KafkaExporterResources.componentName(NAME)), depCaptor.capture())).thenReturn(Future.succeededFuture());
when(mockDepOps.waitForObserved(any(), eq(NAMESPACE), eq(KafkaExporterResources.componentName(NAME)), anyLong(), anyLong())).thenReturn(Future.succeededFuture());
when(mockDepOps.readiness(any(), eq(NAMESPACE), eq(KafkaExporterResources.componentName(NAME)), anyLong(), anyLong())).thenReturn(Future.succeededFuture());
KafkaExporterReconciler reconciler = new KafkaExporterReconciler(
Reconciliation.DUMMY_RECONCILIATION,
new ClusterOperatorConfig.ClusterOperatorConfigBuilder(ResourceUtils.dummyClusterOperatorConfig(), KafkaVersionTestUtils.getKafkaVersionLookup())
.with(ClusterOperatorConfig.NETWORK_POLICY_GENERATION.key(), "false").build(),
supplier,
KAFKA,
VERSIONS,
CLUSTER_CA
);
Checkpoint async = context.checkpoint();
reconciler.reconcile(false, null, null, Clock.systemUTC())
.onComplete(context.succeeding(v -> context.verify(() -> {
assertThat(saCaptor.getAllValues().size(), is(1));
assertThat(saCaptor.getValue(), is(nullValue()));
assertThat(secretCaptor.getAllValues().size(), is(1));
assertThat(secretCaptor.getAllValues().get(0), is(nullValue()));
verify(mockNetPolicyOps, never()).reconcile(any(), eq(NAMESPACE), any(), any());
assertThat(depCaptor.getAllValues().size(), is(1));
assertThat(depCaptor.getValue(), is(nullValue()));
async.flag();
})));
}
|
@Override
public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException {
try {
final AttributedList<Path> buckets = new AttributedList<Path>();
for(B2BucketResponse bucket : session.getClient().listBuckets()) {
buckets.add(new Path(PathNormalizer.normalize(bucket.getBucketName()), EnumSet.of(Path.Type.directory, Path.Type.volume),
new B2AttributesFinderFeature(session, fileid).toAttributes(bucket)));
}
listener.chunk(directory, buckets);
return buckets;
}
catch(B2ApiException e) {
throw new B2ExceptionMappingService(fileid).map("Listing directory {0} failed", e, directory);
}
catch(IOException e) {
throw new DefaultIOExceptionMappingService().map(e);
}
}
|
@Test
@Ignore
public void testList() throws Exception {
final B2VersionIdProvider fileid = new B2VersionIdProvider(session);
final AttributedList<Path> list = new B2BucketListService(session, fileid).list(
new Path(String.valueOf(Path.DELIMITER), EnumSet.of(Path.Type.volume, Path.Type.directory)), new DisabledListProgressListener());
assertFalse(list.isEmpty());
for(Path bucket : list) {
assertEquals(bucket.attributes(), new B2AttributesFinderFeature(session, fileid).find(bucket, new DisabledListProgressListener()));
}
}
|
public SimplePayload(String xmlPayload) {
XmlPullParser parser;
try {
parser = PacketParserUtils.getParserFor(xmlPayload);
}
catch (XmlPullParserException | IOException e) {
throw new AssertionError(e);
}
QName qname = parser.getQName();
payload = xmlPayload;
elemName = StringUtils.requireNotNullNorEmpty(qname.getLocalPart(), "Could not determine element name from XML payload");
ns = StringUtils.requireNotNullNorEmpty(qname.getNamespaceURI(), "Could not determine namespace from XML payload");
}
|
@Test
public void simplePayloadTest() {
String xmlPayload = "<element xmlns='https://example.org'><foo>Test</foo><bar/></element>";
SimplePayload simplePayload = new SimplePayload(xmlPayload);
assertEquals("element", simplePayload.getElementName());
assertEquals("https://example.org", simplePayload.getNamespace());
assertEquals(xmlPayload, simplePayload.toXML().toString());
}
|
public static String getFieldToNullName( LogChannelInterface log, String field, boolean isUseExtId ) {
String fieldToNullName = field;
if ( isUseExtId ) {
// verify if the field has correct syntax
if ( !FIELD_NAME_WITH_EXTID_PATTERN.matcher( field ).matches() ) {
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Warn.IncorrectExternalKeySyntax", field,
fieldToNullName ) );
}
return fieldToNullName;
}
String lookupField = field.substring( field.indexOf( EXTID_SEPARATOR ) + 1 );
// working with custom objects and relationship
// cut off _r and then add _c in the end of the name
if ( lookupField.endsWith( CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX ) ) {
fieldToNullName =
lookupField.substring( 0, lookupField.length() - CUSTOM_OBJECT_RELATIONSHIP_FIELD_SUFFIX.length() )
+ CUSTOM_OBJECT_SUFFIX;
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) );
}
return fieldToNullName;
}
fieldToNullName = lookupField + "Id";
}
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "SalesforceUtils.Debug.NullFieldName", fieldToNullName ) );
}
return fieldToNullName;
}
|
@Test
public void testIncorrectExternalKeySyntaxWarnIsLoggedInDebugMode() {
when( logMock.isDebug() ).thenReturn( true );
inputFieldName = "AccountId";
verify( logMock, never() ).logDebug( anyString() );
SalesforceUtils.getFieldToNullName( logMock, inputFieldName, true );
verify( logMock ).logDebug(
"The field has incorrect external key syntax: AccountId. Syntax for external key should be : object:externalId/lookupField. Trying to use fieldToNullName=AccountId." );
}
|
public static PTransformMatcher emptyFlatten() {
return new PTransformMatcher() {
@Override
public boolean matches(AppliedPTransform<?, ?, ?> application) {
return (application.getTransform() instanceof Flatten.PCollections)
&& application.getInputs().isEmpty();
}
@Override
public String toString() {
return MoreObjects.toStringHelper("EmptyFlattenMatcher").toString();
}
};
}
|
@Test
public void emptyFlattenWithNonFlatten() {
AppliedPTransform application =
AppliedPTransform
.<PCollection<Iterable<Integer>>, PCollection<Integer>, Flatten.Iterables<Integer>>of(
"EmptyFlatten",
Collections.emptyMap(),
Collections.singletonMap(
new TupleTag<Integer>(),
PCollection.createPrimitiveOutputInternal(
p, WindowingStrategy.globalDefault(), IsBounded.BOUNDED, VarIntCoder.of())),
/* This isn't actually possible to construct, but for the sake of example */
Flatten.iterables(),
ResourceHints.create(),
p);
assertThat(PTransformMatchers.emptyFlatten().matches(application), is(false));
}
|
static void encodeImageRemoval(
final UnsafeBuffer encodingBuffer,
final int offset,
final int captureLength,
final int length,
final String channel,
final int sessionId,
final int streamId,
final long id)
{
int encodedLength = encodeLogHeader(encodingBuffer, offset, captureLength, length);
encodingBuffer.putInt(offset + encodedLength, sessionId, LITTLE_ENDIAN);
encodedLength += SIZE_OF_INT;
encodingBuffer.putInt(offset + encodedLength, streamId, LITTLE_ENDIAN);
encodedLength += SIZE_OF_INT;
encodingBuffer.putLong(offset + encodedLength, id, LITTLE_ENDIAN);
encodedLength += SIZE_OF_LONG;
encodeTrailingString(
encodingBuffer, offset + encodedLength, captureLength - SIZE_OF_INT * 2 - SIZE_OF_LONG, channel);
}
|
@Test
void encodeImageRemovalShouldTruncateChannelIfItExceedsMaxMessageLength()
{
final char[] data = new char[MAX_EVENT_LENGTH + 8];
fill(data, 'a');
final int offset = 0;
final int length = data.length + SIZE_OF_LONG + SIZE_OF_INT * 3;
final int captureLength = captureLength(length);
final String channel = new String(data);
final int sessionId = -1;
final int streamId = 1;
final long id = 0;
encodeImageRemoval(buffer, offset, captureLength, length, channel, sessionId, streamId, id);
assertEquals(captureLength, buffer.getInt(offset, LITTLE_ENDIAN));
assertEquals(length, buffer.getInt(offset + SIZE_OF_INT, LITTLE_ENDIAN));
assertNotEquals(0, buffer.getLong(offset + SIZE_OF_INT * 2, LITTLE_ENDIAN));
assertEquals(sessionId, buffer.getInt(offset + LOG_HEADER_LENGTH, LITTLE_ENDIAN));
assertEquals(streamId, buffer.getInt(offset + LOG_HEADER_LENGTH + SIZE_OF_INT, LITTLE_ENDIAN));
assertEquals(id, buffer.getLong(offset + LOG_HEADER_LENGTH + SIZE_OF_INT * 2, LITTLE_ENDIAN));
assertEquals(channel.substring(0, captureLength - SIZE_OF_LONG - SIZE_OF_INT * 3 - 3) + "...",
buffer.getStringAscii(offset + LOG_HEADER_LENGTH + SIZE_OF_INT * 2 + SIZE_OF_LONG, LITTLE_ENDIAN));
}
|
public boolean isEncrypted(String value) {
return value.indexOf('{') == 0 && value.indexOf('}') > 1;
}
|
@Test
public void isEncrypted() {
Encryption encryption = new Encryption(null);
assertThat(encryption.isEncrypted("{aes}ADASDASAD")).isTrue();
assertThat(encryption.isEncrypted("{b64}ADASDASAD")).isTrue();
assertThat(encryption.isEncrypted("{abc}ADASDASAD")).isTrue();
assertThat(encryption.isEncrypted("{}")).isFalse();
assertThat(encryption.isEncrypted("{foo")).isFalse();
assertThat(encryption.isEncrypted("foo{aes}")).isFalse();
}
|
@Override
public void lock() {
try {
lockInterruptibly(-1, null);
} catch (InterruptedException e) {
throw new IllegalStateException();
}
}
|
@Test
public void testConcurrency_MultiInstance() throws InterruptedException {
int iterations = 100;
final AtomicInteger lockedCounter = new AtomicInteger();
testMultiInstanceConcurrency(iterations, r -> {
Lock lock = r.getSpinLock("testConcurrency_MultiInstance2");
lock.lock();
lockedCounter.incrementAndGet();
lock.unlock();
});
Assertions.assertEquals(iterations, lockedCounter.get());
}
|
public static synchronized void d(final String tag, String text) {
if (msLogger.supportsD()) {
msLogger.d(tag, text);
addLog(LVL_D, tag, text);
}
}
|
@Test
public void testD() throws Exception {
Logger.d("mTag", "Text with %d digits", 1);
Mockito.verify(mMockLog).d("mTag", "Text with 1 digits");
Logger.d("mTag", "Text with no digits");
Mockito.verify(mMockLog).d("mTag", "Text with no digits");
}
|
@Override
public ObjectNode encode(RemoteMepEntry remoteMepEntry, CodecContext context) {
checkNotNull(remoteMepEntry, "Mep cannot be null");
ObjectNode result = context.mapper().createObjectNode()
.put("remoteMepId", remoteMepEntry.remoteMepId().toString())
.put("remoteMepState", remoteMepEntry.state().name())
.put("rdi", remoteMepEntry.rdi());
if (remoteMepEntry.failedOrOkTime() != null) {
result = result.put("failedOrOkTime",
remoteMepEntry.failedOrOkTime().toString());
}
if (remoteMepEntry.macAddress() != null) {
result = result.put("macAddress", remoteMepEntry.macAddress().toString());
}
if (remoteMepEntry.portStatusTlvType() != null) {
result = result.put("portStatusTlvType",
remoteMepEntry.portStatusTlvType().name());
}
if (remoteMepEntry.interfaceStatusTlvType() != null) {
result = result.put("interfaceStatusTlvType",
remoteMepEntry.interfaceStatusTlvType().name());
}
if (remoteMepEntry.senderIdTlvType() != null) {
result = result.put("senderIdTlvType",
remoteMepEntry.senderIdTlvType().name());
}
return result;
}
|
@Test
public void testEncodeIterableOfRemoteMepEntryCodecContext()
throws CfmConfigException {
RemoteMepEntry remoteMep2 = DefaultRemoteMepEntry
.builder(MepId.valueOf((short) 20), RemoteMepState.RMEP_IDLE)
.build();
ArrayList<RemoteMepEntry> remoteMeps = new ArrayList<>();
remoteMeps.add(remoteMep1);
remoteMeps.add(remoteMep2);
ObjectNode node = mapper.createObjectNode();
node.set("remoteMep", context.codec(RemoteMepEntry.class)
.encode(remoteMeps, context));
Iterator<JsonNode> an = node.get("remoteMep").elements();
while (an.hasNext()) {
JsonNode jn = an.next();
assertEquals("RMEP_", jn.get("remoteMepState").asText().substring(0, 5));
}
}
|
public static ConsumingResult createConsumingResult(
DataTypeFactory dataTypeFactory,
TypeInformation<?> inputTypeInfo,
@Nullable Schema declaredSchema) {
final DataType inputDataType =
TypeInfoDataTypeConverter.toDataType(dataTypeFactory, inputTypeInfo);
return createConsumingResult(dataTypeFactory, inputDataType, declaredSchema, true);
}
|
@Test
void testInputFromRowWithPhysicalDeclaredSchema() {
final TypeInformation<?> inputTypeInfo =
Types.ROW(Types.INT, Types.LONG, Types.GENERIC(BigDecimal.class), Types.BOOLEAN);
final ConsumingResult result =
SchemaTranslator.createConsumingResult(
dataTypeFactoryWithRawType(BigDecimal.class),
inputTypeInfo,
Schema.newBuilder()
.primaryKeyNamed("pk", "f0")
.column("f1", BIGINT()) // reordered
.column("f0", INT())
.columnByExpression("computed", "f1 + 42")
.column("f2", DECIMAL(10, 2)) // enriches
.columnByExpression("computed2", "f1 - 1")
.build());
assertThat(result.getPhysicalDataType())
.isEqualTo(
ROW(
FIELD("f0", INT()),
FIELD("f1", BIGINT()),
FIELD("f2", DECIMAL(10, 2)),
FIELD("f3", BOOLEAN()))
.notNull());
assertThat(result.isTopLevelRecord()).isTrue();
assertThat(result.getSchema())
.isEqualTo(
Schema.newBuilder()
.column("f0", INT())
.column("f1", BIGINT())
.column("f2", DECIMAL(10, 2))
.column("f3", BOOLEAN())
.columnByExpression("computed", "f1 + 42")
.columnByExpression("computed2", "f1 - 1")
.primaryKeyNamed("pk", "f0")
.build());
assertThat(result.getProjections())
.isEqualTo(Arrays.asList("f1", "f0", "computed", "f2", "computed2"));
}
|
public static List<?> convertToList(Schema schema, Object value) {
return convertToArray(ARRAY_SELECTOR_SCHEMA, value);
}
|
@Test
public void shouldConvertIntegralTypesToFloat() {
float thirdValue = Float.MAX_VALUE;
List<?> list = Values.convertToList(Schema.STRING_SCHEMA, "[1, 2, " + thirdValue + "]");
assertEquals(3, list.size());
assertEquals(1, ((Number) list.get(0)).intValue());
assertEquals(2, ((Number) list.get(1)).intValue());
assertEquals(thirdValue, list.get(2));
}
|
public Set<String> getDeadlockedThreads() {
final long[] ids = threads.findDeadlockedThreads();
if (ids != null) {
final Set<String> deadlocks = new HashSet<>();
for (ThreadInfo info : threads.getThreadInfo(ids, MAX_STACK_TRACE_DEPTH)) {
final StringBuilder stackTrace = new StringBuilder();
for (StackTraceElement element : info.getStackTrace()) {
stackTrace.append("\t at ")
.append(element.toString())
.append(String.format("%n"));
}
deadlocks.add(
String.format("%s locked on %s (owned by %s):%n%s",
info.getThreadName(),
info.getLockName(),
info.getLockOwnerName(),
stackTrace.toString()
)
);
}
return Collections.unmodifiableSet(deadlocks);
}
return Collections.emptySet();
}
|
@Test
public void returnsASetOfThreadsIfAnyAreDeadlocked() {
final ThreadInfo thread1 = mock(ThreadInfo.class);
when(thread1.getThreadName()).thenReturn("thread1");
when(thread1.getLockName()).thenReturn("lock2");
when(thread1.getLockOwnerName()).thenReturn("thread2");
when(thread1.getStackTrace()).thenReturn(new StackTraceElement[]{
new StackTraceElement("Blah", "bloo", "Blah.java", 150),
new StackTraceElement("Blah", "blee", "Blah.java", 100)
});
final ThreadInfo thread2 = mock(ThreadInfo.class);
when(thread2.getThreadName()).thenReturn("thread2");
when(thread2.getLockName()).thenReturn("lock1");
when(thread2.getLockOwnerName()).thenReturn("thread1");
when(thread2.getStackTrace()).thenReturn(new StackTraceElement[]{
new StackTraceElement("Blah", "blee", "Blah.java", 100),
new StackTraceElement("Blah", "bloo", "Blah.java", 150)
});
final long[] ids = {1, 2};
when(threads.findDeadlockedThreads()).thenReturn(ids);
when(threads.getThreadInfo(eq(ids), anyInt()))
.thenReturn(new ThreadInfo[]{thread1, thread2});
assertThat(detector.getDeadlockedThreads())
.containsOnly(String.format(Locale.US,
"thread1 locked on lock2 (owned by thread2):%n" +
"\t at Blah.bloo(Blah.java:150)%n" +
"\t at Blah.blee(Blah.java:100)%n"),
String.format(Locale.US,
"thread2 locked on lock1 (owned by thread1):%n" +
"\t at Blah.blee(Blah.java:100)%n" +
"\t at Blah.bloo(Blah.java:150)%n"));
}
|
@Override
public SortedRangeSet union(ValueSet other)
{
SortedRangeSet otherRangeSet = checkCompatibility(other);
return new Builder(type)
.addAll(this.getOrderedRanges())
.addAll(otherRangeSet.getOrderedRanges())
.build();
}
|
@Test
public void testUnion()
{
assertUnion(SortedRangeSet.none(BIGINT), SortedRangeSet.none(BIGINT), SortedRangeSet.none(BIGINT));
assertUnion(SortedRangeSet.all(BIGINT), SortedRangeSet.all(BIGINT), SortedRangeSet.all(BIGINT));
assertUnion(SortedRangeSet.none(BIGINT), SortedRangeSet.all(BIGINT), SortedRangeSet.all(BIGINT));
assertUnion(
SortedRangeSet.of(Range.equal(BIGINT, 1L), Range.equal(BIGINT, 2L)),
SortedRangeSet.of(Range.equal(BIGINT, 2L), Range.equal(BIGINT, 3L)),
SortedRangeSet.of(Range.equal(BIGINT, 1L), Range.equal(BIGINT, 2L), Range.equal(BIGINT, 3L)));
assertUnion(SortedRangeSet.all(BIGINT), SortedRangeSet.of(Range.equal(BIGINT, 0L)), SortedRangeSet.all(BIGINT));
assertUnion(
SortedRangeSet.of(Range.range(BIGINT, 0L, true, 4L, false)),
SortedRangeSet.of(Range.greaterThan(BIGINT, 3L)),
SortedRangeSet.of(Range.greaterThanOrEqual(BIGINT, 0L)));
assertUnion(
SortedRangeSet.of(Range.greaterThanOrEqual(BIGINT, 0L)),
SortedRangeSet.of(Range.lessThanOrEqual(BIGINT, 0L)),
SortedRangeSet.of(Range.all(BIGINT)));
assertUnion(
SortedRangeSet.of(Range.greaterThan(BIGINT, 0L)),
SortedRangeSet.of(Range.lessThan(BIGINT, 0L)),
SortedRangeSet.of(BIGINT, 0L).complement());
}
|
public CompletableFuture<GetResult> get(byte[] user, byte[] hmac) {
return getUser(user).thenApply(getItemResponse -> {
if (!getItemResponse.hasItem()) {
return GetResult.NOT_STORED;
}
Record record = Record.from(user, getItemResponse.item());
if (!MessageDigest.isEqual(hmac, record.password)) {
return GetResult.PASSWORD_MISMATCH;
}
return GetResult.found(record);
});
}
|
@Test
void testGet() {
byte[] wrongUser = TestRandomUtil.nextBytes(16);
byte[] wrongPassword = TestRandomUtil.nextBytes(16);
assertThat(subscriptions.create(user, password, created)).succeedsWithin(DEFAULT_TIMEOUT);
assertThat(subscriptions.get(user, password)).succeedsWithin(DEFAULT_TIMEOUT).satisfies(getResult -> {
assertThat(getResult.type).isEqualTo(FOUND);
assertThat(getResult.record).isNotNull().satisfies(checkFreshlyCreatedRecord(user, password, created));
});
assertThat(subscriptions.get(user, wrongPassword)).succeedsWithin(DEFAULT_TIMEOUT)
.satisfies(getResult -> {
assertThat(getResult.type).isEqualTo(PASSWORD_MISMATCH);
assertThat(getResult.record).isNull();
});
assertThat(subscriptions.get(wrongUser, password)).succeedsWithin(DEFAULT_TIMEOUT)
.satisfies(getResult -> {
assertThat(getResult.type).isEqualTo(NOT_STORED);
assertThat(getResult.record).isNull();
});
}
|
@VisibleForTesting
public void validateDictDataValueUnique(Long id, String dictType, String value) {
DictDataDO dictData = dictDataMapper.selectByDictTypeAndValue(dictType, value);
if (dictData == null) {
return;
}
// 如果 id 为空,说明不用比较是否为相同 id 的字典数据
if (id == null) {
throw exception(DICT_DATA_VALUE_DUPLICATE);
}
if (!dictData.getId().equals(id)) {
throw exception(DICT_DATA_VALUE_DUPLICATE);
}
}
|
@Test
public void testValidateDictDataValueUnique_valueDuplicateForCreate() {
// 准备参数
String dictType = randomString();
String value = randomString();
// mock 数据
dictDataMapper.insert(randomDictDataDO(o -> {
o.setDictType(dictType);
o.setValue(value);
}));
// 调用,校验异常
assertServiceException(() -> dictDataService.validateDictDataValueUnique(null, dictType, value),
DICT_DATA_VALUE_DUPLICATE);
}
|
@Override
public void doFilter(HttpRequest request, HttpResponse response, FilterChain chain) {
IdentityProvider provider = resolveProviderOrHandleResponse(request, response, INIT_CONTEXT);
if (provider != null) {
handleProvider(request, response, provider);
}
}
|
@Test
public void redirect_with_context_when_failing_because_of_Exception() throws Exception {
when(request.getContextPath()).thenReturn("/sonarqube");
IdentityProvider identityProvider = new FailWithIllegalStateException("failing");
when(request.getRequestURI()).thenReturn("/sessions/init/" + identityProvider.getKey());
identityProviderRepository.addIdentityProvider(identityProvider);
underTest.doFilter(request, response, chain);
verify(response).sendRedirect("/sonarqube/sessions/unauthorized");
}
|
@Override
public void execute(final List<String> args, final PrintWriter terminal) {
CliCmdUtil.ensureArgCountBounds(args, 1, 1, HELP);
final String filePath = args.get(0);
final String content = loadScript(filePath);
requestExecutor.makeKsqlRequest(content);
}
|
@Test
public void shouldThrowIfNoArgSupplied() {
// When:
final Exception e = assertThrows(
KsqlException.class,
() -> cmd.execute(ImmutableList.of(), terminal)
);
// Then:
assertThat(e.getMessage(), containsString(
"Too few parameters"));
}
|
@Override
public InputFile newInputFile(String path) {
return S3InputFile.fromLocation(path, client(), s3FileIOProperties, metrics);
}
|
@Test
public void testReadMissingLocation() {
String location = "s3://bucket/path/to/data.parquet";
InputFile in = s3FileIO.newInputFile(location);
assertThatThrownBy(() -> in.newStream().read())
.isInstanceOf(NotFoundException.class)
.hasMessage("Location does not exist: " + location);
}
|
protected AccessControlList toAcl(final Acl acl) {
if(Acl.EMPTY.equals(acl)) {
return null;
}
if(Acl.CANNED_PRIVATE.equals(acl)) {
return AccessControlList.REST_CANNED_PRIVATE;
}
if(Acl.CANNED_BUCKET_OWNER_FULLCONTROL.equals(acl)) {
return AccessControlList.REST_CANNED_BUCKET_OWNER_FULLCONTROL;
}
if(Acl.CANNED_BUCKET_OWNER_READ.equals(acl)) {
return AccessControlList.REST_CANNED_BUCKET_OWNER_READ;
}
if(Acl.CANNED_AUTHENTICATED_READ.equals(acl)) {
return AccessControlList.REST_CANNED_AUTHENTICATED_READ;
}
if(Acl.CANNED_PUBLIC_READ.equals(acl)) {
return AccessControlList.REST_CANNED_PUBLIC_READ;
}
if(Acl.CANNED_PUBLIC_READ_WRITE.equals(acl)) {
return AccessControlList.REST_CANNED_PUBLIC_READ_WRITE;
}
final AccessControlList list = new AccessControlList();
for(Acl.UserAndRole userAndRole : acl.asList()) {
if(!userAndRole.isValid()) {
continue;
}
if(userAndRole.getUser() instanceof Acl.Owner) {
list.setOwner(new StorageOwner(userAndRole.getUser().getIdentifier(),
userAndRole.getUser().getDisplayName()));
}
else if(userAndRole.getUser() instanceof Acl.EmailUser) {
list.grantPermission(new EmailAddressGrantee(userAndRole.getUser().getIdentifier()),
Permission.parsePermission(userAndRole.getRole().getName()));
}
else if(userAndRole.getUser() instanceof Acl.GroupUser) {
// Handle special cases
if(userAndRole.getUser().getIdentifier().equals(Acl.GroupUser.EVERYONE)) {
list.grantPermission(GroupGrantee.ALL_USERS,
Permission.parsePermission(userAndRole.getRole().getName()));
}
else if(userAndRole.getUser().getIdentifier().equals(Acl.GroupUser.AUTHENTICATED)) {
list.grantPermission(GroupGrantee.AUTHENTICATED_USERS,
Permission.parsePermission(userAndRole.getRole().getName()));
}
else {
// Generic mappings
list.grantPermission(new GroupGrantee(userAndRole.getUser().getIdentifier()),
Permission.parsePermission(userAndRole.getRole().getName()));
}
}
else if(userAndRole.getUser() instanceof Acl.CanonicalUser) {
list.grantPermission(new CanonicalGrantee(userAndRole.getUser().getIdentifier()),
Permission.parsePermission(userAndRole.getRole().getName()));
}
else {
log.warn(String.format("Unsupported user %s", userAndRole.getUser()));
}
}
if(null == list.getOwner()) {
log.warn(String.format("Missing owner in %s", acl));
return null;
}
return list;
}
|
@Test
public void testCannedLists() {
final S3AccessControlListFeature f = new S3AccessControlListFeature(session);
assertSame(Acl.CANNED_PRIVATE, f.toAcl(AccessControlList.REST_CANNED_PRIVATE));
assertSame(Acl.CANNED_PUBLIC_READ, f.toAcl(AccessControlList.REST_CANNED_PUBLIC_READ));
assertSame(Acl.CANNED_PUBLIC_READ_WRITE, f.toAcl(AccessControlList.REST_CANNED_PUBLIC_READ_WRITE));
assertSame(Acl.CANNED_AUTHENTICATED_READ, f.toAcl(AccessControlList.REST_CANNED_AUTHENTICATED_READ));
assertSame(Acl.CANNED_BUCKET_OWNER_FULLCONTROL, f.toAcl(AccessControlList.REST_CANNED_BUCKET_OWNER_FULLCONTROL));
assertSame(Acl.CANNED_BUCKET_OWNER_READ, f.toAcl(AccessControlList.REST_CANNED_BUCKET_OWNER_READ));
assertSame(AccessControlList.REST_CANNED_PRIVATE, f.toAcl(Acl.CANNED_PRIVATE));
assertEquals(Acl.CANNED_PRIVATE.getCannedString(), f.toAcl(Acl.CANNED_PRIVATE).getValueForRESTHeaderACL());
assertSame(AccessControlList.REST_CANNED_PUBLIC_READ, f.toAcl(Acl.CANNED_PUBLIC_READ));
assertEquals(Acl.CANNED_PUBLIC_READ.getCannedString(), f.toAcl(Acl.CANNED_PUBLIC_READ).getValueForRESTHeaderACL());
assertSame(AccessControlList.REST_CANNED_PUBLIC_READ_WRITE, f.toAcl(Acl.CANNED_PUBLIC_READ_WRITE));
assertEquals(Acl.CANNED_PUBLIC_READ_WRITE.getCannedString(), f.toAcl(Acl.CANNED_PUBLIC_READ_WRITE).getValueForRESTHeaderACL());
assertSame(AccessControlList.REST_CANNED_AUTHENTICATED_READ, f.toAcl(Acl.CANNED_AUTHENTICATED_READ));
assertEquals(Acl.CANNED_AUTHENTICATED_READ.getCannedString(), f.toAcl(Acl.CANNED_AUTHENTICATED_READ).getValueForRESTHeaderACL());
assertSame(AccessControlList.REST_CANNED_BUCKET_OWNER_FULLCONTROL, f.toAcl(Acl.CANNED_BUCKET_OWNER_FULLCONTROL));
assertEquals(Acl.CANNED_BUCKET_OWNER_FULLCONTROL.getCannedString(), f.toAcl(Acl.CANNED_BUCKET_OWNER_FULLCONTROL).getValueForRESTHeaderACL());
assertSame(AccessControlList.REST_CANNED_BUCKET_OWNER_READ, f.toAcl(Acl.CANNED_BUCKET_OWNER_READ));
assertEquals(Acl.CANNED_BUCKET_OWNER_READ.getCannedString(), f.toAcl(Acl.CANNED_BUCKET_OWNER_READ).getValueForRESTHeaderACL());
}
|
Path downloadRemoteZip(Path targetPath) throws IOException, UserException {
LOG.info("download plugin zip from: " + source);
Path zip = Files.createTempFile(targetPath, ".plugin_", ".zip");
cleanPathList.add(zip);
// download zip
try (InputStream in = getInputStreamFromUrl(source)) {
Files.copy(in, zip, StandardCopyOption.REPLACE_EXISTING);
}
// .md5 check
if (Strings.isNullOrEmpty(expectedChecksum)) {
try (InputStream in = getInputStreamFromUrl(source + ".md5")) {
BufferedReader br = new BufferedReader(new InputStreamReader(in));
expectedChecksum = br.readLine();
} catch (IOException e) {
throw new UserException(e.getMessage() +
". you should set md5sum in plugin properties or provide a md5 URI to check plugin file");
}
}
DigestUtils.md5Hex(Files.readAllBytes(zip));
final String actualChecksum = DigestUtils.md5Hex(Files.readAllBytes(zip));
if (!StringUtils.equalsIgnoreCase(expectedChecksum, actualChecksum)) {
throw new UserException(
"MD5 check mismatch, expected " + expectedChecksum + " but actual " + actualChecksum);
}
return zip;
}
|
@Test
public void testDownloadAndValidateZipIOException() {
PluginZip util = new PluginZip("http://io-exception", null);
try {
Path zipPath = util.downloadRemoteZip(PluginTestUtil.getTestPath("target"));
} catch (Exception e) {
assertTrue(e instanceof IOException);
}
}
|
@Override
public AlterConsumerGroupOffsetsResult alterConsumerGroupOffsets(
String groupId,
Map<TopicPartition, OffsetAndMetadata> offsets,
AlterConsumerGroupOffsetsOptions options
) {
SimpleAdminApiFuture<CoordinatorKey, Map<TopicPartition, Errors>> future =
AlterConsumerGroupOffsetsHandler.newFuture(groupId);
AlterConsumerGroupOffsetsHandler handler = new AlterConsumerGroupOffsetsHandler(groupId, offsets, logContext);
invokeDriver(handler, future, options.timeoutMs);
return new AlterConsumerGroupOffsetsResult(future.get(CoordinatorKey.byGroupId(groupId)));
}
|
@Test
public void testAlterConsumerGroupOffsetsFindCoordinatorRetriableErrors() throws Exception {
// Retriable FindCoordinatorResponse errors should be retried
final TopicPartition tp1 = new TopicPartition("foo", 0);
try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(mockCluster(1, 0))) {
env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
env.kafkaClient().prepareResponse(
prepareFindCoordinatorResponse(Errors.COORDINATOR_NOT_AVAILABLE, Node.noNode()));
env.kafkaClient().prepareResponse(
prepareFindCoordinatorResponse(Errors.COORDINATOR_LOAD_IN_PROGRESS, Node.noNode()));
env.kafkaClient().prepareResponse(
prepareFindCoordinatorResponse(Errors.NONE, env.cluster().controller()));
env.kafkaClient().prepareResponse(
prepareOffsetCommitResponse(tp1, Errors.NONE));
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(tp1, new OffsetAndMetadata(123L));
final AlterConsumerGroupOffsetsResult result = env.adminClient()
.alterConsumerGroupOffsets(GROUP_ID, offsets);
assertNull(result.all().get());
assertNull(result.partitionResult(tp1).get());
}
}
|
@Override
public int run() throws IOException {
Preconditions.checkArgument(sourceFiles != null && !sourceFiles.isEmpty(), "Missing file name");
// Ensure all source files have the columns specified first
Map<String, Schema> schemas = new HashMap<>();
for (String source : sourceFiles) {
Schema schema = getAvroSchema(source);
schemas.put(source, Expressions.filterSchema(schema, columns));
}
for (String source : sourceFiles) {
Schema projection = schemas.get(source);
Iterable<Object> reader = openDataFile(source, projection);
boolean threw = true;
long count = 0;
try {
for (Object record : reader) {
if (numRecords > 0 && count >= numRecords) {
break;
}
if (columns == null || columns.size() != 1) {
console.info(String.valueOf(record));
} else {
console.info(String.valueOf(select(projection, record, columns.get(0))));
}
count += 1;
}
threw = false;
} catch (RuntimeException e) {
throw new RuntimeException("Failed on record " + count + " in file " + source, e);
} finally {
if (reader instanceof Closeable) {
Closeables.close((Closeable) reader, threw);
}
}
}
return 0;
}
|
@Test
public void testCatCommandWithSpecificColumns() throws IOException {
File file = parquetFile();
CatCommand command = new CatCommand(createLogger(), 0);
command.sourceFiles = Arrays.asList(file.getAbsolutePath());
command.columns = Arrays.asList(INT32_FIELD, INT64_FIELD);
command.setConf(new Configuration());
Assert.assertEquals(0, command.run());
}
|
@VisibleForTesting
static void instantiateNonHeapMemoryMetrics(final MetricGroup metricGroup) {
instantiateMemoryUsageMetrics(
metricGroup, () -> ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage());
}
|
@Test
void testNonHeapMetricsCompleteness() {
final InterceptingOperatorMetricGroup nonHeapMetrics =
new InterceptingOperatorMetricGroup();
MetricUtils.instantiateNonHeapMemoryMetrics(nonHeapMetrics);
assertThat(nonHeapMetrics.get(MetricNames.MEMORY_USED)).isNotNull();
assertThat(nonHeapMetrics.get(MetricNames.MEMORY_COMMITTED)).isNotNull();
assertThat(nonHeapMetrics.get(MetricNames.MEMORY_MAX)).isNotNull();
}
|
@Override
public String execute(CommandContext commandContext, String[] args) {
Channel channel = commandContext.getRemote();
String service = channel.attr(ChangeTelnet.SERVICE_KEY).get();
if ((service == null || service.length() == 0) && (args == null || args.length == 0)) {
return "Please input service name, eg: \r\ncount XxxService\r\ncount XxxService xxxMethod\r\ncount XxxService xxxMethod 10\r\nor \"cd XxxService\" firstly.";
}
StringBuilder buf = new StringBuilder();
if (service != null && service.length() > 0) {
buf.append("Use default service ").append(service).append(".\r\n");
}
String method;
String times;
if (service == null || service.length() == 0) {
service = args[0];
method = args.length > 1 ? args[1] : null;
} else {
method = args.length > 0 ? args[0] : null;
}
if (StringUtils.isNumber(method)) {
times = method;
method = null;
} else {
times = args.length > 2 ? args[2] : "1";
}
if (!StringUtils.isNumber(times)) {
return "Illegal times " + times + ", must be integer.";
}
final int t = Integer.parseInt(times);
Invoker<?> invoker = null;
for (Exporter<?> exporter : dubboProtocol.getExporters()) {
if (service.equals(exporter.getInvoker().getInterface().getSimpleName())
|| service.equals(exporter.getInvoker().getInterface().getName())
|| service.equals(exporter.getInvoker().getUrl().getPath())
|| service.equals(exporter.getInvoker().getUrl().getServiceKey())) {
invoker = exporter.getInvoker();
break;
}
}
if (invoker != null) {
if (t > 0) {
final String mtd = method;
final Invoker<?> inv = invoker;
Thread thread = new Thread(
() -> {
for (int i = 0; i < t; i++) {
String result = count(inv, mtd);
try {
send(channel, "\r\n" + result);
} catch (RemotingException e1) {
return;
}
if (i < t - 1) {
try {
Thread.sleep(1000);
} catch (InterruptedException ignored) {
}
}
}
try {
send(channel, "\r\n" + PROMPT);
} catch (RemotingException ignored) {
}
},
"TelnetCount");
thread.setDaemon(true);
thread.start();
}
} else {
buf.append("No such service ").append(service);
}
return buf.toString();
}
|
@Test
void testCountByServiceKey() throws Exception {
String methodName = "sayHello";
RpcStatus.removeStatus(url, methodName);
String[] args = new String[] {"g/demo:1.0.0", "sayHello", "1"};
ExtensionLoader.getExtensionLoader(Protocol.class)
.getExtension(DubboProtocol.NAME)
.export(mockInvoker);
RpcStatus.beginCount(url, methodName);
RpcStatus.endCount(url, methodName, 10L, true);
count.execute(mockCommandContext, args);
latch.await();
StringBuilder sb = new StringBuilder();
for (Object o : mockChannel.getReceivedObjects()) {
sb.append(o.toString());
}
assertThat(sb.toString(), containsString(buildTable(methodName, 10, 10, "1", "0", "0")));
}
|
public static NameStep newBuilder() {
return new CharacterSteps();
}
|
@Test
void testBuildWarrior() {
final var character = CharacterStepBuilder.newBuilder()
.name("Cuauhtemoc")
.fighterClass("aztec")
.withWeapon("spear")
.withAbility("speed")
.withAbility("strength")
.noMoreAbilities()
.build();
assertEquals("Cuauhtemoc", character.getName());
assertEquals("aztec", character.getFighterClass());
assertEquals("spear", character.getWeapon());
assertNotNull(character.toString());
final var abilities = character.getAbilities();
assertNotNull(abilities);
assertEquals(2, abilities.size());
assertTrue(abilities.contains("speed"));
assertTrue(abilities.contains("strength"));
}
|
public void start(Callback<None> callback)
{
_managerStarted = true;
if (!_startupCallback.compareAndSet(null, callback))
{
throw new IllegalStateException("Already starting");
}
try
{
_zkConnection.start();
//Trying to start store here. If the connection is not ready, will return immediately.
//The connection event will trigger the actual store startup
tryStartStore();
LOG.info("Started ZooKeeper connection to {}", _zkConnectString);
}
catch (Exception e)
{
_startupCallback.set(null);
callback.onError(e);
}
}
|
@Test (invocationCount = 1, timeOut = 5000)
public void testWarmup() throws Exception
{
ScheduledExecutorService warmupExecutorService = Executors.newSingleThreadScheduledExecutor();
boolean isDarkWarmupEnabled = true;
String warmupClusterName = "warmup" + _cluster;
String expectedWarmupUriNodePath = "/d2/uris/" + warmupClusterName + "/ephemoral-0000000000";
int warmupDuration = 2; //run warm-up for 2 seconds
Map<Integer, PartitionData> partitions = Collections.singletonMap(0, new PartitionData(0.5));
UriProperties warmupProperties = new UriProperties(warmupClusterName, Collections.singletonMap(URI.create(_uri), partitions));
ZooKeeperAnnouncer announcer = getZooKeeperWarmupAnnouncer(_cluster, _uri, WEIGHT, isDarkWarmupEnabled, warmupClusterName, warmupDuration, warmupExecutorService);
TestDataHelper.MockServiceDiscoveryEventEmitter eventEmitter = getMockServiceDiscoveryEventEmitter();
announcer.setEventEmitter(eventEmitter);
ZooKeeperConnectionManager manager = createManagerForWarmupTests(false, warmupDuration, announcer);
ZooKeeperEphemeralStore<UriProperties> store = createAndStartUriStore(announcer);
FutureCallback<None> managerStartCallback = new FutureCallback<>();
manager.start(managerStartCallback);
// Wait till warm up completes and announcer successfully marks up to the regular cluster
managerStartCallback.get();
UriProperties properties = store.get(_cluster);
assertNotNull(properties);
assertEquals(properties.getPartitionDataMap(URI.create(_uri)).get(DefaultPartitionAccessor.DEFAULT_PARTITION_ID).getWeight(), WEIGHT);
assertEquals(properties.Uris().size(), 1);
// If warm up has happened, mark down for the warm up cluster should be successful
UriProperties warmupPropertiesAfterMarkdown = store.get(warmupClusterName);
assertNotNull(warmupPropertiesAfterMarkdown);
assertEquals(warmupPropertiesAfterMarkdown.Uris().size(), 0);
List<ServiceDiscoveryEventEmitter.StatusUpdateActionType> expectedActionTypes = Arrays.asList(
ServiceDiscoveryEventEmitter.StatusUpdateActionType.MARK_READY,
ServiceDiscoveryEventEmitter.StatusUpdateActionType.MARK_DOWN,
ServiceDiscoveryEventEmitter.StatusUpdateActionType.MARK_READY);
List<String> expectedUriNodePaths = Arrays.asList(expectedWarmupUriNodePath, expectedWarmupUriNodePath, _expectedUriNodePath);
eventEmitter.verifySDStatusActiveUpdateIntentEvents(
Arrays.asList(Collections.singletonList(warmupClusterName), Collections.singletonList(warmupClusterName), Collections.singletonList(_cluster)),
expectedActionTypes,
expectedUriNodePaths
);
List<String> expectedWriteClusters = Arrays.asList(warmupClusterName, warmupClusterName, _cluster);
eventEmitter.verifySDStatusWriteEvents(expectedWriteClusters, Arrays.asList(_cluster, _cluster, _cluster),
expectedActionTypes, expectedUriNodePaths, Arrays.asList(warmupProperties.toString(), warmupProperties.toString(), properties.toString()),
Arrays.asList(0, 0, 0), expectedUriNodePaths, Arrays.asList(true, true, true));
shutdownManager(manager);
}
|
@Override
public void doInject(RequestResource resource, RamContext context, LoginIdentityContext result) {
if (context.validate()) {
try {
String accessKey = context.getAccessKey();
String secretKey = context.getSecretKey();
// STS 临时凭证鉴权的优先级高于 AK/SK 鉴权
if (StsConfig.getInstance().isStsOn()) {
StsCredential stsCredential = StsCredentialHolder.getInstance().getStsCredential();
accessKey = stsCredential.getAccessKeyId();
secretKey = stsCredential.getAccessKeySecret();
result.setParameter(IdentifyConstants.SECURITY_TOKEN_HEADER, stsCredential.getSecurityToken());
}
String signatureKey = secretKey;
if (StringUtils.isNotEmpty(context.getRegionId())) {
signatureKey = CalculateV4SigningKeyUtil
.finalSigningKeyStringWithDefaultInfo(secretKey, context.getRegionId());
result.setParameter(RamConstants.SIGNATURE_VERSION, RamConstants.V4);
}
String signData = getSignData(getGroupedServiceName(resource));
String signature = SignUtil.sign(signData, signatureKey);
result.setParameter(SIGNATURE_FILED, signature);
result.setParameter(DATA_FILED, signData);
result.setParameter(AK_FILED, accessKey);
} catch (Exception e) {
NAMING_LOGGER.error("inject ak/sk failed.", e);
}
}
}
|
@Test
void testDoInjectWithGroup() throws Exception {
resource = RequestResource.namingBuilder().setResource("test@@aaa").setGroup("group").build();
LoginIdentityContext actual = new LoginIdentityContext();
namingResourceInjector.doInject(resource, ramContext, actual);
assertEquals(3, actual.getAllKey().size());
assertEquals(PropertyKeyConst.ACCESS_KEY, actual.getParameter("ak"));
assertTrue(actual.getParameter("data").endsWith("@@test@@aaa"));
String expectSign = SignUtil.sign(actual.getParameter("data"), PropertyKeyConst.SECRET_KEY);
assertEquals(expectSign, actual.getParameter("signature"));
}
|
protected RemotingCommand request(ChannelHandlerContext ctx, RemotingCommand request,
ProxyContext context, long timeoutMillis) throws Exception {
String brokerName;
if (request.getCode() == RequestCode.SEND_MESSAGE_V2) {
if (request.getExtFields().get(BROKER_NAME_FIELD_FOR_SEND_MESSAGE_V2) == null) {
return RemotingCommand.buildErrorResponse(ResponseCode.VERSION_NOT_SUPPORTED,
"Request doesn't have field bname");
}
brokerName = request.getExtFields().get(BROKER_NAME_FIELD_FOR_SEND_MESSAGE_V2);
} else {
if (request.getExtFields().get(BROKER_NAME_FIELD) == null) {
return RemotingCommand.buildErrorResponse(ResponseCode.VERSION_NOT_SUPPORTED,
"Request doesn't have field bname");
}
brokerName = request.getExtFields().get(BROKER_NAME_FIELD);
}
if (request.isOnewayRPC()) {
messagingProcessor.requestOneway(context, brokerName, request, timeoutMillis);
return null;
}
messagingProcessor.request(context, brokerName, request, timeoutMillis)
.thenAccept(r -> writeResponse(ctx, context, request, r))
.exceptionally(t -> {
writeErrResponse(ctx, context, request, t);
return null;
});
return null;
}
|
@Test
public void testRequestBrokerException() throws Exception {
ArgumentCaptor<RemotingCommand> captor = ArgumentCaptor.forClass(RemotingCommand.class);
String brokerName = "broker";
String remark = "exception";
CompletableFuture<RemotingCommand> future = new CompletableFuture<>();
future.completeExceptionally(new MQBrokerException(ResponseCode.FLUSH_DISK_TIMEOUT, remark));
when(messagingProcessorMock.request(any(), eq(brokerName), any(), anyLong())).thenReturn(future);
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.PULL_MESSAGE, null);
request.addExtField(AbstractRemotingActivity.BROKER_NAME_FIELD, brokerName);
RemotingCommand remotingCommand = remotingActivity.request(ctx, request, null, 10000);
assertThat(remotingCommand).isNull();
verify(ctx, times(1)).writeAndFlush(captor.capture());
assertThat(captor.getValue().getCode()).isEqualTo(ResponseCode.FLUSH_DISK_TIMEOUT);
}
|
public static boolean containsUpperCase(final long word) {
return applyUpperCasePattern(word) != 0;
}
|
@Test
void containsUpperCaseInt() {
// given
final byte[] asciiTable = getExtendedAsciiTable();
shuffleArray(asciiTable, random);
// when
for (int idx = 0; idx < asciiTable.length; idx += Integer.BYTES) {
final int value = getInt(asciiTable, idx);
final boolean containsUpperCase = SWARUtil.containsUpperCase(value);
boolean expectedContainsUpperCase = false;
for (int i = 0; i < Integer.BYTES; i++) {
expectedContainsUpperCase |= Character.isUpperCase(asciiTable[idx + i]);
}
// then
assertEquals(expectedContainsUpperCase, containsUpperCase);
}
}
|
@Override
public void run() {
boolean isNeedFlush = false;
boolean sqlShowEnabled = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.SQL_SHOW);
try {
if (sqlShowEnabled) {
fillLogMDC();
}
isNeedFlush = executeCommand(context,
databaseProtocolFrontendEngine.getCodecEngine().createPacketPayload((ByteBuf) message, context.channel().attr(CommonConstants.CHARSET_ATTRIBUTE_KEY).get()));
// CHECKSTYLE:OFF
} catch (final Exception ex) {
// CHECKSTYLE:ON
processException(ex);
// CHECKSTYLE:OFF
} catch (final Error error) {
// CHECKSTYLE:ON
processException(new RuntimeException(error));
} finally {
connectionSession.clearQueryContext();
Collection<SQLException> exceptions = Collections.emptyList();
try {
connectionSession.getDatabaseConnectionManager().closeExecutionResources();
} catch (final BackendConnectionException ex) {
exceptions = ex.getExceptions().stream().filter(SQLException.class::isInstance).map(SQLException.class::cast).collect(Collectors.toList());
}
if (isNeedFlush) {
context.flush();
}
processClosedExceptions(exceptions);
context.pipeline().fireUserEventTriggered(new WriteCompleteEvent());
if (sqlShowEnabled) {
clearLogMDC();
}
if (message instanceof CompositeByteBuf) {
releaseCompositeByteBuf((CompositeByteBuf) message);
}
((ByteBuf) message).release();
}
}
|
@Test
void assertRunByCommandExecutor() throws SQLException, BackendConnectionException {
when(commandExecutor.execute()).thenReturn(Collections.singleton(databasePacket));
when(engine.getCommandExecuteEngine().getCommandPacket(payload, commandPacketType, connectionSession)).thenReturn(commandPacket);
when(engine.getCommandExecuteEngine().getCommandExecutor(commandPacketType, commandPacket, connectionSession)).thenReturn(commandExecutor);
when(engine.getCommandExecuteEngine().getCommandPacketType(payload)).thenReturn(commandPacketType);
when(engine.getCodecEngine().createPacketPayload(message, StandardCharsets.UTF_8)).thenReturn(payload);
CommandExecutorTask actual = new CommandExecutorTask(engine, connectionSession, handlerContext, message);
actual.run();
verify(handlerContext).flush();
verify(commandExecutor).close();
verify(databaseConnectionManager).closeExecutionResources();
}
|
public static CreateSourceProperties from(final Map<String, Literal> literals) {
try {
return new CreateSourceProperties(literals, DurationParser::parse, false);
} catch (final ConfigException e) {
final String message = e.getMessage().replace(
"configuration",
"property"
);
throw new KsqlException(message, e);
}
}
|
@Test
public void shouldThrowIfValueFormatAndFormatProvided() {
// When:
final Exception e = assertThrows(
KsqlException.class,
() -> CreateSourceProperties.from(
ImmutableMap.<String, Literal>builder()
.putAll(MINIMUM_VALID_PROPS)
.put(VALUE_FORMAT_PROPERTY, new StringLiteral("JSON"))
.put(FORMAT_PROPERTY, new StringLiteral("KAFKA"))
.build())
);
// Then:
assertThat(e.getMessage(), containsString("Cannot supply both 'VALUE_FORMAT' and 'FORMAT' properties, "
+ "as 'FORMAT' sets both key and value formats."));
assertThat(e.getMessage(), containsString("Either use just 'FORMAT', or use 'KEY_FORMAT' and 'VALUE_FORMAT'."));
}
|
@Override
public Object decode(Response response, Type type) throws IOException, DecodeException {
if (response.status() == 404 || response.status() == 204)
if (JSONObject.class.isAssignableFrom((Class<?>) type))
return new JSONObject();
else if (JSONArray.class.isAssignableFrom((Class<?>) type))
return new JSONArray();
else if (String.class.equals(type))
return null;
else
throw new DecodeException(response.status(),
format("%s is not a type supported by this decoder.", type), response.request());
if (response.body() == null)
return null;
try (Reader reader = response.body().asReader(response.charset())) {
Reader bodyReader = (reader.markSupported()) ? reader : new BufferedReader(reader);
bodyReader.mark(1);
if (bodyReader.read() == -1) {
return null; // Empty body
}
bodyReader.reset();
return decodeBody(response, type, bodyReader);
} catch (JSONException jsonException) {
if (jsonException.getCause() != null && jsonException.getCause() instanceof IOException) {
throw (IOException) jsonException.getCause();
}
throw new DecodeException(response.status(), jsonException.getMessage(), response.request(),
jsonException);
}
}
|
@Test
void checkedException() throws IOException {
Response.Body body = mock(Response.Body.class);
when(body.asReader(any())).thenThrow(new IOException("test exception"));
Response response = Response.builder()
.status(200)
.reason("OK")
.headers(Collections.emptyMap())
.body(body)
.request(request)
.build();
Exception exception = assertThrows(IOException.class,
() -> new JsonDecoder().decode(response, JSONArray.class));
assertThat(exception.getMessage()).isEqualTo("test exception");
}
|
static KafkaNodePoolTemplate convertTemplate(KafkaClusterTemplate template) {
if (template != null) {
return new KafkaNodePoolTemplateBuilder()
.withPodSet(template.getPodSet())
.withPod(template.getPod())
.withPerPodService(template.getPerPodService())
.withPerPodRoute(template.getPerPodRoute())
.withPerPodIngress(template.getPerPodIngress())
.withPersistentVolumeClaim(template.getPersistentVolumeClaim())
.withKafkaContainer(template.getKafkaContainer())
.withInitContainer(template.getInitContainer())
.build();
} else {
return null;
}
}
|
@Test
public void testConvertTemplateWithSomeValues() {
KafkaClusterTemplate kafkaTemplate = new KafkaClusterTemplateBuilder()
.withNewKafkaContainer()
.addToEnv(new ContainerEnvVarBuilder().withName("MY_ENV_VAR").withValue("my-env-var-value").build())
.endKafkaContainer()
.withNewPersistentVolumeClaim()
.withNewMetadata()
.addToAnnotations(Map.of("custom-anno", "custom-anno-value"))
.endMetadata()
.endPersistentVolumeClaim()
.withNewBootstrapService()
.withNewMetadata()
.addToAnnotations(Map.of("other-custom-anno", "other-custom-anno-value"))
.endMetadata()
.endBootstrapService()
.build();
KafkaNodePoolTemplate template = VirtualNodePoolConverter.convertTemplate(kafkaTemplate);
assertThat(template, is(notNullValue()));
assertThat(template.getInitContainer(), is(nullValue()));
assertThat(template.getPodSet(), is(nullValue()));
assertThat(template.getPod(), is(nullValue()));
assertThat(template.getPerPodService(), is(nullValue()));
assertThat(template.getPerPodRoute(), is(nullValue()));
assertThat(template.getPerPodIngress(), is(nullValue()));
assertThat(template.getKafkaContainer(), is(notNullValue()));
assertThat(template.getKafkaContainer().getEnv().size(), is(1));
assertThat(template.getKafkaContainer().getEnv().get(0).getName(), is("MY_ENV_VAR"));
assertThat(template.getKafkaContainer().getEnv().get(0).getValue(), is("my-env-var-value"));
assertThat(template.getPersistentVolumeClaim(), is(notNullValue()));
assertThat(template.getPersistentVolumeClaim().getMetadata().getAnnotations(), is(Map.of("custom-anno", "custom-anno-value")));
}
|
public static ExpressionStmt createArraysAsListFromList(List<?> source) {
ExpressionStmt toReturn = createArraysAsListExpression();
MethodCallExpr arraysCallExpression = toReturn.getExpression().asMethodCallExpr();
NodeList<Expression> arguments = new NodeList<>();
source.forEach(value -> arguments.add(getExpressionForObject(value)));
arraysCallExpression.setArguments(arguments);
toReturn.setExpression(arraysCallExpression);
return toReturn;
}
|
@Test
void createArraysAsListFromList() {
List<String> strings = IntStream.range(0, 3)
.mapToObj(i -> "Element" + i)
.collect(Collectors.toList());
ExpressionStmt retrieved = CommonCodegenUtils.createArraysAsListFromList(strings);
assertThat(retrieved).isNotNull();
String arguments = strings.stream()
.map(string -> "\"" + string + "\"")
.collect(Collectors.joining(", "));
String expected = String.format("java.util.Arrays.asList(%s);", arguments);
String retrievedString = retrieved.toString();
assertThat(retrievedString).isEqualTo(expected);
List<Double> doubles = IntStream.range(0, 3)
.mapToObj(i -> i * 0.17)
.collect(Collectors.toList());
retrieved = CommonCodegenUtils.createArraysAsListFromList(doubles);
assertThat(retrieved).isNotNull();
arguments = doubles.stream()
.map(String::valueOf)
.collect(Collectors.joining(", "));
expected = String.format("java.util.Arrays.asList(%s);", arguments);
retrievedString = retrieved.toString();
assertThat(retrievedString).isEqualTo(expected);
}
|
@Override
public ProcessingLogger getLogger(
final String name
) {
return getLogger(name, Collections.emptyMap());
}
|
@Test
public void shouldReturnExistingLogger() {
// When:
factory.getLogger("boo.far", Collections.singletonMap("tag-value", "some-id-1"));
factory.getLogger("boo.far", Collections.singletonMap("tag-value", "some-id-2"));
factory.getLogger("boo.far", Collections.singletonMap("tag-value", "some-id-3"));
final Sensor sensor = metricCollectors.getMetrics().getSensor("boo.far");
// Then:
verify(innerFactory, times(1)).getLogger("boo.far");
verify(loggerFactory, times(1)).apply(config, innerLogger);
verify(loggerWithMetricsFactory, times(1)).apply(metricCollectors.getMetrics());
verify(loggerWithMetricsFactoryHelper, times(1)).apply(logger, sensor);
}
|
@Override
public void logoutFailure(HttpRequest request, String errorMessage) {
checkRequest(request);
requireNonNull(errorMessage, "error message can't be null");
if (!LOGGER.isDebugEnabled()) {
return;
}
LOGGER.debug("logout failure [error|{}][IP|{}|{}]",
emptyIfNull(errorMessage),
request.getRemoteAddr(), getAllIps(request));
}
|
@Test
public void logout_logs_X_Forwarded_For_header_from_request() {
HttpRequest request = mockRequest("1.2.3.4", List.of("2.3.4.5"));
underTest.logoutFailure(request, "bad token");
verifyLog("logout failure [error|bad token][IP|1.2.3.4|2.3.4.5]", Set.of("login", "logout success"));
}
|
@Override
public List<String> listPartitionNames(String dbName, String tblName, TableVersionRange version) {
return hmsOps.getPartitionKeys(dbName, tblName);
}
|
@Test
public void testGetPartitionKeys() {
Assert.assertEquals(
Lists.newArrayList("col1"), hiveMetadata.listPartitionNames("db1", "tbl1", TableVersionRange.empty()));
}
|
@Override
public String getTableName(final int column) {
Preconditions.checkArgument(1 == column);
return "";
}
|
@Test
void assertGetTableName() throws SQLException {
assertThat(actualMetaData.getTableName(1), is(""));
}
|
public static List<Type> decode(String rawInput, List<TypeReference<Type>> outputParameters) {
return decoder.decodeFunctionResult(rawInput, outputParameters);
}
|
@Test
public void testFunctionEmptyStringResultDecode() {
Function function =
new Function(
"test",
Collections.emptyList(),
Collections.singletonList(new TypeReference<Utf8String>() {}));
List<Type> utf8Strings =
FunctionReturnDecoder.decode(
"0x0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000000",
function.getOutputParameters());
assertEquals(utf8Strings.get(0).getValue(), (""));
}
|
public static void setProtocolEngine(Configuration conf,
Class<?> protocol, Class<?> engine) {
if (conf.get(ENGINE_PROP+"."+protocol.getName()) == null) {
conf.setClass(ENGINE_PROP+"."+protocol.getName(), engine,
RpcEngine.class);
}
}
|
@Test
public void testSetProtocolEngine() {
Configuration conf = new Configuration();
RPC.setProtocolEngine(conf, StoppedProtocol.class, StoppedRpcEngine.class);
RpcEngine rpcEngine = RPC.getProtocolEngine(StoppedProtocol.class, conf);
assertTrue(rpcEngine instanceof StoppedRpcEngine);
RPC.setProtocolEngine(conf, StoppedProtocol.class, ProtobufRpcEngine.class);
rpcEngine = RPC.getProtocolEngine(StoppedProtocol.class, conf);
assertTrue(rpcEngine instanceof StoppedRpcEngine);
}
|
@Override
public TypeSerializerSchemaCompatibility<T> resolveSchemaCompatibility(
TypeSerializerSnapshot<T> oldSerializerSnapshot) {
if (!(oldSerializerSnapshot instanceof PojoSerializerSnapshot)) {
return TypeSerializerSchemaCompatibility.incompatible();
}
PojoSerializerSnapshot<T> previousPojoSerializerSnapshot =
(PojoSerializerSnapshot<T>) oldSerializerSnapshot;
final Class<T> previousPojoClass =
previousPojoSerializerSnapshot.snapshotData.getPojoClass();
final LinkedOptionalMap<Field, TypeSerializerSnapshot<?>> fieldSerializerSnapshots =
previousPojoSerializerSnapshot.snapshotData.getFieldSerializerSnapshots();
final LinkedOptionalMap<Class<?>, TypeSerializerSnapshot<?>>
registeredSubclassSerializerSnapshots =
previousPojoSerializerSnapshot.snapshotData
.getRegisteredSubclassSerializerSnapshots();
final LinkedOptionalMap<Class<?>, TypeSerializerSnapshot<?>>
nonRegisteredSubclassSerializerSnapshots =
previousPojoSerializerSnapshot.snapshotData
.getNonRegisteredSubclassSerializerSnapshots();
if (previousPojoClass != snapshotData.getPojoClass()) {
return TypeSerializerSchemaCompatibility.incompatible();
}
if (registeredSubclassSerializerSnapshots.hasAbsentKeysOrValues()) {
return TypeSerializerSchemaCompatibility.incompatible();
}
if (nonRegisteredSubclassSerializerSnapshots.hasAbsentKeysOrValues()) {
return TypeSerializerSchemaCompatibility.incompatible();
}
final IntermediateCompatibilityResult<T> preExistingFieldSerializersCompatibility =
getCompatibilityOfPreExistingFields(fieldSerializerSnapshots);
if (preExistingFieldSerializersCompatibility.isIncompatible()) {
return TypeSerializerSchemaCompatibility.incompatible();
}
final IntermediateCompatibilityResult<T> preExistingRegistrationsCompatibility =
getCompatibilityOfPreExistingRegisteredSubclasses(
registeredSubclassSerializerSnapshots);
if (preExistingRegistrationsCompatibility.isIncompatible()) {
return TypeSerializerSchemaCompatibility.incompatible();
}
if (newPojoSerializerIsCompatibleAfterMigration(
preExistingFieldSerializersCompatibility,
preExistingRegistrationsCompatibility,
fieldSerializerSnapshots)) {
return TypeSerializerSchemaCompatibility.compatibleAfterMigration();
}
if (newPojoSerializerIsCompatibleWithReconfiguredSerializer(
preExistingFieldSerializersCompatibility,
preExistingRegistrationsCompatibility,
registeredSubclassSerializerSnapshots,
nonRegisteredSubclassSerializerSnapshots)) {
return TypeSerializerSchemaCompatibility.compatibleWithReconfiguredSerializer(
constructReconfiguredPojoSerializer(
preExistingFieldSerializersCompatibility,
registeredSubclassSerializerSnapshots,
preExistingRegistrationsCompatibility,
nonRegisteredSubclassSerializerSnapshots));
}
return TypeSerializerSchemaCompatibility.compatibleAsIs();
}
|
@Test
void testResolveSchemaCompatibilityWithRemovedFields() {
final PojoSerializerSnapshot<TestPojo> oldSnapshot =
buildTestSnapshot(
Arrays.asList(
mockRemovedField(ID_FIELD),
NAME_FIELD,
mockRemovedField(HEIGHT_FIELD)));
final PojoSerializerSnapshot<TestPojo> newSnapshot =
buildTestSnapshot(Collections.singletonList(NAME_FIELD));
final TypeSerializerSchemaCompatibility<TestPojo> resultCompatibility =
newSnapshot.resolveSchemaCompatibility(oldSnapshot);
assertThat(resultCompatibility.isCompatibleAfterMigration()).isTrue();
}
|
public Optional<Session> login(@Nullable String currentSessionId, String host,
ActorAwareAuthenticationToken authToken) throws AuthenticationServiceUnavailableException {
final String previousSessionId = StringUtils.defaultIfBlank(currentSessionId, null);
final Subject subject = new Subject.Builder().sessionId(previousSessionId).host(host).buildSubject();
ThreadContext.bind(subject);
try {
final Session session = subject.getSession();
subject.login(authToken);
return createSession(subject, session, host);
} catch (AuthenticationServiceUnavailableException e) {
log.info("Session creation failed due to authentication service being unavailable. Actor: \"{}\"",
authToken.getActor().urn());
final Map<String, Object> auditEventContext = ImmutableMap.of(
"remote_address", host,
"message", "Authentication service unavailable: " + e.getMessage()
);
auditEventSender.failure(authToken.getActor(), SESSION_CREATE, auditEventContext);
throw e;
} catch (AuthenticationException e) {
log.info("Invalid credentials in session create request. Actor: \"{}\"", authToken.getActor().urn());
final Map<String, Object> auditEventContext = ImmutableMap.of(
"remote_address", host
);
auditEventSender.failure(authToken.getActor(), SESSION_CREATE, auditEventContext);
return Optional.empty();
}
}
|
@Test
public void serviceUnavailable() {
setUpUserMock();
assertFalse(SecurityUtils.getSubject().isAuthenticated());
// First realm will throw, second realm will be unable to authenticate because user has no account
securityManager.setRealms(ImmutableList.of(throwingRealm(), new SimpleAccountRealm()));
assertThatThrownBy(() -> sessionCreator.login(null, "host", validToken)).isInstanceOf(
AuthenticationServiceUnavailableException.class);
assertThat(SecurityUtils.getSubject().isAuthenticated()).isFalse();
verify(auditEventSender).failure(eq(AuditActor.user("username")), anyString(),
argThat(map -> StringUtils.containsIgnoreCase((String) map.get("message"), "unavailable")));
}
|
@Override
public boolean overlap(final Window other) throws IllegalArgumentException {
if (getClass() != other.getClass()) {
throw new IllegalArgumentException("Cannot compare windows of different type. Other window has type "
+ other.getClass() + ".");
}
final TimeWindow otherWindow = (TimeWindow) other;
return startMs < otherWindow.endMs && otherWindow.startMs < endMs;
}
|
@Test
public void shouldOverlapIfOtherWindowStartIsWithinThisWindow() {
/*
* This: [-------)
* Other: [-------)
*/
assertTrue(window.overlap(new TimeWindow(start, end + 1)));
assertTrue(window.overlap(new TimeWindow(start, 150)));
assertTrue(window.overlap(new TimeWindow(75, end + 1)));
assertTrue(window.overlap(new TimeWindow(75, 150)));
}
|
@Override
public DictDataDO parseDictData(String dictType, String label) {
return dictDataMapper.selectByDictTypeAndLabel(dictType, label);
}
|
@Test
public void testParseDictData() {
// mock 数据
DictDataDO dictDataDO = randomDictDataDO().setDictType("yunai").setLabel("1");
dictDataMapper.insert(dictDataDO);
DictDataDO dictDataDO02 = randomDictDataDO().setDictType("yunai").setLabel("2");
dictDataMapper.insert(dictDataDO02);
// 准备参数
String dictType = "yunai";
String label = "1";
// 调用
DictDataDO dbDictData = dictDataService.parseDictData(dictType, label);
// 断言
assertEquals(dictDataDO, dbDictData);
}
|
public URLNormalizer removeDuplicateSlashes() {
String urlRoot = HttpURL.getRoot(url);
String path = toURL().getPath();
String urlRootAndPath = urlRoot + path;
String newPath = path.replaceAll("/{2,}", "/");
String newUrlRootAndPath = urlRoot + newPath;
url = StringUtils.replaceOnce(url, urlRootAndPath, newUrlRootAndPath);
return this;
}
|
@Test
public void testRemoveDuplicateSlashes() {
s = "http://www.example.com/a//b///c////d/////e.html";
t = "http://www.example.com/a/b/c/d/e.html";
assertEquals(t, n(s).removeDuplicateSlashes().toString());
s = "http://www.example.com/a//b//c.html?path=/d//e///f";
t = "http://www.example.com/a/b/c.html?path=/d//e///f";
assertEquals(t, n(s).removeDuplicateSlashes().toString());
// This one is for HTTP Collector GitHub issue #163:
s = "http://www.example.com//";
t = "http://www.example.com/";
assertEquals(t, n(s).removeDuplicateSlashes().toString());
}
|
@Override
public Connection getConnection() throws SQLException {
XAConnection xaConnection = xaDataSource.getXAConnection();
return getConnectionProxy(xaConnection);
}
|
@Test
public void testGetConnection() throws SQLException {
// Mock
Connection connection = Mockito.mock(Connection.class);
Mockito.when(connection.getAutoCommit()).thenReturn(true);
DatabaseMetaData metaData = Mockito.mock(DatabaseMetaData.class);
Mockito.when(metaData.getURL()).thenReturn("jdbc:mysql:xxx");
Mockito.when(connection.getMetaData()).thenReturn(metaData);
XAConnection xaConnection = Mockito.mock(XAConnection.class);
Mockito.when(xaConnection.getConnection()).thenReturn(connection);
XADataSource xaDataSource = Mockito.mock(XADataSource.class);
Mockito.when(xaDataSource.getXAConnection()).thenReturn(xaConnection);
DataSourceProxyXANative dataSourceProxyXANative = new DataSourceProxyXANative(xaDataSource);
Connection connFromDataSourceProxyXANative = dataSourceProxyXANative.getConnection();
Assertions.assertTrue(connFromDataSourceProxyXANative instanceof ConnectionProxyXA);
XAConnection xaConnectionFromProxy = ((ConnectionProxyXA)connFromDataSourceProxyXANative).getWrappedXAConnection();
Assertions.assertSame(xaConnection, xaConnectionFromProxy);
Connection connectionFromProxy = ((ConnectionProxyXA)connFromDataSourceProxyXANative).getWrappedConnection();
Assertions.assertSame(connection, connectionFromProxy);
}
|
@Bean
public ShenyuPlugin rewritePlugin() {
return new RewritePlugin();
}
|
@Test
public void testRewritePlugin() {
new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(RewritePluginConfiguration.class))
.withBean(RewritePluginConfigurationTest.class)
.withPropertyValues("debug=true")
.run(context -> {
ShenyuPlugin plugin = context.getBean("rewritePlugin", ShenyuPlugin.class);
assertNotNull(plugin);
assertThat(plugin.named()).isEqualTo(PluginEnum.REWRITE.getName());
});
}
|
@Override
public Object getValue() {
if (value == null) {
value = serializationService.toObject(record.getValue());
}
return value;
}
|
@Test
public void test_getValue() {
assertEquals(value, view.getValue());
}
|
@Override
public Object postProcessAfterInitialization(Object instance, String name) throws Exception {
if (instance instanceof ScopeModelAware) {
ScopeModelAware modelAware = (ScopeModelAware) instance;
modelAware.setScopeModel(scopeModel);
if (this.moduleModel != null) {
modelAware.setModuleModel(this.moduleModel);
}
if (this.applicationModel != null) {
modelAware.setApplicationModel(this.applicationModel);
}
if (this.frameworkModel != null) {
modelAware.setFrameworkModel(this.frameworkModel);
}
}
return instance;
}
|
@Test
void testPostProcessAfterInitialization() throws Exception {
ScopeModelAwareExtensionProcessor processor = new ScopeModelAwareExtensionProcessor(moduleModel);
MockScopeModelAware mockScopeModelAware = new MockScopeModelAware();
Object object = processor.postProcessAfterInitialization(
mockScopeModelAware, mockScopeModelAware.getClass().getName());
Assertions.assertEquals(object, mockScopeModelAware);
Assertions.assertEquals(mockScopeModelAware.getScopeModel(), moduleModel);
Assertions.assertEquals(mockScopeModelAware.getFrameworkModel(), frameworkModel);
Assertions.assertEquals(mockScopeModelAware.getApplicationModel(), applicationModel);
Assertions.assertEquals(mockScopeModelAware.getModuleModel(), moduleModel);
}
|
Boolean processPayment() {
try {
ResponseEntity<Boolean> paymentProcessResult = restTemplateBuilder
.build()
.postForEntity("http://localhost:30301/payment/process", "processing payment",
Boolean.class);
LOGGER.info("Payment processing result: {}", paymentProcessResult.getBody());
return paymentProcessResult.getBody();
} catch (ResourceAccessException | HttpClientErrorException e) {
LOGGER.error("Error communicating with payment service: {}", e.getMessage());
return false;
}
}
|
@Test
void testProcessPayment_ResourceAccessException() {
// Arrange
when(restTemplate.postForEntity(eq("http://localhost:30301/payment/process"), anyString(), eq(Boolean.class)))
.thenThrow(new ResourceAccessException("Service unavailable"));
// Act
Boolean result = orderService.processPayment();
// Assert
assertEquals(false, result);
}
|
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream)
throws Http2Exception {
if (endOfStream || data.isReadable()) {
emptyDataFrames = 0;
} else if (emptyDataFrames++ == maxConsecutiveEmptyFrames && !violationDetected) {
violationDetected = true;
throw Http2Exception.connectionError(Http2Error.ENHANCE_YOUR_CALM,
"Maximum number %d of empty data frames without end_of_stream flag received",
maxConsecutiveEmptyFrames);
}
return super.onDataRead(ctx, streamId, data, padding, endOfStream);
}
|
@Test
public void testEmptyDataFramesWithNonEmptyInBetween() throws Http2Exception {
final Http2EmptyDataFrameListener listener = new Http2EmptyDataFrameListener(frameListener, 2);
listener.onDataRead(ctx, 1, Unpooled.EMPTY_BUFFER, 0, false);
listener.onDataRead(ctx, 1, nonEmpty, 0, false);
listener.onDataRead(ctx, 1, Unpooled.EMPTY_BUFFER, 0, false);
listener.onDataRead(ctx, 1, Unpooled.EMPTY_BUFFER, 0, false);
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
listener.onDataRead(ctx, 1, Unpooled.EMPTY_BUFFER, 0, false);
}
});
verify(frameListener, times(4)).onDataRead(eq(ctx), eq(1), any(ByteBuf.class), eq(0), eq(false));
}
|
public void addPipeline(String groupName, PipelineConfig pipeline) {
String sanitizedGroupName = BasicPipelineConfigs.sanitizedGroupName(groupName);
if (!this.hasGroup(sanitizedGroupName)) {
createNewGroup(sanitizedGroupName, pipeline);
return;
}
for (PipelineConfigs pipelines : this) {
if (pipelines.save(pipeline, sanitizedGroupName)) {
return;
}
}
}
|
@Test
public void shouldReturnTrueWhenGroupNameIsEmptyAndDefaultGroupExists() {
PipelineConfig existingPipeline = createPipelineConfig("pipeline1", "stage1");
PipelineConfigs defaultGroup = createGroup("defaultGroup", existingPipeline);
PipelineGroups pipelineGroups = new PipelineGroups(defaultGroup);
PipelineConfig newPipeline = createPipelineConfig("pipeline3", "stage1");
pipelineGroups.addPipeline("", newPipeline);
assertThat(pipelineGroups.size(), is(1));
assertThat(defaultGroup, hasItem(existingPipeline));
assertThat(defaultGroup, hasItem(newPipeline));
}
|
@Deactivate
public void deactivate() {
deviceToPipeconf.removeListener(mapListener);
deviceToPipeconf = null;
pipeconfToDevices = null;
log.info("Stopped");
}
|
@Test
public void deactivate() {
store.deactivate();
assertNull("Should be null", store.deviceToPipeconf);
assertNull("Should be null", store.pipeconfToDevices);
}
|
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
ReflectionUtils.doWithMethods(bean.getClass(), recurringJobFinderMethodCallback);
return bean;
}
|
@Test
void beansWithMethodsAnnotatedWithRecurringAnnotationNoCronOrIntervalWillThrowException() {
// GIVEN
final RecurringJobPostProcessor recurringJobPostProcessor = getRecurringJobPostProcessor();
// WHEN & THEN
assertThatThrownBy(() -> recurringJobPostProcessor.postProcessAfterInitialization(new MyServiceWithRecurringJobWithoutCronAndInterval(), "not important"))
.isInstanceOf(IllegalArgumentException.class);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.