focal_method stringlengths 13 60.9k | test_case stringlengths 25 109k |
|---|---|
@Override
@SuppressWarnings("MissingDefault")
public boolean offer(final E e) {
if (e == null) {
throw new NullPointerException();
}
long mask;
E[] buffer;
long pIndex;
while (true) {
long producerLimit = lvProducerLimit();
pIndex = lvProducerIndex(this);
// lower bit is indicative of resize, if we see it we spin until it's cleared
if ((pIndex & 1) == 1) {
continue;
}
// pIndex is even (lower bit is 0) -> actual index is (pIndex >> 1)
// mask/buffer may get changed by resizing -> only use for array access after successful CAS.
mask = this.producerMask;
buffer = this.producerBuffer;
// a successful CAS ties the ordering, lv(pIndex)-[mask/buffer]->cas(pIndex)
// assumption behind this optimization is that queue is almost always empty or near empty
if (producerLimit <= pIndex) {
int result = offerSlowPath(mask, pIndex, producerLimit);
switch (result) {
case 0:
break;
case 1:
continue;
case 2:
return false;
case 3:
resize(mask, buffer, pIndex, e);
return true;
}
}
if (casProducerIndex(this, pIndex, pIndex + 2)) {
break;
}
}
// INDEX visible before ELEMENT, consistent with consumer expectation
final long offset = modifiedCalcElementOffset(pIndex, mask);
soElement(buffer, offset, e);
return true;
} | @Test(dataProvider = "full")
public void offer_whenFull(MpscGrowableArrayQueue<Integer> queue) {
assertThat(queue.offer(1)).isFalse();
assertThat(queue).hasSize(FULL_SIZE);
} |
@Override
public Observable<Void> fetchObservable() {
return this.toObservable();
} | @Test
public void testFetchObservable() {
assertNotNull(hystrixCommand.fetchObservable());
} |
static ProjectMeasuresQuery newProjectMeasuresQuery(List<Criterion> criteria, @Nullable Set<String> projectUuids) {
ProjectMeasuresQuery query = new ProjectMeasuresQuery();
Optional.ofNullable(projectUuids).ifPresent(query::setProjectUuids);
criteria.forEach(criterion -> processCriterion(criterion, query));
return query;
} | @Test
public void accept_empty_query() {
ProjectMeasuresQuery result = newProjectMeasuresQuery(emptyList(), emptySet());
assertThat(result.getMetricCriteria()).isEmpty();
} |
public static String loadMigrationRule(Set<ClassLoader> classLoaders, String fileName) {
String rawRule = "";
if (checkFileNameExist(fileName)) {
try {
try (FileInputStream input = new FileInputStream(fileName)) {
return readString(input);
}
} catch (Throwable e) {
logger.warn(
COMMON_IO_EXCEPTION,
"",
"",
"Failed to load " + fileName + " file from " + fileName + "(ignore this file): "
+ e.getMessage(),
e);
}
}
try {
List<ClassLoader> classLoadersToLoad = new LinkedList<>();
classLoadersToLoad.add(ClassUtils.getClassLoader());
classLoadersToLoad.addAll(classLoaders);
for (Set<URL> urls : ClassLoaderResourceLoader.loadResources(fileName, classLoadersToLoad)
.values()) {
for (URL url : urls) {
InputStream is = url.openStream();
if (is != null) {
return readString(is);
}
}
}
} catch (Throwable e) {
logger.warn(
COMMON_IO_EXCEPTION,
"",
"",
"Failed to load " + fileName + " file from " + fileName + "(ignore this file): " + e.getMessage(),
e);
}
return rawRule;
} | @Test
void testLoadMigrationRule() {
Set<ClassLoader> classLoaderSet = new HashSet<>();
classLoaderSet.add(ClassUtils.getClassLoader());
String rule = ConfigUtils.loadMigrationRule(classLoaderSet, "dubbo-migration.yaml");
Assertions.assertNotNull(rule);
} |
public int validate(
final ServiceContext serviceContext,
final List<ParsedStatement> statements,
final SessionProperties sessionProperties,
final String sql
) {
requireSandbox(serviceContext);
final KsqlExecutionContext ctx = requireSandbox(snapshotSupplier.apply(serviceContext));
final Injector injector = injectorFactory.apply(ctx, serviceContext);
final KsqlConfig ksqlConfig = ctx.getKsqlConfig();
int numPersistentQueries = 0;
for (final ParsedStatement parsed : statements) {
final PreparedStatement<?> prepared = ctx.prepare(
parsed,
(isVariableSubstitutionEnabled(sessionProperties, ksqlConfig)
? sessionProperties.getSessionVariables()
: Collections.emptyMap())
);
final ConfiguredStatement<?> configured = ConfiguredStatement.of(prepared,
SessionConfig.of(ksqlConfig, sessionProperties.getMutableScopedProperties())
);
final int currNumPersistentQueries = validate(
serviceContext,
configured,
sessionProperties,
ctx,
injector
);
numPersistentQueries += currNumPersistentQueries;
if (currNumPersistentQueries > 0
&& QueryCapacityUtil.exceedsPersistentQueryCapacity(ctx, ksqlConfig)) {
QueryCapacityUtil.throwTooManyActivePersistentQueriesException(ctx, ksqlConfig, sql);
}
}
return numPersistentQueries;
} | @Test
public void shouldExecuteWithSpecifiedServiceContext() {
// Given:
final List<ParsedStatement> statements = givenParsed(SOME_STREAM_SQL);
final ServiceContext otherServiceContext =
SandboxedServiceContext.create(TestServiceContext.create());
// When:
validator.validate(otherServiceContext, statements, sessionProperties, "sql");
// Then:
verify(distributedStatementValidator).create(
argThat(configured(preparedStatement(instanceOf(CreateStream.class)))),
same(otherServiceContext),
any()
);
} |
@Override
public int union(String... names) {
return get(unionAsync(names));
} | @Test
public void testUnion() {
RScoredSortedSet<String> set1 = redisson.getScoredSortedSet("simple1");
set1.add(1, "one");
set1.add(2, "two");
RScoredSortedSet<String> set2 = redisson.getScoredSortedSet("simple2");
set2.add(1, "one");
set2.add(2, "two");
set2.add(3, "three");
RScoredSortedSet<String> out = redisson.getScoredSortedSet("out");
assertThat(out.union(set1.getName(), set2.getName())).isEqualTo(3);
assertThat(out.readAll()).containsOnly("one", "two", "three");
assertThat(out.getScore("one")).isEqualTo(2);
assertThat(out.getScore("two")).isEqualTo(4);
assertThat(out.getScore("three")).isEqualTo(3);
} |
public static String wrapWithColorTag(final String str, final Color color)
{
return prependColorTag(str, color) + CLOSING_COLOR_TAG;
} | @Test
public void wrapWithColorTag()
{
COLOR_HEXSTRING_MAP.forEach((color, hex) ->
{
assertEquals("<col=" + hex + ">test</col>", ColorUtil.wrapWithColorTag("test", color));
assertEquals("<col=" + hex + "></col>", ColorUtil.wrapWithColorTag("", color));
});
} |
@Override
public Num calculate(BarSeries series, Position position) {
if (position.isClosed()) {
Num entryPrice = position.getEntry().getValue();
return position.getProfit().dividedBy(entryPrice).multipliedBy(series.hundred());
}
return series.zero();
} | @Test
public void calculateWithWinningShortPositions() {
MockBarSeries series = new MockBarSeries(numFunction, 100, 90, 100, 95, 95, 100);
TradingRecord tradingRecord = new BaseTradingRecord(Trade.sellAt(0, series), Trade.buyAt(1, series),
Trade.sellAt(2, series), Trade.buyAt(3, series));
AnalysisCriterion profit = getCriterion();
assertNumEquals(10 + 5, profit.calculate(series, tradingRecord));
} |
@Override
public MetadataNode child(String name) {
if (name.equals(ClusterImageBrokersNode.NAME)) {
return new ClusterImageBrokersNode(image);
} else if (name.equals(ClusterImageControllersNode.NAME)) {
return new ClusterImageControllersNode(image);
} else {
return null;
}
} | @Test
public void testControllersChild() {
MetadataNode child = NODE.child("controllers");
assertNotNull(child);
assertEquals(ClusterImageControllersNode.class, child.getClass());
} |
public static NetworkInterface findNetworkInterface() {
List<NetworkInterface> validNetworkInterfaces = emptyList();
try {
validNetworkInterfaces = getValidNetworkInterfaces();
} catch (Throwable e) {
logger.warn(e);
}
NetworkInterface result = null;
// Try to find the preferred one
for (NetworkInterface networkInterface : validNetworkInterfaces) {
if (isPreferredNetworkInterface(networkInterface)) {
result = networkInterface;
break;
}
}
if (result == null) { // If not found, try to get the first one
for (NetworkInterface networkInterface : validNetworkInterfaces) {
Enumeration<InetAddress> addresses = networkInterface.getInetAddresses();
while (addresses.hasMoreElements()) {
Optional<InetAddress> addressOp = toValidAddress(addresses.nextElement());
if (addressOp.isPresent()) {
try {
if (addressOp.get().isReachable(100)) {
return networkInterface;
}
} catch (IOException e) {
// ignore
}
}
}
}
}
if (result == null) {
result = first(validNetworkInterfaces);
}
return result;
} | @Test
void testIgnoreGivenPrefixInterfaceName() {
// store the origin ignored interfaces
String originIgnoredInterfaces = this.getIgnoredInterfaces();
try {
NetworkInterface networkInterface = NetUtils.findNetworkInterface();
assertNotNull(networkInterface);
// ignore the given prefix network interface's display name
String displayName = networkInterface.getDisplayName();
if (StringUtils.isNotEmpty(displayName) && displayName.length() > 2) {
String ignoredInterfaces = Pattern.quote(displayName.substring(0, 1)) + ".*";
this.setIgnoredInterfaces(ignoredInterfaces);
NetworkInterface newNetworkInterface = NetUtils.findNetworkInterface();
if (newNetworkInterface != null) {
assertTrue(!newNetworkInterface.getDisplayName().startsWith(displayName.substring(0, 1)));
}
}
} finally {
// recover the origin ignored interfaces
this.setIgnoredInterfaces(originIgnoredInterfaces);
}
} |
public Model parse(File file) throws PomParseException {
try (FileInputStream fis = new FileInputStream(file)) {
return parse(fis);
} catch (IOException ex) {
if (ex instanceof PomParseException) {
throw (PomParseException) ex;
}
LOGGER.debug("", ex);
throw new PomParseException(String.format("Unable to parse pom '%s'", file), ex);
}
} | @Test
public void testParse_File() throws Exception {
File file = BaseTest.getResourceAsFile(this, "pom/mailapi-1.4.3.pom");
PomParser instance = new PomParser();
String expVersion = "1.4.3";
Model result = instance.parse(file);
assertEquals("Invalid version extracted", expVersion, result.getParentVersion());
} |
public static Find find(String regex) {
return find(regex, 0);
} | @Test
@Category(NeedsRunner.class)
public void testFindNameGroup() {
PCollection<String> output =
p.apply(Create.of("aj", "xj", "yj", "zj"))
.apply(Regex.find("(?<namedgroup>[xyz])", "namedgroup"));
PAssert.that(output).containsInAnyOrder("x", "y", "z");
p.run();
} |
public void assignStates() {
checkStateMappingCompleteness(allowNonRestoredState, operatorStates, tasks);
Map<OperatorID, OperatorState> localOperators = new HashMap<>(operatorStates);
// find the states of all operators belonging to this task and compute additional
// information in first pass
for (ExecutionJobVertex executionJobVertex : tasks) {
List<OperatorIDPair> operatorIDPairs = executionJobVertex.getOperatorIDs();
Map<OperatorID, OperatorState> operatorStates =
CollectionUtil.newHashMapWithExpectedSize(operatorIDPairs.size());
for (OperatorIDPair operatorIDPair : operatorIDPairs) {
OperatorID operatorID =
operatorIDPair
.getUserDefinedOperatorID()
.filter(localOperators::containsKey)
.orElse(operatorIDPair.getGeneratedOperatorID());
OperatorState operatorState = localOperators.remove(operatorID);
if (operatorState == null) {
operatorState =
new OperatorState(
operatorID,
executionJobVertex.getParallelism(),
executionJobVertex.getMaxParallelism());
}
operatorStates.put(operatorIDPair.getGeneratedOperatorID(), operatorState);
}
final TaskStateAssignment stateAssignment =
new TaskStateAssignment(
executionJobVertex,
operatorStates,
consumerAssignment,
vertexAssignments);
vertexAssignments.put(executionJobVertex, stateAssignment);
for (final IntermediateResult producedDataSet : executionJobVertex.getInputs()) {
consumerAssignment.put(producedDataSet.getId(), stateAssignment);
}
}
// repartition state
for (TaskStateAssignment stateAssignment : vertexAssignments.values()) {
if (stateAssignment.hasNonFinishedState
// FLINK-31963: We need to run repartitioning for stateless operators that have
// upstream output or downstream input states.
|| stateAssignment.hasUpstreamOutputStates()
|| stateAssignment.hasDownstreamInputStates()) {
assignAttemptState(stateAssignment);
}
}
// actually assign the state
for (TaskStateAssignment stateAssignment : vertexAssignments.values()) {
// If upstream has output states or downstream has input states, even the empty task
// state should be assigned for the current task in order to notify this task that the
// old states will send to it which likely should be filtered.
if (stateAssignment.hasNonFinishedState
|| stateAssignment.isFullyFinished
|| stateAssignment.hasUpstreamOutputStates()
|| stateAssignment.hasDownstreamInputStates()) {
assignTaskStateToExecutionJobVertices(stateAssignment);
}
}
} | @Test
public void testChannelStateAssignmentTwoGatesPartiallyDownscaling()
throws JobException, JobExecutionException {
JobVertex upstream1 = createJobVertex(new OperatorID(), 2);
JobVertex upstream2 = createJobVertex(new OperatorID(), 2);
JobVertex downstream = createJobVertex(new OperatorID(), 3);
List<OperatorID> operatorIds =
Stream.of(upstream1, upstream2, downstream)
.map(v -> v.getOperatorIDs().get(0).getGeneratedOperatorID())
.collect(Collectors.toList());
Map<OperatorID, OperatorState> states = buildOperatorStates(operatorIds, 3);
connectVertices(upstream1, downstream, ARBITRARY, FULL);
connectVertices(upstream2, downstream, ROUND_ROBIN, ROUND_ROBIN);
Map<OperatorID, ExecutionJobVertex> vertices =
toExecutionVertices(upstream1, upstream2, downstream);
new StateAssignmentOperation(0, new HashSet<>(vertices.values()), states, false)
.assignStates();
assertThat(
getAssignedState(vertices.get(operatorIds.get(2)), operatorIds.get(2), 0)
.getInputChannelState()
.size())
.isEqualTo(6);
assertThat(
getAssignedState(vertices.get(operatorIds.get(2)), operatorIds.get(2), 1)
.getInputChannelState()
.size())
.isEqualTo(6);
assertThat(
getAssignedState(vertices.get(operatorIds.get(2)), operatorIds.get(2), 2)
.getInputChannelState()
.size())
.isEqualTo(6);
} |
public synchronized <K, V> KStream<K, V> stream(final String topic) {
return stream(Collections.singleton(topic));
} | @Test
public void shouldThrowWhenSubscribedToAPatternWithSetAndUnsetResetPolicies() {
builder.stream(Pattern.compile("some-regex"), Consumed.with(AutoOffsetReset.EARLIEST));
builder.stream(Pattern.compile("some-regex"));
assertThrows(TopologyException.class, builder::build);
} |
public static Optional<Expression> convert(
org.apache.flink.table.expressions.Expression flinkExpression) {
if (!(flinkExpression instanceof CallExpression)) {
return Optional.empty();
}
CallExpression call = (CallExpression) flinkExpression;
Operation op = FILTERS.get(call.getFunctionDefinition());
if (op != null) {
switch (op) {
case IS_NULL:
return onlyChildAs(call, FieldReferenceExpression.class)
.map(FieldReferenceExpression::getName)
.map(Expressions::isNull);
case NOT_NULL:
return onlyChildAs(call, FieldReferenceExpression.class)
.map(FieldReferenceExpression::getName)
.map(Expressions::notNull);
case LT:
return convertFieldAndLiteral(Expressions::lessThan, Expressions::greaterThan, call);
case LT_EQ:
return convertFieldAndLiteral(
Expressions::lessThanOrEqual, Expressions::greaterThanOrEqual, call);
case GT:
return convertFieldAndLiteral(Expressions::greaterThan, Expressions::lessThan, call);
case GT_EQ:
return convertFieldAndLiteral(
Expressions::greaterThanOrEqual, Expressions::lessThanOrEqual, call);
case EQ:
return convertFieldAndLiteral(
(ref, lit) -> {
if (NaNUtil.isNaN(lit)) {
return Expressions.isNaN(ref);
} else {
return Expressions.equal(ref, lit);
}
},
call);
case NOT_EQ:
return convertFieldAndLiteral(
(ref, lit) -> {
if (NaNUtil.isNaN(lit)) {
return Expressions.notNaN(ref);
} else {
return Expressions.notEqual(ref, lit);
}
},
call);
case NOT:
return onlyChildAs(call, CallExpression.class)
.flatMap(FlinkFilters::convert)
.map(Expressions::not);
case AND:
return convertLogicExpression(Expressions::and, call);
case OR:
return convertLogicExpression(Expressions::or, call);
case STARTS_WITH:
return convertLike(call);
}
}
return Optional.empty();
} | @Test
public void testIsNotNull() {
Expression expr = resolve(Expressions.$("field1").isNotNull());
Optional<org.apache.iceberg.expressions.Expression> actual = FlinkFilters.convert(expr);
assertThat(actual).isPresent();
UnboundPredicate<Object> expected =
org.apache.iceberg.expressions.Expressions.notNull("field1");
assertPredicatesMatch(expected, actual.get());
} |
public KsqlTarget target(final URI server) {
return target(server, Collections.emptyMap());
} | @Test
public void shouldHandleArbitraryErrorsOnGetRequests() {
// Given:
server.setErrorCode(417);
// When:
KsqlTarget target = ksqlClient.target(serverUri);
RestResponse<ServerInfo> response = target.getServerInfo();
// Then:
assertThat(server.getHttpMethod(), is(HttpMethod.GET));
assertThat(response.isErroneous(), is(true));
assertThat(response.getErrorMessage().getMessage(),
is("The server returned an unexpected error: Expectation Failed"));
} |
public Object toIdObject(String baseId) throws AmqpProtocolException {
if (baseId == null) {
return null;
}
try {
if (hasAmqpUuidPrefix(baseId)) {
String uuidString = strip(baseId, AMQP_UUID_PREFIX_LENGTH);
return UUID.fromString(uuidString);
} else if (hasAmqpUlongPrefix(baseId)) {
String longString = strip(baseId, AMQP_ULONG_PREFIX_LENGTH);
return UnsignedLong.valueOf(longString);
} else if (hasAmqpStringPrefix(baseId)) {
return strip(baseId, AMQP_STRING_PREFIX_LENGTH);
} else if (hasAmqpBinaryPrefix(baseId)) {
String hexString = strip(baseId, AMQP_BINARY_PREFIX_LENGTH);
byte[] bytes = convertHexStringToBinary(hexString);
return new Binary(bytes);
} else {
// We have a string without any type prefix, transmit it as-is.
return baseId;
}
} catch (IllegalArgumentException e) {
throw new AmqpProtocolException("Unable to convert ID value");
}
} | @Test
public void testToIdObjectWithEncodedUlong() throws Exception {
UnsignedLong longId = UnsignedLong.valueOf(123456789L);
String provided = AMQPMessageIdHelper.AMQP_ULONG_PREFIX + "123456789";
Object idObject = messageIdHelper.toIdObject(provided);
assertNotNull("null object should not have been returned", idObject);
assertEquals("expected id object was not returned", longId, idObject);
} |
public static Path getJobAttemptPath(JobContext context, Path out) {
return getJobAttemptPath(getAppAttemptId(context), out);
} | @Test
public void testJobCommit() throws Exception {
Path jobAttemptPath = jobCommitter.getJobAttemptPath(job);
FileSystem fs = jobAttemptPath.getFileSystem(conf);
Set<String> uploads = runTasks(job, 4, 3);
assertNotEquals(0, uploads.size());
assertPathExists(fs, "No job attempt path", jobAttemptPath);
jobCommitter.commitJob(job);
assertEquals("Should have aborted no uploads",
0, results.getAborts().size());
assertEquals("Should have deleted no uploads",
0, results.getDeletes().size());
assertEquals("Should have committed all uploads",
uploads, getCommittedIds(results.getCommits()));
assertPathDoesNotExist(fs, "jobAttemptPath not deleted", jobAttemptPath);
} |
public boolean isProactiveSupportEnabled() {
if (properties == null) {
return false;
}
return getMetricsEnabled();
} | @Test
public void isProactiveSupportDisabledFull() {
// Given
Properties serverProperties = new Properties();
serverProperties
.setProperty(BaseSupportConfig.CONFLUENT_SUPPORT_METRICS_ENABLE_CONFIG, "false");
serverProperties.setProperty(
BaseSupportConfig.CONFLUENT_SUPPORT_METRICS_ENDPOINT_INSECURE_ENABLE_CONFIG,
"true"
);
serverProperties.setProperty(
BaseSupportConfig.CONFLUENT_SUPPORT_METRICS_ENDPOINT_SECURE_ENABLE_CONFIG,
"true"
);
BaseSupportConfig supportConfig = new TestSupportConfig(serverProperties);
// When/Then
assertFalse(supportConfig.isProactiveSupportEnabled());
} |
@Override
public boolean tryInit(long expectedInsertions, double falseProbability) {
return get(tryInitAsync(expectedInsertions, falseProbability));
} | @Test
public void test() {
RBloomFilter<String> filter = redisson.getBloomFilter("filter");
filter.tryInit(550000000L, 0.5);
test(filter);
filter.delete();
assertThat(filter.tryInit(550000000L, 0.03)).isTrue();
test(filter);
} |
@Override
public ChannelFuture writeHeaders(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int padding,
boolean endStream, ChannelPromise promise) {
return writeHeaders0(ctx, streamId, headers, false, 0, (short) 0, false, padding, endStream, promise);
} | @Test
public void headersWriteShouldOpenStreamForPush() throws Exception {
writeAllFlowControlledFrames();
Http2Stream parent = createStream(STREAM_ID, false);
reservePushStream(PUSH_STREAM_ID, parent);
ChannelPromise promise = newPromise();
encoder.writeHeaders(ctx, PUSH_STREAM_ID, EmptyHttp2Headers.INSTANCE, 0, false, promise);
assertEquals(HALF_CLOSED_REMOTE, stream(PUSH_STREAM_ID).state());
verify(writer).writeHeaders(eq(ctx), eq(PUSH_STREAM_ID), eq(EmptyHttp2Headers.INSTANCE),
eq(0), eq(false), eq(promise));
} |
static JavaType constructType(Type type) {
try {
return constructTypeInner(type);
} catch (Exception e) {
throw new InvalidDataTableTypeException(type, e);
}
} | @Test
void wild_card_list_types_use_upper_bound_in_equality() {
JavaType javaType = TypeFactory.constructType(LIST_OF_WILD_CARD_NUMBER);
JavaType other = TypeFactory.constructType(LIST_OF_NUMBER);
assertThat(javaType, equalTo(other));
TypeFactory.ListType listType = (TypeFactory.ListType) javaType;
JavaType elementType = listType.getElementType();
assertThat(elementType.getOriginal(), equalTo(Number.class));
} |
public Map<K, V> getAll(Iterable<K> keys) {
List<K> missingKeys = new ArrayList<>();
Map<K, V> result = new HashMap<>();
for (K key : keys) {
V value = map.get(key);
if (value == null && !map.containsKey(key)) {
missingKeys.add(key);
} else {
result.put(key, value);
}
}
if (!missingKeys.isEmpty()) {
Map<K, V> missingValues = loader.loadAll(missingKeys);
map.putAll(missingValues);
result.putAll(missingValues);
for (K missingKey : missingKeys) {
if (!map.containsKey(missingKey)) {
map.put(missingKey, null);
result.put(missingKey, null);
}
}
}
return result;
} | @Test
public void getAllNullable() {
// ask for 3 keys but only 2 are available in backed (third key is missing)
List<String> keys = Arrays.asList("one", "two", "three");
Map<String, String> values = new HashMap<>();
values.put("one", "un");
values.put("two", "deux");
when(loader.loadAll(keys)).thenReturn(values);
assertThat(cache.getAll(keys))
.hasSize(3)
.containsEntry("one", "un")
.containsEntry("two", "deux")
.containsEntry("three", null);
// ask for 4 keys. Only a single one was never loaded. The 3 others are kept from cache
when(loader.loadAll(Arrays.asList("four"))).thenReturn(ImmutableMap.of("four", "quatre"));
assertThat(cache.getAll(Arrays.asList("one", "two", "three", "four")))
.hasSize(4)
.containsEntry("one", "un")
.containsEntry("two", "deux")
.containsEntry("three", null)
.containsEntry("four", "quatre");
verify(loader, times(2)).loadAll(anyCollection());
} |
public boolean isStructType() {
return this instanceof StructType;
} | @Test
public void testStructSerialAndDeser() {
// "struct<struct_test:int,c1:struct<c1:int,cc1:string>>"
StructType c1 = new StructType(Lists.newArrayList(
new StructField("c1", ScalarType.createType(PrimitiveType.INT)),
new StructField("cc1", ScalarType.createDefaultCatalogString())
));
StructType root = new StructType(Lists.newArrayList(
new StructField("struct_test", ScalarType.createType(PrimitiveType.INT), "comment test"),
new StructField("c1", c1)
));
String json = GsonUtils.GSON.toJson(root);
Type deType = GsonUtils.GSON.fromJson(json, Type.class);
Assert.assertTrue(deType.isStructType());
Assert.assertEquals("struct<struct_test int(11) COMMENT 'comment test', c1 struct<c1 int(11), cc1 varchar(1073741824)>>",
deType.toString());
// test initialed fieldMap by ctor in deserializer.
Assert.assertEquals(1, ((StructType) deType).getFieldPos("c1"));
} |
public synchronized void submitRunTaskCommand(long jobId, long taskId, JobConfig jobConfig,
Object taskArgs, long workerId) {
RunTaskCommand.Builder runTaskCommand = RunTaskCommand.newBuilder();
runTaskCommand.setJobId(jobId);
runTaskCommand.setTaskId(taskId);
try {
runTaskCommand.setJobConfig(ByteString.copyFrom(SerializationUtils.serialize(jobConfig)));
if (taskArgs != null) {
runTaskCommand.setTaskArgs(ByteString.copyFrom(SerializationUtils.serialize(taskArgs)));
}
} catch (IOException e) {
// TODO(yupeng) better exception handling
LOG.info("Failed to serialize the run task command:" + e);
return;
}
JobCommand.Builder command = JobCommand.newBuilder();
command.setRunTaskCommand(runTaskCommand);
submit(workerId, command);
} | @Test
public void submitRunTaskCommand() throws Exception {
long jobId = 0L;
int taskId = 1;
JobConfig jobConfig = new TestPlanConfig("/test");
long workerId = 2L;
List<Integer> args = Lists.newArrayList(1);
mManager.submitRunTaskCommand(jobId, taskId, jobConfig, args, workerId);
List<JobCommand> commands = mManager.pollAllPendingCommands(workerId);
Assert.assertEquals(1, commands.size());
JobCommand command = commands.get(0);
Assert.assertEquals(jobId, command.getRunTaskCommand().getJobId());
Assert.assertEquals(taskId, command.getRunTaskCommand().getTaskId());
Assert.assertEquals(jobConfig,
SerializationUtils.deserialize(command.getRunTaskCommand().getJobConfig().toByteArray()));
Assert.assertEquals(args,
SerializationUtils.deserialize(command.getRunTaskCommand().getTaskArgs().toByteArray()));
} |
public CompletableFuture<Void> handlePullQuery(
final ServiceContext serviceContext,
final PullPhysicalPlan pullPhysicalPlan,
final ConfiguredStatement<Query> statement,
final RoutingOptions routingOptions,
final PullQueryWriteStream pullQueryQueue,
final CompletableFuture<Void> shouldCancelRequests
) {
final List<KsqlPartitionLocation> allLocations = pullPhysicalPlan.getMaterialization().locator()
.locate(
pullPhysicalPlan.getKeys(),
routingOptions,
routingFilterFactory,
pullPhysicalPlan.getPlanType() == PullPhysicalPlanType.RANGE_SCAN
);
final Map<Integer, List<Host>> emptyPartitions = allLocations.stream()
.filter(loc -> loc.getNodes().stream().noneMatch(node -> node.getHost().isSelected()))
.collect(Collectors.toMap(
KsqlPartitionLocation::getPartition,
loc -> loc.getNodes().stream().map(KsqlNode::getHost).collect(Collectors.toList())));
if (!emptyPartitions.isEmpty()) {
final MaterializationException materializationException = new MaterializationException(
"Unable to execute pull query. "
+ emptyPartitions.entrySet()
.stream()
.map(kv -> String.format(
"Partition %s failed to find valid host. Hosts scanned: %s",
kv.getKey(), kv.getValue()))
.collect(Collectors.joining(", ", "[", "]")));
LOG.debug(materializationException.getMessage());
throw materializationException;
}
// at this point we should filter out the hosts that we should not route to
final List<KsqlPartitionLocation> locations = allLocations
.stream()
.map(KsqlPartitionLocation::removeFilteredHosts)
.collect(Collectors.toList());
final CompletableFuture<Void> completableFuture = new CompletableFuture<>();
coordinatorExecutorService.submit(() -> {
try {
executeRounds(serviceContext, pullPhysicalPlan, statement, routingOptions,
locations, pullQueryQueue, shouldCancelRequests);
completableFuture.complete(null);
} catch (Throwable t) {
completableFuture.completeExceptionally(t);
}
});
return completableFuture;
} | @Test
public void shouldNotRouteToFilteredHost() throws InterruptedException, ExecutionException {
// Given:
location1 = new PartitionLocation(Optional.empty(), 1, ImmutableList.of(badNode, node1));
when(ksqlClient.makeQueryRequest(any(), any(), any(), any(), any(), any(), any()))
.then(invocationOnMock -> RestResponse.successful(200, 2));
locate(location1, location2, location3, location4);
// When:
final CompletableFuture<Void> fut = haRouting.handlePullQuery(
serviceContext,
pullPhysicalPlan,
statement,
routingOptions,
pullQueryQueue,
disconnect
);
fut.get();
// Then:
verify(ksqlClient, never())
.makeQueryRequest(eq(badNode.location()), any(), any(), any(), any(), any(), any());
} |
public static long calculate(PhysicalRel rel, ExpressionEvalContext evalContext) {
GcdCalculatorVisitor visitor = new GcdCalculatorVisitor(evalContext);
visitor.go(rel);
if (visitor.gcd == 0) {
// there's no window aggr in the rel, return the value for joins, which is already capped at some reasonable value
return visitor.maximumIntervalForJoins;
}
// if there's window aggr, cap it with the maximumIntervalForJoins
return Math.min(visitor.gcd, visitor.maximumIntervalForJoins);
} | @Test
public void when_unionAboveSlidingWindows_then_returnGcdOfWindowsSize() {
HazelcastTable table = streamGeneratorTable("s1", 1);
HazelcastTable table2 = streamGeneratorTable("s2", 10);
HazelcastTable table3 = streamGeneratorTable("s3", 100);
List<QueryDataType> parameterTypes = Collections.singletonList(INT);
int expectedGcd = 12;
final String query = "SELECT * FROM "
+ hop("s1", expectedGcd * 4, expectedGcd)
+ " UNION ALL "
+ hop("s2", expectedGcd * 3, expectedGcd)
+ " UNION ALL "
+ hop("s3", expectedGcd * 2, expectedGcd);
PhysicalRel optimizedPhysicalRel = optimizePhysical(query, parameterTypes, table, table2, table3).getPhysical();
// assert plan for better visibility of what was generated
assertPlan(optimizedPhysicalRel, plan(
planRow(0, UnionPhysicalRel.class),
planRow(1, SlidingWindowAggregatePhysicalRel.class),
planRow(2, CalcPhysicalRel.class),
planRow(3, FullScanPhysicalRel.class),
planRow(1, SlidingWindowAggregatePhysicalRel.class),
planRow(2, CalcPhysicalRel.class),
planRow(3, FullScanPhysicalRel.class),
planRow(1, SlidingWindowAggregatePhysicalRel.class),
planRow(2, CalcPhysicalRel.class),
planRow(3, FullScanPhysicalRel.class)
));
// GCD(48, 36, 24) = 12
assertThat(WatermarkThrottlingFrameSizeCalculator.calculate(optimizedPhysicalRel, MOCK_EEC))
.isEqualTo(expectedGcd);
} |
public static <InputT> Builder<InputT> withoutHold(AppliedPTransform<?, ?, ?> transform) {
return new Builder(transform, BoundedWindow.TIMESTAMP_MAX_VALUE);
} | @Test
public void producedBundlesAndAdditionalOutputProducedOutputs() {
TransformResult<Integer> result =
StepTransformResult.<Integer>withoutHold(transform)
.addOutput(bundleFactory.createBundle(pc))
.withAdditionalOutput(OutputType.PCOLLECTION_VIEW)
.build();
assertThat(result.getOutputTypes(), hasItem(OutputType.PCOLLECTION_VIEW));
} |
public static WindowBytesStoreSupplier persistentTimestampedWindowStore(final String name,
final Duration retentionPeriod,
final Duration windowSize,
final boolean retainDuplicates) throws IllegalArgumentException {
return persistentWindowStore(name, retentionPeriod, windowSize, retainDuplicates, true);
} | @Test
public void shouldThrowIfIPersistentTimestampedWindowStoreStoreNameIsNull() {
final Exception e = assertThrows(NullPointerException.class, () -> Stores.persistentTimestampedWindowStore(null, ZERO, ZERO, false));
assertEquals("name cannot be null", e.getMessage());
} |
@Override
public @Nullable SplitResult<TimestampRange> trySplit(double fractionOfRemainder) {
if (InitialPartition.isInitialPartition(partition.getPartitionToken())) {
return null;
}
return super.trySplit(fractionOfRemainder);
} | @Test
public void testTrySplitReturnsNullForInitialPartition() {
when(partition.getPartitionToken()).thenReturn(InitialPartition.PARTITION_TOKEN);
assertNull(tracker.trySplit(0.0D));
} |
@Deprecated
public static String getJwt(JwtClaims claims) throws JoseException {
String jwt;
RSAPrivateKey privateKey = (RSAPrivateKey) getPrivateKey(
jwtConfig.getKey().getFilename(),jwtConfig.getKey().getPassword(), jwtConfig.getKey().getKeyName());
// A JWT is a JWS and/or a JWE with JSON claims as the payload.
// In this example it is a JWS nested inside a JWE
// So we first create a JsonWebSignature object.
JsonWebSignature jws = new JsonWebSignature();
// The payload of the JWS is JSON content of the JWT Claims
jws.setPayload(claims.toJson());
// The JWT is signed using the sender's private key
jws.setKey(privateKey);
// Get provider from security config file, it should be two digit
// And the provider id will set as prefix for keyid in the token header, for example: 05100
// if there is no provider id, we use "00" for the default value
String provider_id = "";
if (jwtConfig.getProviderId() != null) {
provider_id = jwtConfig.getProviderId();
if (provider_id.length() == 1) {
provider_id = "0" + provider_id;
} else if (provider_id.length() > 2) {
logger.error("provider_id defined in the security.yml file is invalid; the length should be 2");
provider_id = provider_id.substring(0, 2);
}
}
jws.setKeyIdHeaderValue(provider_id + jwtConfig.getKey().getKid());
// Set the signature algorithm on the JWT/JWS that will integrity protect the claims
jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256);
// Sign the JWS and produce the compact serialization, which will be the inner JWT/JWS
// representation, which is a string consisting of three dot ('.') separated
// base64url-encoded parts in the form Header.Payload.Signature
jwt = jws.getCompactSerialization();
return jwt;
} | @Test
public void longLivedReferenceJwt() throws Exception {
Map<String, String> custom = new HashMap<>();
custom.put("consumer_application_id", "361");
custom.put("request_transit", "67");
JwtClaims claims = ClaimsUtil.getCustomClaims("steve", "EMPLOYEE", "f7d42348-c647-4efb-a52d-4c5787421e72", Arrays.asList("party.util.reference.read", "server.info.r"), custom, "user admin");
claims.setExpirationTimeMinutesInTheFuture(5256000);
String jwt = JwtIssuer.getJwt(claims, long_kid, KeyUtil.deserializePrivateKey(long_key, KeyUtil.RSA));
System.out.println("***LongLived reference JWT***: " + jwt);
} |
public void computeCpd(Component component, Collection<Block> originBlocks, Collection<Block> duplicationBlocks) {
CloneIndex duplicationIndex = new PackedMemoryCloneIndex();
populateIndex(duplicationIndex, originBlocks);
populateIndex(duplicationIndex, duplicationBlocks);
List<CloneGroup> duplications = SuffixTreeCloneDetectionAlgorithm.detect(duplicationIndex, originBlocks);
Iterable<CloneGroup> filtered = duplications.stream()
.filter(getNumberOfUnitsNotLessThan(component.getFileAttributes().getLanguageKey()))
.toList();
addDuplications(component, filtered);
} | @Test
public void add_duplication_for_java_even_when_no_token() {
Component javaFile = builder(FILE, 1)
.setKey(ORIGIN_FILE_KEY)
.setFileAttributes(new FileAttributes(false, "java", 10))
.build();
Collection<Block> originBlocks = singletonList(
// This block contains 0 token
new Block.Builder()
.setResourceId(ORIGIN_FILE_KEY)
.setBlockHash(new ByteArray("a8998353e96320ec"))
.setIndexInFile(0)
.setLines(30, 45)
.setUnit(0, 0)
.build());
Collection<Block> duplicatedBlocks = singletonList(
new Block.Builder()
.setResourceId(OTHER_FILE_KEY)
.setBlockHash(new ByteArray("a8998353e96320ec"))
.setIndexInFile(0)
.setLines(40, 55)
.build());
underTest.computeCpd(javaFile, originBlocks, duplicatedBlocks);
assertThat(duplicationRepository.getDuplications(ORIGIN_FILE))
.containsExactly(
crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
} |
private PlantUmlDiagram createDiagram(List<String> rawDiagramLines) {
List<String> diagramLines = filterOutComments(rawDiagramLines);
Set<PlantUmlComponent> components = parseComponents(diagramLines);
PlantUmlComponents plantUmlComponents = new PlantUmlComponents(components);
List<ParsedDependency> dependencies = parseDependencies(plantUmlComponents, diagramLines);
return new PlantUmlDiagram.Builder(plantUmlComponents)
.withDependencies(dependencies)
.build();
} | @Test
@UseDataProvider("dependency_arrow_testcases")
public void parses_various_types_of_dependency_arrows(String dependency) {
PlantUmlDiagram diagram = createDiagram(TestDiagram.in(temporaryFolder)
.component("SomeOrigin").withStereoTypes("..origin..")
.component("SomeTarget").withStereoTypes("..target..")
.rawLine(dependency)
.write());
PlantUmlComponent target = getOnlyElement(getComponentWithName("SomeOrigin", diagram).getDependencies());
assertThat(target.getComponentName())
.as("dependency component name")
.isEqualTo(new ComponentName("SomeTarget"));
} |
void setDeprecationHeaders(Request request, Response response, DeprecatedAPI controller) {
String deprecatedRelease = controller.deprecatedIn();
String removalRelease = controller.removalIn();
String entityName = controller.entityName();
ApiVersion deprecatedApiVersion = controller.deprecatedApiVersion();
ApiVersion successorApiVersion = controller.successorApiVersion();
String changelogUrl = format("https://api.gocd.org/%s/#api-changelog", deprecatedRelease);
String link = format("<%s>; Accept=\"%s\"; rel=\"successor-version\"", request.url(), successorApiVersion.mimeType());
String warning = format("299 GoCD/v%s \"The %s API version %s has been deprecated in GoCD Release v%s. This version will be removed in GoCD Release v%s. Version %s of the API is available, and users are encouraged to use it\"", deprecatedRelease, entityName, deprecatedApiVersion, deprecatedRelease, removalRelease, successorApiVersion);
response.header("X-GoCD-API-Deprecated-In", format("v%s", deprecatedRelease));
response.header("X-GoCD-API-Removal-In", format("v%s", removalRelease));
response.header("X-GoCD-API-Deprecation-Info", changelogUrl);
response.header("Link", link);
response.header("Warning", warning);
} | @Test
void shouldAddDeprecationHeaders() {
DoNothingApiV1 doNothingApiV1 = new DoNothingApiV1();
RoutesHelper helper = new RoutesHelper(doNothingApiV1);
Request request = mock(Request.class);
Response response = mock(Response.class);
when(request.url()).thenReturn("http://test.host:80/go");
helper.setDeprecationHeaders(request, response, doNothingApiV1.getClass().getAnnotation(DeprecatedAPI.class));
verify(response).header("X-GoCD-API-Deprecated-In", "v20.2.0");
verify(response).header("X-GoCD-API-Removal-In", "v20.5.0");
verify(response).header("X-GoCD-API-Deprecation-Info", "https://api.gocd.org/20.2.0/#api-changelog");
verify(response).header("Link", "<http://test.host:80/go>; Accept=\"application/vnd.go.cd.v2+json\"; rel=\"successor-version\"");
verify(response).header("Warning", "299 GoCD/v20.2.0 \"The Do Nothing API version v1 has been deprecated in GoCD Release v20.2.0. This version will be removed in GoCD Release v20.5.0. Version v2 of the API is available, and users are encouraged to use it\"");
} |
protected void saveAndRunJobFilters(List<Job> jobs) {
if (jobs.isEmpty()) return;
try {
jobFilterUtils.runOnStateElectionFilter(jobs);
storageProvider.save(jobs);
jobFilterUtils.runOnStateAppliedFilters(jobs);
} catch (ConcurrentJobModificationException concurrentJobModificationException) {
try {
backgroundJobServer.getConcurrentJobModificationResolver().resolve(concurrentJobModificationException);
} catch (UnresolvableConcurrentJobModificationException unresolvableConcurrentJobModificationException) {
throw new SevereJobRunrException("Could not resolve ConcurrentJobModificationException", unresolvableConcurrentJobModificationException);
}
}
} | @Test
void ifStateChangeHappensStateChangeFiltersAreInvoked() {
Job aJobInProgress = aJobInProgress().build();
aJobInProgress.succeeded();
task.saveAndRunJobFilters(singletonList(aJobInProgress));
assertThat(logAllStateChangesFilter.getStateChanges(aJobInProgress)).containsExactly("PROCESSING->SUCCEEDED");
} |
@SuppressWarnings({"unchecked", "rawtypes"})
@Override
public GenericRow apply(
final GenericKey k,
final GenericRow rowValue,
final GenericRow aggRowValue
) {
final GenericRow result = GenericRow.fromList(aggRowValue.values());
for (int idx = 0; idx < nonAggColumnCount; idx++) {
result.set(idx, rowValue.get(idx));
}
for (int idx = nonAggColumnCount; idx < columnCount; idx++) {
final TableAggregationFunction function = aggregateFunctions.get(idx - nonAggColumnCount);
final Object argument = getCurrentValue(
rowValue,
function.getArgIndicesInValue(),
function::convertToInput
);
final Object previous = result.get(idx);
result.set(idx, function.undo(argument, previous));
}
return result;
} | @Test
public void shouldApplyUndoableMultiParamAggregateFunctions() {
when(func1.convertToInput(any())).thenAnswer(
(invocation) -> {
List<?> inputs = invocation.getArgument(0, List.class);
return Pair.of(inputs.get(0), inputs.get(1));
}
);
when(func1.getArgIndicesInValue()).thenReturn(Arrays.asList(0, 1));
// Given:
final GenericRow value = genericRow(1, 2L);
final GenericRow aggRow = genericRow(1, 2L, 3);
// When:
final GenericRow resultRow = aggregator.apply(key, value, aggRow);
// Then:
assertThat(resultRow, equalTo(genericRow(1, 2L, "func1-undone")));
} |
@Override
public ManageSnapshots replaceTag(String name, long snapshotId) {
updateSnapshotReferencesOperation().replaceTag(name, snapshotId);
return this;
} | @TestTemplate
public void testReplaceTag() {
table.newAppend().appendFile(FILE_A).commit();
long snapshotId = table.currentSnapshot().snapshotId();
table.manageSnapshots().createTag("tag1", snapshotId).commit();
// Create a new snapshot and replace the tip of branch1 to be the new snapshot
table.newAppend().appendFile(FILE_B).commit();
long currentSnapshot = table.ops().refresh().currentSnapshot().snapshotId();
table.manageSnapshots().replaceTag("tag1", currentSnapshot).commit();
assertThat(currentSnapshot).isEqualTo(table.ops().refresh().ref("tag1").snapshotId());
} |
@Override
public <U> ParSeqBasedCompletionStage<U> thenApply(Function<? super T, ? extends U> fn)
{
return nextStageByComposingTask(_task.map("thenApply", fn::apply));
} | @Test
public void testThenApply_unFinish() throws Exception
{
CountDownLatch waitLatch = new CountDownLatch(1);
CompletionStage<String> stage2 = createTestStage(TESTVALUE1, 200).thenApply(v -> {
waitLatch.countDown();
return TESTVALUE2;
});
assertFalse(waitLatch.await(100, TimeUnit.MILLISECONDS));
finish(stage2);
assertTrue(waitLatch.await(100, TimeUnit.MILLISECONDS));
} |
@Override
public void run() {
try { // make sure we call afterRun() even on crashes
// and operate countdown latches, else we may hang the parallel runner
if (steps == null) {
beforeRun();
}
if (skipped) {
return;
}
int count = steps.size();
int index = 0;
while ((index = nextStepIndex()) < count) {
currentStep = steps.get(index);
execute(currentStep);
if (currentStepResult != null) { // can be null if debug step-back or hook skip
result.addStepResult(currentStepResult);
}
}
} catch (Exception e) {
if (currentStepResult != null) {
result.addStepResult(currentStepResult);
}
logError("scenario [run] failed\n" + StringUtils.throwableToString(e));
currentStepResult = result.addFakeStepResult("scenario [run] failed", e);
} finally {
if (!skipped) {
afterRun();
if (isFailed() && engine.getConfig().isAbortSuiteOnFailure()) {
featureRuntime.suite.abort();
}
}
if (caller.isNone()) {
logAppender.close(); // reclaim memory
}
}
} | @Test
void testCallSingle() {
run(
"def first = karate.callSingle('uuid.js')",
"def second = karate.callSingle('uuid.js')"
);
matchVar("first", get("second"));
} |
public String decrypt(String encryptedText) {
Matcher matcher = ENCRYPTED_PATTERN.matcher(encryptedText);
if (matcher.matches()) {
Cipher cipher = ciphers.get(matcher.group(1).toLowerCase(Locale.ENGLISH));
if (cipher != null) {
return cipher.decrypt(matcher.group(2));
}
}
return encryptedText;
} | @Test
public void should_notDecryptText_whenBadBraceSyntax(){
Encryption encryption = new Encryption(null);
assertThat(encryption.decrypt("}xxx{Zm9v")).isEqualTo("}xxx{Zm9v");
assertThat(encryption.decrypt("}dcd}59LK")).isEqualTo("}dcd}59LK");
assertThat(encryption.decrypt("}rrrRg6")).isEqualTo("}rrrRg6");
assertThat(encryption.decrypt("{closedjdk")).isEqualTo("{closedjdk");
} |
public static IndicesBlockStatus parseBlockSettings(final GetSettingsResponse settingsResponse) {
IndicesBlockStatus result = new IndicesBlockStatus();
final ImmutableOpenMap<String, Settings> indexToSettingsMap = settingsResponse.getIndexToSettings();
final String[] indicesInResponse = indexToSettingsMap.keys().toArray(String.class);
for (String index : indicesInResponse) {
final Settings blockSettings = indexToSettingsMap.get(index).getByPrefix(BLOCK_SETTINGS_PREFIX);
if (!blockSettings.isEmpty()) {
final Set<String> blockSettingsNames = blockSettings.names();
final Set<String> blockSettingsSetToTrue = blockSettingsNames.stream()
.filter(s -> blockSettings.getAsBoolean(s, false))
.map(s -> BLOCK_SETTINGS_PREFIX + s)
.collect(Collectors.toSet());
if (!blockSettingsSetToTrue.isEmpty()) {
result.addIndexBlocks(index, blockSettingsSetToTrue);
}
}
}
return result;
} | @Test
public void noBlockedIndicesIdentifiedIfEmptyResponseParsed() {
GetSettingsResponse emptyResponse = new GetSettingsResponse(ImmutableOpenMap.of(), ImmutableOpenMap.of());
final IndicesBlockStatus indicesBlockStatus = BlockSettingsParser.parseBlockSettings(emptyResponse);
assertNotNull(indicesBlockStatus);
assertEquals(0, indicesBlockStatus.countBlockedIndices());
} |
@NonNull
@Override
public Object configure(CNode config, ConfigurationContext context) throws ConfiguratorException {
return Stapler.lookupConverter(target)
.convert(
target,
context.getSecretSourceResolver()
.resolve(config.asScalar().toString()));
} | @Test
public void _int_env_default() throws Exception {
Configurator c = registry.lookupOrFail(Integer.class);
final Object value = c.configure(new Scalar("${ENV_FOR_TEST:-123}"), context);
assertEquals(123, value);
} |
@SuppressWarnings("unchecked")
public String uploadFile(
String parentPath,
String name,
InputStream inputStream,
String mediaType,
Date modified,
String description)
throws IOException, InvalidTokenException, DestinationMemoryFullException {
String url;
try {
URIBuilder builder =
getUriBuilder()
.setPath(CONTENT_API_PATH_PREFIX + "/mounts/primary/files/put")
.setParameter("path", parentPath)
.setParameter("filename", name)
.setParameter("autorename", "true")
.setParameter("info", "true");
if (description != null && description.length() > 0) {
builder.setParameter("tags", "description=" + description);
}
if (modified != null) {
builder.setParameter("modified", Long.toString(modified.getTime()));
}
url = builder.build().toString();
} catch (URISyntaxException e) {
throw new IllegalStateException("Could not produce url.", e);
}
Request.Builder requestBuilder = getRequestBuilder(url);
RequestBody uploadBody = new InputStreamRequestBody(MediaType.parse(mediaType), inputStream);
requestBuilder.post(uploadBody);
// We need to reset the input stream because the request could already read some data
try (Response response =
getResponse(fileUploadClient, requestBuilder, inputStream::reset)) {
int code = response.code();
ResponseBody body = response.body();
if (code == 413) {
throw new DestinationMemoryFullException(
"Koofr quota exceeded", new Exception("Koofr file upload response code " + code));
}
if (code < 200 || code > 299) {
throw new KoofrClientIOException(response);
}
Map<String, Object> responseData = objectMapper.readValue(body.bytes(), Map.class);
String newName = (String) responseData.get("name");
Preconditions.checkState(
!Strings.isNullOrEmpty(newName), "Expected name value to be present in %s", responseData);
return parentPath + "/" + newName;
}
} | @Test
public void testUploadFileTokenExpired() throws Exception {
when(credentialFactory.refreshCredential(credential))
.then(
(InvocationOnMock invocation) -> {
final Credential cred = invocation.getArgument(0);
cred.setAccessToken("acc1");
return cred;
});
server.enqueue(new MockResponse().setResponseCode(401));
server.enqueue(
new MockResponse()
.setResponseCode(200)
.setHeader("Content-Type", "application/json")
.setBody(
"{\"name\":\"image.jpg\",\"type\":\"file\",\"modified\":1591868314156,\"size\":5,\"contentType\":\"image/jpeg\",\"hash\":\"d05374dc381d9b52806446a71c8e79b1\",\"tags\":{}}"));
final InputStream inputStream = new ByteArrayInputStream(new byte[] {0, 1, 2, 3, 4});
String fullPath =
client.uploadFile("/path/to/folder", "image.jpg", inputStream, "image/jpeg", null, null);
assertEquals("/path/to/folder/image.jpg", fullPath);
assertEquals(2, server.getRequestCount());
RecordedRequest recordedRequest = server.takeRequest();
assertEquals("POST", recordedRequest.getMethod());
assertEquals(
"/content/api/v2/mounts/primary/files/put?path=%2Fpath%2Fto%2Ffolder&filename=image.jpg&autorename=true&info=true",
recordedRequest.getPath());
assertEquals("Bearer acc", recordedRequest.getHeader("Authorization"));
assertEquals("2.1", recordedRequest.getHeader("X-Koofr-Version"));
assertEquals("image/jpeg", recordedRequest.getHeader("Content-Type"));
assertEquals(5, recordedRequest.getBodySize());
recordedRequest = server.takeRequest();
assertEquals("POST", recordedRequest.getMethod());
assertEquals(
"/content/api/v2/mounts/primary/files/put?path=%2Fpath%2Fto%2Ffolder&filename=image.jpg&autorename=true&info=true",
recordedRequest.getPath());
assertEquals("Bearer acc1", recordedRequest.getHeader("Authorization"));
assertEquals("2.1", recordedRequest.getHeader("X-Koofr-Version"));
assertEquals("image/jpeg", recordedRequest.getHeader("Content-Type"));
assertEquals(5, recordedRequest.getBodySize());
} |
public static boolean canDrop(
FilterPredicate pred, List<ColumnChunkMetaData> columns, DictionaryPageReadStore dictionaries) {
Objects.requireNonNull(pred, "pred cannnot be null");
Objects.requireNonNull(columns, "columns cannnot be null");
return pred.accept(new DictionaryFilter(columns, dictionaries));
} | @Test
public void testNotEqBinary() throws Exception {
BinaryColumn sharp = binaryColumn("single_value_field");
BinaryColumn sharpAndNull = binaryColumn("optional_single_value_field");
BinaryColumn b = binaryColumn("binary_field");
assertTrue(
"Should drop block with only the excluded value",
canDrop(notEq(sharp, Binary.fromString("sharp")), ccmd, dictionaries));
assertFalse(
"Should not drop block with any other value",
canDrop(notEq(sharp, Binary.fromString("applause")), ccmd, dictionaries));
assertFalse(
"Should not drop block with only the excluded value and null",
canDrop(notEq(sharpAndNull, Binary.fromString("sharp")), ccmd, dictionaries));
assertFalse(
"Should not drop block with any other value",
canDrop(notEq(sharpAndNull, Binary.fromString("applause")), ccmd, dictionaries));
assertFalse(
"Should not drop block with a known value",
canDrop(notEq(b, Binary.fromString("x")), ccmd, dictionaries));
assertFalse(
"Should not drop block with a known value",
canDrop(notEq(b, Binary.fromString("B")), ccmd, dictionaries));
assertFalse("Should not drop block for null", canDrop(notEq(b, null), ccmd, dictionaries));
} |
public static String downloadString(String url, String customCharsetName) {
return downloadString(url, CharsetUtil.charset(customCharsetName), null);
} | @Test
@Disabled
public void downloadStringTest() {
final String url = "https://www.baidu.com";
// 从远程直接读取字符串,需要自定义编码,直接调用JDK方法
final String content2 = HttpUtil.downloadString(url, CharsetUtil.UTF_8);
Console.log(content2);
} |
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
data.results().forEach(topicResult ->
topicResult.partitions().forEach(partitionResult ->
updateErrorCounts(errorCounts, Errors.forCode(partitionResult.errorCode()))));
return errorCounts;
} | @Test
public void testErrorCounts() {
AlterReplicaLogDirsResponseData data = new AlterReplicaLogDirsResponseData()
.setResults(asList(
new AlterReplicaLogDirTopicResult()
.setTopicName("t0")
.setPartitions(asList(
new AlterReplicaLogDirPartitionResult()
.setPartitionIndex(0)
.setErrorCode(Errors.LOG_DIR_NOT_FOUND.code()),
new AlterReplicaLogDirPartitionResult()
.setPartitionIndex(1)
.setErrorCode(Errors.NONE.code()))),
new AlterReplicaLogDirTopicResult()
.setTopicName("t1")
.setPartitions(Collections.singletonList(
new AlterReplicaLogDirPartitionResult()
.setPartitionIndex(0)
.setErrorCode(Errors.LOG_DIR_NOT_FOUND.code())))));
Map<Errors, Integer> counts = new AlterReplicaLogDirsResponse(data).errorCounts();
assertEquals(2, counts.size());
assertEquals(Integer.valueOf(2), counts.get(Errors.LOG_DIR_NOT_FOUND));
assertEquals(Integer.valueOf(1), counts.get(Errors.NONE));
} |
public static HiveColumnHandle rowIdColumnHandle()
{
return new HiveColumnHandle(ROW_ID_COLUMN_NAME, ROW_ID_TYPE, ROW_ID_TYPE_SIGNATURE, ROW_ID_COLUMN_INDEX, SYNTHESIZED, Optional.empty(), ImmutableList.of(), Optional.empty());
} | @Test
public void testRowIdIsSynthesized()
{
HiveColumnHandle rowIdColumn = HiveColumnHandle.rowIdColumnHandle();
assertEquals(rowIdColumn.getColumnType(), SYNTHESIZED);
} |
public abstract ResponseType getResponseType(); | @Test(dataProvider = "provideDynamicallyDeterminedResponseTypeData")
public void testEnvelopeDynamicallyDeterminedResponseType(RestLiResponseEnvelope responseEnvelope, ResponseType expectedResponseType)
{
Assert.assertEquals(responseEnvelope.getResponseType(), expectedResponseType);
} |
protected final void safeRegister(final Class type, final Serializer serializer) {
safeRegister(type, createSerializerAdapter(serializer));
} | @Test(expected = IllegalStateException.class)
public void testSafeRegister_alreadyRegisteredType() {
abstractSerializationService.safeRegister(StringBuffer.class, new StringBufferSerializer(true));
abstractSerializationService.safeRegister(StringBuffer.class, new TheOtherGlobalSerializer(true));
} |
public void load(ScannerReport.Metadata metadata) {
if (delegate != null) {
delegate.load(metadata);
} else if (hasBranchProperties(metadata)) {
throw MessageException.of("Current edition does not support branch feature");
} else {
metadataHolder.setBranch(new DefaultBranchImpl(defaultBranchNameResolver.getEffectiveMainBranchName()));
}
} | @Test
public void regular_analysis_of_project_is_enabled_if_delegate_is_absent() {
ScannerReport.Metadata metadata = ScannerReport.Metadata.newBuilder()
.build();
DefaultBranchNameResolver branchNameResolver = mock(DefaultBranchNameResolver.class);
when(branchNameResolver.getEffectiveMainBranchName()).thenReturn(DEFAULT_MAIN_BRANCH_NAME);
new BranchLoader(metadataHolder, branchNameResolver).load(metadata);
assertThat(metadataHolder.getBranch()).isNotNull();
Branch branch = metadataHolder.getBranch();
assertThat(branch.isMain()).isTrue();
assertThat(branch.getName()).isEqualTo(DEFAULT_MAIN_BRANCH_NAME);
} |
public static Map<String, Object> compare(byte[] baselineImg, byte[] latestImg, Map<String, Object> options,
Map<String, Object> defaultOptions) throws MismatchException {
boolean allowScaling = toBool(defaultOptions.get("allowScaling"));
ImageComparison imageComparison = new ImageComparison(baselineImg, latestImg, options, allowScaling);
imageComparison.configure(defaultOptions);
if (imageComparison.baselineMissing) {
imageComparison.result.put("isBaselineMissing", true);
throw new MismatchException("baseline image was empty or not found", imageComparison.result);
}
if (imageComparison.scaleMismatch) {
imageComparison.result.put("isScaleMismatch", true);
throw new MismatchException("latest image dimensions != baseline image dimensions", imageComparison.result);
}
double mismatchPercentage = 100.0;
for (String engine : imageComparison.engines) {
double currentMismatchPercentage;
switch (engine) {
case RESEMBLE:
currentMismatchPercentage = imageComparison.execResemble();
break;
case SSIM:
currentMismatchPercentage = imageComparison.execSSIM();
break;
default:
logger.error("skipping unsupported image comparison engine: {}", engine);
continue;
}
if (currentMismatchPercentage <= mismatchPercentage) {
mismatchPercentage = currentMismatchPercentage;
}
if (mismatchPercentage < imageComparison.stopWhenMismatchIsLessThan) {
break;
}
}
return imageComparison.checkMismatch(mismatchPercentage);
} | @Test
void testScale() {
Map<String, Object> result = ImageComparison.compare(R_1x1_IMG, R_2x2_IMG, opts(), opts("allowScaling", true));
assertEquals(0.0, result.get("mismatchPercentage"));
} |
public String msg(String msg, Object... args) {
LogMessage l = new LogMessage();
l.kvs = this.kvs;
return l.setMsgString(msg, args);
} | @Test
public void testMsgShouldContainsMessageAndThrowableMessage() {
final String message = logMessage.msg(MESSAGE, new Throwable(THROWABLE_MESSAGE));
assertNotNull(message);
assertTrue(message.contains(MESSAGE));
assertTrue(message.contains(THROWABLE_MESSAGE));
} |
public static double minimize(DifferentiableMultivariateFunction func, double[] x, double gtol, int maxIter) {
if (gtol <= 0.0) {
throw new IllegalArgumentException("Invalid gradient tolerance: " + gtol);
}
if (maxIter <= 0) {
throw new IllegalArgumentException("Invalid maximum number of iterations: " + maxIter);
}
double den, fac, fad, fae, sumdg, sumxi, temp, test;
int n = x.length;
double[] dg = new double[n];
double[] g = new double[n];
double[] hdg = new double[n];
double[] xnew = new double[n];
double[] xi = new double[n];
double[][] hessin = new double[n][n];
// Calculate starting function value and gradient and initialize the
// inverse Hessian to the unit matrix.
double f = func.g(x, g);
logger.info(String.format("BFGS: initial function value: %.5f", f));
for (int i = 0; i < n; i++) {
hessin[i][i] = 1.0;
// Initialize line direction.
xi[i] = -g[i];
}
double stpmax = STPMX * max(norm(x), n);
for (int iter = 1; iter <= maxIter; iter++) {
// The new function evaluation occurs in line search.
f = linesearch(func, x, f, g, xi, xnew, stpmax);
if (iter % 100 == 0) {
logger.info(String.format("BFGS: the function value after %3d iterations: %.5f", iter, f));
}
// update the line direction and current point.
for (int i = 0; i < n; i++) {
xi[i] = xnew[i] - x[i];
x[i] = xnew[i];
}
// Test for convergence on x.
test = 0.0;
for (int i = 0; i < n; i++) {
temp = abs(xi[i]) / max(abs(x[i]), 1.0);
if (temp > test) {
test = temp;
}
}
if (test < TOLX) {
logger.info(String.format("BFGS converges on x after %d iterations: %.5f", iter, f));
return f;
}
System.arraycopy(g, 0, dg, 0, n);
func.g(x, g);
// Test for convergence on zero gradient.
den = max(f, 1.0);
test = 0.0;
for (int i = 0; i < n; i++) {
temp = abs(g[i]) * max(abs(x[i]), 1.0) / den;
if (temp > test) {
test = temp;
}
}
if (test < gtol) {
logger.info(String.format("BFGS converges on gradient after %d iterations: %.5f", iter, f));
return f;
}
for (int i = 0; i < n; i++) {
dg[i] = g[i] - dg[i];
}
for (int i = 0; i < n; i++) {
hdg[i] = 0.0;
for (int j = 0; j < n; j++) {
hdg[i] += hessin[i][j] * dg[j];
}
}
fac = fae = sumdg = sumxi = 0.0;
for (int i = 0; i < n; i++) {
fac += dg[i] * xi[i];
fae += dg[i] * hdg[i];
sumdg += dg[i] * dg[i];
sumxi += xi[i] * xi[i];
}
// Skip update if fac is not sufficiently positive.
if (fac > sqrt(EPSILON * sumdg * sumxi)) {
fac = 1.0 / fac;
fad = 1.0 / fae;
// The vector that makes BFGS different from DFP.
for (int i = 0; i < n; i++) {
dg[i] = fac * xi[i] - fad * hdg[i];
}
// BFGS updating formula.
for (int i = 0; i < n; i++) {
for (int j = i; j < n; j++) {
hessin[i][j] += fac * xi[i] * xi[j] - fad * hdg[i] * hdg[j] + fae * dg[i] * dg[j];
hessin[j][i] = hessin[i][j];
}
}
}
// Calculate the next direction to go.
Arrays.fill(xi, 0.0);
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
xi[i] -= hessin[i][j] * g[j];
}
}
}
logger.warn(String.format("BFGS reaches maximum %d iterations: %.5f", maxIter, f));
return f;
} | @Test
public void testLBFGS() {
System.out.println("L-BFGS");
double[] x = new double[100];
for (int j = 1; j <= x.length; j += 2) {
x[j - 1] = -1.2;
x[j] = 1.2;
}
double result = BFGS.minimize(func, 5, x, 1E-5, 500);
System.out.println(Arrays.toString(x));
assertEquals(2.2877072513327043E-15, result, 1E-15);
} |
public static String toJsonString(Pipeline pipeline, ConfigContext ctx) {
final PipelineJsonRenderer visitor = new PipelineJsonRenderer(ctx);
pipeline.traverseTopologically(visitor);
return visitor.jsonBuilder.toString();
} | @Test
public void testEmptyPipeline() {
SamzaPipelineOptions options = PipelineOptionsFactory.create().as(SamzaPipelineOptions.class);
options.setRunner(SamzaRunner.class);
Pipeline p = Pipeline.create(options);
final Map<PValue, String> idMap = PViewToIdMapper.buildIdMap(p);
final Set<String> nonUniqueStateIds = StateIdParser.scan(p);
final ConfigContext ctx = new ConfigContext(idMap, nonUniqueStateIds, options);
String jsonDag =
"{ \"RootNode\": ["
+ " { \"fullName\":\"OuterMostNode\","
+ " \"ChildNodes\":[ ]}],\"graphLinks\": [],\"transformIOInfo\": []"
+ "}";
assertEquals(
JsonParser.parseString(jsonDag),
JsonParser.parseString(
PipelineJsonRenderer.toJsonString(p, ctx).replaceAll(System.lineSeparator(), "")));
} |
@Udf
public <T> boolean contains(
@UdfParameter final List<T> array,
@UdfParameter final T val
) {
return array != null && array.contains(val);
} | @Test
public void shouldFindStringInList() {
assertTrue(udf.contains(Arrays.asList("abc", "bd", "DC"), "DC"));
assertFalse(udf.contains(Arrays.asList("abc", "bd", "DC"), "dc"));
assertFalse(udf.contains(Arrays.asList("abc", "bd", "1"), 1));
} |
@Override
public int read(final byte[] b) throws IOException {
return this.read(b, 0, b.length);
} | @Test
public void testEncryptDecryptWithContentSizeMultipleOfEncryptingBufferSize() throws Exception {
final byte[] content = RandomUtils.nextBytes(1024 * 1024);
final ByteArrayInputStream plain = new ByteArrayInputStream(content);
final PlainFileKey key = Crypto.generateFileKey(PlainFileKey.Version.AES256GCM);
final SDSSession session = new SDSSession(new Host(new TestProtocol()), new DisabledX509TrustManager(), new DefaultX509KeyManager()) {
@Override
public SDSApiClient getClient() {
return new SDSApiClient(new MockHttpClient());
}
};
final TransferStatus status = new TransferStatus();
final ObjectWriter writer = session.getClient().getJSON().getContext(null).writerFor(FileKey.class);
final ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.writeValue(out, TripleCryptConverter.toSwaggerFileKey(key));
status.setFilekey(ByteBuffer.wrap(out.toByteArray()));
final TripleCryptEncryptingInputStream encryptInputStream = new TripleCryptEncryptingInputStream(session, plain, Crypto.createFileEncryptionCipher(key), status);
final ByteArrayOutputStream os = new ByteArrayOutputStream();
new StreamCopier(StreamCancelation.noop, StreamProgress.noop).withLimit((long) content.length).withChunksize(32768).transfer(encryptInputStream, os);
encryptInputStream.close();
out.close();
final ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray());
final ObjectReader reader = session.getClient().getJSON().getContext(null).readerFor(FileKey.class);
final FileKey fileKey = reader.readValue(status.getFilekey().array());
assertNotNull(fileKey.getTag());
final TripleCryptDecryptingInputStream cryptInputStream = new TripleCryptDecryptingInputStream(is,
Crypto.createFileDecryptionCipher(TripleCryptConverter.toCryptoPlainFileKey(fileKey)),
TripleCryptConverter.base64StringToByteArray(fileKey.getTag()));
final byte[] compare = new byte[content.length];
IOUtils.read(cryptInputStream, compare);
assertArrayEquals(content, compare);
} |
protected boolean closeFile( String filename ) {
try {
data.getFileStreamsCollection().closeFile( filename );
} catch ( Exception e ) {
logError( "Exception trying to close file: " + e.toString() );
setErrors( 1 );
return false;
}
return true;
} | @Test
public void testCloseFileDataOutIsNotNullCase() {
textFileOutput =
new TextFileOutput( stepMockHelper.stepMeta, stepMockHelper.stepDataInterface, 0, stepMockHelper.transMeta,
stepMockHelper.trans );
textFileOutput.data = mock( TextFileOutputData.class );
textFileOutput.data.out = mock( CompressionOutputStream.class );
textFileOutput.closeFile();
assertNull( textFileOutput.data.out );
} |
@Description("compute xxhash64 hash")
@ScalarFunction
@SqlType(StandardTypes.VARBINARY)
public static Slice xxhash64(@SqlType(StandardTypes.VARBINARY) Slice slice)
{
Slice hash = Slices.allocate(Long.BYTES);
hash.setLong(0, Long.reverseBytes(XxHash64.hash(slice)));
return hash;
} | @Test
public void testXxhash64()
{
assertFunction("xxhash64(CAST('' AS VARBINARY))", VARBINARY, sqlVarbinaryHex("EF46DB3751D8E999"));
assertFunction("xxhash64(CAST('hashme' AS VARBINARY))", VARBINARY, sqlVarbinaryHex("F9D96E0E1165E892"));
} |
StreamsProducer streamsProducerForTask(final TaskId taskId) {
return activeTaskCreator.streamsProducerForTask(taskId);
} | @Test
public void shouldCloseActiveTasksAndPropagateExceptionsOnCleanShutdownWithExactlyOnceV1() {
when(activeTaskCreator.streamsProducerForTask(any())).thenReturn(mock(StreamsProducer.class));
shouldCloseActiveTasksAndPropagateExceptionsOnCleanShutdown(ProcessingMode.EXACTLY_ONCE_ALPHA);
} |
@SuppressWarnings("MethodMayBeStatic") // Non-static to support DI.
public long parse(final String text) {
final String date;
final String time;
final String timezone;
if (text.contains("T")) {
date = text.substring(0, text.indexOf('T'));
final String withTimezone = text.substring(text.indexOf('T') + 1);
timezone = getTimezone(withTimezone);
time = completeTime(withTimezone.substring(0, withTimezone.length() - timezone.length())
.replaceAll("Z$",""));
} else {
date = completeDate(text);
time = completeTime("");
timezone = "";
}
try {
final ZoneId zoneId = parseTimezone(timezone);
return PARSER.parse(date + "T" + time, zoneId);
} catch (final RuntimeException e) {
throw new KsqlException("Failed to parse timestamp '" + text
+ "': " + e.getMessage()
+ HELP_MESSAGE,
e
);
}
} | @Test
public void shouldParseYearMonth() {
// When:
assertThat(parser.parse("2020-02"), is(fullParse("2020-02-01T00:00:00.000+0000")));
} |
@Override
public Configuration toConfiguration(CommandLine commandLine) throws FlinkException {
final Configuration resultingConfiguration = super.toConfiguration(commandLine);
if (commandLine.hasOption(addressOption.getOpt())) {
String addressWithPort = commandLine.getOptionValue(addressOption.getOpt());
InetSocketAddress jobManagerAddress = NetUtils.parseHostPortAddress(addressWithPort);
setJobManagerAddressInConfig(resultingConfiguration, jobManagerAddress);
URL url = NetUtils.getCorrectHostnamePort(addressWithPort);
resultingConfiguration.set(RestOptions.PATH, url.getPath());
resultingConfiguration.set(SecurityOptions.SSL_REST_ENABLED, isHttpsProtocol(url));
}
resultingConfiguration.set(DeploymentOptions.TARGET, RemoteExecutor.NAME);
DynamicPropertiesUtil.encodeDynamicProperties(commandLine, resultingConfiguration);
return resultingConfiguration;
} | @Test
void testCommandLineMaterialization() throws Exception {
final String hostname = "home-sweet-home";
final String urlPath = "/some/other/path/index.html";
final int port = 1234;
final String[] args = {"-m", hostname + ':' + port + urlPath};
final AbstractCustomCommandLine defaultCLI = new DefaultCLI();
final CommandLine commandLine = defaultCLI.parseCommandLineOptions(args, false);
Configuration configuration = defaultCLI.toConfiguration(commandLine);
assertThat(configuration.get(RestOptions.ADDRESS)).isEqualTo(hostname);
assertThat(configuration.get(RestOptions.PORT)).isEqualTo(port);
final String httpProtocol = "http";
assertThat(configuration.get(SecurityOptions.SSL_REST_ENABLED)).isEqualTo(false);
assertThat(configuration.get(RestOptions.PATH)).isEqualTo(urlPath);
final String hostnameWithHttpScheme = httpProtocol + "://" + hostname;
final String[] httpArgs = {"-m", hostnameWithHttpScheme + ':' + port + urlPath};
final CommandLine httpCommandLine = defaultCLI.parseCommandLineOptions(httpArgs, false);
Configuration httpConfiguration = defaultCLI.toConfiguration(httpCommandLine);
assertThat(httpConfiguration.get(RestOptions.ADDRESS)).isEqualTo(hostname);
assertThat(httpConfiguration.get(RestOptions.PORT)).isEqualTo(port);
assertThat(httpConfiguration.get(SecurityOptions.SSL_REST_ENABLED)).isEqualTo(false);
assertThat(httpConfiguration.get(RestOptions.PATH)).isEqualTo(urlPath);
final String httpsProtocol = "https";
final String hostnameWithHttpsScheme = httpsProtocol + "://" + hostname;
final String[] httpsArgs = {"-m", hostnameWithHttpsScheme + ':' + port + urlPath};
final CommandLine httpsCommandLine = defaultCLI.parseCommandLineOptions(httpsArgs, false);
Configuration httpsConfiguration = defaultCLI.toConfiguration(httpsCommandLine);
assertThat(httpsConfiguration.get(RestOptions.ADDRESS)).isEqualTo(hostname);
assertThat(httpsConfiguration.get(RestOptions.PORT)).isEqualTo(port);
assertThat(httpsConfiguration.get(SecurityOptions.SSL_REST_ENABLED)).isEqualTo(true);
assertThat(httpsConfiguration.get(RestOptions.PATH)).isEqualTo(urlPath);
} |
@Override
protected SchemaTransform from(BigQueryFileLoadsWriteSchemaTransformConfiguration configuration) {
return new BigQueryWriteSchemaTransform(configuration);
} | @Test
public void testToWrite() {
List<
Pair<
BigQueryFileLoadsWriteSchemaTransformConfiguration.Builder,
BigQueryIO.Write<TableRow>>>
cases =
Arrays.asList(
Pair.of(
BigQueryFileLoadsWriteSchemaTransformConfiguration.builder()
.setTableSpec(BigQueryHelpers.toTableSpec(TABLE_REFERENCE))
.setCreateDisposition(CreateDisposition.CREATE_NEVER.name())
.setWriteDisposition(WriteDisposition.WRITE_EMPTY.name()),
BigQueryIO.writeTableRows()
.to(TABLE_REFERENCE)
.withCreateDisposition(CreateDisposition.CREATE_NEVER)
.withWriteDisposition(WriteDisposition.WRITE_EMPTY)
.withSchema(TABLE_SCHEMA)),
Pair.of(
BigQueryFileLoadsWriteSchemaTransformConfiguration.builder()
.setTableSpec(BigQueryHelpers.toTableSpec(TABLE_REFERENCE))
.setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED.name())
.setWriteDisposition(WriteDisposition.WRITE_TRUNCATE.name()),
BigQueryIO.writeTableRows()
.to(TABLE_REFERENCE)
.withCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
.withWriteDisposition(WriteDisposition.WRITE_TRUNCATE)
.withSchema(TABLE_SCHEMA)));
for (Pair<
BigQueryFileLoadsWriteSchemaTransformConfiguration.Builder, BigQueryIO.Write<TableRow>>
caze : cases) {
BigQueryWriteSchemaTransform transform = transformFrom(caze.getLeft().build());
Map<Identifier, Item> gotDisplayData = DisplayData.from(transform.toWrite(SCHEMA)).asMap();
Map<Identifier, Item> wantDisplayData = DisplayData.from(caze.getRight()).asMap();
Set<Identifier> keys = new HashSet<>();
keys.addAll(gotDisplayData.keySet());
keys.addAll(wantDisplayData.keySet());
for (Identifier key : keys) {
Item got = null;
Item want = null;
if (gotDisplayData.containsKey(key)) {
got = gotDisplayData.get(key);
}
if (wantDisplayData.containsKey(key)) {
want = wantDisplayData.get(key);
}
assertEquals(want, got);
}
}
} |
@Override
public YamlShardingAutoTableRuleConfiguration swapToYamlConfiguration(final ShardingAutoTableRuleConfiguration data) {
YamlShardingAutoTableRuleConfiguration result = new YamlShardingAutoTableRuleConfiguration();
result.setLogicTable(data.getLogicTable());
result.setActualDataSources(data.getActualDataSources());
if (null != data.getShardingStrategy()) {
result.setShardingStrategy(shardingStrategySwapper.swapToYamlConfiguration(data.getShardingStrategy()));
}
if (null != data.getKeyGenerateStrategy()) {
result.setKeyGenerateStrategy(keyGenerateStrategySwapper.swapToYamlConfiguration(data.getKeyGenerateStrategy()));
}
if (null != data.getAuditStrategy()) {
result.setAuditStrategy(auditStrategySwapper.swapToYamlConfiguration(data.getAuditStrategy()));
}
return result;
} | @Test
void assertSwapToYamlConfiguration() {
YamlShardingAutoTableRuleConfigurationSwapper swapper = new YamlShardingAutoTableRuleConfigurationSwapper();
YamlShardingAutoTableRuleConfiguration actual = swapper.swapToYamlConfiguration(createShardingAutoTableRuleConfiguration());
assertThat(actual.getShardingStrategy().getStandard().getShardingAlgorithmName(), is("hash_mod"));
assertThat(actual.getKeyGenerateStrategy().getKeyGeneratorName(), is("auto_increment"));
assertThat(actual.getAuditStrategy().getAuditorNames(), is(Collections.singletonList("audit_algorithm")));
} |
@Override
public V put(K key, V value, Duration ttl) {
return get(putAsync(key, value, ttl));
} | @Test
public void testEntrySetByPatternTTL() {
RMapCacheNative<String, String> map = redisson.getMapCacheNative("simple", StringCodec.INSTANCE);
map.put("10", "100");
map.put("20", "200", Duration.ofMinutes(1));
map.put("30", "300");
assertThat(map.entrySet("?0")).containsExactlyInAnyOrder(new AbstractMap.SimpleEntry("10", "100"), new AbstractMap.SimpleEntry("20", "200"), new AbstractMap.SimpleEntry("30", "300"));
assertThat(map.entrySet("1")).isEmpty();
assertThat(map.entrySet("10")).containsExactlyInAnyOrder(new AbstractMap.SimpleEntry("10", "100"));
map.destroy();
} |
public static boolean isEditionBundled(Plugin plugin) {
return SONARSOURCE_ORGANIZATION.equalsIgnoreCase(plugin.getOrganization())
&& Arrays.stream(SONARSOURCE_COMMERCIAL_LICENSES).anyMatch(s -> s.equalsIgnoreCase(plugin.getLicense()));
} | @Test
public void isEditionBundled_on_Plugin_returns_false_for_license_Commercial_and_non_SonarSource_organization() {
Plugin plugin = newPlugin(randomAlphanumeric(3), randomizeCase("Commercial"));
assertThat(EditionBundledPlugins.isEditionBundled(plugin)).isFalse();
} |
@Override
public List<Feature> get() {
List<URI> featurePaths = featureOptions.getFeaturePaths();
List<Feature> features = loadFeatures(featurePaths);
if (features.isEmpty()) {
if (featurePaths.isEmpty()) {
log.warn(() -> "Got no path to feature directory or feature file");
} else {
log.warn(
() -> "No features found at " + featurePaths.stream().map(URI::toString).collect(joining(", ")));
}
}
return features;
} | @Test
void logs_message_if_no_feature_paths_are_given(LogRecordListener logRecordListener) {
Options featureOptions = Collections::emptyList;
FeaturePathFeatureSupplier supplier = new FeaturePathFeatureSupplier(classLoader, featureOptions, parser);
supplier.get();
assertThat(logRecordListener.getLogRecords().get(1).getMessage(),
containsString("Got no path to feature directory or feature file"));
} |
@Override
public void add(String field, String value, Map<String, String[]> data) {
if (! include(field, value)) {
return;
}
if (ALWAYS_SET_FIELDS.contains(field)) {
setAlwaysInclude(field, value, data);
return;
} else if (ALWAYS_ADD_FIELDS.contains(field)) {
addAlwaysInclude(field, value, data);
return;
}
StringSizePair filterKey = filterKey(field, value, data);
if (! data.containsKey(filterKey.string)) {
setFilterKey(filterKey, value, data);
return;
}
String[] vals = data.get(filterKey.string);
if (vals != null && vals.length >= maxValuesPerField) {
setTruncated(data);
return;
}
Integer fieldSizeInteger = fieldSizes.get(filterKey.string);
int fieldSize = fieldSizeInteger == null ? 0 : fieldSizeInteger;
int maxAllowed = maxAllowedToAdd(filterKey);
if (maxAllowed <= 0) {
setTruncated(data);
return;
}
int valueLength = estimateSize(value);
String toAdd = value;
if (valueLength > maxAllowed) {
toAdd = truncate(value, maxAllowed, data);
valueLength = estimateSize(toAdd);
if (valueLength == 0) {
return;
}
}
int addedOverall = valueLength;
if (fieldSizeInteger == null) {
//if there was no value before, we're adding
//a key. If there was a value before, do not
//add the key length.
addedOverall += filterKey.size;
}
estimatedSize += addedOverall;
fieldSizes.put(filterKey.string, valueLength + fieldSize);
data.put(filterKey.string, appendValue(data.get(filterKey.string), toAdd ));
} | @Test
public void testMaxFieldValues() throws Exception {
Metadata metadata = filter(100, 10000, 10000, 3, null, true);
for (int i = 0; i < 10; i++) {
metadata.add(TikaCoreProperties.SUBJECT, "ab");
}
assertEquals(3, metadata.getValues(TikaCoreProperties.SUBJECT).length);
} |
@Override
public String toString() {
return String.format("Request [%s %s: %s headers and %s]", method, url,
headers == null ? "without" : "with " + headers,
charset == null ? "no charset" : "charset " + charset);
} | @Test
void testToString() throws Exception {
assertThat(requestKey.toString()).startsWith("Request [GET a: ");
assertThat(requestKey.toString()).contains(" with my-header=[val] ", " UTF-16]");
} |
@Udf
public <T extends Comparable<? super T>> T arrayMin(@UdfParameter(
description = "Array of values from which to find the minimum") final List<T> input) {
if (input == null) {
return null;
}
T candidate = null;
for (T thisVal : input) {
if (thisVal != null) {
if (candidate == null) {
candidate = thisVal;
} else if (thisVal.compareTo(candidate) < 0) {
candidate = thisVal;
}
}
}
return candidate;
} | @Test
public void shouldReturnNullForListOfNullInput() {
final List<Integer> input = Arrays.asList(null, null, null);
assertThat(udf.arrayMin(input), is(nullValue()));
} |
static Object parseValue( String key, String value, Class<?> valueType )
throws IllegalArgumentException
{
return parseValue( key, value, valueType, null, v -> v, Collections.emptyList() );
} | @Test
void parseValue() {
assertEquals( null, UIDefaultsLoader.parseValue( "dummy", "null", null ) );
assertEquals( false, UIDefaultsLoader.parseValue( "dummy", "false", null ) );
assertEquals( true, UIDefaultsLoader.parseValue( "dummy", "true", null ) );
assertEquals( "hello", UIDefaultsLoader.parseValue( "dummy", "hello", null ) );
assertEquals( "hello", UIDefaultsLoader.parseValue( "dummy", "\"hello\"", null ) );
assertEquals( "null", UIDefaultsLoader.parseValue( "dummy", "\"null\"", null ) );
assertEquals( 'a', UIDefaultsLoader.parseValue( "dummyChar", "a", null ) );
assertEquals( 123, UIDefaultsLoader.parseValue( "dummy", "123", null ) );
assertEquals( 123, UIDefaultsLoader.parseValue( "dummyWidth", "123", null ) );
assertEquals( 1.23f, UIDefaultsLoader.parseValue( "dummy", "1.23", null ) );
assertEquals( 1.23f, UIDefaultsLoader.parseValue( "dummyWidth", "1.23", null ) );
assertEquals( new Insets( 1,2,3,4 ), UIDefaultsLoader.parseValue( "dummyInsets", "1,2,3,4", null ) );
assertEquals( new Dimension( 1,2 ), UIDefaultsLoader.parseValue( "dummySize", "1,2", null ) );
assertEquals( new Color( 0xff0000 ), UIDefaultsLoader.parseValue( "dummy", "#f00", null ) );
assertEquals( new Color( 0xff0000 ), UIDefaultsLoader.parseValue( "dummyColor", "#f00", null ) );
} |
@Override
public void refreshAll() {
// refresh all configs here
getApplication().ifPresent(ApplicationConfig::refresh);
getMonitor().ifPresent(MonitorConfig::refresh);
getMetrics().ifPresent(MetricsConfig::refresh);
getTracing().ifPresent(TracingConfig::refresh);
getSsl().ifPresent(SslConfig::refresh);
getProtocols().forEach(ProtocolConfig::refresh);
getRegistries().forEach(RegistryConfig::refresh);
getConfigCenters().forEach(ConfigCenterConfig::refresh);
getMetadataConfigs().forEach(MetadataReportConfig::refresh);
} | @Test
void testRefreshAll() {
configManager.refreshAll();
moduleConfigManager.refreshAll();
} |
@VisibleForTesting
static String formatTimestamp(Long timestampMicro) {
// timestampMicro is in "microseconds since epoch" format,
// e.g., 1452062291123456L means "2016-01-06 06:38:11.123456 UTC".
// Separate into seconds and microseconds.
long timestampSec = timestampMicro / 1_000_000;
long micros = timestampMicro % 1_000_000;
if (micros < 0) {
micros += 1_000_000;
timestampSec -= 1;
}
String dayAndTime = DATE_AND_SECONDS_FORMATTER.print(timestampSec * 1000);
if (micros == 0) {
return String.format("%s UTC", dayAndTime);
}
return String.format("%s.%06d UTC", dayAndTime, micros);
} | @Test
public void testFormatTimestampLeadingZeroesOnMicros() {
assertThat(
BigQueryAvroUtils.formatTimestamp(1452062291000456L),
equalTo("2016-01-06 06:38:11.000456 UTC"));
} |
@Override
public int getHoldability() {
return 0;
} | @Test
void assertGetHoldability() {
assertThat(connection.getHoldability(), is(0));
} |
@Override
public void write(int b) {
ensureAvailable(1);
buffer[pos++] = (byte) (b);
} | @Test(expected = IndexOutOfBoundsException.class)
public void testWriteForBOffLen_negativeOff() {
out.write(TEST_DATA, -1, 3);
} |
public static Uuid fromString(String str) {
if (str.length() > 24) {
throw new IllegalArgumentException("Input string with prefix `"
+ str.substring(0, 24) + "` is too long to be decoded as a base64 UUID");
}
ByteBuffer uuidBytes = ByteBuffer.wrap(Base64.getUrlDecoder().decode(str));
if (uuidBytes.remaining() != 16) {
throw new IllegalArgumentException("Input string `" + str + "` decoded as "
+ uuidBytes.remaining() + " bytes, which is not equal to the expected 16 bytes "
+ "of a base64-encoded UUID");
}
return new Uuid(uuidBytes.getLong(), uuidBytes.getLong());
} | @Test
public void testFromStringWithInvalidInput() {
String oversizeString = Base64.getUrlEncoder().withoutPadding().encodeToString(new byte[32]);
assertThrows(IllegalArgumentException.class, () -> Uuid.fromString(oversizeString));
String undersizeString = Base64.getUrlEncoder().withoutPadding().encodeToString(new byte[4]);
assertThrows(IllegalArgumentException.class, () -> Uuid.fromString(undersizeString));
} |
public static String getPrettyStringMs(long timestampMs) {
StringBuilder builder = new StringBuilder();
printTimeMs(timestampMs, builder);
return builder.toString();
} | @Test
public void testGetPrettyStringMs() {
// 6hour1min
Assert.assertEquals(DebugUtil.getPrettyStringMs(21660222), "6h1m");
// 1min222ms
Assert.assertEquals(DebugUtil.getPrettyStringMs(60222), "1m");
// 2s222ms
Assert.assertEquals(DebugUtil.getPrettyStringMs(2222), "2s222ms");
// 22ms
Assert.assertEquals(DebugUtil.getPrettyStringMs(22), "22ms");
} |
@CheckForNull
public Duration calculate(DefaultIssue issue) {
if (issue.isFromExternalRuleEngine()) {
return issue.effort();
}
Rule rule = ruleRepository.getByKey(issue.ruleKey());
DebtRemediationFunction fn = rule.getRemediationFunction();
if (fn != null) {
verifyEffortToFix(issue, fn);
Duration debt = Duration.create(0);
String gapMultiplier = fn.gapMultiplier();
if (fn.type().usesGapMultiplier() && !Strings.isNullOrEmpty(gapMultiplier)) {
int effortToFixValue = MoreObjects.firstNonNull(issue.gap(), 1).intValue();
// TODO convert to Duration directly in Rule#remediationFunction -> better performance + error handling
debt = durations.decode(gapMultiplier).multiply(effortToFixValue);
}
String baseEffort = fn.baseEffort();
if (fn.type().usesBaseEffort() && !Strings.isNullOrEmpty(baseEffort)) {
// TODO convert to Duration directly in Rule#remediationFunction -> better performance + error handling
debt = debt.add(durations.decode(baseEffort));
}
return debt;
}
return null;
} | @Test
public void constant_function() {
int constant = 2;
issue.setGap(null);
rule.setFunction(new DefaultDebtRemediationFunction(DebtRemediationFunction.Type.CONSTANT_ISSUE, null, constant + "min"));
assertThat(underTest.calculate(issue).toMinutes()).isEqualTo(2);
} |
public static Read<Solace.Record> read() {
return new Read<Solace.Record>(
Read.Configuration.<Solace.Record>builder()
.setTypeDescriptor(TypeDescriptor.of(Solace.Record.class))
.setParseFn(SolaceRecordMapper::map)
.setTimestampFn(SENDER_TIMESTAMP_FUNCTION)
.setDeduplicateRecords(DEFAULT_DEDUPLICATE_RECORDS)
.setWatermarkIdleDurationThreshold(DEFAULT_WATERMARK_IDLE_DURATION_THRESHOLD));
} | @Test
public void testNoQueueAndTopicSet() {
Read<Record> spec = SolaceIO.read();
assertThrows(IllegalStateException.class, () -> spec.validate(pipeline.getOptions()));
} |
static BytecodeExpression greaterThan(BytecodeExpression left, BytecodeExpression right)
{
checkArgumentTypes(left, right);
OpCode comparisonInstruction;
OpCode noMatchJumpInstruction;
Class<?> type = left.getType().getPrimitiveType();
if (type == int.class) {
comparisonInstruction = null;
noMatchJumpInstruction = IF_ICMPLE;
}
else if (type == long.class) {
comparisonInstruction = LCMP;
noMatchJumpInstruction = IFLE;
}
else if (type == float.class) {
comparisonInstruction = FCMPL;
noMatchJumpInstruction = IFLE;
}
else if (type == double.class) {
comparisonInstruction = DCMPL;
noMatchJumpInstruction = IFLE;
}
else {
throw new IllegalArgumentException("Greater than does not support " + type);
}
return new ComparisonBytecodeExpression(">", comparisonInstruction, noMatchJumpInstruction, left, right);
} | @Test
public void testGreaterThan()
throws Exception
{
assertBytecodeExpression(greaterThan(constantInt(3), constantInt(7)), 3 > 7, "(3 > 7)");
assertBytecodeExpression(greaterThan(constantInt(7), constantInt(3)), 7 > 3, "(7 > 3)");
assertBytecodeExpression(greaterThan(constantInt(7), constantInt(7)), 7 > 7, "(7 > 7)");
assertBytecodeExpression(greaterThan(constantLong(3L), constantLong(7L)), 3L > 7L, "(3L > 7L)");
assertBytecodeExpression(greaterThan(constantLong(7L), constantLong(3L)), 7L > 3L, "(7L > 3L)");
assertBytecodeExpression(greaterThan(constantLong(7L), constantLong(7L)), 7L > 7L, "(7L > 7L)");
assertBytecodeExpression(greaterThan(constantFloat(3.3f), constantFloat(7.7f)), 3.3f > 7.7f, "(3.3f > 7.7f)");
assertBytecodeExpression(greaterThan(constantFloat(7.7f), constantFloat(3.3f)), 7.7f > 3.3f, "(7.7f > 3.3f)");
assertBytecodeExpression(greaterThan(constantFloat(7.7f), constantFloat(7.7f)), 7.7f > 7.7f, "(7.7f > 7.7f)");
assertBytecodeExpression(greaterThan(constantFloat(Float.NaN), constantFloat(7.7f)), Float.NaN > 7.7f, "(NaNf > 7.7f)");
assertBytecodeExpression(greaterThan(constantFloat(7.7f), constantFloat(Float.NaN)), 7.7f > Float.NaN, "(7.7f > NaNf)");
assertBytecodeExpression(greaterThan(constantDouble(3.3), constantDouble(7.7)), 3.3 > 7.7, "(3.3 > 7.7)");
assertBytecodeExpression(greaterThan(constantDouble(7.7), constantDouble(3.3)), 7.7 > 3.3, "(7.7 > 3.3)");
assertBytecodeExpression(greaterThan(constantDouble(7.7), constantDouble(7.7)), 7.7 > 7.7, "(7.7 > 7.7)");
assertBytecodeExpression(greaterThan(constantDouble(Double.NaN), constantDouble(7.7)), Double.NaN > 7.7, "(NaN > 7.7)");
assertBytecodeExpression(greaterThan(constantDouble(7.7), constantDouble(Double.NaN)), 7.7 > Double.NaN, "(7.7 > NaN)");
} |
public static List<String> resolveCompsDependency(Service service) {
List<String> components = new ArrayList<String>();
for (Component component : service.getComponents()) {
int depSize = component.getDependencies().size();
if (!components.contains(component.getName())) {
components.add(component.getName());
}
if (depSize != 0) {
for (String depComp : component.getDependencies()) {
if (!components.contains(depComp)) {
components.add(0, depComp);
}
}
}
}
return components;
} | @Test
public void testResolveCompsCircularDependency() {
Service service = createExampleApplication();
List<String> dependencies = new ArrayList<String>();
List<String> dependencies2 = new ArrayList<String>();
dependencies.add("compb");
dependencies2.add("compa");
Component compa = createComponent("compa");
compa.setDependencies(dependencies);
Component compb = createComponent("compb");
compa.setDependencies(dependencies2);
service.addComponent(compa);
service.addComponent(compb);
List<String> order = ServiceApiUtil.resolveCompsDependency(service);
List<String> expected = new ArrayList<String>();
expected.add("compa");
expected.add("compb");
for (int i = 0; i < expected.size(); i++) {
Assert.assertEquals("Components are not equal.", expected.get(i),
order.get(i));
}
} |
@Override
public Column convert(BasicTypeDefine typeDefine) {
PhysicalColumn.PhysicalColumnBuilder builder =
PhysicalColumn.builder()
.name(typeDefine.getName())
.sourceType(typeDefine.getColumnType())
.nullable(typeDefine.isNullable())
.defaultValue(typeDefine.getDefaultValue())
.comment(typeDefine.getComment());
String xuguDataType = typeDefine.getDataType().toUpperCase();
switch (xuguDataType) {
case XUGU_BOOLEAN:
case XUGU_BOOL:
builder.dataType(BasicType.BOOLEAN_TYPE);
break;
case XUGU_TINYINT:
builder.dataType(BasicType.BYTE_TYPE);
break;
case XUGU_SMALLINT:
builder.dataType(BasicType.SHORT_TYPE);
break;
case XUGU_INT:
case XUGU_INTEGER:
builder.dataType(BasicType.INT_TYPE);
break;
case XUGU_BIGINT:
builder.dataType(BasicType.LONG_TYPE);
break;
case XUGU_FLOAT:
builder.dataType(BasicType.FLOAT_TYPE);
break;
case XUGU_DOUBLE:
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case XUGU_NUMBER:
case XUGU_DECIMAL:
case XUGU_NUMERIC:
DecimalType decimalType;
if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) {
decimalType =
new DecimalType(
typeDefine.getPrecision().intValue(), typeDefine.getScale());
} else {
decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE);
}
builder.dataType(decimalType);
builder.columnLength(Long.valueOf(decimalType.getPrecision()));
builder.scale(decimalType.getScale());
break;
case XUGU_CHAR:
case XUGU_NCHAR:
builder.dataType(BasicType.STRING_TYPE);
if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) {
builder.columnLength(TypeDefineUtils.charTo4ByteLength(1L));
} else {
builder.columnLength(typeDefine.getLength());
}
break;
case XUGU_VARCHAR:
case XUGU_VARCHAR2:
builder.dataType(BasicType.STRING_TYPE);
if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) {
builder.columnLength(TypeDefineUtils.charTo4ByteLength(MAX_VARCHAR_LENGTH));
} else {
builder.columnLength(typeDefine.getLength());
}
break;
case XUGU_CLOB:
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(BYTES_2GB - 1);
break;
case XUGU_JSON:
case XUGU_GUID:
builder.dataType(BasicType.STRING_TYPE);
break;
case XUGU_BINARY:
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(MAX_BINARY_LENGTH);
break;
case XUGU_BLOB:
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(BYTES_2GB - 1);
break;
case XUGU_DATE:
builder.dataType(LocalTimeType.LOCAL_DATE_TYPE);
break;
case XUGU_TIME:
case XUGU_TIME_WITH_TIME_ZONE:
builder.dataType(LocalTimeType.LOCAL_TIME_TYPE);
break;
case XUGU_DATETIME:
case XUGU_DATETIME_WITH_TIME_ZONE:
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
break;
case XUGU_TIMESTAMP:
case XUGU_TIMESTAMP_WITH_TIME_ZONE:
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
if (typeDefine.getScale() == null) {
builder.scale(TIMESTAMP_DEFAULT_SCALE);
} else {
builder.scale(typeDefine.getScale());
}
break;
default:
throw CommonError.convertToSeaTunnelTypeError(
DatabaseIdentifier.XUGU, xuguDataType, typeDefine.getName());
}
return builder.build();
} | @Test
public void testConvertFloat() {
BasicTypeDefine<Object> typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("float")
.dataType("float")
.build();
Column column = XuguTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(BasicType.FLOAT_TYPE, column.getDataType());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType());
} |
@Override
public void addListener(String key, String group, ConfigurationListener listener) {
String listenerKey = buildListenerKey(key, group);
NacosConfigListener nacosConfigListener = ConcurrentHashMapUtils.computeIfAbsent(
watchListenerMap, listenerKey, k -> createTargetListener(key, group));
nacosConfigListener.addListener(listener);
try {
configService.addListener(key, group, nacosConfigListener);
} catch (NacosException e) {
logger.error(CONFIG_ERROR_NACOS, "", "", e.getMessage(), e);
}
} | @Test
void testAddListener() throws Exception {
CountDownLatch latch = new CountDownLatch(4);
TestListener listener1 = new TestListener(latch);
TestListener listener2 = new TestListener(latch);
TestListener listener3 = new TestListener(latch);
TestListener listener4 = new TestListener(latch);
config.addListener("AService.configurators", listener1);
config.addListener("AService.configurators", listener2);
config.addListener("testapp.tag-router", listener3);
config.addListener("testapp.tag-router", listener4);
put("AService.configurators", "new value1");
Thread.sleep(200);
put("testapp.tag-router", "new value2");
Thread.sleep(200);
put("testapp", "new value3");
Thread.sleep(5000);
latch.await();
Assertions.assertEquals(1, listener1.getCount("AService.configurators"));
Assertions.assertEquals(1, listener2.getCount("AService.configurators"));
Assertions.assertEquals(1, listener3.getCount("testapp.tag-router"));
Assertions.assertEquals(1, listener4.getCount("testapp.tag-router"));
Assertions.assertEquals("new value1", listener1.getValue());
Assertions.assertEquals("new value1", listener2.getValue());
Assertions.assertEquals("new value2", listener3.getValue());
Assertions.assertEquals("new value2", listener4.getValue());
} |
public ResT receive(long timeoutMs) throws IOException {
if (mCompleted) {
return null;
}
if (mCanceled) {
throw new CancelledException(formatErrorMessage("Stream is already canceled."));
}
long startMs = System.currentTimeMillis();
while (true) {
long waitedForMs = System.currentTimeMillis() - startMs;
if (waitedForMs >= timeoutMs) {
throw new DeadlineExceededException(formatErrorMessage(
"Timeout waiting for response after %dms. clientClosed: %s clientCancelled: %s "
+ "serverClosed: %s", timeoutMs, mClosed, mCanceled, mClosedFromRemote));
}
// Wait for a minute max
long waitMs = Math.min(timeoutMs - waitedForMs, Constants.MINUTE_MS);
try {
Object response = mResponses.poll(waitMs, TimeUnit.MILLISECONDS);
if (response == null) {
checkError(); // The stream could have errored while we were waiting
// Log a warning before looping again
LOG.warn("Client did not receive message from stream, will wait again. totalWaitMs: {} "
+ "clientClosed: {} clientCancelled: {} serverClosed: {} description: {}",
System.currentTimeMillis() - startMs, mClosed, mCanceled, mClosedFromRemote,
mDescription);
continue;
}
if (response == mResponseObserver) {
mCompleted = true;
return null;
}
checkError();
return (ResT) response;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new CancelledException(
formatErrorMessage("Interrupted while waiting for response."), e);
}
}
} | @Test
public void receiveAfterResponseArrives() throws Exception {
WriteResponse response = WriteResponse.newBuilder().build();
EXECUTOR.submit(() -> {
try {
// push response after a short period of time
Thread.sleep(SHORT_TIMEOUT);
mResponseObserver.onNext(response);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
});
WriteResponse actualResponse = mStream.receive(TIMEOUT);
assertEquals(response, actualResponse);
} |
public SchemaProvider getSchemaProvider() {
if (batch.isPresent() && schemaProvider == null) {
throw new HoodieException("Please provide a valid schema provider class!");
}
return Option.ofNullable(schemaProvider).orElseGet(NullSchemaProvider::getInstance);
} | @Test
public void getSchemaProviderShouldReturnGivenSchemaProvider() {
SchemaProvider schemaProvider = new RowBasedSchemaProvider(null);
final InputBatch<String> inputBatch = new InputBatch<>(Option.of("foo"), null, schemaProvider);
assertSame(schemaProvider, inputBatch.getSchemaProvider());
} |
@Override
public ImmutableSet<E> removed(E e) {
return new PCollectionsImmutableSet<>(underlying().minus(e));
} | @Test
public void testDelegationOfRemoved() {
new PCollectionsHashSetWrapperDelegationChecker<>()
.defineMockConfigurationForFunctionInvocation(mock -> mock.minus(eq(this)), SINGLETON_SET)
.defineWrapperFunctionInvocationAndMockReturnValueTransformation(wrapper -> wrapper.removed(this), identity())
.expectWrapperToWrapMockFunctionReturnValue()
.doFunctionDelegationCheck();
} |
@Override
public void start() {
if (isStarted())
return;
try {
ServerSocket socket = getServerSocketFactory().createServerSocket(getPort(), getBacklog(),
getInetAddress());
ServerListener<RemoteReceiverClient> listener = createServerListener(socket);
runner = createServerRunner(listener, getContext().getExecutorService());
runner.setContext(getContext());
getContext().getExecutorService().execute(runner);
super.start();
} catch (Exception ex) {
addError("server startup error: " + ex, ex);
}
} | @Test
public void testStartWhenAlreadyStarted() throws Exception {
appender.start();
appender.start();
Assertions.assertEquals(1, runner.getStartCount());
} |
@Override
public List<String> readFilesWithRetries(Sleeper sleeper, BackOff backOff)
throws IOException, InterruptedException {
IOException lastException = null;
do {
try {
Collection<Metadata> files = FileSystems.match(filePattern).metadata();
LOG.debug(
"Found file(s) {} by matching the path: {}",
files.stream()
.map(Metadata::resourceId)
.map(ResourceId::getFilename)
.collect(Collectors.joining(",")),
filePattern);
if (files.isEmpty()) {
continue;
}
// Read data from file paths
return readLines(files);
} catch (IOException e) {
// Ignore and retry
lastException = e;
LOG.warn("Error in file reading. Ignore and retry.");
}
} while (BackOffUtils.next(sleeper, backOff));
// Failed after max retries
throw new IOException(
String.format("Unable to read file(s) after retrying %d times", MAX_READ_RETRIES),
lastException);
} | @Test
public void testReadEmpty() throws Exception {
File emptyFile = tmpFolder.newFile("result-000-of-001");
Files.asCharSink(emptyFile, StandardCharsets.UTF_8).write("");
FilePatternMatchingShardedFile shardedFile = new FilePatternMatchingShardedFile(filePattern);
assertThat(shardedFile.readFilesWithRetries(), empty());
} |
public static CA readCA(final String pemFileContent, final String keyPassword) throws CACreationException {
try (var bundleReader = new StringReader(pemFileContent)) {
PEMParser pemParser = new PEMParser(bundleReader);
JcaPEMKeyConverter converter = new JcaPEMKeyConverter().setProvider("BC");
var certificates = new ArrayList<Certificate>();
PrivateKey privateKey = null;
var pemObjects = readPemObjects(pemParser);
for (var pemObject : pemObjects) {
if (pemObject instanceof X509Certificate cert) {
certificates.add(cert);
} else if (pemObject instanceof X509CertificateHolder cert) {
certificates.add(new JcaX509CertificateConverter().getCertificate(cert));
} else if (pemObject instanceof PKCS8EncryptedPrivateKeyInfo encryptedPrivateKey) {
if (keyPassword == null || keyPassword.isBlank()) {
throw new CACreationException("Private key is encrypted, but no password was supplied!");
}
var decryptorBuilder = new JceOpenSSLPKCS8DecryptorProviderBuilder().setProvider("BC");
var keyDecryptorBuilder = decryptorBuilder.build(keyPassword.toCharArray());
var privateKeyInfo = encryptedPrivateKey.decryptPrivateKeyInfo(keyDecryptorBuilder);
privateKey = converter.getPrivateKey(privateKeyInfo);
} else if (pemObject instanceof PrivateKeyInfo privateKeyInfo) {
privateKey = converter.getPrivateKey(privateKeyInfo);
}
}
if (privateKey == null) {
throw new CACreationException("No private key supplied in CA bundle!");
}
if (certificates.isEmpty()) {
throw new CACreationException("No certificate supplied in CA bundle!");
}
return new CA(certificates, privateKey);
} catch (PKCSException e) {
throw new CACreationException("Error while decrypting private key. Wrong password?", e);
} catch (CertificateException | IOException | OperatorCreationException e) {
throw new CACreationException("Failed to parse CA bundle: ", e);
}
} | @Test
void throwsExceptionIfPrivateKeyIsMissing() throws Exception {
assertThatThrownBy(() -> PemCaReader.readCA(PEM_CERT, null))
.isInstanceOf(CACreationException.class)
.hasMessage("No private key supplied in CA bundle!");
} |
public static Path getStagingDir(Cluster cluster, Configuration conf)
throws IOException, InterruptedException {
UserGroupInformation user = UserGroupInformation.getLoginUser();
return getStagingDir(cluster, conf, user);
} | @Test
public void testGetStagingDirWhenShortFileOwnerNameAndFullUserName()
throws IOException, InterruptedException {
Cluster cluster = mock(Cluster.class);
Configuration conf = new Configuration();
String stagingDirOwner = USER_1_SHORT_NAME;
Path stagingPath = mock(Path.class);
UserGroupInformation user = UserGroupInformation
.createUserForTesting(USER_1, GROUP_NAMES);
assertEquals(USER_1, user.getUserName());
FileSystem fs = new FileSystemTestHelper.MockFileSystem();
FileStatus fileStatus = new FileStatus(1, true, 1, 1, 100L, 100L,
FsPermission.getDefault(), stagingDirOwner, stagingDirOwner,
stagingPath);
when(stagingPath.getFileSystem(conf)).thenReturn(fs);
when(fs.getFileStatus(stagingPath)).thenReturn(fileStatus);
when(cluster.getStagingAreaDir()).thenReturn(stagingPath);
assertEquals(stagingPath,
JobSubmissionFiles.getStagingDir(cluster, conf, user));
} |
public static boolean isRestSSLAuthenticationEnabled(Configuration sslConfig) {
checkNotNull(sslConfig, "sslConfig");
return isRestSSLEnabled(sslConfig) && sslConfig.get(SSL_REST_AUTHENTICATION_ENABLED);
} | @Test
void checkEnableRestSSLAuthentication() {
// SSL has to be enabled
Configuration noSSLOptions = new Configuration();
noSSLOptions.set(SecurityOptions.SSL_REST_ENABLED, false);
noSSLOptions.set(SecurityOptions.SSL_REST_AUTHENTICATION_ENABLED, true);
assertThat(SecurityOptions.isRestSSLAuthenticationEnabled(noSSLOptions)).isFalse();
// authentication is disabled by default
Configuration defaultOptions = new Configuration();
defaultOptions.set(SecurityOptions.SSL_REST_ENABLED, true);
assertThat(SecurityOptions.isRestSSLAuthenticationEnabled(defaultOptions)).isFalse();
Configuration options = new Configuration();
options.set(SecurityOptions.SSL_REST_ENABLED, true);
options.set(SecurityOptions.SSL_REST_AUTHENTICATION_ENABLED, true);
assertThat(SecurityOptions.isRestSSLAuthenticationEnabled(options)).isTrue();
} |
public static long calculate(final float percentage) {
if (percentage <= 0 || percentage > 1) {
throw new IllegalArgumentException();
}
checkAndScheduleRefresh();
return (long) (maxAvailable() * percentage);
} | @Test
public void testCalculateWhenIllegalPercentage() {
float largerThanOne = 2;
float zero = 0;
float lessThanZero = -1;
assertThrows(IllegalArgumentException.class, () -> MemoryLimitCalculator.calculate(largerThanOne));
assertThrows(IllegalArgumentException.class, () -> MemoryLimitCalculator.calculate(zero));
assertThrows(IllegalArgumentException.class, () -> MemoryLimitCalculator.calculate(lessThanZero));
} |
public Lease acquire() throws Exception {
String path = internals.attemptLock(-1, null, null);
return makeLease(path);
} | @Test
public void testRelease1AtATime() throws Exception {
final Timing timing = new Timing();
final int CLIENT_QTY = 10;
final int MAX = CLIENT_QTY / 2;
final AtomicInteger maxLeases = new AtomicInteger(0);
final AtomicInteger activeQty = new AtomicInteger(0);
final AtomicInteger uses = new AtomicInteger(0);
List<Future<Object>> futures = Lists.newArrayList();
ExecutorService service = Executors.newFixedThreadPool(CLIENT_QTY);
for (int i = 0; i < CLIENT_QTY; ++i) {
Future<Object> f = service.submit(new Callable<Object>() {
@Override
public Object call() throws Exception {
CuratorFramework client = CuratorFrameworkFactory.newClient(
server.getConnectString(), timing.session(), timing.connection(), new RetryOneTime(1));
client.start();
try {
InterProcessSemaphoreV2 semaphore = new InterProcessSemaphoreV2(client, "/test", MAX);
Lease lease = semaphore.acquire(timing.forWaiting().seconds(), TimeUnit.SECONDS);
assertNotNull(lease);
uses.incrementAndGet();
try {
synchronized (maxLeases) {
int qty = activeQty.incrementAndGet();
if (qty > maxLeases.get()) {
maxLeases.set(qty);
}
}
timing.sleepABit();
} finally {
activeQty.decrementAndGet();
lease.close();
}
} finally {
TestCleanState.closeAndTestClean(client);
}
return null;
}
});
futures.add(f);
}
for (Future<Object> f : futures) {
f.get();
}
assertEquals(uses.get(), CLIENT_QTY);
assertEquals(maxLeases.get(), MAX);
} |
void placeOrder(Order order) {
sendShippingRequest(order);
} | @Test
void testPlaceOrderShortDuration() throws Exception {
long paymentTime = timeLimits.paymentTime();
long queueTaskTime = timeLimits.queueTaskTime();
long messageTime = timeLimits.messageTime();
long employeeTime = timeLimits.employeeTime();
long queueTime = timeLimits.queueTime();
for (double d = 0.1; d < 2; d = d + 0.1) {
paymentTime *= d;
queueTaskTime *= d;
messageTime *= d;
employeeTime *= d;
queueTime *= d;
Commander c = buildCommanderObject(true);
var order = new Order(new User("K", "J"), "pen", 1f);
for (Order.MessageSent ms : Order.MessageSent.values()) {
c.placeOrder(order);
assertFalse(StringUtils.isBlank(order.id));
}
}
} |
@Nullable
static NotificationTemplate lookupTemplateByLocale(Locale locale,
Map<String, Optional<NotificationTemplate>> map) {
return LanguageUtils.computeLangFromLocale(locale).stream()
// reverse order to ensure that the variant is the first element and the default
// is the last element
.sorted(Collections.reverseOrder())
.map(key -> map.getOrDefault(key, Optional.empty()))
.filter(Optional::isPresent)
.map(Optional::get)
.findFirst()
.orElse(null);
} | @Test
void lookupTemplateByLocaleTest() {
Map<String, Optional<NotificationTemplate>> map = new HashMap<>();
map.put("zh_CN", Optional.of(createNotificationTemplate("zh_CN-template")));
map.put("zh", Optional.of(createNotificationTemplate("zh-template")));
map.put("default", Optional.of(createNotificationTemplate("default-template")));
var sc = ReasonNotificationTemplateSelectorImpl
.lookupTemplateByLocale(Locale.SIMPLIFIED_CHINESE, map);
assertThat(sc).isNotNull();
assertThat(sc.getMetadata().getName()).isEqualTo("zh_CN-template");
var c = ReasonNotificationTemplateSelectorImpl
.lookupTemplateByLocale(Locale.CHINESE, map);
assertThat(c).isNotNull();
assertThat(c.getMetadata().getName()).isEqualTo("zh-template");
var e = ReasonNotificationTemplateSelectorImpl
.lookupTemplateByLocale(Locale.ENGLISH, map);
assertThat(e).isNotNull();
assertThat(e.getMetadata().getName()).isEqualTo("default-template");
} |
@Udf
public <T> List<T> remove(
@UdfParameter(description = "Array of values") final List<T> array,
@UdfParameter(description = "Value to remove") final T victim) {
if (array == null) {
return null;
}
return array.stream()
.filter(el -> !Objects.equals(el, victim))
.collect(Collectors.toList());
} | @Test
public void shouldRetainNulls() {
final List<String> input1 = Arrays.asList(null, "foo");
final String input2 = "foo";
final List<String> result = udf.remove(input1, input2);
assertThat(result, contains((String) null));
} |
@Override
public MediaType detect(ZipFile zipFile, TikaInputStream tis) throws IOException {
MediaType mt = detectIWork18(zipFile);
if (mt != null) {
return mt;
}
mt = detectIWork13(zipFile);
if (mt != null) {
return mt;
}
return detectIWork(zipFile);
} | @Test
public void testDetectKeynote13() throws Exception {
String testFile = "/test-documents/testKeynote2013.detect";
IWorkDetector detector = new IWorkDetector();
try (TikaInputStream tis = TikaInputStream.get(getResourceAsStream(testFile));
ZipFile zipFile = ZipFile.builder().setFile(tis.getFile()).get()) {
MediaType result = detector.detect(zipFile, tis);
assertEquals(IWork13DocumentType.KEYNOTE13.getType(), result);
}
} |
@Udf
public Integer length(@UdfParameter final String jsonArray) {
if (jsonArray == null) {
return null;
}
final JsonNode node = UdfJsonMapper.parseJson(jsonArray);
if (node.isMissingNode() || !node.isArray()) {
return null;
}
return node.size();
} | @Test(expected = KsqlFunctionException.class)
public void shouldThrowForInvalidJson() {
udf.length("abc");
} |
public static FindAll findAll(String regex) {
return findAll(Pattern.compile(regex));
} | @Test
@Category(NeedsRunner.class)
public void testFindAllGroups() {
PCollection<List<String>> output =
p.apply(Create.of("aj", "xjx", "yjy", "zjz")).apply(Regex.findAll("([xyz])j([xyz])"));
PAssert.that(output)
.containsInAnyOrder(
Arrays.asList("xjx", "x", "x"),
Arrays.asList("yjy", "y", "y"),
Arrays.asList("zjz", "z", "z"));
p.run();
} |
void commitClusterState(ClusterStateChange newState, Address initiator, UUID txnId) {
commitClusterState(newState, initiator, txnId, false);
} | @Test(expected = NullPointerException.class)
public void test_changeLocalClusterState_nullState() throws Exception {
clusterStateManager.commitClusterState(null, newAddress(), TXN);
} |
@Override
public void close() {
} | @Test
public void shouldSucceed_gapDetectedLocal_noRetry()
throws ExecutionException, InterruptedException {
// Given:
final AtomicReference<Set<KsqlNode>> nodes = new AtomicReference<>(
ImmutableSet.of(ksqlNodeLocal, ksqlNodeRemote));
final PushRouting routing = new PushRouting(sqr -> nodes.get(), 50, false);
// When:
final PushConnectionsHandle handle = handlePushRouting(routing);
context.runOnContext(v -> {
localPublisher.accept(LOCAL_ROW1);
localPublisher.accept(LOCAL_CONTINUATION_TOKEN1);
localPublisher.accept(LOCAL_ROW2);
localPublisher.accept(LOCAL_CONTINUATION_TOKEN_GAP);
});
Set<List<?>> rows = waitOnRows(2);
waitOnNodeStatus(handle, ksqlNodeLocal, RoutingResultStatus.OFFSET_GAP_FOUND);
handle.close();
// Then:
assertThat(rows.contains(LOCAL_ROW1.value().values()), is(true));
assertThat(rows.contains(LOCAL_ROW2.value().values()), is(true));
assertThat(handle.get(ksqlNodeLocal).get().getStatus(),
is(RoutingResultStatus.OFFSET_GAP_FOUND));
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.