focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
public static RLESparseResourceAllocation merge(ResourceCalculator resCalc,
Resource clusterResource, RLESparseResourceAllocation a,
RLESparseResourceAllocation b, RLEOperator operator, long start, long end)
throws PlanningException {
NavigableMap<Long, Resource> cumA =
a.getRangeOverlapping(start, end).getCumulative();
NavigableMap<Long, Resource> cumB =
b.getRangeOverlapping(start, end).getCumulative();
NavigableMap<Long, Resource> out =
merge(resCalc, clusterResource, cumA, cumB, start, end, operator);
return new RLESparseResourceAllocation(out, resCalc);
}
|
@Test
@Ignore
public void testMergeSpeed() throws PlanningException {
for (int j = 0; j < 100; j++) {
TreeMap<Long, Resource> a = new TreeMap<>();
TreeMap<Long, Resource> b = new TreeMap<>();
Random rand = new Random();
long startA = 0;
long startB = 0;
for (int i = 0; i < 1000 + rand.nextInt(9000); i++) {
startA += rand.nextInt(100);
startB += rand.nextInt(100);
a.put(startA,
Resource.newInstance(rand.nextInt(10240), rand.nextInt(10)));
b.put(startB,
Resource.newInstance(rand.nextInt(10240), rand.nextInt(10)));
}
RLESparseResourceAllocation rleA =
new RLESparseResourceAllocation(a, new DefaultResourceCalculator());
RLESparseResourceAllocation rleB =
new RLESparseResourceAllocation(b, new DefaultResourceCalculator());
long start = System.currentTimeMillis();
RLESparseResourceAllocation out =
RLESparseResourceAllocation.merge(new DefaultResourceCalculator(),
Resource.newInstance(100 * 128 * 1024, 100 * 32), rleA, rleB,
RLEOperator.add, Long.MIN_VALUE, Long.MAX_VALUE);
long end = System.currentTimeMillis();
System.out.println(" Took: " + (end - start) + "ms ");
}
}
|
<C extends ProcessingContext> Span nextSpan(C context, Headers headers) {
TraceContextOrSamplingFlags extracted = extractor.extract(headers);
// Clear any propagation keys present in the headers
if (!extracted.equals(emptyExtraction)) {
clearHeaders(headers);
}
Span result = tracer.nextSpan(extracted);
if (!result.isNoop()) {
addTags(context, result);
}
return result;
}
|
@Test void nextSpan_uses_current_context() {
ProcessorContext<String, String> fakeProcessorContext = processorV2ContextSupplier.get();
Span child;
try (Scope scope = tracing.currentTraceContext().newScope(parent)) {
child = kafkaStreamsTracing.nextSpan(fakeProcessorContext, new RecordHeaders());
}
child.finish();
assertThat(child.context().parentIdString()).isEqualTo(parent.spanIdString());
}
|
@Override
public <K> boolean isMine(K id, Function<K, Long> hasher) {
return Objects.equals(localNodeId, getLeader(id, hasher));
}
|
@Test
public void testIsMine() {
// We'll own only the first partition
setUpLeadershipService(1);
replay(leadershipService);
Key myKey = new ControllableHashKey(0);
Key notMyKey = new ControllableHashKey(1);
assertTrue(partitionManager.isMine(myKey, Key::hash));
assertFalse(partitionManager.isMine(notMyKey, Key::hash));
// Make us the owner of 4 partitions now
reset(leadershipService);
setUpLeadershipService(4);
replay(leadershipService);
assertTrue(partitionManager.isMine(myKey, Key::hash));
// notMyKey is now my key because because we're in control of that
// partition now
assertTrue(partitionManager.isMine(notMyKey, Key::hash));
assertFalse(partitionManager.isMine(new ControllableHashKey(4), Key::hash));
}
|
public long toLong() {
long mac = 0;
for (int i = 0; i < 6; i++) {
final long t = (this.address[i] & 0xffL) << (5 - i) * 8;
mac |= t;
}
return mac;
}
|
@Test
public void testToLong() throws Exception {
assertEquals(MAC_ONOS_LONG, MAC_ONOS.toLong());
}
|
public Object createArray(String fqn, int[] dimensions) {
Class<?> clazz = null;
Object returnObject = null;
try {
clazz = TypeUtil.forName(fqn);
returnObject = Array.newInstance(clazz, dimensions);
} catch (Exception e) {
logger.log(Level.WARNING, "Class FQN does not exist: " + fqn, e);
throw new Py4JException(e);
}
return returnObject;
}
|
@Test
public void testCreateArray() {
Object array1 = rEngine.createArray("int", new int[] { 2 });
int[] array1int = (int[]) array1;
assertEquals(2, array1int.length);
array1 = rEngine.createArray("java.lang.String", new int[] { 3, 4 });
String[][] array1String = (String[][]) array1;
assertEquals(3, array1String.length);
assertEquals(4, array1String[0].length);
}
|
@Override
public SchemaKStream<?> buildStream(final PlanBuildContext buildContext) {
final Stacker contextStacker = buildContext.buildNodeContext(getId().toString());
return schemaKStreamFactory.create(
buildContext,
dataSource,
contextStacker.push(SOURCE_OP_NAME)
);
}
|
@Test
public void shouldBuildSchemaKTableWhenKTableSource() {
// Given:
final KsqlTable<String> table = new KsqlTable<>("sqlExpression",
SourceName.of("datasource"),
REAL_SCHEMA,
Optional.of(TIMESTAMP_COLUMN),
false,
new KsqlTopic(
"topic2",
KeyFormat.nonWindowed(FormatInfo.of(FormatFactory.KAFKA.name()), SerdeFeatures.of()),
ValueFormat.of(FormatInfo.of(FormatFactory.JSON.name()), SerdeFeatures.of())
),
false
);
node = new DataSourceNode(
PLAN_NODE_ID,
table,
table.getName(),
false
);
// When:
final SchemaKStream<?> result = buildStream(node);
// Then:
assertThat(result.getClass(), equalTo(SchemaKTable.class));
}
|
public Date parseString(String dateString) throws ParseException {
if (dateString == null || dateString.isEmpty()) {
return null;
}
Matcher xep82WoMillisMatcher = xep80DateTimeWoMillisPattern.matcher(dateString);
Matcher xep82Matcher = xep80DateTimePattern.matcher(dateString);
if (xep82WoMillisMatcher.matches() || xep82Matcher.matches()) {
String rfc822Date;
// Convert the ISO 8601 time zone string to a RFC822 compatible format
// since SimpleDateFormat supports ISO8601 only with Java7 or higher
if (dateString.charAt(dateString.length() - 1) == 'Z') {
rfc822Date = dateString.replace("Z", "+0000");
} else {
// If the time zone wasn't specified with 'Z', then it's in
// ISO8601 format (i.e. '(+|-)HH:mm')
// RFC822 needs a similar format just without the colon (i.e.
// '(+|-)HHmm)'), so remove it
int lastColon = dateString.lastIndexOf(':');
rfc822Date = dateString.substring(0, lastColon) + dateString.substring(lastColon + 1);
}
if (xep82WoMillisMatcher.matches()) {
synchronized (dateTimeFormatWoMillies) {
return dateTimeFormatWoMillies.parse(rfc822Date);
}
} else {
// OF-898: Replace any number of millisecond-characters with at most three of them.
rfc822Date = rfc822Date.replaceAll("(\\.[0-9]{3})[0-9]*", "$1");
synchronized (dateTimeFormat) {
return dateTimeFormat.parse(rfc822Date);
}
}
}
throw new ParseException("Date String could not be parsed: \"" + dateString + "\"", 0);
}
|
@Test
public void testExceptionContainsOffendingValue() throws Exception
{
// Setup fixture
final String testValue = "This is not a valid date value";
// Execute system under test
try {
xmppDateTimeFormat.parseString(testValue);
// Verify results
fail("An exception should have been thrown (but was not).");
} catch (ParseException e) {
assertTrue(e.getMessage().contains(testValue));
}
}
|
public List<List<String>> cells() {
return raw;
}
|
@Test
void cells_col_is_immutable() {
assertThrows(UnsupportedOperationException.class, () -> createSimpleTable().cells().get(0).remove(0));
}
|
public static Set<ConfigKey<?>> getAllConfigsProduced(Class<? extends ConfigProducer> producerClass, String configId) {
// TypeToken is @Beta in guava, so consider implementing a simple recursive method instead.
TypeToken<? extends ConfigProducer>.TypeSet interfaces = TypeToken.of(producerClass).getTypes().interfaces();
return interfaces.rawTypes().stream()
.filter(ReflectionUtil::isConcreteProducer)
.map(i -> createConfigKeyFromInstance(i.getEnclosingClass(), configId))
.collect(Collectors.toCollection(() -> new LinkedHashSet<>()));
}
|
@Test
void getAllConfigsProduced_includes_configs_directly_implemented_by_producer() {
Set<ConfigKey<?>> configs = getAllConfigsProduced(SimpleProducer.class, "foo");
assertEquals(1, configs.size());
assertTrue(configs.contains(new ConfigKey<>(SimpletypesConfig.CONFIG_DEF_NAME, "foo", SimpletypesConfig.CONFIG_DEF_NAMESPACE)));
}
|
protected Properties getConfiguration() {
return config;
}
|
@Test
public void testGetConfiguration() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
FilterConfig config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.CONFIG_PREFIX)).thenReturn("");
Mockito.when(config.getInitParameter("a")).thenReturn("A");
Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>(Arrays.asList("a")).elements());
Properties props = filter.getConfiguration("", config);
Assert.assertEquals("A", props.getProperty("a"));
config = Mockito.mock(FilterConfig.class);
Mockito.when(config.getInitParameter(AuthenticationFilter.CONFIG_PREFIX)).thenReturn("foo");
Mockito.when(config.getInitParameter("foo.a")).thenReturn("A");
Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>(Arrays.asList("foo.a")).elements());
props = filter.getConfiguration("foo.", config);
Assert.assertEquals("A", props.getProperty("a"));
}
|
@GET
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public NodeInfo get() {
return getNodeInfo();
}
|
@Test
public void testNodeInfoDefault() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("node").path("info")
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyNodeInfo(json);
}
|
@Override
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) {
final CompositePropertySource compositePropertySource = new CompositePropertySource(SOURCE_NAME);
compositePropertySource.addPropertySource(new OriginRegistrySwitchSource(SOURCE_NAME));
if (environment.getPropertySources().contains(DYNAMIC_PROPERTY_NAME)) {
environment.getPropertySources().addAfter(DYNAMIC_PROPERTY_NAME, compositePropertySource);
} else {
environment.getPropertySources().addFirst(compositePropertySource);
}
}
|
@Test
public void test() {
final SpringEnvironmentProcessor springEnvironmentProcessor = new SpringEnvironmentProcessor();
final ConfigurableEnvironment environment = Mockito.mock(ConfigurableEnvironment.class);
final MutablePropertySources propertySources = new MutablePropertySources();
Mockito.when(environment.getPropertySources()).thenReturn(propertySources);
springEnvironmentProcessor.postProcessEnvironment(environment, null);
Assert.assertTrue(propertySources.size() > 0);
final Optional<Object> sourceName = ReflectUtils.getFieldValue(springEnvironmentProcessor, "SOURCE_NAME");
Assert.assertTrue(sourceName.isPresent());
propertySources.remove((String) sourceName.get());
assertEquals(0, propertySources.size());
propertySources.addFirst(new CompositePropertySource("test1"));
propertySources.addFirst(new CompositePropertySource("test2"));
propertySources.addFirst(new CompositePropertySource("test3"));
final Optional<Object> dynamicPropertyName = ReflectUtils.getFieldValue(springEnvironmentProcessor, "DYNAMIC_PROPERTY_NAME");
Assert.assertTrue(dynamicPropertyName.isPresent());
propertySources.addFirst(new CompositePropertySource((String) dynamicPropertyName.get()));
springEnvironmentProcessor.postProcessEnvironment(environment, null);
final Iterator<PropertySource<?>> iterator = propertySources.iterator();
int index = 2;
PropertySource<?> expectedSource = null;
while (iterator.hasNext() && index-- > 0) {
expectedSource = iterator.next();
}
Assert.assertNotNull(expectedSource);
Assert.assertEquals(expectedSource.getName(), sourceName.get());
}
|
public Iterable<TimestampedValue<T>> readRange(Instant minTimestamp, Instant limitTimestamp) {
checkState(
!isClosed,
"OrderedList user state is no longer usable because it is closed for %s",
requestTemplate.getStateKey());
// Store pendingAdds whose sort key is in the query range and values are truncated by the
// current size. The values (collections) of pendingAdds are kept, so that they will still be
// accessible in pre-existing iterables even after:
// (1) a sort key is added to or removed from pendingAdds, or
// (2) a new value is added to an existing sort key
ArrayList<PrefetchableIterable<TimestampedValue<T>>> pendingAddsInRange = new ArrayList<>();
for (Entry<Instant, Collection<T>> kv :
pendingAdds.subMap(minTimestamp, limitTimestamp).entrySet()) {
pendingAddsInRange.add(
PrefetchableIterables.limit(
Iterables.transform(kv.getValue(), (v) -> TimestampedValue.of(v, kv.getKey())),
kv.getValue().size()));
}
Iterable<TimestampedValue<T>> valuesInRange = Iterables.concat(pendingAddsInRange);
if (!isCleared) {
StateRequest.Builder getRequestBuilder = this.requestTemplate.toBuilder();
getRequestBuilder
.getStateKeyBuilder()
.getOrderedListUserStateBuilder()
.getRangeBuilder()
.setStart(minTimestamp.getMillis())
.setEnd(limitTimestamp.getMillis());
// TODO: consider use cache here
CachingStateIterable<TimestampedValue<T>> persistentValues =
StateFetchingIterators.readAllAndDecodeStartingFrom(
Caches.noop(),
this.beamFnStateClient,
getRequestBuilder.build(),
this.timestampedValueCoder);
// Make a snapshot of the current pendingRemoves and use them to filter persistent values.
// The values of pendingRemoves are copied, so that they will still be accessible in
// pre-existing iterables even after a sort key is removed.
TreeRangeSet<Instant> pendingRemovesSnapshot = TreeRangeSet.create(pendingRemoves);
Iterable<TimestampedValue<T>> persistentValuesAfterRemoval =
Iterables.filter(
persistentValues, v -> !pendingRemovesSnapshot.contains(v.getTimestamp()));
return Iterables.mergeSorted(
ImmutableList.of(persistentValuesAfterRemoval, valuesInRange),
Comparator.comparing(TimestampedValue::getTimestamp));
}
return valuesInRange;
}
|
@Test
public void testReadRange() throws Exception {
FakeBeamFnStateClient fakeClient =
new FakeBeamFnStateClient(
timestampedValueCoder,
ImmutableMap.of(
createOrderedListStateKey("A", 1), asList(A1, B1),
createOrderedListStateKey("A", 4), Collections.singletonList(A4),
createOrderedListStateKey("A", 2), Collections.singletonList(A2)));
OrderedListUserState<String> userState =
new OrderedListUserState<>(
Caches.noop(),
fakeClient,
"instructionId",
createOrderedListStateKey("A"),
StringUtf8Coder.of());
Iterable<TimestampedValue<String>> stateBeforeB2 =
userState.readRange(Instant.ofEpochMilli(2), Instant.ofEpochMilli(4));
assertArrayEquals(
Collections.singletonList(A2).toArray(),
Iterables.toArray(stateBeforeB2, TimestampedValue.class));
// Add a new value to an existing sort key
userState.add(B2);
assertArrayEquals(
Collections.singletonList(A2).toArray(),
Iterables.toArray(stateBeforeB2, TimestampedValue.class));
assertArrayEquals(
asList(A2, B2).toArray(),
Iterables.toArray(
userState.readRange(Instant.ofEpochMilli(2), Instant.ofEpochMilli(4)),
TimestampedValue.class));
// Add a new value to a new sort key
userState.add(A3);
assertArrayEquals(
Collections.singletonList(A2).toArray(),
Iterables.toArray(stateBeforeB2, TimestampedValue.class));
assertArrayEquals(
asList(A2, B2, A3).toArray(),
Iterables.toArray(
userState.readRange(Instant.ofEpochMilli(2), Instant.ofEpochMilli(4)),
TimestampedValue.class));
userState.asyncClose();
assertThrows(
IllegalStateException.class,
() -> userState.readRange(Instant.ofEpochMilli(1), Instant.ofEpochMilli(2)));
}
|
@Override
public int getattr(String path, FileStat stat) {
return AlluxioFuseUtils.call(
LOG, () -> getattrInternal(path, stat), FuseConstants.FUSE_GETATTR, "path=%s", path);
}
|
@Test
@DoraTestTodoItem(action = DoraTestTodoItem.Action.FIX, owner = "LuQQiu")
@Ignore
public void getattrWithDelay() throws Exception {
String path = "/foo/bar";
AlluxioURI expectedPath = BASE_EXPECTED_URI.join("/foo/bar");
// set up status
FileInfo info = new FileInfo();
info.setLength(0);
info.setCompleted(false);
URIStatus status = new URIStatus(info);
// mock fs
when(mFileSystem.getStatus(any(AlluxioURI.class))).thenReturn(status);
FileStat stat = FileStat.of(ByteBuffer.allocateDirect(256));
// Use another thread to open file so that
// we could change the file status when opening it
Thread t = new Thread(() -> mFuseFs.getattr(path, stat));
t.start();
Thread.sleep(1000);
// If the file is not being written and is not completed,
// we will wait for the file to complete
verify(mFileSystem, atLeast(10)).getStatus(expectedPath);
assertEquals(0, stat.st_size.longValue());
info.setCompleted(true);
info.setLength(1000);
t.join();
assertEquals(1000, stat.st_size.longValue());
}
|
static byte[] readPrivateKey(Path path) throws KeyException {
final byte[] bytes;
try {
bytes = Files.readAllBytes(path);
} catch (IOException e) {
throw new KeyException("Couldn't read private key from file: " + path, e);
}
final String content = new String(bytes, StandardCharsets.US_ASCII);
final Matcher m = KEY_PATTERN.matcher(content);
if (!m.find()) {
throw new KeyException("No private key found in file: " + path);
}
final String s = CharMatcher.breakingWhitespace().removeFrom(m.group(1));
byte[] base64 = s.getBytes(StandardCharsets.US_ASCII);
return Base64.getDecoder().decode(base64);
}
|
@Test(expected = KeyException.class)
public void readPrivateKeyFailsOnInvalidFile() throws Exception {
final File file = temporaryFolder.newFile();
PemReader.readPrivateKey(file.toPath());
}
|
@POST
@Timed
@ApiOperation(value = "Create index set")
@RequiresPermissions(RestPermissions.INDEXSETS_CREATE)
@Consumes(MediaType.APPLICATION_JSON)
@AuditEvent(type = AuditEventTypes.INDEX_SET_CREATE)
@ApiResponses(value = {
@ApiResponse(code = 403, message = "Unauthorized"),
})
public IndexSetSummary save(@ApiParam(name = "Index set configuration", required = true)
@Valid @NotNull IndexSetSummary indexSet) {
try {
checkDataTieringNotNull(indexSet.useLegacyRotation(), indexSet.dataTieringConfig());
final IndexSetConfig indexSetConfig = indexSet.toIndexSetConfig(true);
final Optional<Violation> violation = indexSetValidator.validate(indexSetConfig);
if (violation.isPresent()) {
throw new BadRequestException(violation.get().message());
}
final IndexSetConfig savedObject = indexSetService.save(indexSetConfig);
final IndexSetConfig defaultIndexSet = indexSetService.getDefault();
return IndexSetSummary.fromIndexSetConfig(savedObject, savedObject.equals(defaultIndexSet));
} catch (DuplicateKeyException e) {
throw new BadRequestException(e.getMessage());
}
}
|
@Test
@Ignore("Currently doesn't work with @RequiresPermissions")
public void saveDenied() {
notPermitted();
final IndexSetConfig indexSetConfig = IndexSetConfig.create(
"title",
"description",
true, true,
"prefix",
1,
0,
MessageCountRotationStrategy.class.getCanonicalName(),
MessageCountRotationStrategyConfig.create(1000),
NoopRetentionStrategy.class.getCanonicalName(),
NoopRetentionStrategyConfig.create(1),
ZonedDateTime.of(2016, 10, 10, 12, 0, 0, 0, ZoneOffset.UTC),
"standard",
"index-template",
null,
1,
false
);
expectedException.expect(ForbiddenException.class);
expectedException.expectMessage("Not authorized to access resource id <id>");
try {
indexSetsResource.save(IndexSetSummary.fromIndexSetConfig(indexSetConfig, false));
} finally {
verifyNoMoreInteractions(indexSetService);
}
}
|
public List<Stream> match(Message message) {
final Set<Stream> result = Sets.newHashSet();
final Set<String> blackList = Sets.newHashSet();
for (final Rule rule : rulesList) {
if (blackList.contains(rule.getStreamId())) {
continue;
}
final StreamRule streamRule = rule.getStreamRule();
final StreamRuleType streamRuleType = streamRule.getType();
final Stream.MatchingType matchingType = rule.getMatchingType();
if (!ruleTypesNotNeedingFieldPresence.contains(streamRuleType)
&& !message.hasField(streamRule.getField())) {
if (matchingType == Stream.MatchingType.AND) {
result.remove(rule.getStream());
// blacklist stream because it can't match anymore
blackList.add(rule.getStreamId());
}
continue;
}
final Stream stream;
if (streamRuleType != StreamRuleType.REGEX) {
stream = rule.match(message);
} else {
stream = rule.matchWithTimeOut(message, streamProcessingTimeout, TimeUnit.MILLISECONDS);
}
if (stream == null) {
if (matchingType == Stream.MatchingType.AND) {
result.remove(rule.getStream());
// blacklist stream because it can't match anymore
blackList.add(rule.getStreamId());
}
} else {
result.add(stream);
if (matchingType == Stream.MatchingType.OR) {
// blacklist stream because it is already matched
blackList.add(rule.getStreamId());
}
}
}
final Stream defaultStream = defaultStreamProvider.get();
boolean alreadyRemovedDefaultStream = false;
for (Stream stream : result) {
if (stream.getRemoveMatchesFromDefaultStream()) {
if (alreadyRemovedDefaultStream || message.removeStream(defaultStream)) {
alreadyRemovedDefaultStream = true;
if (LOG.isTraceEnabled()) {
LOG.trace("Successfully removed default stream <{}> from message <{}>", defaultStream.getId(), message.getId());
}
} else {
// A previously executed message processor (or Illuminate) has likely already removed the
// default stream from the message. Now, the message has matched a stream in the Graylog
// MessageFilterChain, and the matching stream is also set to remove the default stream.
// This is usually from user-defined stream rules, and is generally not a problem.
cannotRemoveDefaultMeter.inc();
if (LOG.isTraceEnabled()) {
LOG.trace("Couldn't remove default stream <{}> from message <{}>", defaultStream.getId(), message.getId());
}
}
}
}
return ImmutableList.copyOf(result);
}
|
@Test
public void testGreaterMatch() throws Exception {
final StreamMock stream = getStreamMock("test");
final StreamRuleMock rule = new StreamRuleMock(ImmutableMap.of(
"_id", new ObjectId(),
"field", "testfield",
"value", "1",
"type", StreamRuleType.GREATER.toInteger(),
"stream_id", stream.getId()
));
stream.setStreamRules(Lists.newArrayList(rule));
final StreamRouterEngine engine = newEngine(Lists.newArrayList(stream));
final Message message = getMessage();
// With smaller value.
message.addField("testfield", "1");
assertTrue(engine.match(message).isEmpty());
// With greater value.
message.addField("testfield", "2");
assertEquals(Lists.newArrayList(stream), engine.match(message));
}
|
public static boolean hasCause(Throwable t, Class<? extends Throwable> exceptionClass) {
if (t.getClass().isAssignableFrom(exceptionClass)) return true;
if (t.getCause() != null) {
return hasCause(t.getCause(), exceptionClass);
}
return false;
}
|
@Test
void hasCauseReturnsFalseIfExceptionDoesNotHaveGivenCause() {
final boolean hasInterruptedExceptionAsCause = Exceptions.hasCause(new RuntimeException(new IllegalStateException()), InterruptedException.class);
assertThat(hasInterruptedExceptionAsCause).isFalse();
}
|
public static JSONArray parseArray(String jsonStr) {
return new JSONArray(jsonStr);
}
|
@Test
public void parseNumberToJSONArrayTest2() {
final JSONArray json = JSONUtil.parseArray(123L,
JSONConfig.create().setIgnoreError(true));
assertNotNull(json);
}
|
@Override
public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics,
final CreateTopicsOptions options) {
final Map<String, KafkaFutureImpl<TopicMetadataAndConfig>> topicFutures = new HashMap<>(newTopics.size());
final CreatableTopicCollection topics = new CreatableTopicCollection();
for (NewTopic newTopic : newTopics) {
if (topicNameIsUnrepresentable(newTopic.name())) {
KafkaFutureImpl<TopicMetadataAndConfig> future = new KafkaFutureImpl<>();
future.completeExceptionally(new InvalidTopicException("The given topic name '" +
newTopic.name() + "' cannot be represented in a request."));
topicFutures.put(newTopic.name(), future);
} else if (!topicFutures.containsKey(newTopic.name())) {
topicFutures.put(newTopic.name(), new KafkaFutureImpl<>());
topics.add(newTopic.convertToCreatableTopic());
}
}
if (!topics.isEmpty()) {
final long now = time.milliseconds();
final long deadline = calcDeadlineMs(now, options.timeoutMs());
final Call call = getCreateTopicsCall(options, topicFutures, topics,
Collections.emptyMap(), now, deadline);
runnable.call(call, now);
}
return new CreateTopicsResult(new HashMap<>(topicFutures));
}
|
@Test
public void testCreateTopicsRetryThrottlingExceptionWhenEnabled() throws Exception {
try (AdminClientUnitTestEnv env = mockClientEnv()) {
env.kafkaClient().setNodeApiVersions(NodeApiVersions.create());
env.kafkaClient().prepareResponse(
expectCreateTopicsRequestWithTopics("topic1", "topic2", "topic3"),
prepareCreateTopicsResponse(1000,
creatableTopicResult("topic1", Errors.NONE),
creatableTopicResult("topic2", Errors.THROTTLING_QUOTA_EXCEEDED),
creatableTopicResult("topic3", Errors.TOPIC_ALREADY_EXISTS)));
env.kafkaClient().prepareResponse(
expectCreateTopicsRequestWithTopics("topic2"),
prepareCreateTopicsResponse(1000,
creatableTopicResult("topic2", Errors.THROTTLING_QUOTA_EXCEEDED)));
env.kafkaClient().prepareResponse(
expectCreateTopicsRequestWithTopics("topic2"),
prepareCreateTopicsResponse(0,
creatableTopicResult("topic2", Errors.NONE)));
CreateTopicsResult result = env.adminClient().createTopics(
asList(
new NewTopic("topic1", 1, (short) 1),
new NewTopic("topic2", 1, (short) 1),
new NewTopic("topic3", 1, (short) 1)),
new CreateTopicsOptions().retryOnQuotaViolation(true));
assertNull(result.values().get("topic1").get());
assertNull(result.values().get("topic2").get());
TestUtils.assertFutureThrows(result.values().get("topic3"), TopicExistsException.class);
}
}
|
public DataSource getDatasource() {
return jt.getDataSource();
}
|
@Test
void testGetDataSource() {
HikariDataSource dataSource = new HikariDataSource();
dataSource.setJdbcUrl("test.jdbc.url");
when(jt.getDataSource()).thenReturn(dataSource);
assertEquals(dataSource.getJdbcUrl(), ((HikariDataSource) service.getDatasource()).getJdbcUrl());
}
|
public <V> Future<Iterable<Map.Entry<ByteString, V>>> valuePrefixFuture(
ByteString prefix, String stateFamily, Coder<V> valueCoder) {
// First request has no continuation position.
StateTag<ByteString> stateTag =
StateTag.<ByteString>of(Kind.VALUE_PREFIX, prefix, stateFamily).toBuilder().build();
return valuesToPagingIterableFuture(
stateTag, valueCoder, this.stateFuture(stateTag, valueCoder));
}
|
@Test
public void testReadTagValuePrefix() throws Exception {
Future<Iterable<Map.Entry<ByteString, Integer>>> future =
underTest.valuePrefixFuture(STATE_KEY_PREFIX, STATE_FAMILY, INT_CODER);
Mockito.verifyNoMoreInteractions(mockWindmill);
Windmill.KeyedGetDataRequest.Builder expectedRequest =
Windmill.KeyedGetDataRequest.newBuilder()
.setKey(DATA_KEY)
.setShardingKey(SHARDING_KEY)
.setWorkToken(WORK_TOKEN)
.setMaxBytes(WindmillStateReader.MAX_KEY_BYTES)
.addTagValuePrefixesToFetch(
Windmill.TagValuePrefixRequest.newBuilder()
.setTagPrefix(STATE_KEY_PREFIX)
.setStateFamily(STATE_FAMILY)
.setFetchMaxBytes(WindmillStateReader.MAX_TAG_VALUE_PREFIX_BYTES));
Windmill.KeyedGetDataResponse.Builder response =
Windmill.KeyedGetDataResponse.newBuilder()
.setKey(DATA_KEY)
.addTagValuePrefixes(
Windmill.TagValuePrefixResponse.newBuilder()
.setTagPrefix(STATE_KEY_PREFIX)
.setStateFamily(STATE_FAMILY)
.addTagValues(
Windmill.TagValue.newBuilder()
.setTag(STATE_KEY_1)
.setStateFamily(STATE_FAMILY)
.setValue(intValue(8)))
.addTagValues(
Windmill.TagValue.newBuilder()
.setTag(STATE_KEY_2)
.setStateFamily(STATE_FAMILY)
.setValue(intValue(9))));
Mockito.when(mockWindmill.getStateData(COMPUTATION, expectedRequest.build()))
.thenReturn(response.build());
Iterable<Map.Entry<ByteString, Integer>> result = future.get();
Mockito.verify(mockWindmill).getStateData(COMPUTATION, expectedRequest.build());
Mockito.verifyNoMoreInteractions(mockWindmill);
assertThat(
result,
Matchers.containsInAnyOrder(
new AbstractMap.SimpleEntry<>(STATE_KEY_1, 8),
new AbstractMap.SimpleEntry<>(STATE_KEY_2, 9)));
assertNoReader(future);
}
|
public List<Partition> getPartitions(Connection connection, Table table) {
JDBCTable jdbcTable = (JDBCTable) table;
String query = getPartitionQuery(table);
try (PreparedStatement ps = connection.prepareStatement(query)) {
ps.setString(1, jdbcTable.getDbName());
ps.setString(2, jdbcTable.getJdbcTable());
ResultSet rs = ps.executeQuery();
ImmutableList.Builder<Partition> list = ImmutableList.builder();
if (null != rs) {
while (rs.next()) {
String[] partitionNames = rs.getString("NAME").
replace("'", "").split(",");
long createTime = rs.getTimestamp("MODIFIED_TIME").getTime();
for (String partitionName : partitionNames) {
list.add(new Partition(partitionName, createTime));
}
}
return list.build();
} else {
return Lists.newArrayList();
}
} catch (SQLException | NullPointerException e) {
throw new StarRocksConnectorException(e.getMessage(), e);
}
}
|
@Test
public void testGetPartitionsWithCache() {
try {
JDBCCacheTestUtil.openCacheEnable(connectContext);
JDBCMetadata jdbcMetadata = new JDBCMetadata(properties, "catalog", dataSource);
JDBCTable jdbcTable = new JDBCTable(100000, "tbl1", Arrays.asList(new Column("d", Type.VARCHAR)),
Arrays.asList(new Column("d", Type.VARCHAR)), "test", "catalog", properties);
int size = jdbcMetadata.getPartitions(jdbcTable, Arrays.asList("20230810")).size();
Assert.assertTrue(size > 0);
int sizeWithCache = jdbcMetadata.getPartitions(jdbcTable, Arrays.asList("20230810")).size();
Assert.assertTrue(sizeWithCache > 0);
JDBCCacheTestUtil.closeCacheEnable(connectContext);
Map<String, String> properties = new HashMap<>();
jdbcMetadata.refreshCache(properties);
int sizeWithOutCache = jdbcMetadata.getPartitions(jdbcTable, Arrays.asList("20230810")).size();
Assert.assertEquals(0, sizeWithOutCache);
} catch (Exception e) {
System.out.println(e.getMessage());
Assert.fail();
}
}
|
public static String formatTimestampMillis(long millis) {
return Instant.ofEpochMilli(millis).toString().replace("Z", "+00:00");
}
|
@Test
public void formatTimestampMillis() {
String timestamp = "1970-01-01T00:00:00.001+00:00";
assertThat(DateTimeUtil.formatTimestampMillis(1L)).isEqualTo(timestamp);
assertThat(ZonedDateTime.parse(timestamp).toInstant().toEpochMilli()).isEqualTo(1L);
timestamp = "1970-01-01T00:16:40+00:00";
assertThat(DateTimeUtil.formatTimestampMillis(1000000L)).isEqualTo(timestamp);
assertThat(ZonedDateTime.parse(timestamp).toInstant().toEpochMilli()).isEqualTo(1000000L);
}
|
@SuppressWarnings("deprecation")
@Override
public Handle newHandle() {
return new HandleImpl(minIndex, maxIndex, initialIndex, minCapacity, maxCapacity);
}
|
@Test
public void doesSetCorrectMinBounds() {
AdaptiveRecvByteBufAllocator recvByteBufAllocator = new AdaptiveRecvByteBufAllocator(81, 95, 95);
RecvByteBufAllocator.ExtendedHandle handle =
(RecvByteBufAllocator.ExtendedHandle) recvByteBufAllocator.newHandle();
handle.reset(config);
allocReadExpected(handle, alloc, 81);
}
|
public void setService(Class<? extends Service> klass) throws ServerException {
ensureOperational();
Check.notNull(klass, "serviceKlass");
if (getStatus() == Status.SHUTTING_DOWN) {
throw new IllegalStateException("Server shutting down");
}
try {
Service newService = klass.newInstance();
Service oldService = services.get(newService.getInterface());
if (oldService != null) {
try {
oldService.destroy();
} catch (Throwable ex) {
log.error("Could not destroy service [{}], {}",
new Object[]{oldService.getInterface(), ex.getMessage(), ex});
}
}
newService.init(this);
services.put(newService.getInterface(), newService);
} catch (Exception ex) {
log.error("Could not set service [{}] programmatically -server shutting down-, {}", klass, ex);
destroy();
throw new ServerException(ServerException.ERROR.S09, klass, ex.getMessage(), ex);
}
}
|
@Test(expected = IllegalStateException.class)
@TestDir
public void illegalState3() throws Exception {
Server server = new Server("server", TestDirHelper.getTestDir().getAbsolutePath(), new Configuration(false));
server.setService(null);
}
|
public static Set<X509Certificate> filterValid( X509Certificate... certificates )
{
final Set<X509Certificate> results = new HashSet<>();
if (certificates != null)
{
for ( X509Certificate certificate : certificates )
{
if ( certificate == null )
{
continue;
}
try
{
certificate.checkValidity();
}
catch ( CertificateExpiredException | CertificateNotYetValidException e )
{
// Not yet or no longer valid. Don't include in result.
continue;
}
results.add( certificate );
}
}
return results;
}
|
@Test
public void testFilterValidWithValidAndInvalidCerts() throws Exception
{
// Setup fixture.
final X509Certificate valid = KeystoreTestUtils.generateValidCertificate().getCertificate();
final X509Certificate invalid = KeystoreTestUtils.generateExpiredCertificate().getCertificate();
final Collection<X509Certificate> input = new ArrayList<>();
input.add( valid );
input.add( invalid );
// Execute system under test.
final Collection<X509Certificate> result = CertificateUtils.filterValid( input );
// Verify results.
assertEquals( 1, result.size() );
assertTrue( result.contains( valid ) );
}
|
public static int length(Object obj) {
if (obj == null) {
return 0;
}
if (obj instanceof CharSequence) {
return ((CharSequence) obj).length();
}
if (obj instanceof Collection) {
return ((Collection<?>) obj).size();
}
if (obj instanceof Map) {
return ((Map<?, ?>) obj).size();
}
int count;
if (obj instanceof Iterator) {
final Iterator<?> iter = (Iterator<?>) obj;
count = 0;
while (iter.hasNext()) {
count++;
iter.next();
}
return count;
}
if (obj instanceof Enumeration) {
final Enumeration<?> enumeration = (Enumeration<?>) obj;
count = 0;
while (enumeration.hasMoreElements()) {
count++;
enumeration.nextElement();
}
return count;
}
if (obj.getClass().isArray() == true) {
return Array.getLength(obj);
}
return -1;
}
|
@Test
public void lengthTest() {
int[] array = new int[]{1, 2, 3, 4, 5};
int length = ObjectUtil.length(array);
assertEquals(5, length);
Map<String, String> map = new HashMap<>();
map.put("a", "a1");
map.put("b", "b1");
map.put("c", "c1");
length = ObjectUtil.length(map);
assertEquals(3, length);
}
|
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) {
return api.send(request);
}
|
@Test
public void deleteChatPhoto() {
BaseResponse response = bot.execute(new DeleteChatPhoto(groupId));
if (!response.isOk()) {
assertEquals(400, response.errorCode());
assertEquals("Bad Request: CHAT_NOT_MODIFIED", response.description());
}
}
|
@Override
protected Mono<Void> handleSelectorIfNull(final String pluginName, final ServerWebExchange exchange, final ShenyuPluginChain chain) {
return WebFluxResultUtils.noSelectorResult(pluginName, exchange);
}
|
@Test
public void handleSelectorIfNullTest() {
ConfigurableApplicationContext context = mock(ConfigurableApplicationContext.class);
SpringBeanUtils.getInstance().setApplicationContext(context);
when(context.getBean(ShenyuResult.class)).thenReturn(new DefaultShenyuResult());
StepVerifier.create(sofaPlugin.handleSelectorIfNull("pluginName", this.exchange, this.chain)).expectSubscription().verifyComplete();
}
|
public static String normalize(String url) {
return normalize(url, false);
}
|
@Test
public void normalizeTest3() {
String url = "http://www.hutool.cn//aaa/\\bbb?a=1&b=2";
String normalize = URLUtil.normalize(url, true);
assertEquals("http://www.hutool.cn//aaa//bbb?a=1&b=2", normalize);
url = "www.hutool.cn//aaa/bbb?a=1&b=2";
normalize = URLUtil.normalize(url, true);
assertEquals("http://www.hutool.cn//aaa/bbb?a=1&b=2", normalize);
url = "\\/www.hutool.cn//aaa/bbb?a=1&b=2";
normalize = URLUtil.normalize(url, true);
assertEquals("http://www.hutool.cn//aaa/bbb?a=1&b=2", normalize);
}
|
public T put(String key, T value) {
return delegate.put(key.toLowerCase(), value);
}
|
@Test
public void testPut() throws Exception {
String someKey = "someKey";
Object someValue = mock(Object.class);
Object anotherValue = mock(Object.class);
when(someMap.put(someKey.toLowerCase(), someValue)).thenReturn(anotherValue);
assertEquals(anotherValue, caseInsensitiveMapWrapper.put(someKey, someValue));
verify(someMap, times(1)).put(someKey.toLowerCase(), someValue);
}
|
@Override
public PathAttributes toAttributes(final DavResource resource) {
final PathAttributes attributes = super.toAttributes(resource);
final Map<QName, String> properties = resource.getCustomPropsNS();
if(null != properties) {
if(properties.containsKey(OC_FILEID_CUSTOM_NAMESPACE)) {
final String value = properties.get(OC_FILEID_CUSTOM_NAMESPACE);
attributes.setFileId(value);
}
if(resource.isDirectory()) {
if(properties.containsKey(OC_SIZE_CUSTOM_NAMESPACE)) {
final String value = properties.get(OC_SIZE_CUSTOM_NAMESPACE);
attributes.setSize(Long.parseLong(value));
}
}
if(properties.containsKey(OC_CHECKSUMS_CUSTOM_NAMESPACE)) {
for(String v : StringUtils.split(properties.get(OC_CHECKSUMS_CUSTOM_NAMESPACE), StringUtils.SPACE)) {
try {
attributes.setChecksum(new Checksum(HashAlgorithm.valueOf(StringUtils.lowerCase(StringUtils.split(v, ":")[0])),
StringUtils.lowerCase(StringUtils.split(v, ":")[1])));
}
catch(IllegalArgumentException e) {
log.warn(String.format("Unsupported checksum %s", v));
}
}
}
}
return attributes;
}
|
@Test
public void testCustomModified_Modified() {
final NextcloudAttributesFinderFeature f = new NextcloudAttributesFinderFeature(null);
final DavResource mock = mock(DavResource.class);
Map<QName, String> map = new HashMap<>();
map.put(DAVTimestampFeature.LAST_MODIFIED_CUSTOM_NAMESPACE, "Mon, 29 Oct 2018 21:14:06 UTC");
map.put(DAVTimestampFeature.LAST_MODIFIED_SERVER_CUSTOM_NAMESPACE, "Thu, 01 Nov 2018 15:31:57 UTC");
final Date modified = new DateTime("2018-11-02T15:31:57Z").toDate();
when(mock.getModified()).thenReturn(modified);
when(mock.getCustomPropsNS()).thenReturn(map);
final PathAttributes attrs = f.toAttributes(mock);
assertEquals(modified.getTime(), attrs.getModificationDate());
}
|
@Override
public int encode(PortNumber resource) {
return (int) resource.toLong();
}
|
@Test
public void testEncode() {
assertThat(sut.encode(PortNumber.portNumber(100)), is(100));
}
|
@Override
public boolean unRegistry() {
return zkServiceManager.chooseService().unRegistry();
}
|
@Test
public void unRegistry() {
Mockito.when(zkService34.unRegistry()).thenReturn(true);
Assert.assertTrue(zkDiscoveryClient.unRegistry());
}
|
public static <T> void padLeft(List<T> list, int minLen, T padObj) {
Objects.requireNonNull(list);
if (list.isEmpty()) {
padRight(list, minLen, padObj);
return;
}
for (int i = list.size(); i < minLen; i++) {
list.add(0, padObj);
}
}
|
@Test
public void testPadLeft() {
List<String> srcList = CollUtil.newArrayList();
List<String> answerList = CollUtil.newArrayList("a", "b");
CollUtil.padLeft(srcList, 1, "b");
CollUtil.padLeft(srcList, 2, "a");
assertEquals(srcList, answerList);
srcList = CollUtil.newArrayList("a", "b");
answerList = CollUtil.newArrayList("a", "b");
CollUtil.padLeft(srcList, 2, "a");
assertEquals(srcList, answerList);
srcList = CollUtil.newArrayList("c");
answerList = CollUtil.newArrayList("a", "a", "c");
CollUtil.padLeft(srcList, 3, "a");
assertEquals(srcList, answerList);
}
|
@Override
public void touch(final Local file) throws AccessDeniedException {
try {
try {
Files.createFile(Paths.get(file.getAbsolute()));
}
catch(NoSuchFileException e) {
final Local parent = file.getParent();
new DefaultLocalDirectoryFeature().mkdir(parent);
if(log.isDebugEnabled()) {
log.debug(String.format("Created folder %s", parent));
}
Files.createFile(Paths.get(file.getAbsolute()));
}
catch(FileAlreadyExistsException e) {
log.warn(String.format("File %s already exists", file));
throw new LocalAccessDeniedException(MessageFormat.format(
LocaleFactory.localizedString("Cannot create {0}", "Error"), file.getAbsolute()), e);
}
}
catch(IOException e) {
throw new LocalAccessDeniedException(MessageFormat.format(
LocaleFactory.localizedString("Cannot create {0}", "Error"), file.getAbsolute()), e);
}
if(log.isDebugEnabled()) {
log.debug(String.format("Created file %s", file));
}
}
|
@Test
public void testTouch() throws Exception {
Local parent = new Local(System.getProperty("java.io.tmpdir"), new AlphanumericRandomStringService().random());
Local l = new Local(parent, UUID.randomUUID().toString());
final DefaultLocalTouchFeature f = new DefaultLocalTouchFeature();
// Test create missing parent directory
f.touch(l);
assertTrue(parent.exists());
assertTrue(l.exists());
assertThrows(LocalAccessDeniedException.class, () -> f.touch(l));
l.delete();
parent.delete();
}
|
public static Write write() {
return new AutoValue_MongoDbIO_Write.Builder()
.setMaxConnectionIdleTime(60000)
.setBatchSize(1024L)
.setSslEnabled(false)
.setIgnoreSSLCertificate(false)
.setSslInvalidHostNameAllowed(false)
.setOrdered(true)
.build();
}
|
@Test
public void testUpdate() {
final String collectionName = "testUpdate";
final int numElements = 100;
Document doc = Document.parse("{\"id\":1,\"scientist\":\"Updated\",\"country\":\"India\"}");
database.getCollection(collectionName).insertMany(createDocuments(numElements, true));
assertEquals(numElements, countElements(collectionName));
List<Document> docs = new ArrayList<>();
docs.add(doc);
pipeline
.apply(Create.of(docs))
.apply(
MongoDbIO.write()
.withUri("mongodb://localhost:" + port)
.withDatabase(DATABASE_NAME)
.withCollection(collectionName)
.withUpdateConfiguration(
UpdateConfiguration.create()
.withUpdateKey("id")
.withUpdateFields(
UpdateField.fieldUpdate("$set", "scientist", "scientist"),
UpdateField.fieldUpdate("$set", "country", "country"))));
pipeline.run();
Document out = database.getCollection(collectionName).find(new Document("_id", 1)).first();
assertEquals("Updated", out.get("scientist"));
assertEquals("India", out.get("country"));
}
|
@Override
public Object getNativeDataType( Object object ) throws KettleValueException {
return getString( object );
}
|
@Test
public void testGetNativeData_emptyIsNull() throws Exception {
meta.setNullsAndEmptyAreDifferent( false );
assertEquals( BASE_VALUE, meta.getNativeDataType( BASE_VALUE ) );
assertEquals( TEST_VALUE, meta.getNativeDataType( TEST_VALUE ) );
assertEquals( null, meta.getNativeDataType( null ) );
assertEquals( "1", meta.getNativeDataType( 1 ) );
assertEquals( "1.0", meta.getNativeDataType( 1.0 ) );
Date d = ( new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss.SSS" ) ).parse( "2012-11-10 09:08:07.654" );
assertEquals( d.toString(), meta.getNativeDataType( d ) );
Timestamp ts = Timestamp.valueOf( "2012-11-10 09:08:07.654321" );
assertEquals( "2012-11-10 09:08:07.654321", meta.getNativeDataType( ts ) );
meta.setTrimType( ValueMetaInterface.TRIM_TYPE_NONE );
// assertEquals( null, meta.getNativeDataType( "" ) ); //TODO: is it correct?
assertEquals( "", meta.getNativeDataType( "" ) ); // TODO: is it correct?
assertEquals( "1", meta.getNativeDataType( "1" ) );
assertEquals( " ", meta.getNativeDataType( " " ) );
assertEquals( " 1 ", meta.getNativeDataType( " 1 " ) );
meta.setTrimType( ValueMetaInterface.TRIM_TYPE_LEFT );
// assertEquals( null, meta.getNativeDataType( "" ) ); //TODO: is it correct?
assertEquals( "", meta.getNativeDataType( "" ) ); // TODO: is it correct?
assertEquals( "1", meta.getNativeDataType( "1" ) );
// assertEquals( null, meta.getNativeDataType( " " ) ); //TODO: is it correct?
assertEquals( "", meta.getNativeDataType( " " ) ); // TODO: is it correct?
assertEquals( "1 ", meta.getNativeDataType( " 1 " ) );
meta.setTrimType( ValueMetaInterface.TRIM_TYPE_RIGHT );
// assertEquals( null, meta.getNativeDataType( "" ) ); //TODO: is it correct?
assertEquals( "", meta.getNativeDataType( "" ) ); // TODO: is it correct?
assertEquals( "1", meta.getNativeDataType( "1" ) );
// assertEquals( null, meta.getNativeDataType( " " ) ); //TODO: is it correct?
assertEquals( "", meta.getNativeDataType( " " ) ); // TODO: is it correct?
assertEquals( " 1", meta.getNativeDataType( " 1 " ) );
meta.setTrimType( ValueMetaInterface.TRIM_TYPE_BOTH );
// assertEquals( null, meta.getNativeDataType( "" ) ); //TODO: is it correct?
assertEquals( "", meta.getNativeDataType( "" ) ); // TODO: is it correct?
assertEquals( "1", meta.getNativeDataType( "1" ) );
// assertEquals( null, meta.getNativeDataType( " " ) ); //TODO: is it correct?
assertEquals( "", meta.getNativeDataType( " " ) ); // TODO: is it correct?
assertEquals( "1", meta.getNativeDataType( " 1 " ) );
}
|
public String namespaceProperties(Namespace ns) {
return SLASH.join("v1", prefix, "namespaces", RESTUtil.encodeNamespace(ns), "properties");
}
|
@Test
public void testNamespacePropertiesWithSlash() {
Namespace ns = Namespace.of("n/s");
assertThat(withPrefix.namespaceProperties(ns))
.isEqualTo("v1/ws/catalog/namespaces/n%2Fs/properties");
assertThat(withoutPrefix.namespaceProperties(ns)).isEqualTo("v1/namespaces/n%2Fs/properties");
}
|
@VisibleForTesting
WorkerIdentity get(String key, int index) {
NavigableMap<Integer, WorkerIdentity> map = mActiveNodesByConsistentHashing;
Preconditions.checkState(map != null, "Hash provider is not properly initialized");
return get(map, key, index);
}
|
@Test
public void uninitializedThrowsException() {
ConsistentHashProvider provider = new ConsistentHashProvider(
1, WORKER_LIST_TTL_MS, NUM_VIRTUAL_NODES);
assertThrows(IllegalStateException.class, () -> provider.get(OBJECT_KEY, 0));
}
|
public static Object eval(String expression, Map<String, Object> context) {
return eval(expression, context, ListUtil.empty());
}
|
@Test
public void mvelTest(){
final ExpressionEngine engine = new MvelEngine();
final Dict dict = Dict.create()
.set("a", 100.3)
.set("b", 45)
.set("c", -199.100);
final Object eval = engine.eval("a-(b-c)", dict, null);
assertEquals(-143.8, (double)eval, 0);
}
|
public void run() {
validate();
if (!super.getNotification().hasErrors()) {
LOGGER.info("Register worker in backend system");
}
}
|
@Test
void runWithMissingDOB() {
RegisterWorkerDto workerDto = createValidWorkerDto();
workerDto.setupWorkerDto("name", "occupation", null);
RegisterWorker registerWorker = new RegisterWorker(workerDto);
// Run the registration process
registerWorker.run();
// Verify that the notification contains the missing DOB error
assertTrue(registerWorker.getNotification().hasErrors());
assertTrue(registerWorker.getNotification().getErrors().contains(RegisterWorkerDto.MISSING_DOB));
assertEquals(registerWorker.getNotification().getErrors().size(), 2);
}
|
public synchronized void awaitUpdate(final int lastVersion, final long timeoutMs) throws InterruptedException {
long currentTimeMs = time.milliseconds();
long deadlineMs = currentTimeMs + timeoutMs < 0 ? Long.MAX_VALUE : currentTimeMs + timeoutMs;
time.waitObject(this, () -> {
// Throw fatal exceptions, if there are any. Recoverable topic errors will be handled by the caller.
maybeThrowFatalException();
return updateVersion() > lastVersion || isClosed();
}, deadlineMs);
if (isClosed())
throw new KafkaException("Requested metadata update after close");
}
|
@Test
public void testMetadataUpdateWaitTime() throws Exception {
long time = 0;
metadata.updateWithCurrentRequestVersion(responseWithCurrentTopics(), false, time);
assertTrue(metadata.timeToNextUpdate(time) > 0, "No update needed.");
// first try with a max wait time of 0 and ensure that this returns back without waiting forever
try {
metadata.awaitUpdate(metadata.requestUpdate(true), 0);
fail("Wait on metadata update was expected to timeout, but it didn't");
} catch (TimeoutException te) {
// expected
}
// now try with a higher timeout value once
final long twoSecondWait = 2000;
try {
metadata.awaitUpdate(metadata.requestUpdate(true), twoSecondWait);
fail("Wait on metadata update was expected to timeout, but it didn't");
} catch (TimeoutException te) {
// expected
}
}
|
@Override
public void close() {
internal.close();
}
|
@Test
public void shouldDelegateAndRemoveMetricsOnClose() {
assertThat(storeMetrics(), not(empty()));
store.close();
verify(inner).close();
assertThat(storeMetrics(), empty());
}
|
public RelDataType createRelDataTypeFromSchema(Schema schema) {
Builder builder = new Builder(this);
boolean enableNullHandling = schema.isEnableColumnBasedNullHandling();
for (Map.Entry<String, FieldSpec> entry : schema.getFieldSpecMap().entrySet()) {
builder.add(entry.getKey(), toRelDataType(entry.getValue(), enableNullHandling));
}
return builder.build();
}
|
@Test(dataProvider = "relDataTypeConversion")
public void testScalarTypes(FieldSpec.DataType dataType, RelDataType scalarType, boolean columnNullMode) {
TypeFactory typeFactory = new TypeFactory();
Schema testSchema = new Schema.SchemaBuilder()
.addSingleValueDimension("col", dataType)
.setEnableColumnBasedNullHandling(columnNullMode)
.build();
RelDataType relDataTypeFromSchema = typeFactory.createRelDataTypeFromSchema(testSchema);
List<RelDataTypeField> fieldList = relDataTypeFromSchema.getFieldList();
RelDataTypeField field = fieldList.get(0);
boolean colNullable = isColNullable(testSchema);
Assert.assertEquals(field.getType(), typeFactory.createTypeWithNullability(scalarType, colNullable));
}
|
public static double microdegreesToDegrees(int coordinate) {
return coordinate / CONVERSION_FACTOR;
}
|
@Test
public void intToDoubleTest() {
double degrees = LatLongUtils.microdegreesToDegrees(MICRO_DEGREES);
Assert.assertEquals(DEGREES, degrees, 0);
}
|
private RemotingCommand getConsumerRunningInfo(ChannelHandlerContext ctx,
RemotingCommand request) throws RemotingCommandException {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
final GetConsumerRunningInfoRequestHeader requestHeader =
(GetConsumerRunningInfoRequestHeader) request.decodeCommandCustomHeader(GetConsumerRunningInfoRequestHeader.class);
ConsumerRunningInfo consumerRunningInfo = this.mqClientFactory.consumerRunningInfo(requestHeader.getConsumerGroup());
if (null != consumerRunningInfo) {
if (requestHeader.isJstackEnable()) {
Map<Thread, StackTraceElement[]> map = Thread.getAllStackTraces();
String jstack = UtilAll.jstack(map);
consumerRunningInfo.setJstack(jstack);
}
response.setCode(ResponseCode.SUCCESS);
response.setBody(consumerRunningInfo.encode());
} else {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(String.format("The Consumer Group <%s> not exist in this consumer", requestHeader.getConsumerGroup()));
}
return response;
}
|
@Test
public void testGetConsumerRunningInfo() throws Exception {
ChannelHandlerContext ctx = mock(ChannelHandlerContext.class);
RemotingCommand request = mock(RemotingCommand.class);
when(request.getCode()).thenReturn(RequestCode.GET_CONSUMER_RUNNING_INFO);
ConsumerRunningInfo consumerRunningInfo = new ConsumerRunningInfo();
consumerRunningInfo.setJstack("jstack");
when(mQClientFactory.consumerRunningInfo(anyString())).thenReturn(consumerRunningInfo);
GetConsumerRunningInfoRequestHeader requestHeader = new GetConsumerRunningInfoRequestHeader();
requestHeader.setJstackEnable(true);
requestHeader.setConsumerGroup(defaultGroup);
when(request.decodeCommandCustomHeader(GetConsumerRunningInfoRequestHeader.class)).thenReturn(requestHeader);
RemotingCommand command = processor.processRequest(ctx, request);
assertNotNull(command);
assertEquals(ResponseCode.SUCCESS, command.getCode());
}
|
public long getBlockLen() {
return block_len;
}
|
@Test
public void testGetBlockLen() {
assertEquals(TestParameters.VP_RES_TBL_BLOCK_LENGTH, chmLzxcResetTable.getBlockLen());
}
|
@Subscribe
public void onChatMessage(ChatMessage chatMessage)
{
if (chatMessage.getType() != ChatMessageType.TRADE
&& chatMessage.getType() != ChatMessageType.GAMEMESSAGE
&& chatMessage.getType() != ChatMessageType.SPAM
&& chatMessage.getType() != ChatMessageType.FRIENDSCHATNOTIFICATION)
{
return;
}
String message = chatMessage.getMessage();
Matcher matcher = KILLCOUNT_PATTERN.matcher(message);
if (matcher.find())
{
final String boss = matcher.group("boss");
final int kc = Integer.parseInt(matcher.group("kc"));
final String pre = matcher.group("pre");
final String post = matcher.group("post");
if (Strings.isNullOrEmpty(pre) && Strings.isNullOrEmpty(post))
{
unsetKc(boss);
return;
}
String renamedBoss = KILLCOUNT_RENAMES
.getOrDefault(boss, boss)
// The config service doesn't support keys with colons in them
.replace(":", "");
if (boss != renamedBoss)
{
// Unset old TOB kc
unsetKc(boss);
unsetPb(boss);
unsetKc(boss.replace(":", "."));
unsetPb(boss.replace(":", "."));
// Unset old story mode
unsetKc("Theatre of Blood Story Mode");
unsetPb("Theatre of Blood Story Mode");
}
setKc(renamedBoss, kc);
// We either already have the pb, or need to remember the boss for the upcoming pb
if (lastPb > -1)
{
log.debug("Got out-of-order personal best for {}: {}", renamedBoss, lastPb);
if (renamedBoss.contains("Theatre of Blood"))
{
// TOB team size isn't sent in the kill message, but can be computed from varbits
int tobTeamSize = tobTeamSize();
lastTeamSize = tobTeamSize == 1 ? "Solo" : (tobTeamSize + " players");
}
else if (renamedBoss.contains("Tombs of Amascut"))
{
// TOA team size isn't sent in the kill message, but can be computed from varbits
int toaTeamSize = toaTeamSize();
lastTeamSize = toaTeamSize == 1 ? "Solo" : (toaTeamSize + " players");
}
final double pb = getPb(renamedBoss);
// If a raid with a team size, only update the pb if it is lower than the existing pb
// so that the pb is the overall lowest of any team size
if (lastTeamSize == null || pb == 0 || lastPb < pb)
{
log.debug("Setting overall pb (old: {})", pb);
setPb(renamedBoss, lastPb);
}
if (lastTeamSize != null)
{
log.debug("Setting team size pb: {}", lastTeamSize);
setPb(renamedBoss + " " + lastTeamSize, lastPb);
}
lastPb = -1;
lastTeamSize = null;
}
else
{
lastBossKill = renamedBoss;
lastBossTime = client.getTickCount();
}
return;
}
matcher = DUEL_ARENA_WINS_PATTERN.matcher(message);
if (matcher.find())
{
final int oldWins = getKc("Duel Arena Wins");
final int wins = matcher.group(2).equals("one") ? 1 :
Integer.parseInt(matcher.group(2).replace(",", ""));
final String result = matcher.group(1);
int winningStreak = getKc("Duel Arena Win Streak");
int losingStreak = getKc("Duel Arena Lose Streak");
if (result.equals("won") && wins > oldWins)
{
losingStreak = 0;
winningStreak += 1;
}
else if (result.equals("were defeated"))
{
losingStreak += 1;
winningStreak = 0;
}
else
{
log.warn("unrecognized duel streak chat message: {}", message);
}
setKc("Duel Arena Wins", wins);
setKc("Duel Arena Win Streak", winningStreak);
setKc("Duel Arena Lose Streak", losingStreak);
}
matcher = DUEL_ARENA_LOSSES_PATTERN.matcher(message);
if (matcher.find())
{
int losses = matcher.group(1).equals("one") ? 1 :
Integer.parseInt(matcher.group(1).replace(",", ""));
setKc("Duel Arena Losses", losses);
}
matcher = KILL_DURATION_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = NEW_PB_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = RAIDS_PB_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = RAIDS_DURATION_PATTERN.matcher(message);
if (matcher.find())
{
matchPb(matcher);
}
matcher = HS_PB_PATTERN.matcher(message);
if (matcher.find())
{
int floor = Integer.parseInt(matcher.group("floor"));
String floortime = matcher.group("floortime");
String floorpb = matcher.group("floorpb");
String otime = matcher.group("otime");
String opb = matcher.group("opb");
String pb = MoreObjects.firstNonNull(floorpb, floortime);
setPb("Hallowed Sepulchre Floor " + floor, timeStringToSeconds(pb));
if (otime != null)
{
pb = MoreObjects.firstNonNull(opb, otime);
setPb("Hallowed Sepulchre", timeStringToSeconds(pb));
}
}
matcher = HS_KC_FLOOR_PATTERN.matcher(message);
if (matcher.find())
{
int floor = Integer.parseInt(matcher.group(1));
int kc = Integer.parseInt(matcher.group(2).replaceAll(",", ""));
setKc("Hallowed Sepulchre Floor " + floor, kc);
}
matcher = HS_KC_GHC_PATTERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1).replaceAll(",", ""));
setKc("Hallowed Sepulchre", kc);
}
matcher = HUNTER_RUMOUR_KC_PATTERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1).replaceAll(",", ""));
setKc("Hunter Rumours", kc);
}
if (lastBossKill != null && lastBossTime != client.getTickCount())
{
lastBossKill = null;
lastBossTime = -1;
}
matcher = COLLECTION_LOG_ITEM_PATTERN.matcher(message);
if (matcher.find())
{
String item = matcher.group(1);
int petId = findPet(item);
if (petId != -1)
{
final List<Integer> petList = new ArrayList<>(getPetList());
if (!petList.contains(petId))
{
log.debug("New pet added: {}/{}", item, petId);
petList.add(petId);
setPetList(petList);
}
}
}
matcher = GUARDIANS_OF_THE_RIFT_PATTERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1));
setKc("Guardians of the Rift", kc);
}
}
|
@Test
public void testJadNewPbWithLeagueTask()
{
ChatMessage chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Your TzTok-Jad kill count is: <col=ff0000>2</col>.", null, 0);
chatCommandsPlugin.onChatMessage(chatMessage);
chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Congratulations, you've completed a master task: <col=7f3700>Complete the Fight Caves in 25:00</col>.", null, 0);
chatCommandsPlugin.onChatMessage(chatMessage);
chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Duration: <col=ff0000>21:58</col> (new personal best)", null, 0);
chatCommandsPlugin.onChatMessage(chatMessage);
verify(configManager).setRSProfileConfiguration("personalbest", "tztok-jad", 21 * 60 + 58.0);
verify(configManager).setRSProfileConfiguration("killcount", "tztok-jad", 2);
// Precise times
chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Duration: <col=ff0000>21:58.40</col> (new personal best)", null, 0);
chatCommandsPlugin.onChatMessage(chatMessage);
verify(configManager).setRSProfileConfiguration("personalbest", "tztok-jad", 21 * 60 + 58.4);
}
|
private CoordinatorResult<ConsumerGroupHeartbeatResponseData, CoordinatorRecord> consumerGroupHeartbeat(
String groupId,
String memberId,
int memberEpoch,
String instanceId,
String rackId,
int rebalanceTimeoutMs,
String clientId,
String clientHost,
List<String> subscribedTopicNames,
String assignorName,
List<ConsumerGroupHeartbeatRequestData.TopicPartitions> ownedTopicPartitions
) throws ApiException {
final long currentTimeMs = time.milliseconds();
final List<CoordinatorRecord> records = new ArrayList<>();
// Get or create the consumer group.
boolean createIfNotExists = memberEpoch == 0;
final ConsumerGroup group = getOrMaybeCreateConsumerGroup(groupId, createIfNotExists, records);
throwIfConsumerGroupIsFull(group, memberId);
// Get or create the member.
if (memberId.isEmpty()) memberId = Uuid.randomUuid().toString();
final ConsumerGroupMember member;
if (instanceId == null) {
member = getOrMaybeSubscribeDynamicConsumerGroupMember(
group,
memberId,
memberEpoch,
ownedTopicPartitions,
createIfNotExists,
false
);
} else {
member = getOrMaybeSubscribeStaticConsumerGroupMember(
group,
memberId,
memberEpoch,
instanceId,
ownedTopicPartitions,
createIfNotExists,
false,
records
);
}
// 1. Create or update the member. If the member is new or has changed, a ConsumerGroupMemberMetadataValue
// record is written to the __consumer_offsets partition to persist the change. If the subscriptions have
// changed, the subscription metadata is updated and persisted by writing a ConsumerGroupPartitionMetadataValue
// record to the __consumer_offsets partition. Finally, the group epoch is bumped if the subscriptions have
// changed, and persisted by writing a ConsumerGroupMetadataValue record to the partition.
ConsumerGroupMember updatedMember = new ConsumerGroupMember.Builder(member)
.maybeUpdateInstanceId(Optional.ofNullable(instanceId))
.maybeUpdateRackId(Optional.ofNullable(rackId))
.maybeUpdateRebalanceTimeoutMs(ofSentinel(rebalanceTimeoutMs))
.maybeUpdateServerAssignorName(Optional.ofNullable(assignorName))
.maybeUpdateSubscribedTopicNames(Optional.ofNullable(subscribedTopicNames))
.setClientId(clientId)
.setClientHost(clientHost)
.setClassicMemberMetadata(null)
.build();
boolean bumpGroupEpoch = hasMemberSubscriptionChanged(
groupId,
member,
updatedMember,
records
);
int groupEpoch = group.groupEpoch();
Map<String, TopicMetadata> subscriptionMetadata = group.subscriptionMetadata();
Map<String, Integer> subscribedTopicNamesMap = group.subscribedTopicNames();
SubscriptionType subscriptionType = group.subscriptionType();
if (bumpGroupEpoch || group.hasMetadataExpired(currentTimeMs)) {
// The subscription metadata is updated in two cases:
// 1) The member has updated its subscriptions;
// 2) The refresh deadline has been reached.
subscribedTopicNamesMap = group.computeSubscribedTopicNames(member, updatedMember);
subscriptionMetadata = group.computeSubscriptionMetadata(
subscribedTopicNamesMap,
metadataImage.topics(),
metadataImage.cluster()
);
int numMembers = group.numMembers();
if (!group.hasMember(updatedMember.memberId()) && !group.hasStaticMember(updatedMember.instanceId())) {
numMembers++;
}
subscriptionType = ModernGroup.subscriptionType(
subscribedTopicNamesMap,
numMembers
);
if (!subscriptionMetadata.equals(group.subscriptionMetadata())) {
log.info("[GroupId {}] Computed new subscription metadata: {}.",
groupId, subscriptionMetadata);
bumpGroupEpoch = true;
records.add(newConsumerGroupSubscriptionMetadataRecord(groupId, subscriptionMetadata));
}
if (bumpGroupEpoch) {
groupEpoch += 1;
records.add(newConsumerGroupEpochRecord(groupId, groupEpoch));
log.info("[GroupId {}] Bumped group epoch to {}.", groupId, groupEpoch);
metrics.record(CONSUMER_GROUP_REBALANCES_SENSOR_NAME);
}
group.setMetadataRefreshDeadline(currentTimeMs + consumerGroupMetadataRefreshIntervalMs, groupEpoch);
}
// 2. Update the target assignment if the group epoch is larger than the target assignment epoch. The delta between
// the existing and the new target assignment is persisted to the partition.
final int targetAssignmentEpoch;
final Assignment targetAssignment;
if (groupEpoch > group.assignmentEpoch()) {
targetAssignment = updateTargetAssignment(
group,
groupEpoch,
member,
updatedMember,
subscriptionMetadata,
subscriptionType,
records
);
targetAssignmentEpoch = groupEpoch;
} else {
targetAssignmentEpoch = group.assignmentEpoch();
targetAssignment = group.targetAssignment(updatedMember.memberId(), updatedMember.instanceId());
}
// 3. Reconcile the member's assignment with the target assignment if the member is not
// fully reconciled yet.
updatedMember = maybeReconcile(
groupId,
updatedMember,
group::currentPartitionEpoch,
targetAssignmentEpoch,
targetAssignment,
ownedTopicPartitions,
records
);
scheduleConsumerGroupSessionTimeout(groupId, memberId);
// Prepare the response.
ConsumerGroupHeartbeatResponseData response = new ConsumerGroupHeartbeatResponseData()
.setMemberId(updatedMember.memberId())
.setMemberEpoch(updatedMember.memberEpoch())
.setHeartbeatIntervalMs(consumerGroupHeartbeatIntervalMs(groupId));
// The assignment is only provided in the following cases:
// 1. The member sent a full request. It does so when joining or rejoining the group with zero
// as the member epoch; or on any errors (e.g. timeout). We use all the non-optional fields
// (rebalanceTimeoutMs, subscribedTopicNames and ownedTopicPartitions) to detect a full request
// as those must be set in a full request.
// 2. The member's assignment has been updated.
boolean isFullRequest = memberEpoch == 0 || (rebalanceTimeoutMs != -1 && subscribedTopicNames != null && ownedTopicPartitions != null);
if (isFullRequest || hasAssignedPartitionsChanged(member, updatedMember)) {
response.setAssignment(createConsumerGroupResponseAssignment(updatedMember));
}
return new CoordinatorResult<>(records, response);
}
|
@Test
public void testMemberIdGeneration() {
MockPartitionAssignor assignor = new MockPartitionAssignor("range");
GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder()
.withConsumerGroupAssignors(Collections.singletonList(assignor))
.withMetadataImage(MetadataImage.EMPTY)
.build();
assignor.prepareGroupAssignment(new GroupAssignment(
Collections.emptyMap()
));
CoordinatorResult<ConsumerGroupHeartbeatResponseData, CoordinatorRecord> result = context.consumerGroupHeartbeat(
new ConsumerGroupHeartbeatRequestData()
.setGroupId("group-foo")
.setMemberEpoch(0)
.setServerAssignor("range")
.setRebalanceTimeoutMs(5000)
.setSubscribedTopicNames(Arrays.asList("foo", "bar"))
.setTopicPartitions(Collections.emptyList()));
// Verify that a member id was generated for the new member.
String memberId = result.response().memberId();
assertNotNull(memberId);
assertNotEquals("", memberId);
// The response should get a bumped epoch and should not
// contain any assignment because we did not provide
// topics metadata.
assertEquals(
new ConsumerGroupHeartbeatResponseData()
.setMemberId(memberId)
.setMemberEpoch(1)
.setHeartbeatIntervalMs(5000)
.setAssignment(new ConsumerGroupHeartbeatResponseData.Assignment()),
result.response()
);
}
|
public static int degreeToInt(double deg) {
if (deg >= Double.MAX_VALUE)
return Integer.MAX_VALUE;
if (deg <= -Double.MAX_VALUE)
return -Integer.MAX_VALUE;
return (int) Math.round(deg * DEGREE_FACTOR);
}
|
@Test
void degreeToInt() {
int storedInt = 444_494_395;
double lat = Helper.intToDegree(storedInt);
assertEquals(44.4494395, lat);
assertEquals(storedInt, Helper.degreeToInt(lat));
}
|
@Override
public <T> Future<T> submit(Callable<T> task) {
submitted.mark();
return delegate.submit(new InstrumentedCallable<>(task));
}
|
@Test
public void testSubmitRunnable() throws Exception {
assertThat(submitted.getCount()).isZero();
assertThat(running.getCount()).isZero();
assertThat(completed.getCount()).isZero();
assertThat(duration.getCount()).isZero();
assertThat(scheduledOnce.getCount()).isZero();
assertThat(scheduledRepetitively.getCount()).isZero();
assertThat(scheduledOverrun.getCount()).isZero();
assertThat(percentOfPeriod.getCount()).isZero();
Future<?> theFuture = instrumentedScheduledExecutor.submit(() -> {
assertThat(submitted.getCount()).isEqualTo(1);
assertThat(running.getCount()).isEqualTo(1);
assertThat(completed.getCount()).isZero();
assertThat(duration.getCount()).isZero();
assertThat(scheduledOnce.getCount()).isZero();
assertThat(scheduledRepetitively.getCount()).isZero();
assertThat(scheduledOverrun.getCount()).isZero();
assertThat(percentOfPeriod.getCount()).isZero();
});
theFuture.get();
assertThat(submitted.getCount()).isEqualTo(1);
assertThat(running.getCount()).isZero();
assertThat(completed.getCount()).isEqualTo(1);
assertThat(duration.getCount()).isEqualTo(1);
assertThat(duration.getSnapshot().size()).isEqualTo(1);
assertThat(scheduledOnce.getCount()).isZero();
assertThat(scheduledRepetitively.getCount()).isZero();
assertThat(scheduledOverrun.getCount()).isZero();
assertThat(percentOfPeriod.getCount()).isZero();
}
|
@Nonnull
public static <V> Set<V> findDuplicates(@Nonnull final Collection<V>... collections)
{
final Set<V> merged = new HashSet<>();
final Set<V> duplicates = new HashSet<>();
for (Collection<V> collection : collections) {
for (V o : collection) {
if (!merged.add(o)) {
duplicates.add(o);
}
}
}
return duplicates;
}
|
@Test
public void testMultipleCollectionsWithoutDuplicates() throws Exception
{
// Setup test fixture.
final List<String> input1 = Arrays.asList("a", "b");
final List<String> input2 = Arrays.asList("c", "d");
final List<String> input3 = Arrays.asList("e", "f", "g");
// Execute system under test.
@SuppressWarnings("unchecked")
final Set<String> result = CollectionUtils.findDuplicates(input1, input2, input3);
// Verify results.
assertTrue(result.isEmpty());
}
|
@Override
protected File getFile(HandlerRequest<EmptyRequestBody> handlerRequest) {
if (logDir == null) {
return null;
}
// wrapping around another File instantiation is a simple way to remove any path information
// - we're
// solely interested in the filename
String filename =
new File(handlerRequest.getPathParameter(LogFileNamePathParameter.class)).getName();
return new File(logDir, filename);
}
|
@Test
void testGetJobManagerCustomLogsValidFilenameWithLongInvalidPath() throws Exception {
File actualFile =
testInstance.getFile(
createHandlerRequest(String.format("foobar/../../%s", VALID_LOG_FILENAME)));
assertThat(actualFile).isNotNull();
String actualContent = String.join("", Files.readAllLines(actualFile.toPath()));
assertThat(actualContent).isEqualTo(VALID_LOG_CONTENT);
}
|
@Override
public Map<String, Object> assembleFrom(OAuth2AccessTokenEntity accessToken, UserInfo userInfo, Set<String> authScopes) {
Map<String, Object> result = newLinkedHashMap();
OAuth2Authentication authentication = accessToken.getAuthenticationHolder().getAuthentication();
result.put(ACTIVE, true);
if (accessToken.getPermissions() != null && !accessToken.getPermissions().isEmpty()) {
Set<Object> permissions = Sets.newHashSet();
for (Permission perm : accessToken.getPermissions()) {
Map<String, Object> o = newLinkedHashMap();
o.put("resource_set_id", perm.getResourceSet().getId().toString());
Set<String> scopes = Sets.newHashSet(perm.getScopes());
o.put("scopes", scopes);
permissions.add(o);
}
result.put("permissions", permissions);
} else {
Set<String> scopes = Sets.intersection(authScopes, accessToken.getScope());
result.put(SCOPE, Joiner.on(SCOPE_SEPARATOR).join(scopes));
}
if (accessToken.getExpiration() != null) {
try {
result.put(EXPIRES_AT, dateFormat.valueToString(accessToken.getExpiration()));
result.put(EXP, accessToken.getExpiration().getTime() / 1000L);
} catch (ParseException e) {
logger.error("Parse exception in token introspection", e);
}
}
if (userInfo != null) {
// if we have a UserInfo, use that for the subject
result.put(SUB, userInfo.getSub());
} else {
// otherwise, use the authentication's username
result.put(SUB, authentication.getName());
}
if(authentication.getUserAuthentication() != null) {
result.put(USER_ID, authentication.getUserAuthentication().getName());
}
result.put(CLIENT_ID, authentication.getOAuth2Request().getClientId());
result.put(TOKEN_TYPE, accessToken.getTokenType());
return result;
}
|
@Test
public void shouldAssembleExpectedResultForRefreshTokenWithoutUserInfo() throws ParseException {
// given
OAuth2RefreshTokenEntity refreshToken = refreshToken(new Date(123 * 1000L),
oauth2AuthenticationWithUser(oauth2Request("clientId", scopes("foo", "bar")), "name"));
Set<String> authScopes = scopes("foo", "bar", "baz");
// when
Map<String, Object> result = assembler.assembleFrom(refreshToken, null, authScopes);
// then
Map<String, Object> expected = new ImmutableMap.Builder<String, Object>()
.put("sub", "name")
.put("exp", 123L)
.put("expires_at", dateFormat.valueToString(new Date(123 * 1000L)))
.put("scope", "bar foo")
.put("active", Boolean.TRUE)
.put("user_id", "name")
.put("client_id", "clientId")
.build();
assertThat(result, is(equalTo(expected)));
}
|
@Override
public List<String> getChildren(String path) {
try {
return client.getChildren().forPath(path);
} catch (NoNodeException e) {
return null;
} catch (Exception e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
|
@Test
void testChildrenPath() {
String path = "/dubbo/org.apache.dubbo.demo.DemoService/providers";
curatorClient.create(path, false, true);
curatorClient.create(path + "/provider1", false, true);
curatorClient.create(path + "/provider2", false, true);
List<String> children = curatorClient.getChildren(path);
assertThat(children.size(), is(2));
}
|
public static String jaasConfig(String moduleName, Map<String, String> options) {
StringJoiner joiner = new StringJoiner(" ");
for (Entry<String, String> entry : options.entrySet()) {
String key = Objects.requireNonNull(entry.getKey());
String value = Objects.requireNonNull(entry.getValue());
if (key.contains("=") || key.contains(";")) {
throw new IllegalArgumentException("Keys must not contain '=' or ';'");
}
if (moduleName.isEmpty() || moduleName.contains(";") || moduleName.contains("=")) {
throw new IllegalArgumentException("module name must be not empty and must not contain '=' or ';'");
} else {
joiner.add(key + "=\"" + value + "\"");
}
}
return moduleName + " required " + joiner + ";";
}
|
@Test
public void testConfigWithNullOptionValue() {
String moduleName = "ExampleModule";
Map<String, String> options = new HashMap<>();
options.put("option1", null);
assertThrows(NullPointerException.class, () -> AuthenticationUtils.jaasConfig(moduleName, options));
}
|
static KiePMMLMapValues getKiePMMLMapValues(final MapValues mapValues) {
DATA_TYPE dataType = mapValues.getDataType() != null ? DATA_TYPE.byName(mapValues.getDataType().value()) : null;
KiePMMLMapValues.Builder builder = KiePMMLMapValues.builder(UUID.randomUUID().toString(),
getKiePMMLExtensions(mapValues.getExtensions()),
mapValues.getOutputColumn())
.withKiePMMLInlineTable(getKiePMMLInlineTable(mapValues.getInlineTable()))
.withDataType(dataType);
if (mapValues.getDefaultValue() != null) {
builder = builder.withDefaultValue(mapValues.getDefaultValue().toString());
}
if (mapValues.getMapMissingTo() != null) {
builder = builder.withMapMissingTo(mapValues.getMapMissingTo().toString());
}
if (mapValues.hasFieldColumnPairs()) {
builder = builder.withKiePMMLFieldColumnPairs(getKiePMMLFieldColumnPairs(mapValues.getFieldColumnPairs()));
}
return builder.build();
}
|
@Test
void getKiePMMLMapValues() {
MapValues toConvert = getRandomMapValues();
KiePMMLMapValues retrieved = KiePMMLMapValuesInstanceFactory.getKiePMMLMapValues(toConvert);
commonVerifyKiePMMLMapValues(retrieved, toConvert);
}
|
public WorkflowInstance getWorkflowInstanceRun(String workflowId, long instanceId, long runId) {
WorkflowInstance ret =
withMetricLogError(
() ->
withRetryableQuery(
runId == Constants.LATEST_ONE
? GET_LATEST_WORKFLOW_INSTANCE_RUN_QUERY
: GET_WORKFLOW_INSTANCE_QUERY,
stmt -> {
int idx = 0;
stmt.setString(++idx, workflowId);
stmt.setLong(++idx, instanceId);
if (runId != Constants.LATEST_ONE) {
stmt.setLong(++idx, runId);
}
},
result -> {
if (result.next()) {
return workflowInstanceFromResult(result);
}
return null;
}),
"getWorkflowInstance",
"Failed to get the workflow instance for [{}][{}][{}]",
workflowId,
instanceId,
runId);
if (ret == null) {
throw new MaestroNotFoundException(
"workflow instance [%s][%s][%s] not found (either not created or deleted)",
workflowId, instanceId, runId);
}
return ret;
}
|
@Test
public void testGetWorkflowInstanceRun() {
WorkflowInstance instanceRun =
instanceDao.getWorkflowInstanceRun(
wfi.getWorkflowId(), wfi.getWorkflowInstanceId(), wfi.getWorkflowRunId());
instanceRun.setModifyTime(null);
assertEquals(wfi, instanceRun);
}
|
@Override
public void openExisting(final ProcessorContext context, final long streamTime) {
metricsRecorder.init(ProcessorContextUtils.metricsImpl(context), context.taskId());
super.openExisting(context, streamTime);
}
|
@Test
public void shouldUpdateSegmentFileNameFromOldDateFormatToNewFormat() throws Exception {
final long segmentInterval = 60_000L; // the old segment file's naming system maxes out at 1 minute granularity.
segments = new TimestampedSegments(storeName, METRICS_SCOPE, NUM_SEGMENTS * segmentInterval, segmentInterval);
final String storeDirectoryPath = stateDirectory.getAbsolutePath() + File.separator + storeName;
final File storeDirectory = new File(storeDirectoryPath);
//noinspection ResultOfMethodCallIgnored
storeDirectory.mkdirs();
final SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMddHHmm");
formatter.setTimeZone(new SimpleTimeZone(0, "UTC"));
for (int segmentId = 0; segmentId < NUM_SEGMENTS; ++segmentId) {
final File oldSegment = new File(storeDirectoryPath + File.separator + storeName + "-" + formatter.format(new Date(segmentId * segmentInterval)));
//noinspection ResultOfMethodCallIgnored
Files.createFile(oldSegment.toPath());
}
segments.openExisting(context, -1L);
for (int segmentId = 0; segmentId < NUM_SEGMENTS; ++segmentId) {
final String segmentName = storeName + "." + (long) segmentId * segmentInterval;
final File newSegment = new File(storeDirectoryPath + File.separator + segmentName);
assertTrue(Files.exists(newSegment.toPath()));
}
}
|
@Override
public void notifyClientConnected(final MqttConnectMessage msg) {
for (final InterceptHandler handler : this.handlers.get(InterceptConnectMessage.class)) {
LOG.debug("Sending MQTT CONNECT message to interceptor. CId={}, interceptorId={}",
msg.payload().clientIdentifier(), handler.getID());
executor.execute(() -> handler.onConnect(new InterceptConnectMessage(msg)));
}
}
|
@Test
public void testNotifyClientConnected() throws Exception {
interceptor.notifyClientConnected(MqttMessageBuilders.connect().build());
interval();
assertEquals(40, n.get());
}
|
public RSA() {
super(ALGORITHM_RSA);
}
|
@Test
public void rsaTest() {
final RSA rsa = new RSA();
// 获取私钥和公钥
assertNotNull(rsa.getPrivateKey());
assertNotNull(rsa.getPrivateKeyBase64());
assertNotNull(rsa.getPublicKey());
assertNotNull(rsa.getPrivateKeyBase64());
// 公钥加密,私钥解密
final byte[] encrypt = rsa.encrypt(StrUtil.bytes("我是一段测试aaaa", CharsetUtil.CHARSET_UTF_8), KeyType.PublicKey);
final byte[] decrypt = rsa.decrypt(encrypt, KeyType.PrivateKey);
assertEquals("我是一段测试aaaa", StrUtil.str(decrypt, CharsetUtil.CHARSET_UTF_8));
// 私钥加密,公钥解密
final byte[] encrypt2 = rsa.encrypt(StrUtil.bytes("我是一段测试aaaa", CharsetUtil.CHARSET_UTF_8), KeyType.PrivateKey);
final byte[] decrypt2 = rsa.decrypt(encrypt2, KeyType.PublicKey);
assertEquals("我是一段测试aaaa", StrUtil.str(decrypt2, CharsetUtil.CHARSET_UTF_8));
}
|
static <T extends Type> String encodeDynamicArray(DynamicArray<T> value) {
int size = value.getValue().size();
String encodedLength = encode(new Uint(BigInteger.valueOf(size)));
String valuesOffsets = encodeArrayValuesOffsets(value);
String encodedValues = encodeArrayValues(value);
StringBuilder result = new StringBuilder();
result.append(encodedLength);
result.append(valuesOffsets);
result.append(encodedValues);
return result.toString();
}
|
@Test
public void testDynamicStringsArray() {
DynamicArray<Utf8String> array =
new DynamicArray<>(
Utf8String.class,
new Utf8String("web3j"),
new Utf8String("arrays"),
new Utf8String("encoding"));
assertEquals(
("0000000000000000000000000000000000000000000000000000000000000003"
+ "0000000000000000000000000000000000000000000000000000000000000060"
+ "00000000000000000000000000000000000000000000000000000000000000a0"
+ "00000000000000000000000000000000000000000000000000000000000000e0"
+ "0000000000000000000000000000000000000000000000000000000000000005"
+ "776562336a000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000006"
+ "6172726179730000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000008"
+ "656e636f64696e67000000000000000000000000000000000000000000000000"),
TypeEncoder.encodeDynamicArray(array));
}
|
public static Expression convert(Predicate[] predicates) {
Expression expression = Expressions.alwaysTrue();
for (Predicate predicate : predicates) {
Expression converted = convert(predicate);
Preconditions.checkArgument(
converted != null, "Cannot convert Spark predicate to Iceberg expression: %s", predicate);
expression = Expressions.and(expression, converted);
}
return expression;
}
|
@Test
public void testEqualToNull() {
String col = "col";
NamedReference namedReference = FieldReference.apply(col);
LiteralValue value = new LiteralValue(null, DataTypes.IntegerType);
org.apache.spark.sql.connector.expressions.Expression[] attrAndValue =
new org.apache.spark.sql.connector.expressions.Expression[] {namedReference, value};
org.apache.spark.sql.connector.expressions.Expression[] valueAndAttr =
new org.apache.spark.sql.connector.expressions.Expression[] {value, namedReference};
Predicate eq1 = new Predicate("=", attrAndValue);
assertThatThrownBy(() -> SparkV2Filters.convert(eq1))
.isInstanceOf(NullPointerException.class)
.hasMessageContaining("Expression is always false");
Predicate eq2 = new Predicate("=", valueAndAttr);
assertThatThrownBy(() -> SparkV2Filters.convert(eq2))
.isInstanceOf(NullPointerException.class)
.hasMessageContaining("Expression is always false");
Predicate eqNullSafe1 = new Predicate("<=>", attrAndValue);
Expression expectedEqNullSafe = Expressions.isNull(col);
Expression actualEqNullSafe1 = SparkV2Filters.convert(eqNullSafe1);
assertThat(actualEqNullSafe1.toString()).isEqualTo(expectedEqNullSafe.toString());
Predicate eqNullSafe2 = new Predicate("<=>", valueAndAttr);
Expression actualEqNullSafe2 = SparkV2Filters.convert(eqNullSafe2);
assertThat(actualEqNullSafe2.toString()).isEqualTo(expectedEqNullSafe.toString());
}
|
public static boolean compareRows(List<Row> l1, List<Row> l2) {
return compareRows(l1, l2, false);
}
|
@Test
void testCompareRowsUnordered() {
final List<Row> originalList =
Arrays.asList(
Row.of("a", 12, false),
Row.of("b", 12, false),
Row.of("b", 12, false),
Row.of("b", 12, true));
{
final List<Row> list =
Arrays.asList(
Row.of("a", 12, false),
Row.of("b", 12, false),
Row.of("b", 12, false),
Row.of("b", 12, true));
assertThat(RowUtils.compareRows(originalList, list, false)).isTrue();
}
{
final List<Row> list =
Arrays.asList(
Row.of("a", 12, false),
Row.of("b", 12, false),
Row.of("b", 12, true), // diff order here
Row.of("b", 12, false));
assertThat(RowUtils.compareRows(originalList, list, false)).isFalse();
}
{
final List<Row> list =
Arrays.asList(
Row.of("a", 12, false),
Row.of("b", 12, false),
Row.of("b", 12, true), // diff order here
Row.of("b", 12, false));
assertThat(RowUtils.compareRows(originalList, list, true)).isTrue();
}
{
final List<Row> list =
Arrays.asList(
Row.of("a", 12, false),
Row.of("b", 12, false),
Row.of("b", 12, false),
Row.of("b", 12, true),
Row.of("b", 12, true)); // diff here
assertThat(RowUtils.compareRows(originalList, list, true)).isFalse();
}
}
|
public static Collection<DatabasePacket> buildQueryResponsePackets(final QueryResponseHeader queryResponseHeader, final int characterSet, final int statusFlags) {
Collection<DatabasePacket> result = new LinkedList<>();
List<QueryHeader> queryHeaders = queryResponseHeader.getQueryHeaders();
result.add(new MySQLFieldCountPacket(queryHeaders.size()));
for (QueryHeader each : queryHeaders) {
result.add(new MySQLColumnDefinition41Packet(characterSet, getColumnDefinitionFlag(each), each.getSchema(), each.getTable(), each.getTable(),
each.getColumnLabel(), each.getColumnName(), each.getColumnLength(), MySQLBinaryColumnType.valueOfJDBCType(each.getColumnType()), each.getDecimals(), false));
}
result.add(new MySQLEofPacket(statusFlags));
return result;
}
|
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
void assertBuildQueryResponsePacketsWithBinaryColumnType() {
QueryHeader nonBinaryHeader = new QueryHeader("s", "t", "columnLabel1", "columnName1", 5, "VARCHAR", 1, 1, false, false, false, false);
QueryHeader binaryHeader = new QueryHeader("s", "t", "columnLabel2", "columnName2", 8, "VARBINARY", 1, 1, false, false, false, false);
List<QueryHeader> queryHeaders = Arrays.asList(nonBinaryHeader, binaryHeader);
QueryResponseHeader queryResponseHeader = new QueryResponseHeader(queryHeaders);
List<DatabasePacket> actual = new ArrayList(ResponsePacketBuilder.buildQueryResponsePackets(queryResponseHeader, 255, 0));
assertThat(actual.size(), is(4));
byte[] actualNonBinaryData = new byte[48];
actual.get(1).write(new MySQLPacketPayload(Unpooled.wrappedBuffer(actualNonBinaryData).writerIndex(0), StandardCharsets.UTF_8));
assertThat(actualNonBinaryData[43] & 0x80, is(0));
byte[] actualBinaryData = new byte[48];
actual.get(2).write(new MySQLPacketPayload(Unpooled.wrappedBuffer(actualBinaryData).writerIndex(0), StandardCharsets.UTF_8));
assertThat(actualBinaryData[43] & 0x80, is(0x80));
}
|
@Override
public boolean confirm(String key) {
return cache.replace(key, false, true);
}
|
@Test
void testConfirm() {
// add first key and confirm
assertTrue(repo.add(key01));
assertTrue(repo.confirm(key01));
// try to confirm a key that isn't there
assertFalse(repo.confirm(key02));
}
|
public static HttpBodyOutput write(List<? extends FormMultipart.FormPart> parts) {
return write("blob:" + UUID.randomUUID(), parts);
}
|
@Test
void testMultipart() {
var e = """
--boundary\r
content-disposition: form-data; name="field1"\r
content-type: text/plain; charset=utf-8\r
\r
value1\r
--boundary\r
content-disposition: form-data; name="field2"; filename="example1.txt"\r
content-type: text/plain\r
\r
value2\r
--boundary\r
content-disposition: form-data; name="field3"; filename="example2.txt"\r
content-type: text/plain\r
\r
value3\r
--boundary\r
content-disposition: form-data; name="field4"; filename="example3.txt"\r
content-type: text/plain\r
\r
some streaming data\r
--boundary\r
content-disposition: form-data; name="field5"\r
content-type: text/plain; charset=utf-8\r
\r
value5\r
--boundary--""";
var b = MultipartWriter.write("boundary", List.of(
FormMultipart.data("field1", "value1"),
FormMultipart.file("field2", "example1.txt", "text/plain", "value2".getBytes(StandardCharsets.UTF_8)),
FormMultipart.file("field3", "example2.txt", "text/plain", "value3".getBytes(StandardCharsets.UTF_8)),
FormMultipart.file("field4", "example3.txt", "text/plain", JdkFlowAdapter.publisherToFlowPublisher(Flux.just("some ", "streaming ", "data").map(StandardCharsets.UTF_8::encode))),
FormMultipart.data("field5", "value5")
));
var s = FlowUtils.toByteArrayFuture(b)
.thenApply(_b -> new String(_b, StandardCharsets.UTF_8))
.join();
assertThat(s).isEqualTo(e);
assertThat(b.contentType()).isEqualTo("multipart/form-data;boundary=\"boundary\"");
}
|
@Override
public Double dist(V firstMember, V secondMember, GeoUnit geoUnit) {
return get(distAsync(firstMember, secondMember, geoUnit));
}
|
@Test
public void testDist() {
RGeo<String> geo = redisson.getGeo("test");
geo.add(new GeoEntry(13.361389, 38.115556, "Palermo"), new GeoEntry(15.087269, 37.502669, "Catania"));
assertThat(geo.dist("Palermo", "Catania", GeoUnit.METERS)).isEqualTo(166274.1516D);
}
|
@Override
public void apply(IntentOperationContext<DomainIntent> context) {
Optional<IntentData> toUninstall = context.toUninstall();
Optional<IntentData> toInstall = context.toInstall();
List<DomainIntent> uninstallIntents = context.intentsToUninstall();
List<DomainIntent> installIntents = context.intentsToInstall();
if (!toInstall.isPresent() && !toUninstall.isPresent()) {
intentInstallCoordinator.intentInstallSuccess(context);
return;
}
if (toUninstall.isPresent()) {
IntentData intentData = toUninstall.get();
trackerService.removeTrackedResources(intentData.key(), intentData.intent().resources());
uninstallIntents.forEach(installable ->
trackerService.removeTrackedResources(intentData.intent().key(),
installable.resources()));
}
if (toInstall.isPresent()) {
IntentData intentData = toInstall.get();
trackerService.addTrackedResources(intentData.key(), intentData.intent().resources());
installIntents.forEach(installable ->
trackerService.addTrackedResources(intentData.key(),
installable.resources()));
}
// Generate domain Intent operations
DomainIntentOperations.Builder builder = DomainIntentOperations.builder();
DomainIntentOperationsContext domainOperationsContext;
uninstallIntents.forEach(builder::remove);
installIntents.forEach(builder::add);
domainOperationsContext = new DomainIntentOperationsContext() {
@Override
public void onSuccess(DomainIntentOperations idops) {
intentInstallCoordinator.intentInstallSuccess(context);
}
@Override
public void onError(DomainIntentOperations idos) {
intentInstallCoordinator.intentInstallFailed(context);
}
};
log.debug("submitting domain intent {} -> {}",
toUninstall.map(x -> x.key().toString()).orElse("<empty>"),
toInstall.map(x -> x.key().toString()).orElse("<empty>"));
// Submit domain Inten operations with domain context
domainIntentService.sumbit(builder.build(domainOperationsContext));
}
|
@Test
public void testUninstall() {
List<Intent> intentsToUninstall = createDomainIntents();
List<Intent> intentsToInstall = Lists.newArrayList();
IntentData toUninstall = new IntentData(createP2PIntent(),
IntentState.WITHDRAWING,
new WallClockTimestamp());
IntentData toInstall = null;
toUninstall = IntentData.compiled(toUninstall, intentsToUninstall);
IntentOperationContext<DomainIntent> operationContext;
IntentInstallationContext context = new IntentInstallationContext(toUninstall, toInstall);
operationContext = new IntentOperationContext(intentsToUninstall, intentsToInstall, context);
installer.apply(operationContext);
assertEquals(intentInstallCoordinator.successContext, operationContext);
}
|
static int encode(
final UnsafeBuffer encodingBuffer,
final int offset,
final int captureLength,
final int length,
final DirectBuffer srcBuffer,
final int srcOffset)
{
final int encodedLength = encodeLogHeader(encodingBuffer, offset, captureLength, length);
encodingBuffer.putBytes(offset + encodedLength, srcBuffer, srcOffset, captureLength);
return encodedLength + captureLength;
}
|
@Test
void encodeBufferBuggerThanMaxCaptureSize()
{
final UnsafeBuffer srcBuffer = new UnsafeBuffer(new byte[MAX_EVENT_LENGTH * 2]);
final int offset = 256;
final int srcOffset = 20;
final int length = MAX_EVENT_LENGTH + 1000;
srcBuffer.setMemory(srcOffset, length, (byte)-5);
final int encodedLength = encode(buffer, offset, MAX_CAPTURE_LENGTH, length, srcBuffer, srcOffset);
assertEquals(MAX_EVENT_LENGTH, encodedLength);
assertEquals(MAX_CAPTURE_LENGTH, buffer.getInt(offset, LITTLE_ENDIAN));
assertEquals(length, buffer.getInt(offset + SIZE_OF_INT, LITTLE_ENDIAN));
assertNotEquals(0, buffer.getLong(offset + SIZE_OF_INT * 2, LITTLE_ENDIAN));
for (int i = 0; i < MAX_CAPTURE_LENGTH; i++)
{
assertEquals(-5, buffer.getByte(offset + LOG_HEADER_LENGTH + i));
}
}
|
@Override
public String getFingerprint(Schema schema) {
return schema.toString();
}
|
@Test
public void testDifferentFPs() {
String fp1 = reg.getFingerprint(schema1);
String fp2 = reg.getFingerprint(schema2);
assertNotEquals(fp1, fp2);
}
|
public static FallbackMethod create(String fallbackMethodName, Method originalMethod,
Object[] args, Object original, Object proxy) throws NoSuchMethodException {
MethodMeta methodMeta = new MethodMeta(
fallbackMethodName,
originalMethod.getParameterTypes(),
originalMethod.getReturnType(),
original.getClass());
Map<Class<?>, Method> methods = FALLBACK_METHODS_CACHE
.computeIfAbsent(methodMeta, FallbackMethod::extractMethods);
if (!methods.isEmpty()) {
return new FallbackMethod(methods, originalMethod.getReturnType(), args, original, proxy);
} else {
throw new NoSuchMethodException(String.format("%s %s.%s(%s,%s)",
methodMeta.returnType, methodMeta.targetClass, methodMeta.fallbackMethodName,
StringUtils.arrayToDelimitedString(methodMeta.params, ","), Throwable.class));
}
}
|
@Test
public void shouldFailIf2FallBackMethodsHandleSameException() throws Throwable {
FallbackMethodTest target = new FallbackMethodTest();
Method testMethod = target.getClass().getMethod("testMethod", String.class);
assertThatThrownBy(() -> FallbackMethod
.create("returnMismatchFallback", testMethod, new Object[]{"test"}, target, target))
.isInstanceOf(NoSuchMethodException.class)
.hasMessage(
"class java.lang.String class io.github.resilience4j.spring6.fallback.FallbackMethodTest.returnMismatchFallback(class java.lang.String,class java.lang.Throwable)");
}
|
public FEELFnResult<List> invoke(@ParameterName("list") List list, @ParameterName("start position") BigDecimal start) {
return invoke( list, start, null );
}
|
@Test
void invokeLengthOutOfListBounds() {
FunctionTestUtil.assertResultError(sublistFunction.invoke(Arrays.asList(1, 2), BigDecimal.ONE,
BigDecimal.valueOf(3)), InvalidParametersEvent.class);
FunctionTestUtil.assertResultError(sublistFunction.invoke(Arrays.asList(1, 2), BigDecimal.valueOf(-1),
BigDecimal.valueOf(3)), InvalidParametersEvent.class);
}
|
public static ResourceModel processResource(final Class<?> resourceClass)
{
return processResource(resourceClass, null);
}
|
@Test(expectedExceptions = ResourceConfigException.class)
public void failsOnEmptyBatchFinderMethodBatchParamParameter() {
@RestLiCollection(name = "batchFinderWithEmptyBatchParam")
class LocalClass extends CollectionResourceTemplate<Long, EmptyRecord>
{
@BatchFinder(value = "batchFinderWithEmptyBatchParam", batchParam = "")
public List<EmptyRecord> batchFinderWithEmptyBatchParam(@QueryParam("criteria") EmptyRecord[] criteria) {
return Collections.emptyList();
}
}
RestLiAnnotationReader.processResource(LocalClass.class);
Assert.fail("#validateBatchFinderMethod should fail throwing a ResourceConfigException");
}
|
@Override
public void deleteDictType(Long id) {
// 校验是否存在
DictTypeDO dictType = validateDictTypeExists(id);
// 校验是否有字典数据
if (dictDataService.getDictDataCountByDictType(dictType.getType()) > 0) {
throw exception(DICT_TYPE_HAS_CHILDREN);
}
// 删除字典类型
dictTypeMapper.updateToDelete(id, LocalDateTime.now());
}
|
@Test
public void testDeleteDictType_hasChildren() {
// mock 数据
DictTypeDO dbDictType = randomDictTypeDO();
dictTypeMapper.insert(dbDictType);// @Sql: 先插入出一条存在的数据
// 准备参数
Long id = dbDictType.getId();
// mock 方法
when(dictDataService.getDictDataCountByDictType(eq(dbDictType.getType()))).thenReturn(1L);
// 调用, 并断言异常
assertServiceException(() -> dictTypeService.deleteDictType(id), DICT_TYPE_HAS_CHILDREN);
}
|
static void checkNearCacheNativeMemoryConfig(InMemoryFormat inMemoryFormat, NativeMemoryConfig nativeMemoryConfig,
boolean isEnterprise) {
if (!isEnterprise) {
return;
}
if (inMemoryFormat != NATIVE) {
return;
}
if (nativeMemoryConfig != null && nativeMemoryConfig.isEnabled()) {
return;
}
throw new InvalidConfigurationException("Enable native memory config to use NATIVE in-memory-format for Near Cache");
}
|
@Test
public void checkNearCacheNativeMemoryConfig_shouldNotThrowExceptionWithNativeMemoryConfig_NATIVE_onEE() {
NativeMemoryConfig nativeMemoryConfig = new NativeMemoryConfig()
.setEnabled(true);
checkNearCacheNativeMemoryConfig(NATIVE, nativeMemoryConfig, true);
}
|
@Override
public boolean addClass(final Class<?> stepClass) {
if (stepClasses.contains(stepClass)) {
return true;
}
checkNoComponentAnnotations(stepClass);
if (hasCucumberContextConfiguration(stepClass)) {
checkOnlyOneClassHasCucumberContextConfiguration(stepClass);
withCucumberContextConfiguration = stepClass;
}
stepClasses.add(stepClass);
return true;
}
|
@Test
void shouldFailIfClassWithAnnotationAnnotatedWithSpringComponentAnnotationsIsFound() {
final ObjectFactory factory = new SpringFactory();
Executable testMethod = () -> factory.addClass(WithControllerAnnotation.class);
CucumberBackendException actualThrown = assertThrows(CucumberBackendException.class, testMethod);
assertThat(actualThrown.getMessage(), is(equalTo(
"Glue class io.cucumber.spring.componentannotation.WithControllerAnnotation was (meta-)annotated with @Component; marking it as a candidate for auto-detection by Spring. Glue classes are detected and registered by Cucumber. Auto-detection of glue classes by spring may lead to duplicate bean definitions. Please remove the @Component (meta-)annotation")));
}
|
public Date getEndOfNextNthPeriod(Date now, int numPeriods) {
Calendar cal = this;
cal.setTime(now);
roundDownTime(cal, this.datePattern);
switch (this.periodicityType) {
case TOP_OF_MILLISECOND:
cal.add(Calendar.MILLISECOND, numPeriods);
break;
case TOP_OF_SECOND:
cal.add(Calendar.SECOND, numPeriods);
break;
case TOP_OF_MINUTE:
cal.add(Calendar.MINUTE, numPeriods);
break;
case TOP_OF_HOUR:
cal.add(Calendar.HOUR_OF_DAY, numPeriods);
break;
case TOP_OF_DAY:
cal.add(Calendar.DATE, numPeriods);
break;
case TOP_OF_WEEK:
cal.set(Calendar.DAY_OF_WEEK, cal.getFirstDayOfWeek());
cal.add(Calendar.WEEK_OF_YEAR, numPeriods);
break;
case TOP_OF_MONTH:
cal.add(Calendar.MONTH, numPeriods);
break;
default:
throw new IllegalStateException("Unknown periodicity type.");
}
return cal.getTime();
}
|
@Test
public void testVaryingNumberOfDailyPeriods() {
RollingCalendar rc = new RollingCalendar("yyyy-MM-dd");
final long MILLIS_IN_DAY = 24 * 3600 * 1000;
for (int p = 20; p > -100; p--) {
long now = 1223325293589L; // Mon Oct 06 22:34:53 CEST 2008
Date nowDate = new Date(now);
Date result = rc.getEndOfNextNthPeriod(nowDate, p);
long offset = rc.getTimeZone().getRawOffset() + rc.getTimeZone().getDSTSavings();
long origin = now - ((now + offset) % (MILLIS_IN_DAY));
long expected = origin + p * MILLIS_IN_DAY;
assertEquals("p=" + p, expected, result.getTime());
}
}
|
public static <T> Class<? extends T> getMapperClass(Class<T> clazz) {
try {
List<ClassLoader> classLoaders = collectClassLoaders( clazz.getClassLoader() );
return getMapperClass( clazz, classLoaders );
}
catch ( ClassNotFoundException e ) {
throw new RuntimeException( e );
}
}
|
@Test
public void shouldReturnImplementationClass() {
Class<? extends Foo> mapperClass = Mappers.getMapperClass( Foo.class );
assertThat( mapperClass ).isNotNull();
assertThat( mapperClass ).isNotExactlyInstanceOf( Foo.class );
}
|
@Override
public long getMaxJournalId() {
long ret = -1;
if (bdbEnvironment == null) {
return ret;
}
List<Long> dbNames = bdbEnvironment.getDatabaseNamesWithPrefix(prefix);
if (dbNames == null || dbNames.isEmpty()) {
return ret;
}
int index = dbNames.size() - 1;
String dbName = getFullDatabaseName(dbNames.get(index));
long dbNumberName = dbNames.get(index);
// open database temporarily and close after count
try (Database database = bdbEnvironment.openDatabase(dbName).getDb()) {
ret = dbNumberName + database.count() - 1;
}
return ret;
}
|
@Test
public void testGetMaxJournalId(@Mocked CloseSafeDatabase closeSafeDatabase,
@Mocked BDBEnvironment environment,
@Mocked Database database) throws Exception {
BDBJEJournal journal = new BDBJEJournal(environment);
// failed to get database names; return -1
new Expectations(environment) {
{
environment.getDatabaseNamesWithPrefix("");
times = 1;
result = null;
}
};
Assert.assertEquals(-1, journal.getMaxJournalId());
// no databases; return -1
new Expectations(environment) {
{
environment.getDatabaseNamesWithPrefix("");
times = 1;
result = new ArrayList<>();
}
};
Assert.assertEquals(-1, journal.getMaxJournalId());
// db 3, 23, 45; open 45 get its size 10
new Expectations(environment) {
{
environment.getDatabaseNamesWithPrefix("");
times = 1;
result = Arrays.asList(3L, 23L, 45L);
environment.openDatabase("45");
times = 1;
result = closeSafeDatabase;
}
};
new Expectations(closeSafeDatabase) {
{
closeSafeDatabase.getDb();
times = 1;
result = database;
}
};
new Expectations(database) {
{
database.count();
times = 1;
result = 10;
database.close();
times = 1;
}
};
Assert.assertEquals(54, journal.getMaxJournalId());
journal.close(); // no db will closed
}
|
public Date parseString(String dateString) throws ParseException {
if (dateString == null || dateString.isEmpty()) {
return null;
}
Matcher xep82WoMillisMatcher = xep80DateTimeWoMillisPattern.matcher(dateString);
Matcher xep82Matcher = xep80DateTimePattern.matcher(dateString);
if (xep82WoMillisMatcher.matches() || xep82Matcher.matches()) {
String rfc822Date;
// Convert the ISO 8601 time zone string to a RFC822 compatible format
// since SimpleDateFormat supports ISO8601 only with Java7 or higher
if (dateString.charAt(dateString.length() - 1) == 'Z') {
rfc822Date = dateString.replace("Z", "+0000");
} else {
// If the time zone wasn't specified with 'Z', then it's in
// ISO8601 format (i.e. '(+|-)HH:mm')
// RFC822 needs a similar format just without the colon (i.e.
// '(+|-)HHmm)'), so remove it
int lastColon = dateString.lastIndexOf(':');
rfc822Date = dateString.substring(0, lastColon) + dateString.substring(lastColon + 1);
}
if (xep82WoMillisMatcher.matches()) {
synchronized (dateTimeFormatWoMillies) {
return dateTimeFormatWoMillies.parse(rfc822Date);
}
} else {
// OF-898: Replace any number of millisecond-characters with at most three of them.
rfc822Date = rfc822Date.replaceAll("(\\.[0-9]{3})[0-9]*", "$1");
synchronized (dateTimeFormat) {
return dateTimeFormat.parse(rfc822Date);
}
}
}
throw new ParseException("Date String could not be parsed: \"" + dateString + "\"", 0);
}
|
@Test
public void testFormatNoSecondFractions() throws Exception
{
// Setup fixture
final String testValue = "2015-03-19T22:54:15+00:00"; // Thu, 19 Mar 2015 22:54:15 GMT
// Execute system under test
final Date result = xmppDateTimeFormat.parseString(testValue);
// Verify results
long expected = 1426805655000L; // Epoch value of Thu, 19 Mar 2015 22:54:15 GMT
assertEquals( expected, result.getTime() );
}
|
public static Gson instance() {
return SingletonHolder.INSTANCE;
}
|
@Test
void serializesCharsets() {
assertThatJson(Serialization.instance().toJson(StandardCharsets.UTF_8)).isEqualTo("UTF-8");
//noinspection CharsetObjectCanBeUsed
assertThatJson(Serialization.instance().toJson(Charset.forName("ascii"))).isEqualTo("US-ASCII");
}
|
public void runOnStateAppliedFilters(Job job) {
new JobPerformingFilters(job, jobDefaultFilters).runOnStateAppliedFilters();
}
|
@Test
void jobFiltersAreExecutedIfJobHasStateChange() {
// GIVEN
Job aJob = anEnqueuedJob().build();
aJob.getStateChangesForJobFilters(); // clear
// WHEN
aJob.startProcessingOn(backgroundJobServer);
jobFilterUtils.runOnStateAppliedFilters(List.of(aJob));
// THEN
assertThat(logAllStateChangesFilter.getStateChanges(aJob)).containsExactly("ENQUEUED->PROCESSING");
}
|
protected void generateChildEipStatistics(ChildEip childEip, ChildEipStatistic childEipStatistic) {
childEip.getEipAttributeMap().forEach((key, value) -> {
if (value instanceof EipAttribute) {
EipAttribute eipAttribute = (EipAttribute) value;
EipStatistic eipStatistic = new EipStatistic();
eipStatistic.setId(key);
eipStatistic.setTested(eipAttribute.getExchangesTotal() > 0);
eipStatistic.setTotalProcessingTime(eipAttribute.getTotalProcessingTime());
eipStatistic.setProperties(eipAttribute.getProperties());
childEipStatistic.getEipStatisticMap().put(eipAttribute.getIndex(), eipStatistic);
} else if (value instanceof String) {
Properties properties = new Properties();
properties.put("value", value);
EipStatistic eipStatistic = new EipStatistic();
eipStatistic.setId(key);
eipStatistic.setProperties(properties);
childEipStatistic.getEipStatisticMap().put(0, eipStatistic);
}
});
}
|
@Test
public void testGenerateChildEipStatistics() {
}
|
public void stop() {
status = GameStatus.STOPPED;
}
|
@Test
void testStop() {
gameLoop.stop();
Assertions.assertEquals(GameStatus.STOPPED, gameLoop.status);
}
|
protected Function<Object, ValueWrapper> createExtractorFunction(ExpressionEvaluator expressionEvaluator,
FactMappingValue expectedResult,
ScesimModelDescriptor scesimModelDescriptor) {
return objectToCheck -> {
ExpressionIdentifier expressionIdentifier = expectedResult.getExpressionIdentifier();
FactMapping factMapping = scesimModelDescriptor.getFactMapping(expectedResult.getFactIdentifier(), expressionIdentifier)
.orElseThrow(() -> new IllegalStateException("Wrong expression, this should not happen"));
List<String> pathToValue = factMapping.getExpressionElementsWithoutClass().stream().map(ExpressionElement::getStep).collect(toList());
ScenarioBeanWrapper<?> scenarioBeanWrapper = ScenarioBeanUtil.navigateToObject(objectToCheck, pathToValue, false);
Object resultValue = scenarioBeanWrapper.getBean();
Object expectedResultValue = expectedResult.getRawValue();
return getResultWrapper(factMapping.getClassName(),
expectedResult,
expressionEvaluator,
expectedResultValue,
resultValue,
scenarioBeanWrapper.getBeanClass());
};
}
|
@Test
public void createExtractorFunction() {
String personName = "Test";
FactMappingValue factMappingValue = new FactMappingValue(personFactIdentifier, firstNameGivenExpressionIdentifier, personName);
Function<Object, ValueWrapper> extractorFunction = runnerHelper.createExtractorFunction(expressionEvaluator, factMappingValue, simulation.getScesimModelDescriptor());
Person person = new Person();
person.setFirstName(personName);
assertThat(extractorFunction.apply(person).isValid()).isTrue();
person.setFirstName("OtherString");
assertThat(extractorFunction.apply(person).isValid()).isFalse();
Function<Object, ValueWrapper> extractorFunction1 = runnerHelper.createExtractorFunction(expressionEvaluator,
new FactMappingValue(personFactIdentifier,
firstNameGivenExpressionIdentifier,
null),
simulation.getScesimModelDescriptor());
ValueWrapper nullValue = extractorFunction1.apply(new Person());
assertThat(nullValue.isValid()).isTrue();
assertThat(nullValue.getValue()).isNull();
}
|
public List<String> getServices() throws PolarisException {
if (CollectionUtils.isEmpty(polarisDiscoveryHandler.getServices().getServices())) {
return Collections.emptyList();
}
return polarisDiscoveryHandler.getServices().getServices().stream()
.map(ServiceInfo::getService).collect(Collectors.toList());
}
|
@Test
public void testGetServices() throws PolarisException {
ServiceInfo mockServiceInfo = mock(ServiceInfo.class);
when(mockServiceInfo.getService()).thenReturn(SERVICE_PROVIDER);
ServicesResponse mockServicesResponse = mock(ServicesResponse.class);
when(mockServicesResponse.getServices()).thenReturn(singletonList(mockServiceInfo));
when(polarisDiscoveryHandler.getServices()).thenReturn(mockServicesResponse);
List<String> services = polarisServiceDiscovery.getServices();
assertThat(services).size().isEqualTo(1);
}
|
public NodeState getWantedState() {
NodeState retiredState = new NodeState(node.getType(), State.RETIRED);
// Don't let configure retired state override explicitly set Down and Maintenance.
if (configuredRetired && wantedState.above(retiredState)) {
return retiredState;
}
return wantedState;
}
|
@Test
void maintenance_wanted_state_overrides_config_retired_state() {
ClusterFixture fixture = ClusterFixture.forFlatCluster(3)
.markNodeAsConfigRetired(1)
.proposeStorageNodeWantedState(1, State.MAINTENANCE);
NodeInfo nodeInfo = fixture.cluster.getNodeInfo(new Node(NodeType.STORAGE, 1));
assertEquals(State.MAINTENANCE, nodeInfo.getWantedState().getState());
}
|
public Collection<ServerPluginInfo> loadPlugins() {
Map<String, ServerPluginInfo> bundledPluginsByKey = new LinkedHashMap<>();
for (ServerPluginInfo bundled : getBundledPluginsMetadata()) {
failIfContains(bundledPluginsByKey, bundled,
plugin -> MessageException.of(format("Found two versions of the plugin %s [%s] in the directory %s. Please remove one of %s or %s.",
bundled.getName(), bundled.getKey(), getRelativeDir(fs.getInstalledBundledPluginsDir()), bundled.getNonNullJarFile().getName(), plugin.getNonNullJarFile().getName())));
bundledPluginsByKey.put(bundled.getKey(), bundled);
}
Map<String, ServerPluginInfo> externalPluginsByKey = new LinkedHashMap<>();
for (ServerPluginInfo external : getExternalPluginsMetadata()) {
failIfContains(bundledPluginsByKey, external,
plugin -> MessageException.of(format("Found a plugin '%s' in the directory '%s' with the same key [%s] as a built-in feature '%s'. Please remove '%s'.",
external.getName(), getRelativeDir(fs.getInstalledExternalPluginsDir()), external.getKey(), plugin.getName(),
new File(getRelativeDir(fs.getInstalledExternalPluginsDir()), external.getNonNullJarFile().getName()))));
failIfContains(externalPluginsByKey, external,
plugin -> MessageException.of(format("Found two versions of the plugin '%s' [%s] in the directory '%s'. Please remove %s or %s.", external.getName(), external.getKey(),
getRelativeDir(fs.getInstalledExternalPluginsDir()), external.getNonNullJarFile().getName(), plugin.getNonNullJarFile().getName())));
externalPluginsByKey.put(external.getKey(), external);
}
for (PluginInfo downloaded : getDownloadedPluginsMetadata()) {
failIfContains(bundledPluginsByKey, downloaded,
plugin -> MessageException.of(format("Fail to update plugin: %s. Built-in feature with same key already exists: %s. Move or delete plugin from %s directory",
plugin.getName(), plugin.getKey(), getRelativeDir(fs.getDownloadedPluginsDir()))));
ServerPluginInfo installedPlugin;
if (externalPluginsByKey.containsKey(downloaded.getKey())) {
deleteQuietly(externalPluginsByKey.get(downloaded.getKey()).getNonNullJarFile());
installedPlugin = moveDownloadedPluginToExtensions(downloaded);
LOG.info("Plugin {} [{}] updated to version {}", installedPlugin.getName(), installedPlugin.getKey(), installedPlugin.getVersion());
} else {
installedPlugin = moveDownloadedPluginToExtensions(downloaded);
LOG.info("Plugin {} [{}] installed", installedPlugin.getName(), installedPlugin.getKey());
}
externalPluginsByKey.put(downloaded.getKey(), installedPlugin);
}
Map<String, ServerPluginInfo> plugins = new HashMap<>(externalPluginsByKey.size() + bundledPluginsByKey.size());
plugins.putAll(externalPluginsByKey);
plugins.putAll(bundledPluginsByKey);
PluginRequirementsValidator.unloadIncompatiblePlugins(plugins);
return plugins.values();
}
|
@Test
public void fail_if_external_plugin_has_same_key_has_bundled_plugin() throws IOException {
File jar = createJar(fs.getInstalledExternalPluginsDir(), "plugin1", "main", null);
createJar(fs.getInstalledBundledPluginsDir(), "plugin1", "main", null);
String dir = getDirName(fs.getInstalledExternalPluginsDir());
assertThatThrownBy(() -> underTest.loadPlugins())
.isInstanceOf(MessageException.class)
.hasMessageContaining("Found a plugin 'plugin1' in the directory '" + dir + "' with the same key [plugin1] as a built-in feature 'plugin1'. "
+ "Please remove '" + new File(dir, jar.getName()) + "'");
}
|
@Override
public Result apply(ApplyNode applyNode, Captures captures, Context context)
{
if (applyNode.getSubqueryAssignments().size() != 1) {
return Result.empty();
}
RowExpression expression = getOnlyElement(applyNode.getSubqueryAssignments().getExpressions());
if (!(expression instanceof InSubqueryExpression)) {
return Result.empty();
}
InSubqueryExpression inPredicate = (InSubqueryExpression) expression;
VariableReferenceExpression semiJoinVariable = getOnlyElement(applyNode.getSubqueryAssignments().getVariables());
SemiJoinNode replacement = new SemiJoinNode(
applyNode.getSourceLocation(),
context.getIdAllocator().getNextId(),
applyNode.getInput(),
applyNode.getSubquery(),
inPredicate.getValue(),
inPredicate.getSubquery(),
semiJoinVariable,
Optional.empty(),
Optional.empty(),
Optional.empty(),
ImmutableMap.of());
return Result.ofPlanNode(replacement);
}
|
@Test
public void testDoesNotFireOnNoCorrelation()
{
tester().assertThat(new TransformUncorrelatedInPredicateSubqueryToSemiJoin())
.on(p -> p.apply(
Assignments.of(),
emptyList(),
p.values(),
p.values()))
.doesNotFire();
}
|
public void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException,
IOException {
if ( isJettyMode() && !request.getContextPath().startsWith( CONTEXT_PATH ) ) {
return;
}
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "ExecuteTransServlet.Log.ExecuteTransRequested" ) );
}
// Options taken from PAN
//
String[] knownOptions = new String[] { REP, USER, PASS, TRANS, LEVEL };
String repOption = request.getParameter( REP );
String userOption = request.getParameter( USER );
String passOption = Encr.decryptPasswordOptionallyEncrypted( request.getParameter( PASS ) );
String transOption = request.getParameter( TRANS );
String levelOption = request.getParameter( LEVEL );
response.setStatus( HttpServletResponse.SC_OK );
String encoding = System.getProperty( "KETTLE_DEFAULT_SERVLET_ENCODING", null );
if ( encoding != null && !Utils.isEmpty( encoding.trim() ) ) {
response.setCharacterEncoding( encoding );
response.setContentType( "text/html; charset=" + encoding );
}
PrintWriter out = response.getWriter();
if ( transOption == null ) {
response.setStatus( HttpServletResponse.SC_BAD_REQUEST );
out.println( new WebResult( WebResult.STRING_ERROR, BaseMessages.getString(
PKG, "ExecuteTransServlet.Error.MissingMandatoryParameter", TRANS ) ) );
return;
}
try {
final Repository repository = openRepository( repOption, userOption, passOption );
final TransMeta transMeta = loadTransformation( repository, transOption );
// Set the servlet parameters as variables in the transformation
//
String[] parameters = transMeta.listParameters();
Enumeration<?> parameterNames = request.getParameterNames();
while ( parameterNames.hasMoreElements() ) {
String parameter = (String) parameterNames.nextElement();
String[] values = request.getParameterValues( parameter );
// Ignore the known options. set the rest as variables
//
if ( Const.indexOfString( parameter, knownOptions ) < 0 ) {
// If it's a trans parameter, set it, otherwise simply set the variable
//
if ( Const.indexOfString( parameter, parameters ) < 0 ) {
transMeta.setVariable( parameter, values[0] );
} else {
transMeta.setParameterValue( parameter, values[0] );
}
}
}
TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration();
LogLevel logLevel = LogLevel.getLogLevelForCode( levelOption );
transExecutionConfiguration.setLogLevel( logLevel );
TransConfiguration transConfiguration = new TransConfiguration( transMeta, transExecutionConfiguration );
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject =
new SimpleLoggingObject( CONTEXT_PATH, LoggingObjectType.CARTE, null );
servletLoggingObject.setContainerObjectId( carteObjectId );
servletLoggingObject.setLogLevel( logLevel );
// Create the transformation and store in the list...
//
final Trans trans = new Trans( transMeta, servletLoggingObject );
trans.setRepository( repository );
trans.setSocketRepository( getSocketRepository() );
getTransformationMap().addTransformation( transMeta.getName(), carteObjectId, trans, transConfiguration );
trans.setContainerObjectId( carteObjectId );
if ( repository != null ) {
// The repository connection is open: make sure we disconnect from the repository once we
// are done with this transformation.
//
trans.addTransListener( new TransAdapter() {
@Override public void transFinished( Trans trans ) {
repository.disconnect();
}
} );
}
// Pass the servlet print writer to the transformation...
//
trans.setServletPrintWriter( out );
trans.setServletReponse( response );
trans.setServletRequest( request );
try {
// Execute the transformation...
//
executeTrans( trans );
String logging = KettleLogStore.getAppender().getBuffer( trans.getLogChannelId(), false ).toString();
if ( trans.isFinishedOrStopped() && trans.getErrors() > 0 ) {
response.setStatus( HttpServletResponse.SC_INTERNAL_SERVER_ERROR );
out.println( new WebResult( WebResult.STRING_ERROR, BaseMessages.getString(
PKG, "ExecuteTransServlet.Error.ErrorExecutingTrans", logging ) ) );
}
out.flush();
} catch ( Exception executionException ) {
String logging = KettleLogStore.getAppender().getBuffer( trans.getLogChannelId(), false ).toString();
throw new KettleException( BaseMessages.getString( PKG, "ExecuteTransServlet.Error.ErrorExecutingTrans", logging ), executionException );
}
} catch ( Exception ex ) {
// When we get to this point KettleAuthenticationException has already been wrapped in an Execution Exception
// and that in a KettleException
Throwable kettleExceptionCause = ex.getCause();
if ( kettleExceptionCause != null && kettleExceptionCause instanceof ExecutionException ) {
Throwable executionExceptionCause = kettleExceptionCause.getCause();
if ( executionExceptionCause != null && executionExceptionCause instanceof KettleAuthenticationException ) {
response.setStatus( HttpServletResponse.SC_UNAUTHORIZED );
out.println( new WebResult( WebResult.STRING_ERROR, BaseMessages.getString(
PKG, "ExecuteTransServlet.Error.Authentication", getContextPath() ) ) );
}
} else if ( ex.getMessage().contains( UNABLE_TO_FIND_TRANS ) ) {
response.setStatus( HttpServletResponse.SC_NOT_FOUND );
out.println( new WebResult( WebResult.STRING_ERROR, BaseMessages.getString(
PKG, "ExecuteTransServlet.Error.UnableToFindTransformation", transOption ) ) );
} else {
response.setStatus( HttpServletResponse.SC_INTERNAL_SERVER_ERROR );
out.println( new WebResult( WebResult.STRING_ERROR, BaseMessages.getString(
PKG, "ExecuteTransServlet.Error.UnexpectedError", Const.CR + Const.getStackTracker( ex ) ) ) );
}
}
}
|
@Ignore("Unable to run this test without PowerMock") @Test
public void doGetRepositoryAuthenticationFailTest() throws Exception {
HttpServletRequest mockHttpServletRequest = mock( HttpServletRequest.class );
HttpServletResponse mockHttpServletResponse = mock( HttpServletResponse.class );
RepositoriesMeta repositoriesMeta = mock( RepositoriesMeta.class );
RepositoryMeta repositoryMeta = mock( RepositoryMeta.class );
Repository repository = mock( Repository.class );
PluginRegistry pluginRegistry = mock( PluginRegistry.class );
KettleException kettleException = mock( KettleException.class );
ExecutionException executionException = mock( ExecutionException.class );
KettleLogStore.init();
StringWriter out = new StringWriter();
PrintWriter printWriter = new PrintWriter( out );
when( mockHttpServletRequest.getParameter( "rep" ) ).thenReturn( "Repo" );
when( mockHttpServletRequest.getParameter( "trans" ) ).thenReturn( "Trans" );
when( mockHttpServletRequest.getParameter( "user" ) ).thenReturn( "wrongUser" );
when( mockHttpServletRequest.getParameter( "pass" ) ).thenReturn( "wrongPass" );
when( mockHttpServletResponse.getWriter() ).thenReturn( printWriter );
// whenNew( RepositoriesMeta.class ).withNoArguments().thenReturn( repositoriesMeta );
when( repositoriesMeta.findRepository( "Repo" ) ).thenReturn( repositoryMeta );
when( PluginRegistry.getInstance() ).thenReturn( pluginRegistry );
when( pluginRegistry.loadClass( RepositoryPluginType.class, repositoryMeta, Repository.class ) ).thenReturn( repository );
doThrow( kettleException ).when( repository ).connect( "wrongUser", "wrongPass" );
when( kettleException.getCause() ).thenReturn( executionException );
when( executionException.getCause() ).thenReturn( new KettleAuthenticationException() );
when( repository.isConnected() ).thenReturn( false );
executeTransServlet.doGet( mockHttpServletRequest, mockHttpServletResponse );
verify( mockHttpServletResponse ).setStatus( HttpServletResponse.SC_OK );
verify( mockHttpServletResponse ).setStatus( HttpServletResponse.SC_UNAUTHORIZED );
}
|
static String getRelativeFileInternal(File canonicalBaseFile, File canonicalFileToRelativize) {
List<String> basePath = getPathComponents(canonicalBaseFile);
List<String> pathToRelativize = getPathComponents(canonicalFileToRelativize);
//if the roots aren't the same (i.e. different drives on a windows machine), we can't construct a relative
//path from one to the other, so just return the canonical file
if (!basePath.get(0).equals(pathToRelativize.get(0))) {
return canonicalFileToRelativize.getPath();
}
int commonDirs;
StringBuilder sb = new StringBuilder();
for (commonDirs=1; commonDirs<basePath.size() && commonDirs<pathToRelativize.size(); commonDirs++) {
if (!basePath.get(commonDirs).equals(pathToRelativize.get(commonDirs))) {
break;
}
}
boolean first = true;
for (int i=commonDirs; i<basePath.size(); i++) {
if (!first) {
sb.append(File.separatorChar);
} else {
first = false;
}
sb.append("..");
}
first = true;
for (int i=commonDirs; i<pathToRelativize.size(); i++) {
if (first) {
if (sb.length() != 0) {
sb.append(File.separatorChar);
}
first = false;
} else {
sb.append(File.separatorChar);
}
sb.append(pathToRelativize.get(i));
}
if (sb.length() == 0) {
return ".";
}
return sb.toString();
}
|
@Test
public void pathUtilTest8() {
File[] roots = File.listRoots();
File basePath = new File(roots[0] + "some" + File.separatorChar);
File relativePath = new File(roots[0] + "some" + File.separatorChar + "dir" + File.separatorChar);
String path = PathUtil.getRelativeFileInternal(basePath, relativePath);
Assert.assertEquals(path, "dir");
}
|
public static <E> void setMax(E obj, AtomicLongFieldUpdater<E> updater, long value) {
for (; ; ) {
long current = updater.get(obj);
if (current >= value) {
return;
}
if (updater.compareAndSet(obj, current, value)) {
return;
}
}
}
|
@Test
public void setMax() {
setMax(8, 7);
setMax(9, 9);
setMax(10, 11);
}
|
public static String maskPassword(String key, String value) {
if (key.contains("PASSWORD")) {
return "********";
} else {
return value;
}
}
|
@Test
public void testMaskedPasswords() {
String noPassword = "SOME_VARIABLE";
String passwordAtTheEnd = "SOME_PASSWORD";
String passwordInTheMiddle = "SOME_PASSWORD_TO_THE_BIG_SECRET";
assertThat(Util.maskPassword(noPassword, "123456"), is("123456"));
assertThat(Util.maskPassword(passwordAtTheEnd, "123456"), is("********"));
assertThat(Util.maskPassword(passwordInTheMiddle, "123456"), is("********"));
}
|
public static ValidateTopicResult validateTopic(String topic) {
if (UtilAll.isBlank(topic)) {
return new ValidateTopicResult(false, "The specified topic is blank.");
}
if (isTopicOrGroupIllegal(topic)) {
return new ValidateTopicResult(false, "The specified topic contains illegal characters, allowing only ^[%|a-zA-Z0-9_-]+$");
}
if (topic.length() > TOPIC_MAX_LENGTH) {
return new ValidateTopicResult(false, "The specified topic is longer than topic max length.");
}
return new ValidateTopicResult(true, "");
}
|
@Test
public void testTopicValidator_Pass() {
TopicValidator.ValidateTopicResult res = TopicValidator.validateTopic("TestTopic");
assertThat(res.isValid()).isTrue();
assertThat(res.getRemark()).isEmpty();
}
|
@Override
public Response execute(Request request, Options options) throws IOException {
EnhancedPluginContext enhancedPluginContext = new EnhancedPluginContext();
HttpHeaders requestHeaders = new HttpHeaders();
request.headers().forEach((s, strings) -> requestHeaders.addAll(s, new ArrayList<>(strings)));
URI url = URI.create(request.url());
EnhancedRequestContext enhancedRequestContext = EnhancedRequestContext.builder()
.httpHeaders(requestHeaders)
.httpMethod(HttpMethod.valueOf(request.httpMethod().name()))
.url(url)
.build();
enhancedPluginContext.setRequest(enhancedRequestContext);
enhancedPluginContext.setOriginRequest(request);
enhancedPluginContext.setLocalServiceInstance(pluginRunner.getLocalServiceInstance());
String svcName = request.requestTemplate().feignTarget().name();
DefaultServiceInstance serviceInstance = new DefaultServiceInstance(
String.format("%s-%s-%d", svcName, url.getHost(), url.getPort()),
svcName, url.getHost(), url.getPort(), url.getScheme().equals("https"));
// -1 means access directly by url
if (serviceInstance.getPort() == -1) {
enhancedPluginContext.setTargetServiceInstance(null, url);
}
else {
enhancedPluginContext.setTargetServiceInstance(serviceInstance, url);
}
// Run pre enhanced plugins.
pluginRunner.run(EnhancedPluginType.Client.PRE, enhancedPluginContext);
long startMillis = System.currentTimeMillis();
try {
Response response = delegate.execute(request, options);
enhancedPluginContext.setDelay(System.currentTimeMillis() - startMillis);
HttpHeaders responseHeaders = new HttpHeaders();
response.headers().forEach((s, strings) -> responseHeaders.addAll(s, new ArrayList<>(strings)));
EnhancedResponseContext enhancedResponseContext = EnhancedResponseContext.builder()
.httpStatus(response.status())
.httpHeaders(responseHeaders)
.build();
enhancedPluginContext.setResponse(enhancedResponseContext);
// Run post enhanced plugins.
pluginRunner.run(EnhancedPluginType.Client.POST, enhancedPluginContext);
return response;
}
catch (IOException origin) {
enhancedPluginContext.setDelay(System.currentTimeMillis() - startMillis);
enhancedPluginContext.setThrowable(origin);
// Run exception enhanced feign plugins.
pluginRunner.run(EnhancedPluginType.Client.EXCEPTION, enhancedPluginContext);
throw origin;
}
finally {
// Run finally enhanced plugins.
pluginRunner.run(EnhancedPluginType.Client.FINALLY, enhancedPluginContext);
}
}
|
@Test
public void testExecute() throws IOException {
// mock Client.class
Client delegate = mock(Client.class);
doAnswer(invocation -> {
Request request = invocation.getArgument(0);
if (request.httpMethod().equals(Request.HttpMethod.GET)) {
return Response.builder().request(request).status(200).build();
}
else if (request.httpMethod().equals(Request.HttpMethod.POST)) {
return Response.builder().request(request).status(502).build();
}
throw new IOException("Mock exception.");
}).when(delegate).execute(any(Request.class), nullable(Request.Options.class));
// mock target
Target<Object> target = Target.EmptyTarget.create(Object.class);
// mock RequestTemplate.class
RequestTemplate requestTemplate = new RequestTemplate();
requestTemplate.feignTarget(target);
EnhancedFeignClient polarisFeignClient = new EnhancedFeignClient(delegate,
new DefaultEnhancedPluginRunner(getMockEnhancedFeignPlugins(), null, polarisSDKContextManager.getSDKContext()));
// 200
Response response = polarisFeignClient.execute(Request.create(Request.HttpMethod.GET, "http://localhost:8080/test",
Collections.emptyMap(), null, requestTemplate), null);
assertThat(response.status()).isEqualTo(200);
// 502
response = polarisFeignClient.execute(Request.create(Request.HttpMethod.POST, "http://localhost:8080/test",
Collections.emptyMap(), null, requestTemplate), null);
assertThat(response.status()).isEqualTo(502);
// Exception
try {
polarisFeignClient.execute(Request.create(Request.HttpMethod.DELETE, "http://localhost:8080/test",
Collections.emptyMap(), null, requestTemplate), null);
fail("IOException should be thrown.");
}
catch (Throwable t) {
assertThat(t).isInstanceOf(IOException.class);
assertThat(t.getMessage()).isEqualTo("Mock exception.");
}
}
|
@Override
public final int size() {
// NOTE: because indices are on even numbers we cannot use the size util.
/*
* It is possible for a thread to be interrupted or reschedule between the read of the producer
* and consumer indices, therefore protection is required to ensure size is within valid range.
* In the event of concurrent polls/offers to this method the size is OVER estimated as we read
* consumer index BEFORE the producer index.
*/
long after = lvConsumerIndex(this);
long size;
while (true) {
final long before = after;
final long currentProducerIndex = lvProducerIndex(this);
after = lvConsumerIndex(this);
if (before == after) {
size = ((currentProducerIndex - after) >> 1);
break;
}
}
// Long overflow is impossible, so size is always positive. Integer overflow is possible for the
// unbounded indexed queues.
if (size > Integer.MAX_VALUE) {
return Integer.MAX_VALUE;
} else {
return (int) size;
}
}
|
@Test(dataProvider = "empty")
public void size_whenEmpty(MpscGrowableArrayQueue<Integer> queue) {
assertThat(queue.size()).isEqualTo(0);
}
|
public static Builder newBuilder() {
return new AutoValue_SplunkEvent.Builder();
}
|
@Test
public void testEquals() {
String event = "test-event";
String host = "test-host";
String index = "test-index";
String source = "test-source";
String sourceType = "test-source-type";
Long time = 123456789L;
JsonObject fields = new JsonObject();
fields.addProperty("test-key", "test-value");
SplunkEvent actualEvent =
SplunkEvent.newBuilder()
.withEvent(event)
.withHost(host)
.withIndex(index)
.withSource(source)
.withSourceType(sourceType)
.withTime(time)
.withFields(fields)
.create();
assertEquals(
SplunkEvent.newBuilder()
.withEvent(event)
.withHost(host)
.withIndex(index)
.withSource(source)
.withSourceType(sourceType)
.withTime(time)
.withFields(fields)
.create(),
actualEvent);
assertNotEquals(
SplunkEvent.newBuilder()
.withEvent("a-different-test-event")
.withHost(host)
.withIndex(index)
.withSource(source)
.withSourceType(sourceType)
.withTime(time)
.create(),
actualEvent);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.