focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
public CreateStreamCommand createStreamCommand(final KsqlStructuredDataOutputNode outputNode) {
return new CreateStreamCommand(
outputNode.getSinkName().get(),
outputNode.getSchema(),
outputNode.getTimestampColumn(),
outputNode.getKsqlTopic().getKafkaTopicName(),
Formats.from(outputNode.getKsqlTopic()),
outputNode.getKsqlTopic().getKeyFormat().getWindowInfo(),
Optional.of(outputNode.getOrReplace()),
Optional.of(false)
);
}
|
@Test
public void shouldBuildSchemaWithExplicitKeyFieldForStream() {
// Given:
final CreateStream statement = new CreateStream(
SOME_NAME,
TableElements.of(
tableElement("k", new Type(SqlTypes.STRING), KEY_CONSTRAINT),
ELEMENT1,
ELEMENT2
),
false,
true,
withProperties,
false
);
// When:
final CreateStreamCommand result = createSourceFactory.createStreamCommand(
statement,
ksqlConfig
);
// Then:
assertThat(result.getSchema(), is(LogicalSchema.builder()
.keyColumn(ColumnName.of("k"), SqlTypes.STRING)
.valueColumn(ColumnName.of("bob"), SqlTypes.STRING)
.valueColumn(ColumnName.of("hojjat"), BIGINT)
.build()
));
}
|
@Override
public void getConfig(FederationConfig.Builder builder) {
for (Target target : resolvedTargets.values())
builder.target(target.getTargetConfig());
targetSelector.ifPresent(selector -> builder.targetSelector(selector.getGlobalComponentId().stringValue()));
}
|
@Test
void source_groups_are_not_inherited_when_inheritDefaultSources_is_false() throws Exception {
FederationFixture f = new ProvidersWithSourceFixture();
FederationSearcher federationSearcherWithoutDefaultSources = newFederationSearcher(false, List.of());
f.initializeFederationSearcher(federationSearcherWithoutDefaultSources);
FederationConfig federationConfig = getConfig(federationSearcherWithoutDefaultSources);
assertEquals(0, federationConfig.target().size());
}
|
@Udf(description = "Converts a string representation of a date in the given format"
+ " into the TIMESTAMP value."
+ " Single quotes in the timestamp format can be escaped with '',"
+ " for example: 'yyyy-MM-dd''T''HH:mm:ssX'.")
public Timestamp parseTimestamp(
@UdfParameter(
description = "The string representation of a date.") final String formattedTimestamp,
@UdfParameter(
description = "The format pattern should be in the format expected by"
+ " java.time.format.DateTimeFormatter.") final String formatPattern) {
return parseTimestamp(formattedTimestamp, formatPattern, ZoneId.of("GMT").getId());
}
|
@Test
public void shouldSupportPSTTimeZone() {
// When:
final Object result = udf.parseTimestamp("2018-08-15 10:10:43",
"yyyy-MM-dd HH:mm:ss", "America/Los_Angeles");
// Then:
assertThat(result, is(new Timestamp(1534353043000L)));
}
|
public Account update(Account account, Consumer<Account> updater) {
return update(account, a -> {
updater.accept(a);
// assume that all updaters passed to the public method actually modify the account
return true;
});
}
|
@Test
void testChangePhoneNumberViaUpdate() {
final String originalNumber = "+14152222222";
final String targetNumber = "+14153333333";
final UUID uuid = UUID.randomUUID();
final Account account = AccountsHelper.generateTestAccount(originalNumber, uuid, UUID.randomUUID(), new ArrayList<>(), new byte[UnidentifiedAccessUtil.UNIDENTIFIED_ACCESS_KEY_LENGTH]);
assertThrows(AssertionError.class, () -> accountsManager.update(account, a -> a.setNumber(targetNumber, UUID.randomUUID())));
}
|
@Override
public void remove(final MetaData metaData) {
metaData.updateContextPath();
List<TarsInvokePrx> prxList = ApplicationConfigCache.getInstance()
.get(metaData.getPath()).getTarsInvokePrxList();
List<TarsInvokePrx> removePrxList = prxList.stream()
.filter(tarsInvokePrx -> tarsInvokePrx.getHost().equals(metaData.getAppName()))
.collect(Collectors.toList());
prxList.removeAll(removePrxList);
if (CollectionUtils.isEmpty(prxList)) {
META_DATA.remove(metaData.getPath());
}
}
|
@Test
public void testUnSubscribe() {
tarsMetaDataHandler.remove(metaData);
}
|
@Override
public void init() {
if (inited.compareAndSet(false, true)) {
this.scanPeriod = CommonUtils.parseInt(registryConfig.getParameter("registry.domain.scan.period"),
scanPeriod);
Runnable task = () -> {
try {
refreshDomain();
notifyListener();
} catch (Throwable e) {
LOGGER.error(e.getMessage(), e);
}
};
scheduledExecutorService = new ScheduledService("DomainRegistry-Back-Load",
ScheduledService.MODE_FIXEDDELAY,
task, //定时load任务
scanPeriod, // 延迟一个周期
scanPeriod, // 一个周期循环
TimeUnit.MILLISECONDS
).start();
}
}
|
@Test
public void testInit() {
DomainRegistry domainRegistry = new DomainRegistry(new RegistryConfig());
assertNull(domainRegistry.scheduledExecutorService);
domainRegistry.init();
assertTrue(domainRegistry.scheduledExecutorService.isStarted());
}
|
public OptExpression next() {
// For logic scan to physical scan, we only need to match once
if (isPatternWithoutChildren && groupExpressionIndex.get(0) > 0) {
return null;
}
OptExpression expression;
do {
this.groupTraceKey = 0;
// Match with the next groupExpression of the last group node
int lastNode = this.groupExpressionIndex.size() - 1;
int lastNodeIndex = this.groupExpressionIndex.get(lastNode);
this.groupExpressionIndex.set(lastNode, lastNodeIndex + 1);
expression = match(pattern, groupExpression);
} while (expression == null && this.groupExpressionIndex.size() != 1);
nextIdx++;
return expression;
}
|
@Test
public void testBinderDepth2Repeat4() {
OptExpression expr1 = OptExpression.create(new MockOperator(OperatorType.LOGICAL_JOIN, 0),
OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 1)),
OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 2)));
OptExpression expr2 = OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 3));
OptExpression expr3 = OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 4));
Memo memo = new Memo();
GroupExpression ge = memo.init(expr1);
memo.copyIn(ge.inputAt(0), expr2);
memo.copyIn(ge.inputAt(1), expr3);
Pattern pattern = Pattern.create(OperatorType.LOGICAL_JOIN)
.addChildren(Pattern.create(OperatorType.LOGICAL_OLAP_SCAN))
.addChildren(Pattern.create(OperatorType.LOGICAL_OLAP_SCAN));
Binder binder = new Binder(pattern, ge);
OptExpression result;
result = binder.next();
assertEquals(OperatorType.LOGICAL_JOIN, result.getOp().getOpType());
assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(0).getOp().getOpType());
assertEquals(1, ((MockOperator) result.inputAt(0).getOp()).getValue());
assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(1).getOp().getOpType());
assertEquals(2, ((MockOperator) result.inputAt(1).getOp()).getValue());
result = binder.next();
assertEquals(OperatorType.LOGICAL_JOIN, result.getOp().getOpType());
assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(0).getOp().getOpType());
assertEquals(1, ((MockOperator) result.inputAt(0).getOp()).getValue());
assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(1).getOp().getOpType());
assertEquals(4, ((MockOperator) result.inputAt(1).getOp()).getValue());
result = binder.next();
assertEquals(OperatorType.LOGICAL_JOIN, result.getOp().getOpType());
assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(0).getOp().getOpType());
assertEquals(3, ((MockOperator) result.inputAt(0).getOp()).getValue());
assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(1).getOp().getOpType());
assertEquals(2, ((MockOperator) result.inputAt(1).getOp()).getValue());
result = binder.next();
assertEquals(OperatorType.LOGICAL_JOIN, result.getOp().getOpType());
assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(0).getOp().getOpType());
assertEquals(3, ((MockOperator) result.inputAt(0).getOp()).getValue());
assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(1).getOp().getOpType());
assertEquals(4, ((MockOperator) result.inputAt(1).getOp()).getValue());
assertNull(binder.next());
}
|
@Override
public Flux<ChatResponse> stream(Prompt prompt) {
WatsonxAiRequest request = request(prompt);
Flux<WatsonxAiResponse> response = this.watsonxAiApi.generateStreaming(request);
return response.map(chunk -> {
Generation generation = new Generation(chunk.results().get(0).generatedText());
if (chunk.system() != null) {
generation = generation.withGenerationMetadata(
ChatGenerationMetadata.from(chunk.results().get(0).stopReason(), chunk.system()));
}
return new ChatResponse(List.of(generation));
});
}
|
@Test
public void testStreamMethod() {
WatsonxAiApi mockChatApi = mock(WatsonxAiApi.class);
WatsonxAiChatModel chatModel = new WatsonxAiChatModel(mockChatApi);
Prompt prompt = new Prompt(List.of(new SystemMessage("Your prompt here")),
WatsonxAiChatOptions.builder().withModel("google/flan-ul2").build());
WatsonxAiChatOptions parameters = WatsonxAiChatOptions.builder().withModel("google/flan-ul2").build();
WatsonxAiResults fakeResultsFirst = new WatsonxAiResults("LLM resp", 0, 0, "max_tokens");
WatsonxAiResults fakeResultsSecond = new WatsonxAiResults("onse", 4, 3, "not_finished");
WatsonxAiResponse fakeResponseFirst = new WatsonxAiResponse("google/flan-ul2", new Date(),
List.of(fakeResultsFirst),
Map.of("warnings", List.of(Map.of("message", "the message", "id", "disclaimer_warning"))));
WatsonxAiResponse fakeResponseSecond = new WatsonxAiResponse("google/flan-ul2", new Date(),
List.of(fakeResultsSecond), null);
Flux<WatsonxAiResponse> fakeResponse = Flux.just(fakeResponseFirst, fakeResponseSecond);
when(mockChatApi.generateStreaming(any(WatsonxAiRequest.class))).thenReturn(fakeResponse);
Generation firstGen = new Generation("LLM resp")
.withGenerationMetadata(ChatGenerationMetadata.from("max_tokens",
Map.of("warnings", List.of(Map.of("message", "the message", "id", "disclaimer_warning")))));
Generation secondGen = new Generation("onse");
Flux<ChatResponse> response = chatModel.stream(prompt);
StepVerifier.create(response).assertNext(current -> {
ChatResponse expected = new ChatResponse(List.of(firstGen));
Assert.assertEquals(expected.getResults().size(), current.getResults().size());
Assert.assertEquals(expected.getResult().getOutput(), current.getResult().getOutput());
}).assertNext(current -> {
ChatResponse expected = new ChatResponse(List.of(secondGen));
Assert.assertEquals(expected.getResults().size(), current.getResults().size());
Assert.assertEquals(expected.getResult().getOutput(), current.getResult().getOutput());
}).expectComplete().verify();
}
|
@Override
public boolean isDetected() {
return system.envVariable("bamboo_buildNumber") != null;
}
|
@Test
public void isDetected() {
assertThat(underTest.isDetected()).isFalse();
setEnvVariable("bamboo_buildNumber", "41");
assertThat(underTest.isDetected()).isTrue();
}
|
static CounterResult fromJson(String json) {
return JsonUtil.parse(json, CounterResultParser::fromJson);
}
|
@Test
public void unsupportedUnit() {
assertThatThrownBy(() -> CounterResultParser.fromJson("{\"unit\":\"unknown\",\"value\":23}"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Invalid unit: unknown");
}
|
@Subscribe
public void inputCreated(InputCreated inputCreatedEvent) {
final String inputId = inputCreatedEvent.id();
LOG.debug("Input created: {}", inputId);
final Input input;
try {
input = inputService.find(inputId);
} catch (NotFoundException e) {
LOG.warn("Received InputCreated event but could not find input {}", inputId, e);
return;
}
final IOState<MessageInput> inputState = inputRegistry.getInputState(inputId);
if (inputState != null) {
inputRegistry.remove(inputState);
}
if (input.isGlobal() || this.nodeId.getNodeId().equals(input.getNodeId())) {
startInput(input);
}
}
|
@Test
@SuppressWarnings("unchecked")
public void inputCreatedDoesNotStopInputIfItIsNotRunning() throws Exception {
final String inputId = "input-id";
final Input input = mock(Input.class);
when(inputService.find(inputId)).thenReturn(input);
when(inputRegistry.getInputState(inputId)).thenReturn(null);
listener.inputCreated(InputCreated.create(inputId));
verify(inputRegistry, never()).remove(any(IOState.class));
}
|
public static <T extends PipelineOptions> T as(Class<T> klass) {
return new Builder().as(klass);
}
|
@Test
public void testGettersAnnotatedWithInconsistentDefault() throws Exception {
// Initial construction is valid.
GetterWithDefault options = PipelineOptionsFactory.as(GetterWithDefault.class);
expectedException.expect(IllegalArgumentException.class);
// Make sure the error message says what the problem is, generally
expectedException.expectMessage("contradictory annotations");
// Make sure the error message gives actionable details about what
// annotations were contradictory.
// Note that the quotes in the unparsed string are present in Java 11 but absent in Java 8
expectedException.expectMessage(
anyOf(
containsString("Default.String(value=\"abc\")"),
containsString("Default.String(value=abc)")));
expectedException.expectMessage("Default.Integer(value=1");
// When we attempt to convert, we should error at this moment.
options.as(GetterWithInconsistentDefaultType.class);
}
|
@Operation(summary = "list", description = "List stacks components")
@GetMapping("/{stackName}/{stackVersion}/components")
public ResponseEntity<List<ServiceComponentVO>> components(
@PathVariable String stackName, @PathVariable String stackVersion) {
return ResponseEntity.success(stackService.components(stackName, stackVersion));
}
|
@Test
void componentsReturnsAllComponentsForValidStack() {
String stackName = "bigtop";
String stackVersion = "1.0.0";
List<ServiceComponentVO> components = Arrays.asList(new ServiceComponentVO(), new ServiceComponentVO());
when(stackService.components(stackName, stackVersion)).thenReturn(components);
ResponseEntity<List<ServiceComponentVO>> response = stackController.components(stackName, stackVersion);
assertTrue(response.isSuccess());
assertEquals(components, response.getData());
}
|
public final Logger getLogger(final Class<?> clazz) {
return getLogger(clazz.getName());
}
|
@Test
public void loggerNameEndingInDotOrDollarShouldWork() {
{
String loggerName = "toto.x.";
Logger logger = lc.getLogger(loggerName);
assertEquals(loggerName, logger.getName());
}
{
String loggerName = "toto.x$";
Logger logger = lc.getLogger(loggerName);
assertEquals(loggerName, logger.getName());
}
}
|
public String getPath() {
return mUri.getPath();
}
|
@Test
public void getPathTests() {
assertEquals(".", new AlluxioURI(".").getPath());
assertEquals("/", new AlluxioURI("/").getPath());
assertEquals("/", new AlluxioURI("alluxio:/").getPath());
assertEquals("/", new AlluxioURI("alluxio://localhost:80/").getPath());
assertEquals("/a.txt", new AlluxioURI("alluxio://localhost:80/a.txt").getPath());
assertEquals("/b", new AlluxioURI("alluxio://localhost:80/a/../b").getPath());
assertEquals("/b", new AlluxioURI("alluxio://localhost:80/a/c/../../b").getPath());
assertEquals("/a/b", new AlluxioURI("alluxio://localhost:80/a/./b").getPath());
assertEquals("/a/b", new AlluxioURI("/a/b").getPath());
assertEquals("/a/b", new AlluxioURI("file:///a/b").getPath());
assertEquals("/a/b", new AlluxioURI("alluxio://localhost:80/a/b/").getPath());
}
|
public static Schema create(Type type) {
switch (type) {
case STRING:
return new StringSchema();
case BYTES:
return new BytesSchema();
case INT:
return new IntSchema();
case LONG:
return new LongSchema();
case FLOAT:
return new FloatSchema();
case DOUBLE:
return new DoubleSchema();
case BOOLEAN:
return new BooleanSchema();
case NULL:
return new NullSchema();
default:
throw new AvroRuntimeException("Can't create a: " + type);
}
}
|
@Test
void intAsFloatDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.FLOAT), "doc", 1);
assertTrue(field.hasDefaultValue());
assertEquals(1.0f, field.defaultVal());
assertEquals(1.0f, GenericData.get().getDefaultValue(field));
}
|
synchronized public void clear() {
count = 0;
bufferCount = 0;
samples.clear();
}
|
@Test
public void testClear() throws IOException {
for (int i = 0; i < 1000; i++) {
estimator.insert(i);
}
estimator.clear();
assertThat(estimator.getCount()).isZero();
assertThat(estimator.getSampleCount()).isZero();
assertThat(estimator.snapshot()).isNull();
}
|
public void addLicense(License license) {
licenses.add(license);
}
|
@Test
public void testAddLicense() {
License license = new License("name", "url");
Model instance = new Model();
instance.addLicense(license);
assertNotNull(instance.getLicenses());
}
|
@Override
public ClassLoader getDefaultClassLoader() {
return DEFAULT_CLASS_LOADER;
}
|
@Test
public void loadClass_notFound() {
runWithClassloader(provider -> {
assertThrows(ClassNotFoundException.class, () ->
provider.getDefaultClassLoader().loadClass("a.b.c"));
});
}
|
public static URL socketToUrl(InetSocketAddress socketAddress) {
String hostString = socketAddress.getHostString();
// If the hostString is an IPv6 address, it needs to be enclosed in square brackets
// at the beginning and end.
if (socketAddress.getAddress() != null
&& socketAddress.getAddress() instanceof Inet6Address
&& hostString.equals(socketAddress.getAddress().getHostAddress())) {
hostString = "[" + hostString + "]";
}
String hostPort = hostString + ":" + socketAddress.getPort();
return validateHostPortString(hostPort);
}
|
@Test
void testIpv4SocketToUrl() throws MalformedURLException {
InetSocketAddress socketAddress = new InetSocketAddress("192.168.0.1", 8080);
URL expectedResult = new URL("http://192.168.0.1:8080");
assertThat(socketToUrl(socketAddress)).isEqualTo(expectedResult);
}
|
public static Collection<? extends Certificate> loadCertificates(Path certificatePath) throws CertificateException, IOException {
final CertificateFactory cf = CertificateFactory.getInstance("X.509");
File certFile = certificatePath.toFile();
if (certFile.isDirectory()) {
final ByteArrayOutputStream certStream = new ByteArrayOutputStream();
try (DirectoryStream<Path> ds = Files.newDirectoryStream(certFile.toPath())) {
for (Path f : ds) {
certStream.write(Files.readAllBytes(f));
}
}
return cf.generateCertificates(new ByteArrayInputStream(certStream.toByteArray()));
} else {
try (InputStream inputStream = Files.newInputStream(certificatePath)) {
return cf.generateCertificates(inputStream);
}
}
}
|
@Test
public void testLoadCertificates() throws Exception {
final File certFile = resourceToFile(CERTIFICATES.get(keyAlgorithm));
final Collection<? extends Certificate> certificates = KeyUtil.loadCertificates(certFile.toPath());
assertThat(certificates)
.isNotEmpty()
.hasOnlyElementsOfType(X509Certificate.class);
}
|
public void setMaxHeaderTableSize(ByteBuf out, long maxHeaderTableSize) throws Http2Exception {
if (maxHeaderTableSize < MIN_HEADER_TABLE_SIZE || maxHeaderTableSize > MAX_HEADER_TABLE_SIZE) {
throw connectionError(PROTOCOL_ERROR, "Header Table Size must be >= %d and <= %d but was %d",
MIN_HEADER_TABLE_SIZE, MAX_HEADER_TABLE_SIZE, maxHeaderTableSize);
}
if (this.maxHeaderTableSize == maxHeaderTableSize) {
return;
}
this.maxHeaderTableSize = maxHeaderTableSize;
ensureCapacity(0);
// Casting to integer is safe as we verified the maxHeaderTableSize is a valid unsigned int.
encodeInteger(out, 0x20, 5, maxHeaderTableSize);
}
|
@Test
public void testSetMaxHeaderTableSizeOverflow() {
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackEncoder.setMaxHeaderTableSize(buf, MAX_HEADER_TABLE_SIZE + 1);
}
});
}
|
void addPartitionEpochs(
Map<Uuid, Set<Integer>> assignment,
int epoch
) {
assignment.forEach((topicId, assignedPartitions) -> {
currentPartitionEpoch.compute(topicId, (__, partitionsOrNull) -> {
if (partitionsOrNull == null) {
partitionsOrNull = new TimelineHashMap<>(snapshotRegistry, assignedPartitions.size());
}
for (Integer partitionId : assignedPartitions) {
Integer prevValue = partitionsOrNull.put(partitionId, epoch);
if (prevValue != null) {
throw new IllegalStateException(
String.format("Cannot set the epoch of %s-%s to %d because the partition is " +
"still owned at epoch %d", topicId, partitionId, epoch, prevValue));
}
}
return partitionsOrNull;
});
});
}
|
@Test
public void testAddPartitionEpochs() {
Uuid fooTopicId = Uuid.randomUuid();
ConsumerGroup consumerGroup = createConsumerGroup("foo");
consumerGroup.addPartitionEpochs(
mkAssignment(
mkTopicAssignment(fooTopicId, 1)
),
10
);
// Changing the epoch should fail because the owner of the partition
// should remove it first.
assertThrows(IllegalStateException.class, () -> consumerGroup.addPartitionEpochs(
mkAssignment(
mkTopicAssignment(fooTopicId, 1)
),
11
));
}
|
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Paths paths = (Paths) o;
return Objects.equals(this.extensions, paths.extensions) &&
super.equals(o);
}
|
@Test
public void testEquals() {
Paths paths = new Paths();
Assert.assertTrue(paths.equals(paths));
Assert.assertTrue(paths.equals(new Paths()));
Assert.assertFalse(paths.equals(null));
Assert.assertFalse(paths.equals(new String()));
}
|
@Override
public void getConfig(ComponentsConfig.Builder builder) {
builder.setApplyOnRestart(getDeferChangesUntilRestart()); // Sufficient to set on one config
builder.components.addAll(ComponentsConfigGenerator.generate(getAllComponents()));
builder.components(new ComponentsConfig.Components.Builder().id("com.yahoo.container.core.config.HandlersConfigurerDi$RegistriesHack"));
}
|
@Test
void search_and_docproc_bundles_are_installed_for_application_clusters_with_search() {
ApplicationContainerCluster cluster = newClusterWithSearch(createRoot(false), false, null);
var bundleBuilder = new PlatformBundlesConfig.Builder();
cluster.getConfig(bundleBuilder);
List<Path> installedBundles = bundleBuilder.build().bundlePaths().stream().map(Paths::get).toList();
PlatformBundles.SEARCH_AND_DOCPROC_BUNDLES.forEach(bundle -> assertTrue(installedBundles.contains(bundle)));
}
|
public LinkedHashMap<String, String> getKeyPropertyList(ObjectName mbeanName) {
LinkedHashMap<String, String> keyProperties = keyPropertiesPerBean.get(mbeanName);
if (keyProperties == null) {
keyProperties = new LinkedHashMap<>();
String properties = mbeanName.getKeyPropertyListString();
Matcher match = PROPERTY_PATTERN.matcher(properties);
while (match.lookingAt()) {
keyProperties.put(match.group(1), match.group(2));
properties = properties.substring(match.end());
if (properties.startsWith(",")) {
properties = properties.substring(1);
}
match.reset(properties);
}
keyPropertiesPerBean.put(mbeanName, keyProperties);
}
return keyProperties;
}
|
@Test
public void testQuotedObjectNameWithComma() throws Throwable {
JmxMBeanPropertyCache testCache = new JmxMBeanPropertyCache();
LinkedHashMap<String, String> parameterList =
testCache.getKeyPropertyList(
new ObjectName("com.organisation:name=\"value,more\",name2=value2"));
assertSameElementsAndOrder(parameterList, "name", "\"value,more\"", "name2", "value2");
}
|
@Override
public TableDataConsistencyCheckResult swapToObject(final YamlTableDataConsistencyCheckResult yamlConfig) {
if (null == yamlConfig) {
return null;
}
if (!Strings.isNullOrEmpty(yamlConfig.getIgnoredType())) {
return new TableDataConsistencyCheckResult(TableDataConsistencyCheckIgnoredType.valueOf(yamlConfig.getIgnoredType()));
}
return new TableDataConsistencyCheckResult(yamlConfig.isMatched());
}
|
@Test
void assertSwapToObjectWithNullYamlTableDataConsistencyCheckResultIgnoredType() {
YamlTableDataConsistencyCheckResult yamlConfig = new YamlTableDataConsistencyCheckResult();
yamlConfig.setIgnoredType(null);
TableDataConsistencyCheckResult result = yamlTableDataConsistencyCheckResultSwapper.swapToObject(yamlConfig);
assertNull(result.getIgnoredType());
assertFalse(result.isMatched());
}
|
@Override
public SentWebAppMessage deserializeResponse(String answer) throws TelegramApiRequestException {
return deserializeResponse(answer, SentWebAppMessage.class);
}
|
@Test
public void testAnswerWebAppQueryDeserializeValidResponse() {
String responseText = "{\"ok\":true,\"result\": { \"inline_message_id\": \"123456\" } }";
AnswerWebAppQuery answerWebAppQuery = AnswerWebAppQuery
.builder()
.webAppQueryId("123456789")
.queryResult(InlineQueryResultArticle
.builder()
.id("")
.title("Text")
.inputMessageContent(InputTextMessageContent
.builder()
.messageText("My own text")
.build())
.build())
.build();
try {
SentWebAppMessage result = answerWebAppQuery.deserializeResponse(responseText);
assertNotNull(result);
assertEquals("123456", result.getInlineMessageId());
} catch (TelegramApiRequestException e) {
fail(e.getMessage());
}
}
|
public int startWithRunStrategy(
@NotNull WorkflowInstance instance, @NotNull RunStrategy runStrategy) {
return withMetricLogError(
() ->
withRetryableTransaction(
conn -> {
final long nextInstanceId =
getLatestInstanceId(conn, instance.getWorkflowId()) + 1;
if (isDuplicated(conn, instance)) {
return 0;
}
completeInstanceInit(conn, nextInstanceId, instance);
int res;
if (instance.getStatus().isTerminal()) {
// Save it directly and send a terminate event
res = addTerminatedInstance(conn, instance);
} else {
switch (runStrategy.getRule()) {
case SEQUENTIAL:
case PARALLEL:
case STRICT_SEQUENTIAL:
res = insertInstance(conn, instance, true, null);
break;
case FIRST_ONLY:
res = startFirstOnlyInstance(conn, instance);
break;
case LAST_ONLY:
res = startLastOnlyInstance(conn, instance);
break;
default:
throw new MaestroInternalError(
"When start, run strategy [%s] is not supported.", runStrategy);
}
}
if (instance.getWorkflowInstanceId() == nextInstanceId) {
updateLatestInstanceId(conn, instance.getWorkflowId(), nextInstanceId);
}
return res;
}),
"startWithRunStrategy",
"Failed to start a workflow [{}][{}] with run strategy [{}]",
instance.getWorkflowId(),
instance.getWorkflowUuid(),
runStrategy);
}
|
@Test
public void testStartRunStrategyWithFirstOnly() {
MaestroTestHelper.removeWorkflowInstance(dataSource, TEST_WORKFLOW_ID, 1);
wfi.setWorkflowUuid("test-uuid");
wfi.setWorkflowInstanceId(0);
int res = runStrategyDao.startWithRunStrategy(wfi, RunStrategy.create("FIRST_ONLY"));
assertEquals(1, res);
assertEquals(1, wfi.getWorkflowInstanceId());
assertEquals(1, wfi.getWorkflowRunId());
assertEquals("test-uuid", wfi.getWorkflowUuid());
WorkflowInstance latestRun =
dao.getLatestWorkflowInstanceRun(wfi.getWorkflowId(), wfi.getWorkflowInstanceId());
assertEquals(1, latestRun.getWorkflowInstanceId());
assertEquals("test-uuid", latestRun.getWorkflowUuid());
assertEquals(WorkflowInstance.Status.CREATED, latestRun.getStatus());
verifyPublish(0, 0, 0, 1, 0);
wfi.setWorkflowInstanceId(0);
res = runStrategyDao.startWithRunStrategy(wfi, RunStrategy.create("FIRST_ONLY"));
assertEquals(0, res);
assertEquals(0, wfi.getWorkflowInstanceId());
assertEquals(1, wfi.getWorkflowRunId());
assertEquals("test-uuid", wfi.getWorkflowUuid());
verifyPublish(0, 0, 0, 0, 0);
wfi.setWorkflowUuid("test-uuid-1");
wfi.setWorkflowInstanceId(0);
res = runStrategyDao.startWithRunStrategy(wfi, RunStrategy.create("FIRST_ONLY"));
assertEquals(-1, res);
assertEquals(2, wfi.getWorkflowInstanceId());
assertEquals(1, wfi.getWorkflowRunId());
assertEquals("test-uuid-1", wfi.getWorkflowUuid());
latestRun = dao.getLatestWorkflowInstanceRun(wfi.getWorkflowId(), wfi.getWorkflowInstanceId());
assertEquals(2, latestRun.getWorkflowInstanceId());
assertEquals("test-uuid-1", latestRun.getWorkflowUuid());
assertEquals(WorkflowInstance.Status.STOPPED, latestRun.getStatus());
verifyPublish(0, 0, 0, 0, 1);
MaestroTestHelper.removeWorkflowInstance(dataSource, TEST_WORKFLOW_ID, 2);
}
|
@Override
public ProcNodeInterface lookup(String beIdStr) throws AnalysisException {
if (Strings.isNullOrEmpty(beIdStr)) {
throw new AnalysisException("Backend id is null");
}
long backendId = -1L;
try {
backendId = Long.parseLong(beIdStr);
} catch (NumberFormatException e) {
throw new AnalysisException("Invalid backend id format: " + beIdStr);
}
Backend backend = clusterInfoService.getBackend(backendId);
if (backend == null) {
throw new AnalysisException("Backend[" + backendId + "] does not exist.");
}
return new BackendProcNode(backend);
}
|
@Test
public void testLookupInvalid() {
BackendsProcDir dir = new BackendsProcDir(systemInfoService);
ExceptionChecker.expectThrows(AnalysisException.class, () -> dir.lookup(null));
ExceptionChecker.expectThrows(AnalysisException.class, () -> dir.lookup(""));
}
|
static Class<?> obtainActualTypeInStreamObserver(Type typeInStreamObserver) {
return (Class<?>)
(typeInStreamObserver instanceof ParameterizedType
? ((ParameterizedType) typeInStreamObserver).getRawType()
: typeInStreamObserver);
}
|
@Test
void testObtainActualType() throws NoSuchMethodException {
Method method1 = DescriptorService.class.getMethod("serverStream1", Object.class, StreamObserver.class);
Class<?> clazz1 = ReflectionPackableMethod.obtainActualTypeInStreamObserver(
((ParameterizedType) method1.getGenericParameterTypes()[1]).getActualTypeArguments()[0]);
Assertions.assertEquals(clazz1.getName(), String.class.getName());
Method method2 = DescriptorService.class.getMethod("serverStream2", Object.class, StreamObserver.class);
Class<?> clazz2 = ReflectionPackableMethod.obtainActualTypeInStreamObserver(
((ParameterizedType) method2.getGenericParameterTypes()[1]).getActualTypeArguments()[0]);
Assertions.assertEquals(clazz2.getName(), DataWrapper.class.getName());
Method method3 = DescriptorService.class.getMethod("biStream1", StreamObserver.class);
Class<?> clazz31 = ReflectionPackableMethod.obtainActualTypeInStreamObserver(
((ParameterizedType) method3.getGenericReturnType()).getActualTypeArguments()[0]);
Assertions.assertEquals(clazz31.getName(), String.class.getName());
Class<?> clazz32 = ReflectionPackableMethod.obtainActualTypeInStreamObserver(
((ParameterizedType) method3.getGenericParameterTypes()[0]).getActualTypeArguments()[0]);
Assertions.assertEquals(clazz32.getName(), String.class.getName());
Method method4 = DescriptorService.class.getMethod("biStream2", StreamObserver.class);
Class<?> clazz41 = ReflectionPackableMethod.obtainActualTypeInStreamObserver(
((ParameterizedType) method4.getGenericReturnType()).getActualTypeArguments()[0]);
Assertions.assertEquals(clazz41.getName(), DataWrapper.class.getName());
Class<?> clazz42 = ReflectionPackableMethod.obtainActualTypeInStreamObserver(
((ParameterizedType) method4.getGenericParameterTypes()[0]).getActualTypeArguments()[0]);
Assertions.assertEquals(clazz42.getName(), DataWrapper.class.getName());
}
|
@Override
public void apply(IntentOperationContext<FlowRuleIntent> context) {
Optional<IntentData> toUninstall = context.toUninstall();
Optional<IntentData> toInstall = context.toInstall();
if (toInstall.isPresent() && toUninstall.isPresent()) {
Intent intentToInstall = toInstall.get().intent();
if (requireNonDisruptive(intentToInstall) && INSTALLED.equals(toUninstall.get().state())) {
reallocate(context);
return;
}
}
if (!toInstall.isPresent() && !toUninstall.isPresent()) {
// Nothing to do.
intentInstallCoordinator.intentInstallSuccess(context);
return;
}
List<FlowRuleIntent> uninstallIntents = context.intentsToUninstall();
List<FlowRuleIntent> installIntents = context.intentsToInstall();
List<FlowRule> flowRulesToUninstall;
List<FlowRule> flowRulesToInstall;
if (toUninstall.isPresent()) {
// Remove tracked resource from both Intent and installable Intents.
trackIntentResources(toUninstall.get(), uninstallIntents, REMOVE);
// Retrieves all flow rules from all flow rule Intents.
flowRulesToUninstall = uninstallIntents.stream()
.map(FlowRuleIntent::flowRules)
.flatMap(Collection::stream)
.filter(flowRule -> flowRuleService.getFlowEntry(flowRule) != null)
.collect(Collectors.toList());
} else {
// No flow rules to be uninstalled.
flowRulesToUninstall = Collections.emptyList();
}
if (toInstall.isPresent()) {
// Track resource from both Intent and installable Intents.
trackIntentResources(toInstall.get(), installIntents, ADD);
// Retrieves all flow rules from all flow rule Intents.
flowRulesToInstall = installIntents.stream()
.map(FlowRuleIntent::flowRules)
.flatMap(Collection::stream)
.collect(Collectors.toList());
} else {
// No flow rules to be installed.
flowRulesToInstall = Collections.emptyList();
}
List<FlowRule> flowRuleToModify;
List<FlowRule> dontTouch;
// If both uninstall/install list contained equal (=match conditions are equal) FlowRules,
// omit it from remove list, since it will/should be overwritten by install
flowRuleToModify = flowRulesToInstall.stream()
.filter(flowRule -> flowRulesToUninstall.stream().anyMatch(flowRule::equals))
.collect(Collectors.toList());
// If both contained exactMatch-ing FlowRules, remove from both list,
// since it will result in no-op.
dontTouch = flowRulesToInstall.stream()
.filter(flowRule -> flowRulesToUninstall.stream().anyMatch(flowRule::exactMatch))
.collect(Collectors.toList());
flowRulesToUninstall.removeAll(flowRuleToModify);
flowRulesToUninstall.removeAll(dontTouch);
flowRulesToInstall.removeAll(flowRuleToModify);
flowRulesToInstall.removeAll(dontTouch);
flowRuleToModify.removeAll(dontTouch);
if (flowRulesToInstall.isEmpty() && flowRulesToUninstall.isEmpty() && flowRuleToModify.isEmpty()) {
// There is no flow rules to install/uninstall
intentInstallCoordinator.intentInstallSuccess(context);
return;
}
FlowRuleOperations.Builder builder = FlowRuleOperations.builder();
// Add flows
flowRulesToInstall.forEach(builder::add);
// Modify flows
flowRuleToModify.forEach(builder::modify);
// Remove flows
flowRulesToUninstall.forEach(builder::remove);
FlowRuleOperationsContext flowRuleOperationsContext = new FlowRuleOperationsContext() {
@Override
public void onSuccess(FlowRuleOperations ops) {
intentInstallCoordinator.intentInstallSuccess(context);
}
@Override
public void onError(FlowRuleOperations ops) {
intentInstallCoordinator.intentInstallFailed(context);
}
};
FlowRuleOperations operations = builder.build(flowRuleOperationsContext);
log.debug("applying intent {} -> {} with {} rules: {}",
toUninstall.map(x -> x.key().toString()).orElse("<empty>"),
toInstall.map(x -> x.key().toString()).orElse("<empty>"),
operations.stages().stream().mapToLong(Set::size).sum(),
operations.stages());
flowRuleService.apply(operations);
}
|
@Test
public void testUninstallAndInstallUnchanged() {
List<Intent> intentsToInstall = createFlowRuleIntents();
List<Intent> intentsToUninstall = createFlowRuleIntents();
IntentData toInstall = new IntentData(createP2PIntent(),
IntentState.INSTALLING,
new WallClockTimestamp());
toInstall = IntentData.compiled(toInstall, intentsToInstall);
IntentData toUninstall = new IntentData(createP2PIntent(),
IntentState.INSTALLED,
new WallClockTimestamp());
toUninstall = IntentData.compiled(toUninstall, intentsToUninstall);
IntentOperationContext<FlowRuleIntent> operationContext;
IntentInstallationContext context = new IntentInstallationContext(toUninstall, toInstall);
operationContext = new IntentOperationContext(intentsToUninstall, intentsToInstall, context);
flowRuleService.load(operationContext.intentsToUninstall());
installer.apply(operationContext);
IntentOperationContext successContext = intentInstallCoordinator.successContext;
assertEquals(successContext, operationContext);
assertEquals(0, flowRuleService.flowRulesRemove.size());
assertEquals(0, flowRuleService.flowRulesAdd.size());
assertEquals(0, flowRuleService.flowRulesModify.size());
}
|
Map<String, File> scanExistingUsers() throws IOException {
Map<String, File> users = new HashMap<>();
File[] userDirectories = listUserDirectories();
if (userDirectories != null) {
for (File directory : userDirectories) {
String userId = idStrategy.idFromFilename(directory.getName());
users.put(userId, directory);
}
}
addEmptyUsernameIfExists(users);
return users;
}
|
@Test
public void scanExistingUsersNone() throws IOException {
File usersDirectory = createTestDirectory(getClass(), name);
UserIdMigrator migrator = new UserIdMigrator(usersDirectory, IdStrategy.CASE_INSENSITIVE);
Map<String, File> userMappings = migrator.scanExistingUsers();
assertThat(userMappings.keySet(), empty());
}
|
public static String toString(InetAddress inetAddress) {
if (inetAddress instanceof Inet6Address) {
String address = InetAddresses.toAddrString(inetAddress);
// toAddrString() returns any interface/scope as a %-suffix,
// see https://github.com/google/guava/commit/3f61870ac6e5b18dbb74ce6f6cb2930ad8750a43
int percentIndex = address.indexOf('%');
return percentIndex < 0 ? address : address.substring(0, percentIndex);
} else {
return inetAddress.getHostAddress();
}
}
|
@Test
void testToStringWithInterface2() throws UnknownHostException {
byte[] bytes = new byte[] { 0x10,(byte)0x80, 0,0, 0,0, 0,0, 0,8, 8,0, 0x20,0x0c, 0x41,0x7a };
Inet6Address address = Inet6Address.getByAddress(null, bytes, 1);
// Verify Guava's InetAddresses.toAddrString() includes the interface.
// If this assert fails, we can use InetAddresses.toAddrString() instead of InetAddressUtil.toString().
assertNotEquals(-1, InetAddresses.toAddrString(address).indexOf('%'));
assertEquals("1080::8:800:200c:417a", InetAddressUtil.toString(address));
}
|
public List<String> getDatacentersFor(
InetAddress address,
String continent,
String country,
Optional<String> subdivision
) {
final int NUM_DATACENTERS = 3;
if(this.isEmpty()) {
return Collections.emptyList();
}
List<String> dcsBySubnet = getDatacentersBySubnet(address);
List<String> dcsByGeo = getDatacentersByGeo(continent, country, subdivision).stream()
.limit(NUM_DATACENTERS)
.filter(dc ->
(dcsBySubnet.isEmpty() || !dc.equals(dcsBySubnet.getFirst()))
&& (dcsBySubnet.size() < 2 || !dc.equals(dcsBySubnet.get(1)))
).toList();
return Stream.concat(
dcsBySubnet.stream().limit(dcsByGeo.isEmpty() ? NUM_DATACENTERS : NUM_DATACENTERS - 1),
dcsByGeo.stream())
.limit(NUM_DATACENTERS)
.toList();
}
|
@Test
void testGetFastestDatacentersDistinct() throws UnknownHostException {
var v6address = Inet6Address.getByName("2001:db8:b0ac:aaaa:aaaa:aaaa:aaaa:0001");
var actual = basicTable.getDatacentersFor(v6address, "NA", "US", Optional.of("VA"));
assertThat(actual).isEqualTo(List.of("datacenter-2", "datacenter-1"));
}
|
@Override
public void invoke() throws Exception {
// --------------------------------------------------------------------
// Initialize
// --------------------------------------------------------------------
LOG.debug(getLogString("Start registering input and output"));
// initialize OutputFormat
initOutputFormat();
// initialize input readers
try {
initInputReaders();
} catch (Exception e) {
throw new RuntimeException(
"Initializing the input streams failed"
+ (e.getMessage() == null ? "." : ": " + e.getMessage()),
e);
}
LOG.debug(getLogString("Finished registering input and output"));
// --------------------------------------------------------------------
// Invoke
// --------------------------------------------------------------------
LOG.debug(getLogString("Starting data sink operator"));
RuntimeContext ctx = createRuntimeContext();
final Counter numRecordsIn;
{
Counter tmpNumRecordsIn;
try {
InternalOperatorIOMetricGroup ioMetricGroup =
((InternalOperatorMetricGroup) ctx.getMetricGroup()).getIOMetricGroup();
ioMetricGroup.reuseInputMetricsForTask();
ioMetricGroup.reuseOutputMetricsForTask();
tmpNumRecordsIn = ioMetricGroup.getNumRecordsInCounter();
} catch (Exception e) {
LOG.warn("An exception occurred during the metrics setup.", e);
tmpNumRecordsIn = new SimpleCounter();
}
numRecordsIn = tmpNumRecordsIn;
}
if (RichOutputFormat.class.isAssignableFrom(this.format.getClass())) {
((RichOutputFormat) this.format).setRuntimeContext(ctx);
LOG.debug(getLogString("Rich Sink detected. Initializing runtime context."));
}
ExecutionConfig executionConfig = getExecutionConfig();
boolean objectReuseEnabled = executionConfig.isObjectReuseEnabled();
try {
// initialize local strategies
MutableObjectIterator<IT> input1;
switch (this.config.getInputLocalStrategy(0)) {
case NONE:
// nothing to do
localStrategy = null;
input1 = reader;
break;
case SORT:
// initialize sort local strategy
try {
// get type comparator
TypeComparatorFactory<IT> compFact =
this.config.getInputComparator(0, getUserCodeClassLoader());
if (compFact == null) {
throw new Exception(
"Missing comparator factory for local strategy on input " + 0);
}
// initialize sorter
Sorter<IT> sorter =
ExternalSorter.newBuilder(
getEnvironment().getMemoryManager(),
this,
this.inputTypeSerializerFactory.getSerializer(),
compFact.createComparator())
.maxNumFileHandles(this.config.getFilehandlesInput(0))
.enableSpilling(
getEnvironment().getIOManager(),
this.config.getSpillingThresholdInput(0))
.memoryFraction(this.config.getRelativeMemoryInput(0))
.objectReuse(
this.getExecutionConfig().isObjectReuseEnabled())
.largeRecords(this.config.getUseLargeRecordHandler())
.build(this.reader);
this.localStrategy = sorter;
input1 = sorter.getIterator();
} catch (Exception e) {
throw new RuntimeException(
"Initializing the input processing failed"
+ (e.getMessage() == null ? "." : ": " + e.getMessage()),
e);
}
break;
default:
throw new RuntimeException("Invalid local strategy for DataSinkTask");
}
// read the reader and write it to the output
final TypeSerializer<IT> serializer = this.inputTypeSerializerFactory.getSerializer();
final MutableObjectIterator<IT> input = input1;
final OutputFormat<IT> format = this.format;
// check if task has been canceled
if (this.taskCanceled) {
return;
}
LOG.debug(getLogString("Starting to produce output"));
// open
format.open(
new InitializationContext() {
@Override
public int getNumTasks() {
return getEnvironment().getTaskInfo().getNumberOfParallelSubtasks();
}
@Override
public int getTaskNumber() {
return getEnvironment().getTaskInfo().getIndexOfThisSubtask();
}
@Override
public int getAttemptNumber() {
return getEnvironment().getTaskInfo().getAttemptNumber();
}
});
if (objectReuseEnabled) {
IT record = serializer.createInstance();
// work!
while (!this.taskCanceled && ((record = input.next(record)) != null)) {
numRecordsIn.inc();
format.writeRecord(record);
}
} else {
IT record;
// work!
while (!this.taskCanceled && ((record = input.next()) != null)) {
numRecordsIn.inc();
format.writeRecord(record);
}
}
// close. We close here such that a regular close throwing an exception marks a task as
// failed.
if (!this.taskCanceled) {
this.format.close();
this.format = null;
}
} catch (Exception ex) {
// make a best effort to clean up
try {
if (!cleanupCalled && format instanceof CleanupWhenUnsuccessful) {
cleanupCalled = true;
((CleanupWhenUnsuccessful) format).tryCleanupOnError();
}
} catch (Throwable t) {
LOG.error("Cleanup on error failed.", t);
}
ex = ExceptionInChainedStubException.exceptionUnwrap(ex);
if (ex instanceof CancelTaskException) {
// forward canceling exception
throw ex;
}
// drop, if the task was canceled
else if (!this.taskCanceled) {
if (LOG.isErrorEnabled()) {
LOG.error(getLogString("Error in user code: " + ex.getMessage()), ex);
}
throw ex;
}
} finally {
if (this.format != null) {
// close format, if it has not been closed, yet.
// This should only be the case if we had a previous error, or were canceled.
try {
this.format.close();
} catch (Throwable t) {
if (LOG.isWarnEnabled()) {
LOG.warn(getLogString("Error closing the output format"), t);
}
}
}
// close local strategy if necessary
if (localStrategy != null) {
try {
this.localStrategy.close();
} catch (Throwable t) {
LOG.error("Error closing local strategy", t);
}
}
BatchTask.clearReaders(new MutableReader<?>[] {inputReader});
}
if (!this.taskCanceled) {
LOG.debug(getLogString("Finished data sink operator"));
} else {
LOG.debug(getLogString("Data sink operator cancelled"));
}
}
|
@Test
@SuppressWarnings("unchecked")
void testCancelSortingDataSinkTask() {
double memoryFraction = 1.0;
super.initEnvironment(MEMORY_MANAGER_SIZE, NETWORK_BUFFER_SIZE);
super.addInput(new InfiniteInputIterator(), 0);
final DataSinkTask<Record> testTask = new DataSinkTask<>(this.mockEnv);
Configuration stubParams = new Configuration();
// set sorting
super.getTaskConfig().setInputLocalStrategy(0, LocalStrategy.SORT);
super.getTaskConfig()
.setInputComparator(
new RecordComparatorFactory(new int[] {1}, (new Class[] {IntValue.class})),
0);
super.getTaskConfig().setRelativeMemoryInput(0, memoryFraction);
super.getTaskConfig().setFilehandlesInput(0, 8);
super.getTaskConfig().setSpillingThresholdInput(0, 0.8f);
File tempTestFile = new File(tempFolder.toFile(), UUID.randomUUID().toString());
super.registerFileOutputTask(
MockOutputFormat.class, tempTestFile.toURI().toString(), stubParams);
Thread taskRunner =
new Thread() {
@Override
public void run() {
try {
testTask.invoke();
} catch (Exception ie) {
ie.printStackTrace();
fail("Task threw exception although it was properly canceled");
}
}
};
taskRunner.start();
TaskCancelThread tct = new TaskCancelThread(2, taskRunner, testTask);
tct.start();
try {
tct.join();
taskRunner.join();
} catch (InterruptedException ie) {
fail("Joining threads failed");
}
}
|
public List<HandleAndLocalPath> uploadFilesToCheckpointFs(
@Nonnull List<Path> files,
CheckpointStreamFactory checkpointStreamFactory,
CheckpointedStateScope stateScope,
CloseableRegistry closeableRegistry,
CloseableRegistry tmpResourcesRegistry)
throws Exception {
List<CompletableFuture<HandleAndLocalPath>> futures =
createUploadFutures(
files,
checkpointStreamFactory,
stateScope,
closeableRegistry,
tmpResourcesRegistry);
List<HandleAndLocalPath> handles = new ArrayList<>(files.size());
try {
FutureUtils.waitForAll(futures).get();
for (CompletableFuture<HandleAndLocalPath> future : futures) {
handles.add(future.get());
}
} catch (ExecutionException e) {
Throwable throwable = ExceptionUtils.stripExecutionException(e);
throwable = ExceptionUtils.stripException(throwable, RuntimeException.class);
if (throwable instanceof IOException) {
throw (IOException) throwable;
} else {
throw new FlinkRuntimeException("Failed to upload data for state handles.", e);
}
}
return handles;
}
|
@Test
void testMultiThreadUploadCorrectly() throws Exception {
File checkpointPrivateFolder = TempDirUtils.newFolder(temporaryFolder, "private");
org.apache.flink.core.fs.Path checkpointPrivateDirectory =
org.apache.flink.core.fs.Path.fromLocalFile(checkpointPrivateFolder);
File checkpointSharedFolder = TempDirUtils.newFolder(temporaryFolder, "shared");
org.apache.flink.core.fs.Path checkpointSharedDirectory =
org.apache.flink.core.fs.Path.fromLocalFile(checkpointSharedFolder);
FileSystem fileSystem = checkpointPrivateDirectory.getFileSystem();
int fileStateSizeThreshold = 1024;
int writeBufferSize = 4096;
FsCheckpointStreamFactory checkpointStreamFactory =
new FsCheckpointStreamFactory(
fileSystem,
checkpointPrivateDirectory,
checkpointSharedDirectory,
fileStateSizeThreshold,
writeBufferSize);
String localFolder = "local";
TempDirUtils.newFolder(temporaryFolder, localFolder);
int sstFileCount = 6;
List<Path> sstFilePaths =
generateRandomSstFiles(localFolder, sstFileCount, fileStateSizeThreshold);
try (RocksDBStateUploader rocksDBStateUploader = new RocksDBStateUploader(5)) {
List<HandleAndLocalPath> sstFiles =
rocksDBStateUploader.uploadFilesToCheckpointFs(
sstFilePaths,
checkpointStreamFactory,
CheckpointedStateScope.SHARED,
new CloseableRegistry(),
new CloseableRegistry());
for (Path path : sstFilePaths) {
assertStateContentEqual(
path,
sstFiles.stream()
.filter(e -> e.getLocalPath().equals(path.getFileName().toString()))
.findFirst()
.get()
.getHandle()
.openInputStream());
}
}
}
|
@Override
public DataNode readNode(ResourceId path, Filter filter) {
// FIXME transform filter? is it possible?
return super.readNode(toAbsoluteId(path), filter);
}
|
@Test
public void testReadNode() {
Filter filter = null;
DataNode returned = view.readNode(relIntf, filter);
assertTrue(ResourceIds.isPrefix(rid, realPath));
// FIXME test realFilter
// TODO do we expect something to happen on returned?
}
|
@Override
public long recalculateRevision() {
return revision.addAndGet(1);
}
|
@Test
void testRecalculateRevision() {
assertEquals(0, connectionBasedClient.getRevision());
connectionBasedClient.recalculateRevision();
assertEquals(1, connectionBasedClient.getRevision());
}
|
boolean hasAsPathLoop(long localAsNumber) {
for (PathSegment pathSegment : asPath.getPathSegments()) {
for (Long asNumber : pathSegment.getSegmentAsNumbers()) {
if (asNumber.equals(localAsNumber)) {
return true;
}
}
}
return false;
}
|
@Test
public void testHasAsPathLoop() {
BgpRouteEntry bgpRouteEntry = generateBgpRouteEntry();
// Test for loops: test each AS number in the interval [1, 6]
for (int i = 1; i <= 6; i++) {
assertThat(bgpRouteEntry.hasAsPathLoop(i), is(true));
}
// Test for non-loops
assertThat(bgpRouteEntry.hasAsPathLoop(500), is(false));
}
|
public static String implodeMultiline(List<String> lines) {
if (lines==null) return null;
return implode(lines.toArray(new String[0]), "\n");
}
|
@Test
public void testImplodeMultiline() {
assertEquals(StringUtilities.implodeMultiline(List.of("foo", "bar")), "foo\nbar");
assertEquals(StringUtilities.implodeMultiline(List.of("")), "");
assertNull(StringUtilities.implodeMultiline(null));
assertEquals(StringUtilities.implodeMultiline(List.of("\n")), "\n");
}
|
protected List<List<Comparable>> getPartitionTransInfo(long txnId, long tableId) throws AnalysisException {
List<List<Comparable>> partitionInfos = new ArrayList<List<Comparable>>();
readLock();
try {
TransactionState transactionState = unprotectedGetTransactionState(txnId);
if (null == transactionState) {
throw new AnalysisException("Transaction[" + txnId + "] does not exist.");
}
TableCommitInfo tableCommitInfo = transactionState.getIdToTableCommitInfos().get(tableId);
Map<Long, PartitionCommitInfo> idToPartitionCommitInfo = tableCommitInfo.getIdToPartitionCommitInfo();
for (Map.Entry<Long, PartitionCommitInfo> entry : idToPartitionCommitInfo.entrySet()) {
List<Comparable> partitionInfo = new ArrayList<Comparable>();
partitionInfo.add(entry.getKey());
partitionInfo.add(entry.getValue().getVersion());
partitionInfos.add(partitionInfo);
}
} finally {
readUnlock();
}
return partitionInfos;
}
|
@Test
public void testGetPartitionTransInfo() throws AnalysisException {
DatabaseTransactionMgr masterDbTransMgr =
masterTransMgr.getDatabaseTransactionMgr(GlobalStateMgrTestUtil.testDbId1);
Long txnId = lableToTxnId.get(GlobalStateMgrTestUtil.testTxnLable1);
List<List<Comparable>> partitionTransInfos =
masterDbTransMgr.getPartitionTransInfo(txnId, GlobalStateMgrTestUtil.testTableId1);
assertEquals(1, partitionTransInfos.size());
List<Comparable> partitionTransInfo = partitionTransInfos.get(0);
assertEquals(2, partitionTransInfo.size());
assertEquals(3L, partitionTransInfo.get(0));
assertEquals(13L, partitionTransInfo.get(1));
}
|
@Override
protected Set<StepField> getUsedFields( RestMeta stepMeta ) {
Set<StepField> usedFields = new HashSet<>();
// add url field
if ( stepMeta.isUrlInField() && StringUtils.isNotEmpty( stepMeta.getUrlField() ) ) {
usedFields.addAll( createStepFields( stepMeta.getUrlField(), getInputs() ) );
}
// add method field
if ( stepMeta.isDynamicMethod() && StringUtils.isNotEmpty( stepMeta.getMethodFieldName() ) ) {
usedFields.addAll( createStepFields( stepMeta.getMethodFieldName(), getInputs() ) );
}
// add body field
if ( StringUtils.isNotEmpty( stepMeta.getBodyField() ) ) {
usedFields.addAll( createStepFields( stepMeta.getBodyField(), getInputs() ) );
}
// add parameters as used fields
String[] parameterFields = stepMeta.getParameterField();
if ( ArrayUtils.isNotEmpty( parameterFields ) ) {
for ( String paramField : parameterFields ) {
usedFields.addAll( createStepFields( paramField, getInputs() ) );
}
}
// add headers as used fields
String[] headerFields = stepMeta.getHeaderField();
if ( ArrayUtils.isNotEmpty( headerFields ) ) {
for ( String headerField : headerFields ) {
usedFields.addAll( createStepFields( headerField, getInputs() ) );
}
}
return usedFields;
}
|
@Test
public void testGetUsedFields_urlInFieldNoFieldSet() throws Exception {
Set<StepField> fields = new HashSet<>();
when( meta.isUrlInField() ).thenReturn( true );
when( meta.getUrlField() ).thenReturn( null );
Set<StepField> usedFields = analyzer.getUsedFields( meta );
verify( analyzer, never() ).createStepFields( anyString(), any( StepNodes.class ) );
}
|
@Override
public boolean isActive() {
return isActive;
}
|
@Test(timeOut = 30000)
public void testSendCommandBeforeCreatingProducer() throws Exception {
resetChannel();
setChannelConnected();
// test SEND before producer is created
sendMessage();
// Then expect channel to close
Awaitility.await().atMost(10, TimeUnit.SECONDS).until(() -> !channel.isActive());
channel.finish();
}
|
public static boolean safeRangeEquals(final Range<Comparable<?>> sourceRange, final Range<Comparable<?>> targetRange) {
Class<?> clazz = getRangeTargetNumericType(sourceRange, targetRange);
if (null == clazz) {
return sourceRange.equals(targetRange);
}
Range<Comparable<?>> newSourceRange = createTargetNumericTypeRange(sourceRange, clazz);
Range<Comparable<?>> newTargetRange = createTargetNumericTypeRange(targetRange, clazz);
return newSourceRange.equals(newTargetRange);
}
|
@Test
void assertSafeRangeEqualsForBigDecimal() {
assertTrue(SafeNumberOperationUtils.safeRangeEquals(Range.greaterThan(BigDecimal.valueOf(1.1)), Range.greaterThan(1.1F)));
}
|
@Override
public void close() throws IOException {
channel.close();
}
|
@Test
public void testVersion() throws IOException {
RecordIOWriter writer = new RecordIOWriter(file);
FileChannel channel = FileChannel.open(file, StandardOpenOption.READ);
ByteBuffer versionBuffer = ByteBuffer.allocate(1);
channel.read(versionBuffer);
versionBuffer.rewind();
channel.close();
assertThat(versionBuffer.get() == VERSION, equalTo(true));
}
|
@VisibleForTesting
boolean applyRaisingException() throws Exception {
Boolean outcome = apply();
if (thrown != null) {
throw thrown;
}
return outcome;
}
|
@Test
public void testReadFailureDoesNotSurfaceInAbort() throws Throwable {
int threshold = 50;
SDKStreamDrainer drainer = new SDKStreamDrainer("s3://example/",
new FakeSDKInputStream(BYTES, threshold),
true,
BYTES,
EMPTY_INPUT_STREAM_STATISTICS, "test");
drainer.applyRaisingException();
assertAborted(drainer);
}
|
public Map<String, String> parse(String body) {
final ImmutableMap.Builder<String, String> newLookupBuilder = ImmutableMap.builder();
final String[] lines = body.split(lineSeparator);
for (String line : lines) {
if (line.startsWith(this.ignorechar)) {
continue;
}
final String[] values = line.split(this.splitPattern);
if (values.length <= Math.max(keyColumn, keyOnly ? 0 : valueColumn)) {
continue;
}
final String key = this.caseInsensitive ? values[keyColumn].toLowerCase(Locale.ENGLISH) : values[keyColumn];
final String value = this.keyOnly ? "" : values[valueColumn].trim();
final String finalKey = Strings.isNullOrEmpty(quoteChar) ? key.trim() : key.trim().replaceAll("^" + quoteChar + "|" + quoteChar + "$", "");
final String finalValue = Strings.isNullOrEmpty(quoteChar) ? value.trim() : value.trim().replaceAll("^" + quoteChar + "|" + quoteChar + "$", "");
newLookupBuilder.put(finalKey, finalValue);
}
return newLookupBuilder.build();
}
|
@Test
public void parseKeyOnlyFile() throws Exception {
final String input = "# Sample file for testing\n" +
"foo\n" +
"bar\n" +
"baz";
final DSVParser dsvParser = new DSVParser("#", "\n", ":", "", true, false, 0, Optional.empty());
final Map<String, String> result = dsvParser.parse(input);
assertThat(result)
.isNotNull()
.isNotEmpty()
.hasSize(3)
.containsExactly(
new AbstractMap.SimpleEntry<>("foo", ""),
new AbstractMap.SimpleEntry<>("bar", ""),
new AbstractMap.SimpleEntry<>("baz", "")
);
}
|
@Override
public IMetaverseNode createResourceNode( IExternalResourceInfo resource ) throws MetaverseException {
return createFileNode( resource.getName(), descriptor );
}
|
@Test
public void testCreateResourceNode() throws Exception {
IExternalResourceInfo res = mock( IExternalResourceInfo.class );
when( res.getName() ).thenReturn( "file:///Users/home/tmp/xyz.xml" );
IMetaverseNode resourceNode = analyzer.createResourceNode( res );
assertNotNull( resourceNode );
assertEquals( DictionaryConst.NODE_TYPE_FILE, resourceNode.getType() );
}
|
public static Table getTableMeta(DataSource ds, String tableName) {
return getTableMeta(ds, null, null, tableName);
}
|
@Test
public void getTableIndexInfoTest() {
final Table table = MetaUtil.getTableMeta(ds, "user_1");
assertEquals(table.getIndexInfoList().size(), 2);
}
|
Mono<Locale> getLocaleFromSubscriber(Subscriber subscriber) {
// TODO get locale from subscriber
return Mono.just(Locale.getDefault());
}
|
@Test
void getLocaleFromSubscriberTest() {
var subscription = mock(Subscriber.class);
notificationCenter.getLocaleFromSubscriber(subscription)
.as(StepVerifier::create)
.expectNext(Locale.getDefault())
.verifyComplete();
}
|
@Override
public void onStartup(Set<Class<?>> c, ServletContext ctx) throws ServletException {
if (isDisabledByConfiguration(ctx)) {
StatusViaSLF4JLoggerFactory.addInfo("Due to deployment instructions will NOT register an instance of "
+ LogbackServletContextListener.class + " to the current web-app", this);
return;
}
StatusViaSLF4JLoggerFactory.addInfo(
"Adding an instance of " + LogbackServletContextListener.class + " to the current web-app", this);
LogbackServletContextListener lscl = new LogbackServletContextListener();
ctx.addListener(lscl);
}
|
@Test
public void noListenerShouldBeAddedWhenDisabled() throws ServletException {
ServletContext mockedServletContext = mock(ServletContext.class);
when(mockedServletContext.getInitParameter(CoreConstants.DISABLE_SERVLET_CONTAINER_INITIALIZER_KEY))
.thenReturn("true");
lsci.onStartup(null, mockedServletContext);
verify(mockedServletContext, times(0)).addListener(any(LogbackServletContextListener.class));
}
|
public static <T> T visit(final Schema start, final SchemaVisitor<T> visitor) {
// Set of Visited Schemas
IdentityHashMap<Schema, Schema> visited = new IdentityHashMap<>();
// Stack that contains the Schams to process and afterVisitNonTerminal
// functions.
// Deque<Either<Schema, Supplier<SchemaVisitorAction>>>
// Using either has a cost which we want to avoid...
Deque<Object> dq = new ArrayDeque<>();
dq.addLast(start);
Object current;
while ((current = dq.pollLast()) != null) {
if (current instanceof Supplier) {
// we are executing a non terminal post visit.
SchemaVisitorAction action = ((Supplier<SchemaVisitorAction>) current).get();
switch (action) {
case CONTINUE:
break;
case SKIP_SUBTREE:
throw new UnsupportedOperationException();
case SKIP_SIBLINGS:
while (dq.getLast() instanceof Schema) {
dq.removeLast();
}
break;
case TERMINATE:
return visitor.get();
default:
throw new UnsupportedOperationException("Invalid action " + action);
}
} else {
Schema schema = (Schema) current;
boolean terminate;
if (!visited.containsKey(schema)) {
Schema.Type type = schema.getType();
switch (type) {
case ARRAY:
terminate = visitNonTerminal(visitor, schema, dq, Collections.singleton(schema.getElementType()));
visited.put(schema, schema);
break;
case RECORD:
terminate = visitNonTerminal(visitor, schema, dq, () -> schema.getFields().stream().map(Field::schema)
.collect(Collectors.toCollection(ArrayDeque::new)).descendingIterator());
visited.put(schema, schema);
break;
case UNION:
terminate = visitNonTerminal(visitor, schema, dq, schema.getTypes());
visited.put(schema, schema);
break;
case MAP:
terminate = visitNonTerminal(visitor, schema, dq, Collections.singleton(schema.getValueType()));
visited.put(schema, schema);
break;
case NULL:
case BOOLEAN:
case BYTES:
case DOUBLE:
case ENUM:
case FIXED:
case FLOAT:
case INT:
case LONG:
case STRING:
terminate = visitTerminal(visitor, schema, dq);
break;
default:
throw new UnsupportedOperationException("Invalid type " + type);
}
} else {
terminate = visitTerminal(visitor, schema, dq);
}
if (terminate) {
return visitor.get();
}
}
}
return visitor.get();
}
|
@Test
void visit2() {
String s2 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": [" + "{\"name\": \"f1\", \"type\": \"int\"}"
+ "]}";
assertEquals("c1.\"int\"!", Schemas.visit(new Schema.Parser().parse(s2), new TestVisitor()));
}
|
@Override
public Character getChar(K name) {
return null;
}
|
@Test
public void testGetCharDefault() {
assertEquals('x', HEADERS.getChar("name1", 'x'));
}
|
@Override
public SchemaKStream<?> buildStream(final PlanBuildContext buildContext) {
final Stacker contextStacker = buildContext.buildNodeContext(getId().toString());
return schemaKStreamFactory.create(
buildContext,
dataSource,
contextStacker.push(SOURCE_OP_NAME)
);
}
|
@Test
public void shouldBuildTableByConvertingFromStream() {
// Given:
givenNodeWithMockSource();
// When:
final SchemaKStream<?> returned = node.buildStream(buildContext);
// Then:
assertThat(returned, is(table));
}
|
@Override
protected void decode(final ChannelHandlerContext ctx, final ByteBuf in, final List<Object> out) {
while (in.readableBytes() >= 1 + MySQLBinlogEventHeader.MYSQL_BINLOG_EVENT_HEADER_LENGTH) {
in.markReaderIndex();
MySQLPacketPayload payload = new MySQLPacketPayload(in, ctx.channel().attr(CommonConstants.CHARSET_ATTRIBUTE_KEY).get());
checkPayload(payload);
MySQLBinlogEventHeader binlogEventHeader = new MySQLBinlogEventHeader(payload, binlogContext.getChecksumLength());
if (!checkEventIntegrity(in, binlogEventHeader)) {
return;
}
Optional<MySQLBaseBinlogEvent> binlogEvent = decodeEvent(binlogEventHeader, payload);
if (!binlogEvent.isPresent()) {
skipChecksum(binlogEventHeader.getEventType(), in);
return;
}
if (binlogEvent.get() instanceof PlaceholderBinlogEvent) {
out.add(binlogEvent.get());
skipChecksum(binlogEventHeader.getEventType(), in);
return;
}
if (decodeWithTX) {
processEventWithTX(binlogEvent.get(), out);
} else {
processEventIgnoreTX(binlogEvent.get(), out);
}
skipChecksum(binlogEventHeader.getEventType(), in);
}
}
|
@Test
void assertDecodeQueryEvent() {
ByteBuf byteBuf = Unpooled.buffer();
byteBuf.writeBytes(StringUtil.decodeHexDump("00f3e25665020100000087000000c2740f0a0400c9150000000000000400002d000000000000012000a045000000000603737464042d002d00e0000c0164735f3000116df40b00000"
+ "0000012ff0064735f300044524f50205441424c452060745f70726f76696e636560202f2a2067656e65726174656420627920736572766572202a2fcefe4ec6"));
List<Object> decodedEvents = new LinkedList<>();
binlogEventPacketDecoder.decode(channelHandlerContext, byteBuf, decodedEvents);
assertFalse(decodedEvents.isEmpty());
Object actual = decodedEvents.get(0);
assertInstanceOf(MySQLQueryBinlogEvent.class, actual);
assertThat(((MySQLQueryBinlogEvent) actual).getTimestamp(), is(1700193011L));
assertThat(((MySQLQueryBinlogEvent) actual).getPosition(), is(168785090L));
}
|
public static String strip(CharSequence str, CharSequence prefixOrSuffix) {
if (equals(str, prefixOrSuffix)) {
// 对于去除相同字符的情况单独处理
return EMPTY;
}
return strip(str, prefixOrSuffix, prefixOrSuffix);
}
|
@Test
public void stripTest() {
final String SOURCE_STRING = "aaa_STRIPPED_bbb";
// ---------------------------- test strip ----------------------------
// Normal test
assertEquals("aa_STRIPPED_bbb", CharSequenceUtil.strip(SOURCE_STRING, "a"));
assertEquals(SOURCE_STRING, CharSequenceUtil.strip(SOURCE_STRING, ""));
assertEquals("aa_STRIPPED_bb", CharSequenceUtil.strip(SOURCE_STRING, "a", "b"));
// test null param
assertEquals(SOURCE_STRING, CharSequenceUtil.strip(SOURCE_STRING, null, null));
assertEquals(SOURCE_STRING, CharSequenceUtil.strip(SOURCE_STRING, "", ""));
assertEquals("aaa_STRIPPED_bb", CharSequenceUtil.strip(SOURCE_STRING, "", "b"));
assertEquals("aaa_STRIPPED_bb", CharSequenceUtil.strip(SOURCE_STRING, null, "b"));
assertEquals("aa_STRIPPED_bbb", CharSequenceUtil.strip(SOURCE_STRING, "a", ""));
assertEquals("aa_STRIPPED_bbb", CharSequenceUtil.strip(SOURCE_STRING, "a", null));
// 本次提交前无法通过的 case
assertEquals("", CharSequenceUtil.strip("a", "a", "a"));
// 前缀后缀有重叠,优先去掉前缀
assertEquals("a", CharSequenceUtil.strip("aba", "ab", "ba"));
}
|
public ServiceInfo getServiceInfo(final String serviceName, final String groupName, final String clusters) {
String groupedServiceName = NamingUtils.getGroupedName(serviceName, groupName);
String key = ServiceInfo.getKey(groupedServiceName, clusters);
return serviceInfoMap.get(key);
}
|
@Test
void testGetServiceInfo() {
ServiceInfo info = new ServiceInfo("a@@b@@c");
Instance instance1 = createInstance("1.1.1.1", 1);
List<Instance> hosts = new ArrayList<>();
hosts.add(instance1);
info.setHosts(hosts);
ServiceInfo expect = holder.processServiceInfo(info);
String serviceName = "b";
String groupName = "a";
String clusters = "c";
ServiceInfo actual = holder.getServiceInfo(serviceName, groupName, clusters);
assertEquals(expect.getKey(), actual.getKey());
assertEquals(expect.getHosts().size(), actual.getHosts().size());
assertEquals(expect.getHosts().get(0), actual.getHosts().get(0));
}
|
@Override
public OpticalConnectivityId setupConnectivity(ConnectPoint ingress, ConnectPoint egress,
Bandwidth bandwidth, Duration latency) {
checkNotNull(ingress);
checkNotNull(egress);
log.info("setupConnectivity({}, {}, {}, {})", ingress, egress, bandwidth, latency);
Bandwidth bw = (bandwidth == null) ? NO_BW_REQUIREMENT : bandwidth;
Stream<Path> paths = topologyService.getKShortestPaths(
topologyService.currentTopology(),
ingress.deviceId(), egress.deviceId(),
new BandwidthLinkWeight(bandwidth));
// Path service calculates from node to node, we're only interested in port to port
Optional<OpticalConnectivityId> id =
paths.filter(p -> p.src().equals(ingress) && p.dst().equals(egress))
.limit(maxPaths)
.map(p -> setupPath(p, bw, latency))
.filter(Objects::nonNull)
.findFirst();
if (id.isPresent()) {
log.info("Assigned OpticalConnectivityId: {}", id);
} else {
log.error("setupConnectivity({}, {}, {}, {}) failed.", ingress, egress, bandwidth, latency);
}
return id.orElse(null);
}
|
@Test
public void testInstalledEventRemote() {
// set the master for ingress device of intent to remote node
mastershipService.setMastership(DEVICE2.id(), MastershipRole.NONE);
Bandwidth bandwidth = Bandwidth.bps(100);
Duration latency = Duration.ofMillis(10);
OpticalConnectivityId cid = target.setupConnectivity(CP12, CP71, bandwidth, latency);
// notify all intents are installed
intentService.notifyInstalled();
// remote nodes must not receive event before distributed map is updated
assertEquals(0, listener.events.size());
}
|
static JavaType constructType(Type type) {
try {
return constructTypeInner(type);
} catch (Exception e) {
throw new InvalidDataTableTypeException(type, e);
}
}
|
@Test
void lists_are_list_types() {
JavaType javaType = TypeFactory.constructType(LIST_OF_LIST_OF_OBJECT);
assertThat(javaType.getClass(), equalTo(TypeFactory.ListType.class));
assertThat(javaType.getOriginal(), is(LIST_OF_LIST_OF_OBJECT));
TypeFactory.ListType listType = (TypeFactory.ListType) javaType;
JavaType elementType = listType.getElementType();
assertThat(elementType.getClass(), equalTo(TypeFactory.ListType.class));
assertThat(elementType.getOriginal(), is(LIST_OF_OBJECT));
}
|
@Udf(description = "Returns the tangent of an INT value")
public Double tan(
@UdfParameter(
value = "value",
description = "The value in radians to get the tangent of."
) final Integer value
) {
return tan(value == null ? null : value.doubleValue());
}
|
@Test
public void shouldHandleNull() {
assertThat(udf.tan((Integer) null), is(nullValue()));
assertThat(udf.tan((Long) null), is(nullValue()));
assertThat(udf.tan((Double) null), is(nullValue()));
}
|
public static MqttTopicFilter toFilter(String topicFilter) {
if (topicFilter == null || topicFilter.isEmpty()) {
throw new IllegalArgumentException("Topic filter can't be empty!");
}
return filters.computeIfAbsent(topicFilter, filter -> {
if (filter.equals("#")) {
return new AlwaysTrueTopicFilter();
} else if (filter.contains("+") || filter.contains("#")) {
String regex = filter
.replace("\\", "\\\\")
.replace("+", "[^/]+")
.replace("/#", "($|/.*)");
log.debug("Converting [{}] to [{}]", filter, regex);
return new RegexTopicFilter(regex);
} else {
return new EqualsTopicFilter(filter);
}
});
}
|
@Test
public void metadataCanBeUpdated() throws ScriptException {
MqttTopicFilter filter = MqttTopicFilterFactory.toFilter("Sensor/Temperature/House/+");
assertTrue(filter.filter(TEST_STR_1));
assertFalse(filter.filter(TEST_STR_2));
filter = MqttTopicFilterFactory.toFilter("Sensor/+/House/#");
assertTrue(filter.filter(TEST_STR_1));
assertFalse(filter.filter(TEST_STR_2));
filter = MqttTopicFilterFactory.toFilter("Sensor/#");
assertTrue(filter.filter(TEST_STR_1));
assertTrue(filter.filter(TEST_STR_2));
assertTrue(filter.filter(TEST_STR_3));
filter = MqttTopicFilterFactory.toFilter("Sensor/Temperature/#");
assertTrue(filter.filter(TEST_STR_1));
assertTrue(filter.filter(TEST_STR_2));
assertFalse(filter.filter(TEST_STR_3));
filter = MqttTopicFilterFactory.toFilter("#");
assertTrue(filter.filter(TEST_STR_1));
assertTrue(filter.filter(TEST_STR_2));
assertTrue(filter.filter(TEST_STR_3));
assertTrue(filter.filter(TEST_STR_4));
assertTrue(filter.filter(TEST_STR_5));
assertTrue(filter.filter(TEST_STR_6));
filter = MqttTopicFilterFactory.toFilter("Sensor/Temperature#");
assertFalse(filter.filter(TEST_STR_2));
}
|
@Override
public @Nullable State waitUntilFinish() {
return waitUntilFinish(Duration.millis(-1));
}
|
@Test
public void testWaitToFinishFail() throws Exception {
Dataflow.Projects.Locations.Jobs.Get statusRequest =
mock(Dataflow.Projects.Locations.Jobs.Get.class);
when(mockJobs.get(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID))).thenReturn(statusRequest);
when(statusRequest.execute()).thenThrow(IOException.class);
DataflowPipelineJob job =
new DataflowPipelineJob(DataflowClient.create(options), JOB_ID, options, ImmutableMap.of());
long startTime = fastClock.nanoTime();
State state =
job.waitUntilFinish(
Duration.standardMinutes(5), null, fastClock::sleep, fastClock::nanoTime);
assertEquals(null, state);
long timeDiff = TimeUnit.NANOSECONDS.toMillis(fastClock.nanoTime() - startTime);
checkValidInterval(
DataflowPipelineJob.MESSAGES_POLLING_INTERVAL,
DataflowPipelineJob.MESSAGES_POLLING_RETRIES,
timeDiff);
}
|
Flux<DataEntityList> export(KafkaCluster cluster) {
return kafkaConnectService.getConnects(cluster)
.flatMap(connect -> kafkaConnectService.getConnectorNamesWithErrorsSuppress(cluster, connect.getName())
.flatMap(connectorName -> kafkaConnectService.getConnector(cluster, connect.getName(), connectorName))
.flatMap(connectorDTO ->
kafkaConnectService.getConnectorTopics(cluster, connect.getName(), connectorDTO.getName())
.map(topics -> createConnectorDataEntity(cluster, connect, connectorDTO, topics)))
.buffer(100)
.map(connectDataEntities -> {
String dsOddrn = Oddrn.connectDataSourceOddrn(connect.getAddress());
return new DataEntityList()
.dataSourceOddrn(dsOddrn)
.items(connectDataEntities);
})
);
}
|
@Test
void exportsConnectorsAsDataTransformers() {
ConnectDTO connect = new ConnectDTO();
connect.setName("testConnect");
connect.setAddress("http://kconnect:8083");
ConnectorDTO sinkConnector = new ConnectorDTO();
sinkConnector.setName("testSink");
sinkConnector.setType(ConnectorTypeDTO.SINK);
sinkConnector.setConnect(connect.getName());
sinkConnector.setConfig(
Map.of(
"connector.class", "FileStreamSink",
"file", "filePathHere",
"topic", "inputTopic"
)
);
ConnectorDTO sourceConnector = new ConnectorDTO();
sourceConnector.setName("testSource");
sourceConnector.setConnect(connect.getName());
sourceConnector.setType(ConnectorTypeDTO.SOURCE);
sourceConnector.setConfig(
Map.of(
"connector.class", "FileStreamSource",
"file", "filePathHere",
"topic", "outputTopic"
)
);
when(kafkaConnectService.getConnects(CLUSTER))
.thenReturn(Flux.just(connect));
when(kafkaConnectService.getConnectorNamesWithErrorsSuppress(CLUSTER, connect.getName()))
.thenReturn(Flux.just(sinkConnector.getName(), sourceConnector.getName()));
when(kafkaConnectService.getConnector(CLUSTER, connect.getName(), sinkConnector.getName()))
.thenReturn(Mono.just(sinkConnector));
when(kafkaConnectService.getConnector(CLUSTER, connect.getName(), sourceConnector.getName()))
.thenReturn(Mono.just(sourceConnector));
when(kafkaConnectService.getConnectorTopics(CLUSTER, connect.getName(), sourceConnector.getName()))
.thenReturn(Mono.just(new ConnectorTopics().topics(List.of("outputTopic"))));
when(kafkaConnectService.getConnectorTopics(CLUSTER, connect.getName(), sinkConnector.getName()))
.thenReturn(Mono.just(new ConnectorTopics().topics(List.of("inputTopic"))));
StepVerifier.create(exporter.export(CLUSTER))
.assertNext(dataEntityList -> {
assertThat(dataEntityList.getDataSourceOddrn())
.isEqualTo("//kafkaconnect/host/kconnect:8083");
assertThat(dataEntityList.getItems())
.hasSize(2);
assertThat(dataEntityList.getItems())
.filteredOn(DataEntity::getOddrn, "//kafkaconnect/host/kconnect:8083/connectors/testSink")
.singleElement()
.satisfies(sink -> {
assertThat(sink.getMetadata().get(0).getMetadata())
.containsOnlyKeys("type", "connector.class", "file", "topic");
assertThat(sink.getDataTransformer().getInputs()).contains(
"//kafka/cluster/localhost:9092/topics/inputTopic");
});
assertThat(dataEntityList.getItems())
.filteredOn(DataEntity::getOddrn, "//kafkaconnect/host/kconnect:8083/connectors/testSource")
.singleElement()
.satisfies(source -> {
assertThat(source.getMetadata().get(0).getMetadata())
.containsOnlyKeys("type", "connector.class", "file", "topic");
assertThat(source.getDataTransformer().getOutputs()).contains(
"//kafka/cluster/localhost:9092/topics/outputTopic");
});
})
.verifyComplete();
}
|
public void recordNanos(long durationNanos) {
// nano clock is not guaranteed to be monotonic. So
// lets record it as zero, so we can at least count.
if (durationNanos < 0) {
durationNanos = 0;
}
long d = NANOSECONDS.toMicros(durationNanos);
int durationMicros = d > Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) d;
COUNT.addAndGet(this, 1);
TOTAL_MICROS.addAndGet(this, durationMicros);
for (; ; ) {
long currentMax = maxMicros;
if (durationMicros <= currentMax) {
break;
}
if (MAX_MICROS.compareAndSet(this, currentMax, durationMicros)) {
break;
}
}
try {
buckets.incrementAndGet(usToBucketIndex(durationMicros));
} catch (RuntimeException e) {
throw new RuntimeException("duration nanos:" + durationNanos, e);
}
}
|
@Test
public void recordNanos() {
recordNanos(0, 0);
recordNanos(200, 0);
recordNanos(TimeUnit.MICROSECONDS.toNanos(1), 0);
recordNanos(TimeUnit.MICROSECONDS.toNanos(2), 1);
recordNanos(TimeUnit.MICROSECONDS.toNanos(3), 1);
recordNanos(TimeUnit.MICROSECONDS.toNanos(4), 2);
recordNanos(TimeUnit.MICROSECONDS.toNanos(5), 2);
recordNanos(TimeUnit.MICROSECONDS.toNanos(6), 2);
recordNanos(TimeUnit.MICROSECONDS.toNanos(8), 3);
recordNanos(TimeUnit.MICROSECONDS.toNanos(11), 3);
recordNanos(TimeUnit.MICROSECONDS.toNanos(12), 3);
recordNanos(TimeUnit.MICROSECONDS.toNanos(23), 4);
recordNanos(TimeUnit.MICROSECONDS.toNanos(24), 4);
recordNanos(TimeUnit.MICROSECONDS.toNanos(33), 5);
}
|
public Object[][] getAllValuesInOrder() {
return messages().stream()
.map(this::valuesFrom)
.toArray(Object[][]::new);
}
|
@Test
void getsValuesInOrder() {
Object[] msg1Values = {"2015-01-01 01:00:00.000", "source-1"};
Object[] msg2Values = {"2015-01-02 01:00:00.000", "source-2"};
SimpleMessageChunk sut = simpleMessageChunk("timestamp,source",
msg1Values,
msg2Values
);
assertThat(Arrays.stream(sut.getAllValuesInOrder()).toList()).asList()
.containsExactly(msg1Values, msg2Values);
}
|
public static int MAXIM(@NonNull final byte[] data, final int offset, final int length) {
return CRC(0x8005, 0x0000, data, offset, length, true, true, 0xFFFF);
}
|
@Test
public void MAXIM_123456789() {
final byte[] data = "123456789".getBytes();
assertEquals(0x44C2, CRC16.MAXIM(data, 0, 9));
}
|
@Override
public List<SmsReceiveRespDTO> parseSmsReceiveStatus(String text) {
JSONArray statuses = JSONUtil.parseArray(text);
// 字段参考
return convertList(statuses, status -> {
JSONObject statusObj = (JSONObject) status;
return new SmsReceiveRespDTO()
.setSuccess("SUCCESS".equals(statusObj.getStr("report_status"))) // 是否接收成功
.setErrorCode(statusObj.getStr("errmsg")) // 状态报告编码
.setMobile(statusObj.getStr("mobile")) // 手机号
.setReceiveTime(statusObj.getLocalDateTime("user_receive_time", null)) // 状态报告时间
.setSerialNo(statusObj.getStr("sid")); // 发送序列号
});
}
|
@Test
public void testParseSmsReceiveStatus() {
// 准备参数
String text = "[\n" +
" {\n" +
" \"user_receive_time\": \"2015-10-17 08:03:04\",\n" +
" \"nationcode\": \"86\",\n" +
" \"mobile\": \"13900000001\",\n" +
" \"report_status\": \"SUCCESS\",\n" +
" \"errmsg\": \"DELIVRD\",\n" +
" \"description\": \"用户短信送达成功\",\n" +
" \"sid\": \"12345\",\n" +
" \"ext\": {\"logId\":\"67890\"}\n" +
" }\n" +
"]";
// 调用
List<SmsReceiveRespDTO> statuses = smsClient.parseSmsReceiveStatus(text);
// 断言
assertEquals(1, statuses.size());
assertTrue(statuses.get(0).getSuccess());
assertEquals("DELIVRD", statuses.get(0).getErrorCode());
assertEquals("13900000001", statuses.get(0).getMobile());
assertEquals(LocalDateTime.of(2015, 10, 17, 8, 3, 4), statuses.get(0).getReceiveTime());
assertEquals("12345", statuses.get(0).getSerialNo());
}
|
public synchronized String getDatabaseName() {
return databaseName;
}
|
@Test
public void testGetDatabaseNameShouldReturnCorrectValue() {
assertThat(testManager.getDatabaseName()).matches(TEST_ID + "-\\d{8}-\\d{6}-\\d{6}");
}
|
public static String subPath(String rootDir, File file) {
try {
return subPath(rootDir, file.getCanonicalPath());
} catch (IOException e) {
throw new IORuntimeException(e);
}
}
|
@Test
public void subPathTest2() {
String subPath = FileUtil.subPath("d:/aaa/bbb/", "d:/aaa/bbb/ccc/");
assertEquals("ccc/", subPath);
subPath = FileUtil.subPath("d:/aaa/bbb", "d:/aaa/bbb/ccc/");
assertEquals("ccc/", subPath);
subPath = FileUtil.subPath("d:/aaa/bbb", "d:/aaa/bbb/ccc/test.txt");
assertEquals("ccc/test.txt", subPath);
subPath = FileUtil.subPath("d:/aaa/bbb/", "d:/aaa/bbb/ccc");
assertEquals("ccc", subPath);
subPath = FileUtil.subPath("d:/aaa/bbb", "d:/aaa/bbb/ccc");
assertEquals("ccc", subPath);
subPath = FileUtil.subPath("d:/aaa/bbb", "d:/aaa/bbb");
assertEquals("", subPath);
subPath = FileUtil.subPath("d:/aaa/bbb/", "d:/aaa/bbb");
assertEquals("", subPath);
}
|
static String createPackageName(Class<?> cls) {
return getPackageName(cls) + ".generated";
}
|
@Test
public void testCreatePackageName() {
assertEquals(AbiTypesGenerator.createPackageName(String.class), ("java.lang.generated"));
}
|
@CanDistro
@PutMapping(value = {"", "/instance"})
@Secured(action = ActionTypes.WRITE)
public Result<String> update(UpdateHealthForm updateHealthForm) throws NacosException {
updateHealthForm.validate();
healthOperatorV2.updateHealthStatusForPersistentInstance(updateHealthForm.getNamespaceId(), buildCompositeServiceName(updateHealthForm),
updateHealthForm.getClusterName(), updateHealthForm.getIp(), updateHealthForm.getPort(),
updateHealthForm.getHealthy());
return Result.success("ok");
}
|
@Test
void testUpdate() throws Exception {
doNothing().when(healthOperatorV2).updateHealthStatusForPersistentInstance(TEST_NAMESPACE,
NamingUtils.getGroupedName(updateHealthForm.getServiceName(), updateHealthForm.getGroupName()), TEST_CLUSTER_NAME,
"123.123.123.123", 8888, true);
MockHttpServletRequestBuilder builder = convert(updateHealthForm,
MockMvcRequestBuilders.put(UtilsAndCommons.DEFAULT_NACOS_NAMING_CONTEXT_V2 + UtilsAndCommons.NACOS_NAMING_HEALTH_CONTEXT));
MockHttpServletResponse response = mockmvc.perform(builder).andReturn().getResponse();
assertEquals(200, response.getStatus());
assertEquals("ok", JacksonUtils.toObj(response.getContentAsString()).get("data").asText());
}
|
public static Map<Integer, Map<RowExpression, VariableReferenceExpression>> collectCSEByLevel(List<? extends RowExpression> expressions)
{
if (expressions.isEmpty()) {
return ImmutableMap.of();
}
CommonSubExpressionCollector expressionCollector = new CommonSubExpressionCollector();
expressions.forEach(expression -> expression.accept(expressionCollector, null));
if (expressionCollector.cseByLevel.isEmpty()) {
return ImmutableMap.of();
}
Map<Integer, Map<RowExpression, Integer>> cseByLevel = removeRedundantCSE(expressionCollector.cseByLevel, expressionCollector.expressionCount);
VariableAllocator variableAllocator = new VariableAllocator();
ImmutableMap.Builder<Integer, Map<RowExpression, VariableReferenceExpression>> commonSubExpressions = ImmutableMap.builder();
Map<RowExpression, VariableReferenceExpression> rewriteWith = new HashMap<>();
int startCSELevel = cseByLevel.keySet().stream().reduce(Math::min).get();
int maxCSELevel = cseByLevel.keySet().stream().reduce(Math::max).get();
for (int i = startCSELevel; i <= maxCSELevel; i++) {
if (cseByLevel.containsKey(i)) {
ExpressionRewriter rewriter = new ExpressionRewriter(rewriteWith);
ImmutableMap.Builder<RowExpression, VariableReferenceExpression> expressionVariableMapBuilder = ImmutableMap.builder();
for (Map.Entry<RowExpression, Integer> entry : cseByLevel.get(i).entrySet()) {
RowExpression rewrittenExpression = entry.getKey().accept(rewriter, null);
expressionVariableMapBuilder.put(rewrittenExpression, variableAllocator.newVariable(rewrittenExpression, "cse"));
}
Map<RowExpression, VariableReferenceExpression> expressionVariableMap = expressionVariableMapBuilder.build();
commonSubExpressions.put(i, expressionVariableMap);
rewriteWith.putAll(expressionVariableMap.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue())));
}
}
return commonSubExpressions.build();
}
|
@Test
void testCollectCSEByLevelCaseStatement()
{
List<RowExpression> expressions = ImmutableList.of(rowExpression("1 + CASE WHEN x = 1 THEN y + z WHEN x = 2 THEN z * 2 END"), rowExpression("2 + CASE WHEN x = 1 THEN y + z WHEN x = 2 THEN z * 2 END"));
Map<Integer, Map<RowExpression, VariableReferenceExpression>> cseByLevel = collectCSEByLevel(expressions);
assertEquals(cseByLevel, ImmutableMap.of(3, ImmutableMap.of(rowExpression("CASE WHEN x = 1 THEN y + z WHEN x = 2 THEN z * 2 END"), rowExpression("\"expr$cse\""))));
}
|
public Status currentStatus(FetchRequest request) {
final DocumentStatus ds = fetchStatus(request);
if (MUStatusType.ACTIEF == ds.getStatusMu() || ds.getDocType() == DocTypeType.NI) {
switch (ds.getStatus()) {
case GEACTIVEERD:
return Status.ACTIVE;
case UITGEREIKT:
return Status.ISSUED;
case GEBLOKKEERD:
return Status.BLOCKED;
default:
break;
}
}
return Status.INACTIVE;
}
|
@Test
public void getIssuedStatusWithSuccesTest() throws Exception {
final DocumentStatus dummyDocumentStatus = new DocumentStatus();
dummyDocumentStatus.setId(1L);
dummyDocumentStatus.setDocType(DocTypeType.NL_RIJBEWIJS);
dummyDocumentStatus.setPseudonym(pseudonym);
dummyDocumentStatus.setSequenceNo("SSSSSSSSSSSSS");
dummyDocumentStatus.setStatus(StatusType.UITGEREIKT);
dummyDocumentStatus.setStatusMu(MUStatusType.ACTIEF);
when(bsnkPseudonymDecryptorMock.decryptEp(anyString(), anyString(), anyString())).thenReturn(pseudonym);
when(documentStatusRepositoryMock.findByPseudonymAndDocTypeAndSequenceNo(anyString(), any(DocTypeType.class), anyString())).thenReturn(Optional.of(dummyDocumentStatus));
FetchRequest request = new FetchRequest();
request.setDocType(dummyDocumentStatus.getDocType());
request.setEpsc(encrypted);
request.setSequenceNo(dummyDocumentStatus.getSequenceNo());
Status status = documentStatusService.currentStatus(request);
assertEquals(Status.ISSUED, status);
}
|
@Bean
@ConfigurationPropertiesBinding
public StringToMatchTypeConverter stringToMatchTypeConverter() {
return new StringToMatchTypeConverter();
}
|
@Test
public void testStringToMatchTypeConverter() {
contextRunner.withPropertyValues(PREFIX + ".repository=BUCKET4J_JCACHE")
.run((context) -> assertThat(context).hasSingleBean(StringToMatchTypeConverter.class));
}
|
@Override
public AppResponse process(Flow flow, AppSessionRequest request) {
if (appSession.getRegistrationId() == null) {
return new NokResponse();
}
Map<String, String> result = digidClient.getExistingAccount(appSession.getRegistrationId(), appSession.getLanguage());
if (result.get(lowerUnderscore(STATUS)).equals("OK") && result.get(lowerUnderscore(ACCOUNT_ID)) != null) {
appSession.setAccountId(Long.valueOf(result.get(lowerUnderscore(ACCOUNT_ID))));
digidClient.remoteLog("54", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId()));
return new OkResponse();
} else if (result.get(lowerUnderscore(STATUS)).equals("PENDING")) {
// switch state to require replace action
appSession.setState(State.EXISTING_ACCOUNT_FOUND.name());
return new StatusResponse("PENDING");
} else {
return new NokResponse();
}
}
|
@Test
void processNOKMissingRegistrationTest(){
checkExistingAccount.getAppSession().setRegistrationId(null);
AppResponse appResponse = checkExistingAccount.process(flowMock, null);
assertTrue(appResponse instanceof NokResponse);
assertEquals("NOK", ((NokResponse) appResponse).getStatus());
}
|
@Override
public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext) throws IOException {
responseContext.getHeaders().add(X_FRAME_OPTIONS, (httpAllowEmbedding ? EmbeddingOptions.SAMEORIGIN : EmbeddingOptions.DENY).toString());
}
|
@Test
void disallowsEmbeddingIfConfigurationSettingIsFalse() throws IOException {
final EmbeddingControlFilter filter = new EmbeddingControlFilter(false);
final ContainerResponseContext responseContext = new ContainerResponse(requestContext, Response.ok().build());
filter.filter(requestContext, responseContext);
assertThat(responseContext.getHeaders())
.containsEntry("X-Frame-Options", Collections.singletonList("DENY"));
}
|
public static boolean isTotalCommentCounter(Counter counter) {
String sceneValue = counter.getId().getTag(SCENE);
if (StringUtils.isBlank(sceneValue)) {
return false;
}
return TOTAL_COMMENT_SCENE.equals(sceneValue);
}
|
@Test
void isTotalCommentCounter() {
MeterRegistry meterRegistry = new SimpleMeterRegistry();
Counter totalCommentCounter =
MeterUtils.totalCommentCounter(meterRegistry, "posts.content.halo.run/fake-post");
assertThat(MeterUtils.isTotalCommentCounter(totalCommentCounter)).isTrue();
assertThat(MeterUtils.isVisitCounter(totalCommentCounter)).isFalse();
}
|
@Override
public Point calculatePositionForPreview(
Keyboard.Key key, PreviewPopupTheme theme, int[] windowOffset) {
Point point = new Point(key.x + windowOffset[0], windowOffset[1]);
Rect padding = new Rect();
theme.getPreviewKeyBackground().getPadding(padding);
point.offset((key.width / 2), padding.bottom - theme.getVerticalOffset());
return point;
}
|
@Test
public void testCalculatePositionForPreviewWithNoneExtendAnimation() throws Exception {
mTheme.setPreviewAnimationType(PreviewPopupTheme.ANIMATION_STYLE_APPEAR);
int[] offsets = new int[] {50, 60};
Point result = mUnderTest.calculatePositionForPreview(mTestKey, mTheme, offsets);
Assert.assertEquals(mTestKey.x + mTestKey.width / 2 + offsets[0], result.x);
Assert.assertEquals(offsets[1], result.y);
}
|
public static List<SubscriptionItem> readFrom(
final InputStream in, @Nullable final ImportExportEventListener eventListener)
throws InvalidSourceException {
if (in == null) {
throw new InvalidSourceException("input is null");
}
final List<SubscriptionItem> channels = new ArrayList<>();
try {
final JsonObject parentObject = JsonParser.object().from(in);
if (!parentObject.has(JSON_SUBSCRIPTIONS_ARRAY_KEY)) {
throw new InvalidSourceException("Channels array is null");
}
final JsonArray channelsArray = parentObject.getArray(JSON_SUBSCRIPTIONS_ARRAY_KEY);
if (eventListener != null) {
eventListener.onSizeReceived(channelsArray.size());
}
for (final Object o : channelsArray) {
if (o instanceof JsonObject) {
final JsonObject itemObject = (JsonObject) o;
final int serviceId = itemObject.getInt(JSON_SERVICE_ID_KEY, 0);
final String url = itemObject.getString(JSON_URL_KEY);
final String name = itemObject.getString(JSON_NAME_KEY);
if (url != null && name != null && !url.isEmpty() && !name.isEmpty()) {
channels.add(new SubscriptionItem(serviceId, url, name));
if (eventListener != null) {
eventListener.onItemCompleted(name);
}
}
}
}
} catch (final Throwable e) {
throw new InvalidSourceException("Couldn't parse json", e);
}
return channels;
}
|
@Test
public void testInvalidSource() {
final List<String> invalidList = Arrays.asList(
"{}",
"",
null,
"gibberish");
for (final String invalidContent : invalidList) {
try {
if (invalidContent != null) {
final byte[] bytes = invalidContent.getBytes(StandardCharsets.UTF_8);
ImportExportJsonHelper.readFrom((new ByteArrayInputStream(bytes)), null);
} else {
ImportExportJsonHelper.readFrom(null, null);
}
fail("didn't throw exception");
} catch (final Exception e) {
final boolean isExpectedException =
e instanceof SubscriptionExtractor.InvalidSourceException;
assertTrue("\"" + e.getClass().getSimpleName()
+ "\" is not the expected exception", isExpectedException);
}
}
}
|
public static CloudConfiguration buildCloudConfigurationForStorage(Map<String, String> properties) {
return buildCloudConfigurationForStorage(properties, false);
}
|
@Test
public void testAzureBlobCloudConfiguration() {
Map<String, String> map = new HashMap<String, String>() {
{
put(CloudConfigurationConstants.AZURE_BLOB_SHARED_KEY, "XX");
put(CloudConfigurationConstants.AZURE_BLOB_CONTAINER, "XX");
put(CloudConfigurationConstants.AZURE_BLOB_SAS_TOKEN, "XX");
put(CloudConfigurationConstants.AZURE_BLOB_STORAGE_ACCOUNT, "XX");
put(CloudConfigurationConstants.AZURE_BLOB_ENDPOINT, "XX");
}
};
CloudConfiguration cc = CloudConfigurationFactory.buildCloudConfigurationForStorage(map);
Assert.assertEquals(cc.getCloudType(), CloudType.AZURE);
TCloudConfiguration tc = new TCloudConfiguration();
cc.toThrift(tc);
Configuration conf = new Configuration();
cc.applyToConfiguration(conf);
cc.toFileStoreInfo();
Assert.assertEquals(cc.toConfString(),
"AzureCloudConfiguration{resources='', jars='', hdpuser='', cred=AzureBlobCloudCredential{endpoint='XX', " +
"storageAccount='XX', sharedKey='XX', container='XX', sasToken='XX'}}");
}
|
@Deprecated
public DefaultMQPushConsumerImpl getDefaultMQPushConsumerImpl() {
return defaultMQPushConsumerImpl;
}
|
@Test
public void testPullMessage_ExceptionOccursWhenComputePullFromWhere() throws MQClientException {
final CountDownLatch countDownLatch = new CountDownLatch(1);
final MessageExt[] messageExts = new MessageExt[1];
pushConsumer.getDefaultMQPushConsumerImpl().setConsumeMessageService(
new ConsumeMessageConcurrentlyService(pushConsumer.getDefaultMQPushConsumerImpl(),
new MessageListenerConcurrently() {
@Override
public ConsumeConcurrentlyStatus consumeMessage(List<MessageExt> msgs,
ConsumeConcurrentlyContext context) {
messageExts[0] = msgs.get(0);
return null;
}
}));
pushConsumer.getDefaultMQPushConsumerImpl().setConsumeOrderly(true);
PullMessageService pullMessageService = mQClientFactory.getPullMessageService();
pullMessageService.executePullRequestImmediately(createPullRequest());
assertThat(messageExts[0]).isNull();
}
|
public Model<T> reproduceFromProvenance() throws ClassNotFoundException {
// Until now the object only holds the configuration for these objects, the following
// functions will actually re-instantiate them.
Trainer<T> newTrainer = recoverTrainer();
Dataset<T> newDataset = recoverDataset();
// Exposing the configuration manager means there could be an edge case were
// the invocation count is changed before the model is trained.
// Pass through a desired invocation count to prevent this behavior
// TODO: does not apply to inner trainers, figure out how to address this or if it needs to be addressed
int trainedInvocationCount = (int) this.modelProvenance
.getTrainerProvenance()
.getInstanceValues()
.get("train-invocation-count")
.getValue();
// This function actually re-trains a model rather than copy the original
return newTrainer.train(newDataset);
}
|
@Test
public void testBaggingTrainerAllInvocationsChange() throws IOException, ClassNotFoundException {
// This example has multiple trainers in the form of an ensemble, and all need to be set to the correct value
CARTRegressionTrainer subsamplingTree = new CARTRegressionTrainer(Integer.MAX_VALUE,
MIN_EXAMPLES, 0.0f, 0.5f, false, new MeanSquaredError(), Trainer.DEFAULT_SEED);
RandomForestTrainer<Regressor> rfT = new RandomForestTrainer<>(subsamplingTree,new AveragingCombiner(),10);
Dataset<Regressor> trainData = new MutableDataset<>(getConfigurableRegressionDenseTrain());
Model<Regressor> model1 = rfT.train(trainData);
subsamplingTree.setInvocationCount(15);
Model<Regressor> model2 = rfT.train(trainData);
ReproUtil<Regressor> reproUtil = new ReproUtil<>(model2.getProvenance(),Regressor.class);
Model<Regressor> reproducedModel = reproUtil.reproduceFromProvenance();
// Make sure the inner trainer's setinvocation count has occurred
assertEquals(((TrainerProvenanceImpl) model2.getProvenance()
.getTrainerProvenance()
.getConfiguredParameters()
.get("innerTrainer"))
.getInstanceValues()
.get("train-invocation-count"),
((TrainerProvenanceImpl) reproducedModel.getProvenance()
.getTrainerProvenance()
.getConfiguredParameters()
.get("innerTrainer"))
.getInstanceValues()
.get("train-invocation-count"));
// make sure the main rft setInvocationCount has occurred correctly.
assertEquals(model2.getProvenance().getTrainerProvenance().getInstanceValues().get("train-invocation-count"),
reproducedModel.getProvenance().getTrainerProvenance().getInstanceValues().get("train-invocation-count"));
}
|
public static CharSequence escapeCsv(CharSequence value) {
return escapeCsv(value, false);
}
|
@Test
public void escapeCsvWithLineFeed() {
CharSequence value = "some text\n more text";
CharSequence expected = "\"some text\n more text\"";
escapeCsv(value, expected);
}
|
public static BaseNCodec of(String alphabet) {
return new BaseNCodec(alphabet);
}
|
@Test
void codec_generalizes_down_to_base_10() {
var b10 = BaseNCodec.of("0123456789");
verifyRoundtrip(b10, unhex("00"), "0");
verifyRoundtrip(b10, unhex("000f"), "015");
verifyRoundtrip(b10, unhex("ffff"), "65535");
// A large prime number: 2^252 + 27742317777372353535851937790883648493 (Curve25519 order)
var numStr = "7237005577332262213973186563042994240857116359379907606001950938285454250989";
var numBN = new BigInteger(numStr);
verifyRoundtrip(b10, numBN.toByteArray(), numStr);
}
|
public void init() {
try {
ObjectName oName = new ObjectName(this.name + ":type=" + TaskManager.class.getSimpleName());
ManagementFactory.getPlatformMBeanServer().registerMBean(this, oName);
} catch (Exception e) {
LOGGER.error("registerMBean_fail", e);
}
}
|
@Test
void testInit() throws Exception {
taskManager.init();
ObjectName oName = new ObjectName(TaskManagerTest.class.getName() + ":type=" + TaskManager.class.getSimpleName());
assertTrue(ManagementFactory.getPlatformMBeanServer().isRegistered(oName));
}
|
static String canonicalQueryString(Map<String, String> attributes) {
List<String> components = getListOfEntries(attributes);
Collections.sort(components);
return canonicalQueryString(components);
}
|
@Test
public void canonicalQueryString() {
// given
Map<String, String> attributes = new HashMap<>();
attributes.put("second-attribute", "second-attribute+value");
attributes.put("attribute", "attribute+value");
attributes.put("name", "Name*");
// when
String result = AwsRequestUtils.canonicalQueryString(attributes);
assertEquals("attribute=attribute%2Bvalue&name=Name%2A&second-attribute=second-attribute%2Bvalue", result);
}
|
public CoordinatorResult<OffsetCommitResponseData, CoordinatorRecord> commitOffset(
RequestContext context,
OffsetCommitRequestData request
) throws ApiException {
Group group = validateOffsetCommit(context, request);
// In the old consumer group protocol, the offset commits maintain the session if
// the group is in Stable or PreparingRebalance state.
if (group.type() == Group.GroupType.CLASSIC) {
ClassicGroup classicGroup = (ClassicGroup) group;
if (classicGroup.isInState(ClassicGroupState.STABLE) || classicGroup.isInState(ClassicGroupState.PREPARING_REBALANCE)) {
groupMetadataManager.rescheduleClassicGroupMemberHeartbeat(
classicGroup,
classicGroup.member(request.memberId())
);
}
}
final OffsetCommitResponseData response = new OffsetCommitResponseData();
final List<CoordinatorRecord> records = new ArrayList<>();
final long currentTimeMs = time.milliseconds();
final OptionalLong expireTimestampMs = expireTimestampMs(request.retentionTimeMs(), currentTimeMs);
request.topics().forEach(topic -> {
final OffsetCommitResponseTopic topicResponse = new OffsetCommitResponseTopic().setName(topic.name());
response.topics().add(topicResponse);
topic.partitions().forEach(partition -> {
if (isMetadataInvalid(partition.committedMetadata())) {
topicResponse.partitions().add(new OffsetCommitResponsePartition()
.setPartitionIndex(partition.partitionIndex())
.setErrorCode(Errors.OFFSET_METADATA_TOO_LARGE.code()));
} else {
log.debug("[GroupId {}] Committing offsets {} for partition {}-{} from member {} with leader epoch {}.",
request.groupId(), partition.committedOffset(), topic.name(), partition.partitionIndex(),
request.memberId(), partition.committedLeaderEpoch());
topicResponse.partitions().add(new OffsetCommitResponsePartition()
.setPartitionIndex(partition.partitionIndex())
.setErrorCode(Errors.NONE.code()));
final OffsetAndMetadata offsetAndMetadata = OffsetAndMetadata.fromRequest(
partition,
currentTimeMs,
expireTimestampMs
);
records.add(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
request.groupId(),
topic.name(),
partition.partitionIndex(),
offsetAndMetadata,
metadataImage.features().metadataVersion()
));
}
});
});
if (!records.isEmpty()) {
metrics.record(GroupCoordinatorMetrics.OFFSET_COMMITS_SENSOR_NAME, records.size());
}
return new CoordinatorResult<>(records, response);
}
|
@Test
public void testConsumerGroupOffsetCommitWithIllegalGenerationId() {
OffsetMetadataManagerTestContext context = new OffsetMetadataManagerTestContext.Builder().build();
// Create an empty group.
ConsumerGroup group = context.groupMetadataManager.getOrMaybeCreatePersistedConsumerGroup(
"foo",
true
);
// Add member.
group.updateMember(new ConsumerGroupMember.Builder("member")
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setClassicMemberMetadata(new ConsumerGroupMemberMetadataValue.ClassicMemberMetadata())
.build()
);
OffsetCommitRequestData request = new OffsetCommitRequestData()
.setGroupId("foo")
.setMemberId("member")
.setGenerationIdOrMemberEpoch(9)
.setTopics(Collections.singletonList(
new OffsetCommitRequestData.OffsetCommitRequestTopic()
.setName("bar")
.setPartitions(Collections.singletonList(
new OffsetCommitRequestData.OffsetCommitRequestPartition()
.setPartitionIndex(0)
.setCommittedOffset(100L)
))
));
// Verify that a smaller epoch is rejected.
assertThrows(IllegalGenerationException.class, () -> context.commitOffset(request));
// Verify that a larger epoch is rejected.
request.setGenerationIdOrMemberEpoch(11);
assertThrows(IllegalGenerationException.class, () -> context.commitOffset(request));
}
|
@Udf
public String concat(@UdfParameter final String... jsonStrings) {
if (jsonStrings == null) {
return null;
}
final List<JsonNode> nodes = new ArrayList<>(jsonStrings.length);
boolean allObjects = true;
for (final String jsonString : jsonStrings) {
if (jsonString == null) {
return null;
}
final JsonNode node = UdfJsonMapper.parseJson(jsonString);
if (node.isMissingNode()) {
return null;
}
if (allObjects && !node.isObject()) {
allObjects = false;
}
nodes.add(node);
}
JsonNode result = nodes.get(0);
if (allObjects) {
for (int i = 1; i < nodes.size(); i++) {
result = concatObjects((ObjectNode) result, (ObjectNode) nodes.get(i));
}
} else {
for (int i = 1; i < nodes.size(); i++) {
result = concatArrays(toArrayNode(result), toArrayNode(nodes.get(i)));
}
}
return UdfJsonMapper.writeValueAsJson(result);
}
|
@Test
public void shouldReturnNullIfArgumentIsNull() {
assertNull(udf.concat(null));
}
|
public Rating getRatingForDensity(double value) {
return ratingBounds.entrySet().stream()
.filter(e -> e.getValue().match(value))
.map(Map.Entry::getKey)
.findFirst()
.orElseThrow(() -> new IllegalArgumentException(format("Invalid value '%s'", value)));
}
|
@Test
public void density_matching_exact_grid_values() {
assertThat(ratingGrid.getRatingForDensity(0.1)).isEqualTo(A);
assertThat(ratingGrid.getRatingForDensity(0.2)).isEqualTo(B);
assertThat(ratingGrid.getRatingForDensity(0.5)).isEqualTo(C);
assertThat(ratingGrid.getRatingForDensity(1)).isEqualTo(D);
}
|
@Override
public void getConfig(RuleBasedFilterConfig.Builder builder) {
Set<String> hostNames = endpoints.stream()
.flatMap(e -> e.names().stream())
.collect(Collectors.toCollection(() -> new LinkedHashSet<>()));
if(hostNames.size() > 0) {
Collection<String> hostnamesSorted = hostNames.stream().sorted().toList();
RuleBasedFilterConfig.Rule.Builder rule = new RuleBasedFilterConfig.Rule.Builder()
.hostNames(hostnamesSorted)
.pathExpressions(ContainerCluster.RESERVED_URI_PREFIX + "/{*}")
.pathExpressions(ContainerDocumentApi.DOCUMENT_V1_PREFIX + "/{*}")
.methods(List.of(PUT, POST, DELETE))
.action(BLOCK)
.name("block-feed-global-endpoints")
.blockResponseMessage("Feed to global endpoints are not allowed")
.blockResponseCode(405)
.blockResponseHeaders(new RuleBasedFilterConfig.Rule.BlockResponseHeaders.Builder()
.name("Allow")
.value("GET, OPTIONS, HEAD"));
builder.rule(rule);
}
builder.dryrun(dryRun);
builder.defaultRule.action(ALLOW);
}
|
@Test
void does_not_setup_blocking_rule_when_endpoints_empty() {
var filter = new BlockFeedGlobalEndpointsFilter(Set.of(), true);
var config = getConfig(filter);
assertEquals(0, config.rule().size());
}
|
@Override
public List<Node> sniff(List<Node> nodes) {
if (attribute == null || value == null) {
return nodes;
}
return nodes.stream()
.filter(node -> nodeMatchesFilter(node, attribute, value))
.collect(Collectors.toList());
}
|
@Test
void returnsNodesMatchingGivenFilter() throws Exception {
final List<Node> nodes = mockNodes();
final NodesSniffer nodesSniffer = new FilteredElasticsearchNodesSniffer("rack", "42");
assertThat(nodesSniffer.sniff(nodes)).containsExactly(nodeOnRack42);
}
|
public static <T> void concat(T[] sourceFirst, T[] sourceSecond, T[] dest) {
System.arraycopy(sourceFirst, 0, dest, 0, sourceFirst.length);
System.arraycopy(sourceSecond, 0, dest, sourceFirst.length, sourceSecond.length);
}
|
@Test(expected = NullPointerException.class)
public void concat_whenDestNull() {
Integer[] first = new Integer[]{1, 2, 3};
Integer[] second = new Integer[]{4};
Integer[] concatenated = null;
ArrayUtils.concat(first, second, concatenated);
fail();
}
|
public ManagedChannel get(WindmillServiceAddress windmillServiceAddress) {
return channelCache.getUnchecked(windmillServiceAddress);
}
|
@Test
public void testLoadingCacheReturnsLoadsChannelWhenNotPresent() {
String channelName = "existingChannel";
ManagedChannel channel = newChannel(channelName);
Function<WindmillServiceAddress, ManagedChannel> channelFactory =
spy(
new Function<WindmillServiceAddress, ManagedChannel>() {
@Override
public ManagedChannel apply(WindmillServiceAddress windmillServiceAddress) {
return channel;
}
});
cache = newCache(channelFactory);
WindmillServiceAddress someAddress = mock(WindmillServiceAddress.class);
ManagedChannel cachedChannel = cache.get(someAddress);
assertSame(channel, cachedChannel);
verify(channelFactory, times(1)).apply(eq(someAddress));
}
|
public static ProxyBackendHandler newInstance(final SQLStatementContext sqlStatementContext, final String sql, final ConnectionSession connectionSession) {
TCLStatement tclStatement = (TCLStatement) sqlStatementContext.getSqlStatement();
if (tclStatement instanceof BeginTransactionStatement || tclStatement instanceof StartTransactionStatement) {
return new TransactionBackendHandler(tclStatement, TransactionOperationType.BEGIN, connectionSession);
}
if (tclStatement instanceof SetAutoCommitStatement) {
return new TransactionBackendHandler(tclStatement, TransactionOperationType.SET_AUTOCOMMIT, connectionSession);
}
if (tclStatement instanceof SavepointStatement) {
return new TransactionBackendHandler(tclStatement, TransactionOperationType.SAVEPOINT, connectionSession);
}
if (tclStatement instanceof ReleaseSavepointStatement) {
return new TransactionBackendHandler(tclStatement, TransactionOperationType.RELEASE_SAVEPOINT, connectionSession);
}
if (tclStatement instanceof CommitStatement) {
return new TransactionBackendHandler(tclStatement, TransactionOperationType.COMMIT, connectionSession);
}
if (tclStatement instanceof RollbackStatement) {
return ((RollbackStatement) tclStatement).getSavepointName().isPresent()
? new TransactionBackendHandler(tclStatement, TransactionOperationType.ROLLBACK_TO_SAVEPOINT, connectionSession)
: new TransactionBackendHandler(tclStatement, TransactionOperationType.ROLLBACK, connectionSession);
}
if (tclStatement instanceof SetTransactionStatement && OperationScope.GLOBAL != ((SetTransactionStatement) tclStatement).getScope()) {
return new TransactionSetHandler((SetTransactionStatement) tclStatement, connectionSession);
}
if (tclStatement instanceof XAStatement) {
return new TransactionXAHandler(sqlStatementContext, sql, connectionSession);
}
QueryContext queryContext = new QueryContext(sqlStatementContext, sql, Collections.emptyList(), new HintValueContext(), connectionSession.getConnectionContext(),
ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData());
return DatabaseConnectorFactory.getInstance().newInstance(queryContext, connectionSession.getDatabaseConnectionManager(), false);
}
|
@Test
void assertBroadcastBackendHandlerReturnedWhenTCLStatementNotHit() {
SQLStatementContext context = mock(SQLStatementContext.class);
when(context.getSqlStatement()).thenReturn(mock(TCLStatement.class));
DatabaseConnectorFactory mockFactory = mock(DatabaseConnectorFactory.class);
when(DatabaseConnectorFactory.getInstance()).thenReturn(mockFactory);
when(mockFactory.newInstance(any(QueryContext.class), nullable(ProxyDatabaseConnectionManager.class), anyBoolean())).thenReturn(mock(DatabaseConnector.class));
ShardingSphereMetaData metaData = mock(ShardingSphereMetaData.class);
when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData()).thenReturn(metaData);
ConnectionSession connectionSession = mock(ConnectionSession.class);
ConnectionContext connectionContext = mockConnectionContext();
when(connectionSession.getConnectionContext()).thenReturn(connectionContext);
assertThat(TransactionBackendHandlerFactory.newInstance(context, null, connectionSession), instanceOf(DatabaseConnector.class));
}
|
@VisibleForTesting
static Map<String, String> generatePostfix(Set<java.nio.file.Path> pathKeys) {
Map<String, String> rawPathToMethodVer = new HashMap<>();
for (java.nio.file.Path path : pathKeys) {
java.nio.file.Path firstLevel = path.subpath(0, 1);
String version = isVersion(firstLevel.toString()) ? firstLevel.toString() : "";
rawPathToMethodVer.put(path.toString(), version.toUpperCase());
}
return rawPathToMethodVer;
}
|
@Test
public void testRawPathMapping() {
// Following are the actual exhausted list of paths in Tables services at version
Set<String> paths = new HashSet<>();
paths.add("/v0/databases");
paths.add("/v0.9/databases");
paths.add("/v1/databases");
paths.add("/databases");
paths.add("/databases/{databaseId}/aclPolicies");
paths.add("/v0/databases/{databaseId}/aclPolicies");
paths.add("/databases/{databaseId}/tables/{tableId}/aclPolicies");
paths.add("/v0/databases/{databaseId}/tables/{tableId}/aclPolicies");
paths.add("/v0/databases/{databaseId}/tables");
paths.add("/databases/{databaseId}/tables");
paths.add("/databases/{databaseId}/tables/{tableId}");
paths.add("/v0/databases/{databaseId}/tables/{tableId}");
paths.add("/v0/databases/{databaseId}/tables/{tableId}/iceberg/v2/snapshots");
paths.add("/databases/{databaseId}/tables/{tableId}/iceberg/v2/snapshots");
Map<String, String> result =
MainApplicationConfig.generatePostfix(
paths.stream().map(Paths::get).collect(Collectors.toSet()));
Assertions.assertEquals(result.size(), paths.size());
Assertions.assertEquals(result.get("/v0/databases"), "V0");
Assertions.assertEquals(result.get("/v1/databases"), "V1");
Assertions.assertEquals(result.get("/databases"), "");
Assertions.assertEquals(result.get("/databases/{databaseId}/aclPolicies"), "");
Assertions.assertEquals(result.get("/v0/databases/{databaseId}/aclPolicies"), "V0");
Assertions.assertEquals(result.get("/databases/{databaseId}/tables/{tableId}/aclPolicies"), "");
Assertions.assertEquals(
result.get("/v0/databases/{databaseId}/tables/{tableId}/aclPolicies"), "V0");
Assertions.assertEquals(result.get("/v0/databases/{databaseId}/tables"), "V0");
Assertions.assertEquals(result.get("/databases/{databaseId}/tables"), "");
Assertions.assertEquals(result.get("/databases/{databaseId}/tables/{tableId}"), "");
Assertions.assertEquals(result.get("/v0/databases/{databaseId}/tables/{tableId}"), "V0");
Assertions.assertEquals(
result.get("/v0/databases/{databaseId}/tables/{tableId}/iceberg/v2/snapshots"), "V0");
Assertions.assertEquals(
result.get("/databases/{databaseId}/tables/{tableId}/iceberg/v2/snapshots"), "");
}
|
public boolean isEnable() {
return rpcClient.isRunning();
}
|
@Test
void testIsEnable() {
when(this.rpcClient.isRunning()).thenReturn(true);
assertTrue(client.isEnable());
verify(this.rpcClient, times(1)).isRunning();
}
|
public static Map<String, Map<String, String>> revertRegister(Map<String, Map<String, String>> register) {
Map<String, Map<String, String>> newRegister = new HashMap<>();
for (Map.Entry<String, Map<String, String>> entry : register.entrySet()) {
String serviceName = entry.getKey();
Map<String, String> serviceUrls = entry.getValue();
if (StringUtils.isContains(serviceName, ':') && StringUtils.isContains(serviceName, '/')) {
for (Map.Entry<String, String> entry2 : serviceUrls.entrySet()) {
String serviceUrl = entry2.getKey();
String serviceQuery = entry2.getValue();
Map<String, String> params = StringUtils.parseQueryString(serviceQuery);
String name = serviceName;
int i = name.indexOf('/');
if (i >= 0) {
params.put(GROUP_KEY, name.substring(0, i));
name = name.substring(i + 1);
}
i = name.lastIndexOf(':');
if (i >= 0) {
params.put(VERSION_KEY, name.substring(i + 1));
name = name.substring(0, i);
}
Map<String, String> newUrls = newRegister.computeIfAbsent(name, k -> new HashMap<String, String>());
newUrls.put(serviceUrl, StringUtils.toQueryString(params));
}
} else {
newRegister.put(serviceName, serviceUrls);
}
}
return newRegister;
}
|
@Test
void testRevertRegister() {
String key = "perf/dubbo.test.api.HelloService:1.0.0";
Map<String, Map<String, String>> register = new HashMap<String, Map<String, String>>();
Map<String, String> service = new HashMap<String, String>();
service.put("dubbo://127.0.0.1:20880/com.xxx.XxxService", null);
register.put(key, service);
Map<String, Map<String, String>> newRegister = UrlUtils.revertRegister(register);
Map<String, Map<String, String>> expectedRegister = new HashMap<String, Map<String, String>>();
service.put("dubbo://127.0.0.1:20880/com.xxx.XxxService", "group=perf&version=1.0.0");
expectedRegister.put("dubbo.test.api.HelloService", service);
assertEquals(expectedRegister, newRegister);
}
|
@PreAuthorize("hasAnyAuthority('SYS_ADMIN', 'TENANT_ADMIN')")
@PostMapping(value = "/api/image", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public TbResourceInfo uploadImage(@RequestPart MultipartFile file,
@RequestPart(required = false) String title,
@RequestPart(required = false) String imageSubType) throws Exception {
SecurityUser user = getCurrentUser();
TbResource image = new TbResource();
image.setTenantId(user.getTenantId());
accessControlService.checkPermission(user, Resource.TB_RESOURCE, Operation.CREATE, null, image);
resourceValidator.validateResourceSize(user.getTenantId(), null, file.getSize());
image.setFileName(file.getOriginalFilename());
if (StringUtils.isNotEmpty(title)) {
image.setTitle(title);
} else {
image.setTitle(file.getOriginalFilename());
}
ResourceSubType subType = ResourceSubType.IMAGE;
if (StringUtils.isNotEmpty(imageSubType)) {
subType = ResourceSubType.valueOf(imageSubType);
}
image.setResourceType(ResourceType.IMAGE);
image.setResourceSubType(subType);
ImageDescriptor descriptor = new ImageDescriptor();
descriptor.setMediaType(file.getContentType());
image.setDescriptorValue(descriptor);
image.setData(file.getBytes());
image.setPublic(true);
return tbImageService.save(image, user);
}
|
@Test
public void testUploadImageWithSameFilename() throws Exception {
String filename = "my_jpeg_image.jpg";
TbResourceInfo imageInfo1 = uploadImage(HttpMethod.POST, "/api/image", filename, "image/jpeg", JPEG_IMAGE);
assertThat(imageInfo1.getTitle()).isEqualTo(filename);
assertThat(imageInfo1.getFileName()).isEqualTo(filename);
assertThat(imageInfo1.getResourceKey()).isEqualTo(filename);
TbResourceInfo imageInfo2 = uploadImage(HttpMethod.POST, "/api/image", filename, "image/jpeg", JPEG_IMAGE);
assertThat(imageInfo2.getTitle()).isEqualTo(filename);
assertThat(imageInfo2.getFileName()).isEqualTo(filename);
assertThat(imageInfo2.getResourceKey()).isEqualTo("my_jpeg_image_(1).jpg");
TbResourceInfo imageInfo3 = uploadImage(HttpMethod.POST, "/api/image", filename, "image/jpeg", JPEG_IMAGE);
assertThat(imageInfo3.getTitle()).isEqualTo(filename);
assertThat(imageInfo3.getFileName()).isEqualTo(filename);
assertThat(imageInfo3.getResourceKey()).isEqualTo("my_jpeg_image_(2).jpg");
}
|
public boolean performCrashDetectingFlow() {
final File newCrashFile = new File(mApp.getFilesDir(), NEW_CRASH_FILENAME);
if (newCrashFile.isFile()) {
String ackReportFilename = getAckReportFilename();
StringBuilder header = new StringBuilder();
StringBuilder report = new StringBuilder();
try (BufferedReader reader =
new BufferedReader(
new InputStreamReader(
mApp.openFileInput(NEW_CRASH_FILENAME), Charset.forName("UTF-8")))) {
try (BufferedWriter writer =
new BufferedWriter(
new OutputStreamWriter(
mApp.openFileOutput(ackReportFilename, Context.MODE_PRIVATE),
Charset.forName("UTF-8")))) {
Logger.i(TAG, "Archiving crash report to %s.", ackReportFilename);
Logger.d(TAG, "Crash report:");
String line;
boolean stillInHeader = true;
while (null != (line = reader.readLine())) {
writer.write(line);
writer.newLine();
report.append(line).append(NEW_LINE);
if (line.equals(HEADER_BREAK_LINE)) stillInHeader = false;
if (stillInHeader) header.append(line).append(NEW_LINE);
Logger.d(TAG, "err: %s", line);
}
}
} catch (Exception e) {
Logger.e(TAG, "Failed to write crash report to archive!");
return false;
}
if (!newCrashFile.delete()) {
Logger.e(TAG, "Failed to delete crash log! %s", newCrashFile.getAbsolutePath());
}
sendNotification(
header.toString(), report.toString(), new File(mApp.getFilesDir(), ackReportFilename));
return true;
}
return false;
}
|
@Test
public void testCallsDetectedIfPreviouslyCrashed() throws Exception {
Context app = ApplicationProvider.getApplicationContext();
var notificationDriver = Mockito.mock(NotificationDriver.class);
var notificationBuilder = Mockito.mock(NotifyBuilder.class);
Mockito.doReturn(notificationBuilder)
.when(notificationDriver)
.buildNotification(Mockito.any(), Mockito.anyInt(), Mockito.anyInt());
Mockito.doReturn(notificationBuilder).when(notificationBuilder).setContentText(Mockito.any());
Mockito.doReturn(notificationBuilder).when(notificationBuilder).setColor(Mockito.anyInt());
Mockito.doReturn(notificationBuilder).when(notificationBuilder).setDefaults(Mockito.anyInt());
Mockito.doReturn(notificationBuilder).when(notificationBuilder).setContentIntent(Mockito.any());
Mockito.doReturn(notificationBuilder)
.when(notificationBuilder)
.setAutoCancel(Mockito.anyBoolean());
Mockito.doReturn(notificationBuilder)
.when(notificationBuilder)
.setOnlyAlertOnce(Mockito.anyBoolean());
TestableChewbaccaUncaughtExceptionHandler underTest =
new TestableChewbaccaUncaughtExceptionHandler(app, null, notificationDriver);
File newReport =
new File(app.getFilesDir(), ChewbaccaUncaughtExceptionHandler.NEW_CRASH_FILENAME);
List<String> reportTextLines =
Arrays.asList(
"header text",
"header 2",
ChewbaccaUncaughtExceptionHandler.HEADER_BREAK_LINE,
"report text 1",
"report text 2");
Files.write(newReport.toPath(), reportTextLines);
Assert.assertTrue(newReport.exists());
Assert.assertTrue(underTest.performCrashDetectingFlow());
Assert.assertFalse(newReport.exists());
File[] ackFiles = app.getFilesDir().listFiles();
Assert.assertEquals(1, ackFiles.length);
Matcher matcher =
Pattern.compile(
ChewbaccaUncaughtExceptionHandler.ACK_CRASH_FILENAME_TEMPLATE.replace(
"{TIME}", "\\d+"))
.matcher(ackFiles[0].getName());
Assert.assertTrue(ackFiles[0].getName() + " did not match", matcher.find());
List<String> text = Files.readAllLines(ackFiles[0].toPath());
Assert.assertEquals(5, text.size());
for (int lineIndex = 0; lineIndex < reportTextLines.size(); lineIndex++) {
Assert.assertEquals(
"line " + lineIndex + " not equals", reportTextLines.get(lineIndex), text.get(lineIndex));
}
Mockito.verify(notificationDriver)
.buildNotification(
NotificationIds.CrashDetected,
R.drawable.ic_crash_detected,
R.string.ime_crashed_title);
Mockito.verify(notificationDriver).notify(Mockito.notNull(), Mockito.eq(true));
ArgumentCaptor<PendingIntent> captor = ArgumentCaptor.forClass(PendingIntent.class);
Mockito.verify(notificationBuilder).setContentIntent(captor.capture());
Intent savedIntent = Shadows.shadowOf(captor.getValue()).getSavedIntent();
Assert.assertEquals(
Intent.FLAG_ACTIVITY_NEW_TASK, savedIntent.getFlags() | Intent.FLAG_ACTIVITY_NEW_TASK);
Assert.assertEquals(Intent.ACTION_VIEW, savedIntent.getAction());
Assert.assertEquals(Uri.parse("https://example.com"), savedIntent.getData());
BugReportDetails reportDetails =
savedIntent.getParcelableExtra(BugReportDetails.EXTRA_KEY_BugReportDetails);
Assert.assertNotNull(reportDetails);
Assert.assertEquals(
reportDetails.crashHeader.trim(), String.join("\n", reportTextLines.subList(0, 2)));
Assert.assertEquals(reportDetails.crashReportText.trim(), String.join("\n", reportTextLines));
Assert.assertEquals("file", reportDetails.fullReport.getScheme());
Assert.assertEquals(Uri.fromFile(ackFiles[0]), reportDetails.fullReport);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.