focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
static String formatPercent(double value) {
DecimalFormat percentFormatter = new DecimalFormat("#.#", DecimalFormatSymbols.getInstance(Locale.ENGLISH));
return percentFormatter.format(value) + "%";
}
|
@Test
public void format_percent() {
assertThat(formatPercent(0d)).isEqualTo("0%");
assertThat(formatPercent(12.345)).isEqualTo("12.3%");
assertThat(formatPercent(12.56)).isEqualTo("12.6%");
}
|
@Override
public int read() throws IOException {
if (mPosition == mLength) { // at end of file
return -1;
}
updateStreamIfNeeded();
int res = mUfsInStream.get().read();
if (res == -1) {
return -1;
}
mPosition++;
Metrics.BYTES_READ_FROM_UFS.inc(1);
return res;
}
|
@Test
public void readOutOfBoundByteBuffer() throws IOException, AlluxioException {
AlluxioURI ufsPath = getUfsPath();
createFile(ufsPath, CHUNK_SIZE);
ByteBuffer buffer = ByteBuffer.allocate(CHUNK_SIZE * 2);
try (FileInStream inStream = getStream(ufsPath)) {
assertEquals(CHUNK_SIZE, inStream.read(buffer));
assertTrue(BufferUtils.matchIncreasingByteBuffer(0, CHUNK_SIZE, buffer));
assertEquals(-1, inStream.read(buffer));
}
}
|
@Override
void onFailure(Throwable cause, CompletableFuture<Map<String, String>> failureLabels) {
if (hasPendingStateTransition) {
// the error handling remains the same independent of how many tasks have failed
// we don't want to initiate the same state transition multiple times, so we exit early
// this could also be achieved via Context#runIfState, but that'd spam the logs
return;
}
hasPendingStateTransition = true;
FutureUtils.assertNoException(
internalSavepointFuture.handle(
(savepoint, savepointError) -> {
// if savepointError is null then the savepoint has been created
// successfully, but the job failed while committing side effects,
// so we enrich the exception for the user
final Throwable ex =
savepointError != null
? cause
: new StopWithSavepointStoppingException(
savepoint, getJobId(), cause);
operationFailureCause = ex;
FailureResultUtil.restartOrFail(
context.howToHandleFailure(ex, failureLabels), context, this);
return null;
}));
}
|
@Test
void testConcurrentSavepointFailureAndGloballyTerminalStateCauseRestart() throws Exception {
try (MockStopWithSavepointContext ctx = new MockStopWithSavepointContext()) {
CheckpointScheduling mockStopWithSavepointOperations = new MockCheckpointScheduling();
CompletableFuture<String> savepointFuture = new CompletableFuture<>();
StateTrackingMockExecutionGraph executionGraph = new StateTrackingMockExecutionGraph();
StopWithSavepoint sws =
createStopWithSavepoint(
ctx, mockStopWithSavepointOperations, executionGraph, savepointFuture);
ctx.setStopWithSavepoint(sws);
ctx.setHowToHandleFailure(failure -> FailureResult.canRestart(failure, Duration.ZERO));
sws.onFailure(
new Exception("task failure"),
CompletableFuture.completedFuture(Collections.emptyMap()));
// this is a sanity check that we haven't scheduled a state transition
ctx.triggerExecutors();
ctx.setExpectRestarting(assertNonNull());
savepointFuture.completeExceptionally(new Exception("savepoint failure"));
ctx.triggerExecutors();
}
}
|
@Override
public void trackEvent(InputData input) {
process(input);
}
|
@Test
public void trackEventH5() {
initSensors();
SensorsDataAPI.sharedInstance().setTrackEventCallBack(new SensorsDataTrackEventCallBack() {
@Override
public boolean onTrackEvent(String eventName, JSONObject eventProperties) {
assertEquals("$WebClick", eventName);
String version = DeviceUtils.getHarmonyOSVersion();
if (TextUtils.isEmpty(version)) {
assertEquals(eventProperties.opt("$os"), "Android");
assertEquals(eventProperties.opt("$os_version"), DeviceUtils.getOS());
} else {
assertEquals(eventProperties.opt("$os"), "HarmonyOS");
assertEquals(eventProperties.opt("$os_version"), version);
}
assertEquals(eventProperties.opt("$lib"), "js");
assertEquals(eventProperties.opt("$lib_version"), "1.14.23");
assertEquals(eventProperties.opt("$manufacturer"), DeviceUtils.getManufacturer());
assertEquals(eventProperties.opt("$model"), DeviceUtils.getModel());
assertEquals(eventProperties.opt("$brand"), DeviceUtils.getBrand());
assertEquals(eventProperties.opt("$app_version"), AppInfoUtils.getAppVersionName(mApplication));
int[] size = DeviceUtils.getDeviceSize(mApplication);
assertEquals(eventProperties.opt("$screen_width"), size[0]);
assertEquals(eventProperties.opt("$screen_height"), size[1]);
assertEquals(eventProperties.opt("$carrier"), SensorsDataUtils.getOperator(mApplication));
assertEquals(eventProperties.opt("$timezone_offset"), TimeUtils.getZoneOffset());
assertEquals(eventProperties.opt("$app_id"), AppInfoUtils.getProcessName(mApplication));
return true;
}
});
InputData inputData = new InputData();
inputData.setExtras("{\"server_url\":\"https://sdkdebugtest.datasink.sensorsdata.cn/sa.gif?project=default&token=cfb8b60e42e0ae9b\"," +
"\"distinct_id\":\"181b8fcc33747-0e98015efae734-5f2b2f1c-277920-181b8fcc33bec\",\"lib\":{\"$lib\":\"js\",\"$lib_method\":\"code\",\"$lib_version\":\"1.14.23\"}," +
"\"properties\":{\"$screen_height\":772,\"$screen_width\":360,\"$lib\":\"js\",\"$lib_version\":\"1.14.23\",\"$latest_traffic_source_type\":\"url的domain解析失败\",\"$latest_search_keyword\":\"url的domain解析失败\",\"$latest_referrer\":\"url的domain解析失败\",\"$device_id\":\"181b8fcc33747-0e98015efae734-5f2b2f1c-277920-181b8fcc33bec\",\"$element_type\":\"button\",\"$element_class_name\":\"\",\"$element_content\":\"test\",\"$url\":\"file:///android_asset/new_h5_test/index.html\",\"$url_path\":\"/android_asset/new_h5_test/index.html\",\"$title\":\"sdk demo ls\",\"$viewport_width\":360,\"$element_selector\":\"body > div:nth-of-type(1) > button:nth-of-type(1)\",\"timepppp\":\"2022-07-01 17:00:50.771\",\"$is_first_day\":false},\"anonymous_id\":\"181b8fcc33747-0e98015efae734-5f2b2f1c-277920-181b8fcc33bec\"," +
"\"type\":\"track\",\"event\":\"$WebClick\",\"time\":1656666050772,\"_track_id\":887940781,\"_flush_time\":1656666050781}");
TrackEventProcessor eventProcessor = new TrackEventProcessor(SensorsDataAPI.sharedInstance().getSAContextManager());
eventProcessor.trackEvent(inputData);
}
|
public static <T, S> T copy(S source, T target, String... ignore) {
return copy(source, target, DEFAULT_CONVERT, ignore);
}
|
@Test
public void testProxy() {
AtomicReference<Object> reference = new AtomicReference<>();
ProxyTest test = (ProxyTest) Proxy.newProxyInstance(ClassUtils.getDefaultClassLoader(),
new Class[]{ProxyTest.class}, (proxy, method, args) -> {
if (method.getName().equals("getName")) {
return "test";
}
if (method.getName().equals("setName")) {
reference.set(args[0]);
return null;
}
return null;
});
Target source = new Target();
FastBeanCopier.copy(test, source);
Assert.assertEquals(source.getName(), test.getName());
source.setName("test2");
FastBeanCopier.copy(source, test);
Assert.assertEquals(reference.get(), source.getName());
}
|
static void format(final JavaInput javaInput, JavaOutput javaOutput, JavaFormatterOptions options)
throws FormatterException {
Context context = new Context();
DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<>();
context.put(DiagnosticListener.class, diagnostics);
Options.instance(context).put("allowStringFolding", "false");
Options.instance(context).put("--enable-preview", "true");
JCCompilationUnit unit;
JavacFileManager fileManager = new JavacFileManager(context, true, UTF_8);
try {
fileManager.setLocation(StandardLocation.PLATFORM_CLASS_PATH, ImmutableList.of());
} catch (IOException e) {
// impossible
throw new IOError(e);
}
SimpleJavaFileObject source =
new SimpleJavaFileObject(URI.create("source"), JavaFileObject.Kind.SOURCE) {
@Override
public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException {
return javaInput.getText();
}
};
Log.instance(context).useSource(source);
ParserFactory parserFactory = ParserFactory.instance(context);
JavacParser parser =
parserFactory.newParser(
javaInput.getText(),
/* keepDocComments= */ true,
/* keepEndPos= */ true,
/* keepLineMap= */ true);
unit = parser.parseCompilationUnit();
unit.sourcefile = source;
javaInput.setCompilationUnit(unit);
Iterable<Diagnostic<? extends JavaFileObject>> errorDiagnostics =
Iterables.filter(diagnostics.getDiagnostics(), Formatter::errorDiagnostic);
if (!Iterables.isEmpty(errorDiagnostics)) {
throw FormatterException.fromJavacDiagnostics(errorDiagnostics);
}
OpsBuilder builder = new OpsBuilder(javaInput, javaOutput);
// Output the compilation unit.
JavaInputAstVisitor visitor;
if (Runtime.version().feature() >= 21) {
visitor =
createVisitor(
"com.google.googlejavaformat.java.java21.Java21InputAstVisitor", builder, options);
} else if (Runtime.version().feature() >= 17) {
visitor =
createVisitor(
"com.google.googlejavaformat.java.java17.Java17InputAstVisitor", builder, options);
} else {
visitor = new JavaInputAstVisitor(builder, options.indentationMultiplier());
}
visitor.scan(unit, null);
builder.sync(javaInput.getText().length());
builder.drain();
Doc doc = new DocBuilder().withOps(builder.build()).build();
doc.computeBreaks(javaOutput.getCommentsHelper(), MAX_LINE_LENGTH, new Doc.State(+0, 0));
doc.write(javaOutput);
javaOutput.flush();
}
|
@Test
public void testFormatOffsetOutOfRange() throws Exception {
String input = "class Foo{}\n";
Path tmpdir = testFolder.newFolder().toPath();
Path path = tmpdir.resolve("Foo.java");
Files.writeString(path, input);
StringWriter out = new StringWriter();
StringWriter err = new StringWriter();
Main main = new Main(new PrintWriter(out, true), new PrintWriter(err, true), System.in);
String[] args = {"--offset", "9998", "--length", "1", path.toString()};
assertThat(main.format(args)).isEqualTo(1);
assertThat(err.toString())
.contains("error: invalid offset (9998) or length (1); offset + length (9999)");
}
|
public boolean isCurrentOSValidForThisPlugin(String currentOS) {
if (about == null || about.targetOperatingSystems.isEmpty()) {
return true;
}
for (String targetOperatingSystem : about.targetOperatingSystems) {
if (targetOperatingSystem.equalsIgnoreCase(currentOS)) {
return true;
}
}
return false;
}
|
@Test
void shouldDoACaseInsensitiveMatchForValidOSesAgainstCurrentOS() {
assertThat(descriptorWithTargetOSes("linux").isCurrentOSValidForThisPlugin("Linux")).isTrue();
assertThat(descriptorWithTargetOSes("LiNuX").isCurrentOSValidForThisPlugin("Linux")).isTrue();
assertThat(descriptorWithTargetOSes("windows").isCurrentOSValidForThisPlugin("Linux")).isFalse();
assertThat(descriptorWithTargetOSes("windOWS").isCurrentOSValidForThisPlugin("Linux")).isFalse();
assertThat(descriptorWithTargetOSes("WinDOWs", "LINUx").isCurrentOSValidForThisPlugin("Linux")).isTrue();
assertThat(descriptorWithTargetOSes("WINDows", "Sunos", "Mac os x").isCurrentOSValidForThisPlugin("Linux")).isFalse();
}
|
public static String getKeyTenant(String dataId, String group, String tenant) {
return doGetKey(dataId, group, tenant);
}
|
@Test
public void getKeyTenant() {
String dataId = "dataId";
String group = "group";
String datumStr = "datumStr";
String expected = "dataId+group+datumStr";
String keyTenant = GroupKey.getKeyTenant(dataId, group, datumStr);
Assert.isTrue(keyTenant.equals(expected));
}
|
@Override
public <T> T clone(T object) {
if (object instanceof String) {
return object;
} else if (object instanceof Collection) {
Object firstElement = findFirstNonNullElement((Collection) object);
if (firstElement != null && !(firstElement instanceof Serializable)) {
JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass());
return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type);
}
} else if (object instanceof Map) {
Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object);
if (firstEntry != null) {
Object key = firstEntry.getKey();
Object value = firstEntry.getValue();
if (!(key instanceof Serializable) || !(value instanceof Serializable)) {
JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass());
return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type);
}
}
} else if (object instanceof JsonNode) {
return (T) ((JsonNode) object).deepCopy();
}
if (object instanceof Serializable) {
try {
return (T) SerializationHelper.clone((Serializable) object);
} catch (SerializationException e) {
//it is possible that object itself implements java.io.Serializable, but underlying structure does not
//in this case we switch to the other JSON marshaling strategy which doesn't use the Java serialization
}
}
return jsonClone(object);
}
|
@Test
public void should_clone_map_with_null_value() {
Map<String, Object> original = new HashMap<>();
original.put("null", null);
Object cloned = serializer.clone(original);
assertEquals(original, cloned);
assertNotSame(original, cloned);
}
|
public static String nullToEmpty( String source ) {
if ( source == null ) {
return EMPTY_STRING;
}
return source;
}
|
@Test
public void testNullToEmpty() {
assertEquals( "", Const.nullToEmpty( null ) );
assertEquals( "value", Const.nullToEmpty( "value" ) );
}
|
@Override
public CompletableFuture<SendPushNotificationResult> sendNotification(PushNotification pushNotification) {
Message.Builder builder = Message.builder()
.setToken(pushNotification.deviceToken())
.setAndroidConfig(AndroidConfig.builder()
.setPriority(pushNotification.urgent() ? AndroidConfig.Priority.HIGH : AndroidConfig.Priority.NORMAL)
.build());
final String key = switch (pushNotification.notificationType()) {
case NOTIFICATION -> "newMessageAlert";
case ATTEMPT_LOGIN_NOTIFICATION_HIGH_PRIORITY -> "attemptLoginContext";
case CHALLENGE -> "challenge";
case RATE_LIMIT_CHALLENGE -> "rateLimitChallenge";
};
builder.putData(key, pushNotification.data() != null ? pushNotification.data() : "");
final Timer.Sample sample = Timer.start();
return GoogleApiUtil.toCompletableFuture(firebaseMessagingClient.sendAsync(builder.build()), executor)
.whenComplete((ignored, throwable) -> sample.stop(SEND_NOTIFICATION_TIMER))
.thenApply(ignored -> new SendPushNotificationResult(true, Optional.empty(), false, Optional.empty()))
.exceptionally(throwable -> {
if (ExceptionUtils.unwrap(throwable) instanceof final FirebaseMessagingException firebaseMessagingException) {
final String errorCode;
if (firebaseMessagingException.getMessagingErrorCode() != null) {
errorCode = firebaseMessagingException.getMessagingErrorCode().name();
} else {
logger.warn("Received an FCM exception with no error code", firebaseMessagingException);
errorCode = "unknown";
}
final boolean unregistered =
firebaseMessagingException.getMessagingErrorCode() == MessagingErrorCode.UNREGISTERED;
return new SendPushNotificationResult(false, Optional.of(errorCode), unregistered, Optional.empty());
} else {
throw ExceptionUtils.wrap(throwable);
}
});
}
|
@Test
void testSendMessageUnregistered() {
final PushNotification pushNotification = new PushNotification("foo", PushNotification.TokenType.FCM, PushNotification.NotificationType.NOTIFICATION, null, null, null, true);
final FirebaseMessagingException unregisteredException = mock(FirebaseMessagingException.class);
when(unregisteredException.getMessagingErrorCode()).thenReturn(MessagingErrorCode.UNREGISTERED);
final SettableApiFuture<String> sendFuture = SettableApiFuture.create();
sendFuture.setException(unregisteredException);
when(firebaseMessaging.sendAsync(any())).thenReturn(sendFuture);
final SendPushNotificationResult result = fcmSender.sendNotification(pushNotification).join();
verify(firebaseMessaging).sendAsync(any(Message.class));
assertFalse(result.accepted());
assertEquals(Optional.of("UNREGISTERED"), result.errorCode());
assertTrue(result.unregistered());
}
|
public static <T, R> CheckedFunction0<R> andThen(CheckedFunction0<T> function,
CheckedFunction2<T, Throwable, R> handler) {
return () -> {
try {
return handler.apply(function.apply(), null);
} catch (Throwable throwable) {
return handler.apply(null, throwable);
}
};
}
|
@Test
public void shouldRecoverFromException2() throws Throwable {
CheckedFunction0<String> callable = () -> {
throw new IllegalArgumentException("BAM!");
};
CheckedFunction0<String> callableWithRecovery = VavrCheckedFunctionUtils.andThen(callable, (result, ex) -> {
if(ex instanceof IllegalArgumentException){
return "Bla";
}
return result;
});
String result = callableWithRecovery.apply();
assertThat(result).isEqualTo("Bla");
}
|
@Override
public List<Object> getAll() {
List<Object> r = new ArrayList<Object>(this.size());
for (int i = 0; i < this.size(); i++){
r.add(i, get(i));
}
return r;
}
|
@Test
public void testGetAll() throws Exception {
HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
List<Object> list = r.getAll();
Assert.assertEquals(INT_CONST, ((Integer) list.get(0)).intValue());
Assert.assertEquals(LONG_CONST, ((Long) list.get(1)).longValue());
Assert.assertEquals(DOUBLE_CONST, ((Double) list.get(2)).doubleValue(), 0);
Assert.assertEquals(STRING_CONST, list.get(3));
}
|
@VisibleForTesting
static Map<String, Long> getExternalResourceAmountMap(Configuration config) {
final Set<String> resourceSet = getExternalResourceSet(config);
if (resourceSet.isEmpty()) {
return Collections.emptyMap();
}
final Map<String, Long> externalResourceAmountMap = new HashMap<>();
for (String resourceName : resourceSet) {
final ConfigOption<Long> amountOption =
key(ExternalResourceOptions.getAmountConfigOptionForResource(resourceName))
.longType()
.noDefaultValue();
final Optional<Long> amountOpt = config.getOptional(amountOption);
if (!amountOpt.isPresent()) {
LOG.warn(
"The amount of the {} should be configured. Will ignore that resource.",
resourceName);
} else if (amountOpt.get() <= 0) {
LOG.warn(
"The amount of the {} should be positive while finding {}. Will ignore that resource.",
amountOpt.get(),
resourceName);
} else {
externalResourceAmountMap.put(resourceName, amountOpt.get());
}
}
return externalResourceAmountMap;
}
|
@Test
public void testGetExternalResourceAmountMapWithIllegalAmount() {
final Configuration config = new Configuration();
config.set(
ExternalResourceOptions.EXTERNAL_RESOURCE_LIST,
Collections.singletonList(RESOURCE_NAME_1));
config.setLong(
ExternalResourceOptions.getAmountConfigOptionForResource(RESOURCE_NAME_1), 0);
final Map<String, Long> externalResourceAmountMap =
ExternalResourceUtils.getExternalResourceAmountMap(config);
assertThat(externalResourceAmountMap.entrySet(), is(empty()));
}
|
public ImmutableSortedSet<Name> snapshot() {
ImmutableSortedSet.Builder<Name> builder =
new ImmutableSortedSet.Builder<>(Name.displayOrdering());
for (DirectoryEntry entry : this) {
if (!isReserved(entry.name())) {
builder.add(entry.name());
}
}
return builder.build();
}
|
@Test
public void testSnapshot() {
root.link(Name.simple("bar"), regularFile(10));
root.link(Name.simple("abc"), regularFile(10));
// does not include . or .. and is sorted by the name
assertThat(root.snapshot())
.containsExactly(Name.simple("abc"), Name.simple("bar"), Name.simple("foo"))
.inOrder();
}
|
public boolean sync() throws IOException {
if (!preSyncCheck()) {
return false;
}
if (!getAllDiffs()) {
return false;
}
List<Path> sourcePaths = context.getSourcePaths();
final Path sourceDir = sourcePaths.get(0);
final Path targetDir = context.getTargetPath();
final FileSystem tfs = targetDir.getFileSystem(conf);
Path tmpDir = null;
try {
tmpDir = createTargetTmpDir(tfs, targetDir);
DiffInfo[] renameAndDeleteDiffs =
getRenameAndDeleteDiffsForSync(targetDir);
if (renameAndDeleteDiffs.length > 0) {
// do the real sync work: deletion and rename
syncDiff(renameAndDeleteDiffs, tfs, tmpDir);
}
return true;
} catch (Exception e) {
DistCp.LOG.warn("Failed to use snapshot diff for distcp", e);
return false;
} finally {
deleteTargetTmpDir(tfs, tmpDir);
// TODO: since we have tmp directory, we can support "undo" with failures
// set the source path using the snapshot path
context.setSourcePaths(Arrays.asList(getSnapshotPath(sourceDir,
context.getToSnapshot())));
}
}
|
@Test
public void testSync() throws Exception {
initData(source);
initData(target);
enableAndCreateFirstSnapshot();
// make changes under source
int numCreatedModified = changeData(dfs, source);
dfs.createSnapshot(source, "s2");
// before sync, make some further changes on source. this should not affect
// the later distcp since we're copying (s2-s1) to target
final Path toDelete = new Path(source, "foo/d1/foo/f1");
dfs.delete(toDelete, true);
final Path newdir = new Path(source, "foo/d1/foo/newdir");
dfs.mkdirs(newdir);
SnapshotDiffReport report = dfs.getSnapshotDiffReport(source, "s1", "s2");
System.out.println(report);
DistCpSync distCpSync = new DistCpSync(context, conf);
// do the sync
Assert.assertTrue(distCpSync.sync());
// make sure the source path has been updated to the snapshot path
final Path spath = new Path(source,
HdfsConstants.DOT_SNAPSHOT_DIR + Path.SEPARATOR + "s2");
Assert.assertEquals(spath, context.getSourcePaths().get(0));
// build copy listing
final Path listingPath = new Path("/tmp/META/fileList.seq");
CopyListing listing = new SimpleCopyListing(conf, new Credentials(), distCpSync);
listing.buildListing(listingPath, context);
Map<Text, CopyListingFileStatus> copyListing = getListing(listingPath);
CopyMapper copyMapper = new CopyMapper();
StubContext stubContext = new StubContext(conf, null, 0);
Mapper<Text, CopyListingFileStatus, Text, Text>.Context mapContext =
stubContext.getContext();
// Enable append
mapContext.getConfiguration().setBoolean(
DistCpOptionSwitch.APPEND.getConfigLabel(), true);
copyMapper.setup(mapContext);
for (Map.Entry<Text, CopyListingFileStatus> entry : copyListing.entrySet()) {
copyMapper.map(entry.getKey(), entry.getValue(), mapContext);
}
// verify that we only list modified and created files/directories
Assert.assertEquals(numCreatedModified, copyListing.size());
// verify that we only copied new appended data of f2 and the new file f1
Assert.assertEquals(BLOCK_SIZE * 3, stubContext.getReporter()
.getCounter(CopyMapper.Counter.BYTESCOPIED).getValue());
// verify the source and target now has the same structure
verifyCopy(dfs.getFileStatus(spath), dfs.getFileStatus(target), false);
}
|
public static int doMain(String[] args) throws Exception {
Arguments arguments = new Arguments();
CommandLine commander = new CommandLine(arguments);
try {
commander.parseArgs(args);
if (arguments.help) {
commander.usage(commander.getOut());
return 0;
}
if (arguments.generateDocs) {
CmdGenerateDocs cmd = new CmdGenerateDocs("pulsar");
cmd.addCommand("initialize-namespace", commander);
cmd.run(null);
return 0;
}
} catch (Exception e) {
commander.getErr().println(e);
return 1;
}
if (arguments.configurationStore == null) {
System.err.println("Configuration store address argument is required (--configuration-store)");
commander.usage(commander.getOut());
return 1;
}
try (MetadataStore configStore = PulsarClusterMetadataSetup
.initConfigMetadataStore(arguments.configurationStore, arguments.zkSessionTimeoutMillis)) {
PulsarResources pulsarResources = new PulsarResources(null, configStore);
for (String namespace : arguments.namespaces) {
NamespaceName namespaceName = null;
try {
namespaceName = NamespaceName.get(namespace);
} catch (Exception e) {
System.out.println("Invalid namespace name.");
return 1;
}
// Create specified tenant
PulsarClusterMetadataSetup
.createTenantIfAbsent(pulsarResources, namespaceName.getTenant(), arguments.cluster);
// Create specified namespace
PulsarClusterMetadataSetup.createNamespaceIfAbsent(pulsarResources, namespaceName,
arguments.cluster);
}
}
System.out.println("Initial namespace setup success");
return 0;
}
|
@Test
public void testMainGenerateDocs() throws Exception {
PrintStream oldStream = System.out;
try {
ByteArrayOutputStream baoStream = new ByteArrayOutputStream();
System.setOut(new PrintStream(baoStream));
Class argumentsClass =
Class.forName("org.apache.pulsar.PulsarInitialNamespaceSetup$Arguments");
PulsarInitialNamespaceSetup.doMain(new String[]{"-cs", "cs", "-c", "c", "-g", "demo"});
String message = baoStream.toString();
Field[] fields = argumentsClass.getDeclaredFields();
for (Field field : fields) {
boolean fieldHasAnno = field.isAnnotationPresent(Option.class);
if (fieldHasAnno) {
Option fieldAnno = field.getAnnotation(Option.class);
String[] names = fieldAnno.names();
if (names.length == 0) {
continue;
}
String nameStr = Arrays.asList(names).toString();
nameStr = nameStr.substring(1, nameStr.length() - 1);
assertTrue(message.indexOf(nameStr) > 0);
}
}
} finally {
System.setOut(oldStream);
}
}
|
public CardinalityEstimatorConfig setBackupCount(int backupCount) {
this.backupCount = checkBackupCount(backupCount, asyncBackupCount);
return this;
}
|
@Test(expected = IllegalArgumentException.class)
public void testSetBackupCount_withInvalidValue() {
config.setBackupCount(7);
}
|
public static boolean isFourDigitsAsciiEncodedNumber(final int value)
{
return 0 == ((((value + 0x46464646) | (value - 0x30303030)) & 0x80808080));
}
|
@Test
void shouldDetectFourDigitsAsciiEncodedNumbers()
{
final int index = 2;
final UnsafeBuffer buffer = new UnsafeBuffer(new byte[8]);
for (int i = 0; i < 1000; i++)
{
buffer.putIntAscii(index, i);
assertFalse(isFourDigitsAsciiEncodedNumber(buffer.getInt(index, LITTLE_ENDIAN)));
assertFalse(isFourDigitsAsciiEncodedNumber(buffer.getInt(index, BIG_ENDIAN)));
}
for (int i = 1000; i < 10000; i++)
{
buffer.putIntAscii(index, i);
assertTrue(isFourDigitsAsciiEncodedNumber(buffer.getInt(index, LITTLE_ENDIAN)));
assertTrue(isFourDigitsAsciiEncodedNumber(buffer.getInt(index, BIG_ENDIAN)));
}
buffer.putIntAscii(index, 1234);
buffer.putByte(index, (byte)'a');
assertFalse(isFourDigitsAsciiEncodedNumber(buffer.getInt(index, LITTLE_ENDIAN)));
assertFalse(isFourDigitsAsciiEncodedNumber(buffer.getInt(index, BIG_ENDIAN)));
}
|
@Deprecated(forRemoval = true)
public SystemInfo(ApplicationId application, Zone zone, Cluster cluster, Node node) {
this(application, zone, new Cloud(""), cluster.id(), node);
}
|
@Test
@SuppressWarnings("removal")
void testSystemInfo() {
ApplicationId application = new ApplicationId("tenant1", "application1", "instance1");
Zone zone = new Zone(Environment.dev, "us-west-1");
Cloud cloud = new Cloud("aws");
String cluster = "clusterName";
Node node = new Node(0);
SystemInfo info = new SystemInfo(application, zone, cloud, cluster, node);
assertEquals(application, info.application());
assertEquals(zone, info.zone());
assertEquals(cloud, info.cloud());
assertEquals(cluster, info.clusterName());
assertEquals(node, info.node());
}
|
@Override
protected void validateDataImpl(TenantId tenantId, WidgetsBundle widgetsBundle) {
validateString("Widgets bundle title", widgetsBundle.getTitle());
if (widgetsBundle.getTenantId() == null) {
widgetsBundle.setTenantId(TenantId.fromUUID(ModelConstants.NULL_UUID));
}
if (!widgetsBundle.getTenantId().getId().equals(ModelConstants.NULL_UUID)) {
if (!tenantService.tenantExists(widgetsBundle.getTenantId())) {
throw new DataValidationException("Widgets bundle is referencing to non-existent tenant!");
}
}
}
|
@Test
void testValidateNameInvocation() {
WidgetsBundle widgetsBundle = new WidgetsBundle();
widgetsBundle.setTitle("my fancy WB");
widgetsBundle.setTenantId(tenantId);
validator.validateDataImpl(tenantId, widgetsBundle);
verify(validator).validateString("Widgets bundle title", widgetsBundle.getTitle());
}
|
private MsgAllocator allocator(Class<?> clazz)
{
try {
Class<? extends MsgAllocator> msgAllocator = clazz.asSubclass(MsgAllocator.class);
return msgAllocator.newInstance();
}
catch (InstantiationException | IllegalAccessException e) {
throw new IllegalArgumentException(e);
}
}
|
@Test
public void testAllocator()
{
options.setSocketOpt(ZMQ.ZMQ_MSG_ALLOCATOR, new MsgAllocatorDirect());
assertThat(options.getSocketOpt(ZMQ.ZMQ_MSG_ALLOCATOR), is(options.allocator));
}
|
public MethodBuilder onthrowMethod(String onthrowMethod) {
this.onthrowMethod = onthrowMethod;
return getThis();
}
|
@Test
void onthrowMethod() {
MethodBuilder builder = MethodBuilder.newBuilder();
builder.onthrowMethod("on-throw-method");
Assertions.assertEquals("on-throw-method", builder.build().getOnthrowMethod());
}
|
public static Read read() {
return new Read(null, "", new Scan());
}
|
@Test
public void testReading() throws Exception {
final String table = tmpTable.getName();
final int numRows = 1001;
createAndWriteData(table, numRows);
runReadTestLength(HBaseIO.read().withConfiguration(conf).withTableId(table), false, numRows);
}
|
public void initializeSession(AuthenticationRequest authenticationRequest, SAMLBindingContext bindingContext) throws SamlSessionException, SharedServiceClientException {
final String httpSessionId = authenticationRequest.getRequest().getSession().getId();
if (authenticationRequest.getFederationName() != null) {
findOrInitializeFederationSession(authenticationRequest, httpSessionId);
}
findOrInitializeSamlSession(authenticationRequest, httpSessionId, bindingContext);
}
|
@Test
public void noValidRequesterIdIsPresentTest() throws SamlSessionException, SharedServiceClientException {
RequesterID requesterID = OpenSAMLUtils.buildSAMLObject(RequesterID.class);
requesterID.setRequesterID("requesterId");
Scoping scoping = OpenSAMLUtils.buildSAMLObject(Scoping.class);
scoping.getRequesterIDs().add(requesterID);
authnRequest.setScoping(scoping);
samlSessionService.initializeSession(authenticationRequest, bindingContext);
assertNull(authenticationRequest.getSamlSession().getRequesterId());
}
|
@Override
public void rotate(IndexSet indexSet) {
indexRotator.rotate(indexSet, this::shouldRotate);
}
|
@Test
public void testRotationOnEmptyIndexSetWhenEnabled() {
final DateTime initialTime = new DateTime(2022, 7, 21, 13, 00, 00, 0, DateTimeZone.UTC);
final Period period = hours(1);
final InstantMillisProvider clock = new InstantMillisProvider(initialTime);
DateTimeUtils.setCurrentMillisProvider(clock);
when(indexSet.getConfig()).thenReturn(indexSetConfig);
when(indexSetConfig.rotationStrategyConfig()).thenReturn(TimeBasedRotationStrategyConfig.builder().rotateEmptyIndexSet(true).rotationPeriod(period).build());
when(indices.indexCreationDate(anyString())).thenReturn(Optional.of(initialTime.minus(minutes(11))));
when(indexSet.getNewestIndex()).thenReturn(IGNORED);
when(indexSet.getConfig()).thenReturn(indexSetConfig);
when(indices.numberOfMessages(anyString())).thenReturn(0L);
when(indices.getIndices(eq(indexSet))).thenReturn(ImmutableSet.of("emptyIndex", IGNORED));
clock.tick(minutes(60));
rotationStrategy.rotate(indexSet);
verify(indexSet, times(1)).cycle();
}
|
@SneakyThrows({SystemException.class, RollbackException.class})
@Override
public void enlistResource(final SingleXAResource singleXAResource) {
transactionManager.getTransaction().enlistResource(singleXAResource.getDelegate());
}
|
@Test
void assertEnlistResource() throws SystemException, RollbackException {
SingleXAResource singleXAResource = mock(SingleXAResource.class);
Transaction transaction = mock(Transaction.class);
when(transactionManager.getTransaction()).thenReturn(transaction);
transactionManagerProvider.enlistResource(singleXAResource);
verify(transaction).enlistResource(singleXAResource.getDelegate());
}
|
public Map<String, String> getParams() {
return params;
}
|
@Test
void testRequestUrlParams() {
RequestUrl requestUrl = new MatchUrl("/api/jobs/enqueued?offset=2&limit=2").toRequestUrl("/api/jobs/:state");
assertThat(requestUrl.getParams()).containsEntry(":state", "enqueued");
}
|
@Override
public String name() {
return this.name;
}
|
@Test
void testName() {
String name = "test";
DefaultGrpcClientConfig.Builder builder = DefaultGrpcClientConfig.newBuilder();
builder.setName(name);
DefaultGrpcClientConfig config = (DefaultGrpcClientConfig) builder.build();
assertEquals(name, config.name());
}
|
boolean isReassignmentInProgress() {
return isReassignmentInProgress(
removing,
adding);
}
|
@Test
public void testIsReassignmentInProgress() {
assertTrue(PartitionReassignmentReplicas.isReassignmentInProgress(
new PartitionRegistration.Builder().
setReplicas(new int[]{0, 1, 3, 2}).
setDirectories(new Uuid[]{
Uuid.fromString("HEKOeWDdQOqr2cmHrnjqjA"),
Uuid.fromString("I8kmmcM5TjOwNFnGvJLCjA"),
Uuid.fromString("x8osEoRkQdupZNYpU5c3Lw"),
Uuid.fromString("OT6qgtRqTiuiX8EikvAVow")}).
setIsr(new int[]{0, 1, 3, 2}).
setRemovingReplicas(new int[]{2}).
setAddingReplicas(new int[]{3}).
setLeader(0).
setLeaderRecoveryState(LeaderRecoveryState.RECOVERED).
setLeaderEpoch(0).
setPartitionEpoch(0).
build()));
assertTrue(PartitionReassignmentReplicas.isReassignmentInProgress(
new PartitionRegistration.Builder().
setReplicas(new int[]{0, 1, 3, 2}).
setDirectories(new Uuid[]{
Uuid.fromString("QrbOddSYQg6JgFu7hLvOTg"),
Uuid.fromString("S585FNNoSmiSH6ZYCrNqCg"),
Uuid.fromString("wjT5ieLARfKYMWIzTFwcag"),
Uuid.fromString("qzX9qWPVTWuLbiEQL0cgeg")
}).
setIsr(new int[]{0, 1, 3, 2}).
setRemovingReplicas(new int[]{2}).
setLeader(0).
setLeaderRecoveryState(LeaderRecoveryState.RECOVERED).
setLeaderEpoch(0).
setPartitionEpoch(0).
build()));
assertTrue(PartitionReassignmentReplicas.isReassignmentInProgress(
new PartitionRegistration.Builder().
setReplicas(new int[]{0, 1, 3, 2}).
setDirectories(new Uuid[]{
Uuid.fromString("QIyJnfdUSz6laFLCgj3AjA"),
Uuid.fromString("1QIvvBx2QVqNw2dsnYXUZg"),
Uuid.fromString("yPvPnGrxR0q8KC2Q5k0FIg"),
Uuid.fromString("a0lnxzleTcWVf1IyalE9cA")
}).
setIsr(new int[]{0, 1, 3, 2}).
setAddingReplicas(new int[]{3}).
setLeader(0).
setLeaderRecoveryState(LeaderRecoveryState.RECOVERED).
setLeaderEpoch(0).
setPartitionEpoch(0).
build()));
assertFalse(PartitionReassignmentReplicas.isReassignmentInProgress(
new PartitionRegistration.Builder().
setReplicas(new int[]{0, 1, 2}).
setDirectories(new Uuid[]{
Uuid.fromString("I4qCCBe9TYGOB0xvmvTI7w"),
Uuid.fromString("JvzGem0nTxiNPM5jIzNzlA"),
Uuid.fromString("EfWjZ2EsSKSvEn9PkG7lWQ")
}).
setIsr(new int[]{0, 1, 2}).
setLeader(0).
setLeaderRecoveryState(LeaderRecoveryState.RECOVERED).
setLeaderEpoch(0).
setPartitionEpoch(0).
build()));
}
|
private StringBuilder read(int n) throws IOException {
// Input stream finished?
boolean eof = false;
// Read that many.
final StringBuilder s = new StringBuilder(n);
while (s.length() < n && !eof) {
// Always get from the pushBack buffer.
if (pushBack.length() == 0) {
// Read something from the stream into pushBack.
eof = readIntoPushBack();
}
// Pushback only contains deliverable codes.
if (pushBack.length() > 0) {
// Grab one character
s.append(pushBack.charAt(0));
// Remove it from pushBack
pushBack.deleteCharAt(0);
}
}
return s;
}
|
@Test
public void testRead_3args() throws Exception {
byte[] data = new byte[10];
int offset = 0;
int length = 10;
byte[] expected = "abcdefghij".getBytes(StandardCharsets.UTF_8);
String text = "abcdefghijklmnopqrstuvwxyz";
InputStream stream = new ByteArrayInputStream(text.getBytes(StandardCharsets.UTF_8));
XmlInputStream instance = new XmlInputStream(stream);
int expResult = 10;
int result = instance.read(data, offset, length);
assertEquals(expResult, result);
assertArrayEquals(expected, data);
data = new byte[5];
offset = 0;
length = 5;
expected = "&".getBytes(StandardCharsets.UTF_8);
text = "&";
stream = new ByteArrayInputStream(text.getBytes(StandardCharsets.UTF_8));
instance = new XmlInputStream(stream);
expResult = 5;
result = instance.read(data, offset, length);
assertEquals(expResult, result);
assertArrayEquals(expected, data);
data = new byte[10];
offset = 0;
length = 10;
expected = "& test".getBytes(StandardCharsets.UTF_8);
text = "& test";
stream = new ByteArrayInputStream(text.getBytes(StandardCharsets.UTF_8));
instance = new XmlInputStream(stream);
expResult = 10;
result = instance.read(data, offset, length);
assertEquals(expResult, result);
assertArrayEquals(expected, data);
}
|
@Override
public Type[] getActualTypeArguments() {
return typeArguments;
}
|
@Test
void getActualTypeArguments() {
List<Type> typeArguments = Arrays.asList(String.class, Boolean.class, Long.class);
EfestoClassKey keyListWithTypes = new EfestoClassKey(List.class, typeArguments.toArray(new Type[0]));
Type[] retrieved = keyListWithTypes.getActualTypeArguments();
assertThat(retrieved).isNotNull();
assertThat(retrieved).hasSameSizeAs(typeArguments);
assertThat(typeArguments).containsAll(Arrays.asList(retrieved));
typeArguments = Collections.emptyList();
EfestoClassKey keyListWithEmptyTypes = new EfestoClassKey(List.class, typeArguments.toArray(new Type[0]));
retrieved = keyListWithEmptyTypes.getActualTypeArguments();
assertThat(retrieved).isNotNull();
assertThat(retrieved).isEmpty();
EfestoClassKey keyListWithoutTypes = new EfestoClassKey(List.class);
retrieved = keyListWithoutTypes.getActualTypeArguments();
assertThat(retrieved).isNotNull();
assertThat(retrieved).isEmpty();
}
|
@Override
public boolean accept(ProcessingEnvironment processingEnv, TypeMirror type) {
return isArrayType(type);
}
|
@Test
void testAccept() {
assertTrue(builder.accept(processingEnv, integersField.asType()));
assertTrue(builder.accept(processingEnv, stringsField.asType()));
assertTrue(builder.accept(processingEnv, primitiveTypeModelsField.asType()));
assertTrue(builder.accept(processingEnv, modelsField.asType()));
assertTrue(builder.accept(processingEnv, colorsField.asType()));
}
|
public SubscriptionName createSubscription(TopicName topicName, String subscriptionName) {
checkArgument(!subscriptionName.isEmpty(), "subscriptionName can not be empty");
checkIsUsable();
if (!createdTopics.contains(topicName)) {
throw new IllegalArgumentException(
"Can not create a subscription for a topic not managed by this instance.");
}
LOG.info("Creating subscription '{}' for topic '{}'", subscriptionName, topicName);
Subscription subscription =
Failsafe.with(retryOnDeadlineExceeded())
.get(
() ->
subscriptionAdminClient.createSubscription(
getSubscriptionName(subscriptionName),
topicName,
PushConfig.getDefaultInstance(),
DEFAULT_ACK_DEADLINE_SECONDS));
SubscriptionName reference = PubsubUtils.toSubscriptionName(subscription);
createdSubscriptions.add(getSubscriptionName(subscriptionName));
LOG.info(
"Subscription '{}' for topic '{}' was created successfully!",
subscription.getName(),
topicName);
return reference;
}
|
@Test
public void testCreateSubscriptionWithInvalidNameShouldFail() {
IllegalArgumentException exception =
assertThrows(
IllegalArgumentException.class,
() -> testManager.createSubscription(TopicName.of(PROJECT_ID, "topic-a"), ""));
assertThat(exception).hasMessageThat().contains("subscriptionName can not be empty");
}
|
@Override
public void send(Object message) throws RemotingException {
send(message, false);
}
|
@Test
void sendTest04() throws RemotingException {
String message = "this is a test message";
header.send(message);
List<Object> objects = channel.getSentObjects();
Assertions.assertEquals(objects.get(0), "this is a test message");
}
|
@Override
public void requestDeferredDeepLink(JSONObject params) {
}
|
@Test
public void requestDeferredDeepLink() {
mSensorsAPI.requestDeferredDeepLink(new JSONObject());
}
|
@ScalarOperator(DIVIDE)
@SqlType(StandardTypes.TINYINT)
public static long divide(@SqlType(StandardTypes.TINYINT) long left, @SqlType(StandardTypes.TINYINT) long right)
{
try {
return left / right;
}
catch (ArithmeticException e) {
throw new PrestoException(DIVISION_BY_ZERO, e);
}
}
|
@Test
public void testDivide()
{
assertFunction("TINYINT'37' / TINYINT'37'", TINYINT, (byte) 1);
assertFunction("TINYINT'37' / TINYINT'17'", TINYINT, (byte) (37 / 17));
assertFunction("TINYINT'17' / TINYINT'37'", TINYINT, (byte) (17 / 37));
assertFunction("TINYINT'17' / TINYINT'17'", TINYINT, (byte) 1);
assertInvalidFunction("TINYINT'17' / TINYINT'0'", DIVISION_BY_ZERO);
}
|
ClassicGroup getOrMaybeCreateClassicGroup(
String groupId,
boolean createIfNotExists
) throws GroupIdNotFoundException {
Group group = groups.get(groupId);
if (group == null && !createIfNotExists) {
throw new GroupIdNotFoundException(String.format("Classic group %s not found.", groupId));
}
if (group == null) {
ClassicGroup classicGroup = new ClassicGroup(logContext, groupId, ClassicGroupState.EMPTY, time, metrics);
groups.put(groupId, classicGroup);
metrics.onClassicGroupStateTransition(null, classicGroup.currentState());
return classicGroup;
} else {
if (group.type() == CLASSIC) {
return (ClassicGroup) group;
} else {
// We don't support upgrading/downgrading between protocols at the moment so
// we throw an exception if a group exists with the wrong type.
throw new GroupIdNotFoundException(String.format("Group %s is not a classic group.",
groupId));
}
}
}
|
@Test
public void testStaticMemberReJoinWithIllegalStateAsUnknownMember() throws Exception {
GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder()
.build();
context.staticMembersJoinAndRebalance(
"group-id",
"leader-instance-id",
"follower-instance-id"
);
ClassicGroup group = context.groupMetadataManager.getOrMaybeCreateClassicGroup("group-id", false);
group.transitionTo(PREPARING_REBALANCE);
group.transitionTo(EMPTY);
JoinGroupRequestData request = new GroupMetadataManagerTestContext.JoinGroupRequestBuilder()
.withGroupId("group-id")
.withGroupInstanceId("follower-instance-id")
.withMemberId(UNKNOWN_MEMBER_ID)
.withProtocolSuperset()
.build();
// Illegal state exception shall trigger since follower id resides in pending member bucket.
IllegalStateException exception = assertThrows(IllegalStateException.class, () -> context.sendClassicGroupJoin(
request,
true,
true
));
String message = exception.getMessage();
assertTrue(message.contains(group.groupId()));
assertTrue(message.contains("follower-instance-id"));
}
|
public synchronized CryptoKey getOrCreateCryptoKey(String keyRingId, String keyName) {
// Get the keyring, creating it if it does not already exist
if (keyRing == null) {
maybeCreateKeyRing(keyRingId);
}
try (KeyManagementServiceClient client = clientFactory.getKMSClient()) {
// Build the symmetric key to create.
CryptoKey keyToCreate =
CryptoKey.newBuilder()
.setPurpose(CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT)
.setVersionTemplate(
CryptoKeyVersionTemplate.newBuilder()
.setAlgorithm(
CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION))
.build();
LOG.info("Checking if symmetric key {} already exists in KMS.", keyName);
// Loop through the existing keys in the given keyring to see if the
// key already exists.
String newKeyName = CryptoKeyName.of(projectId, region, keyRingId, keyName).toString();
Optional<CryptoKey> existingKey =
StreamSupport.stream(
client.listCryptoKeys(keyRing.getName()).iterateAll().spliterator(), false)
.filter(kRing -> kRing.getName().equals(newKeyName))
.findFirst();
// Create the symmetric key if it does not exist, otherwise, return the found key.
CryptoKey cryptoKey;
if (!existingKey.isPresent()) {
LOG.info("Symmetric key {} does not exist. Creating the key in KMS.", keyName);
cryptoKey = client.createCryptoKey(keyRing.getName(), keyName, keyToCreate);
LOG.info("Created symmetric key {}.", cryptoKey.getName());
} else {
LOG.info("Symmetric key {} already exists. Retrieving the key from KMS.", keyName);
cryptoKey = existingKey.get();
LOG.info("Retrieved symmetric key {}.", cryptoKey.getName());
}
return cryptoKey;
}
}
|
@Test
public void testGetOrCreateCryptoKeyShouldCreateCryptoKeyWhenItDoesNotExist() {
KeyRing keyRing =
KeyRing.newBuilder()
.setName(KeyRingName.of(PROJECT_ID, REGION, KEYRING_ID).toString())
.build();
when(kmsClientFactory.getKMSClient()).thenReturn(serviceClient);
when(serviceClient.createKeyRing(any(LocationName.class), anyString(), any(KeyRing.class)))
.thenReturn(keyRing);
when(serviceClient.listCryptoKeys(KEYRING_ID).iterateAll()).thenReturn(ImmutableList.of());
testManager.getOrCreateCryptoKey(KEYRING_ID, KEY_ID);
verify(serviceClient).createCryptoKey(anyString(), anyString(), any(CryptoKey.class));
}
|
public abstract boolean isEmpty() throws IOException;
|
@Test
public void testIsEmpty() throws Exception {
File tmpFile = tmpFolder.newFile();
List<IsmRecord<byte[]>> data = new ArrayList<>();
writeElementsToFile(data, tmpFile);
IsmReader<byte[]> reader =
new IsmReaderImpl<byte[]>(
FileSystems.matchSingleFileSpec(tmpFile.getAbsolutePath()).resourceId(), CODER, cache);
assertFalse(reader.isInitialized());
assertTrue(reader.isEmpty());
assertTrue(reader.isInitialized());
}
|
@Override
public PostgreSQLPacketPayload createPacketPayload(final ByteBuf message, final Charset charset) {
return new PostgreSQLPacketPayload(message, charset);
}
|
@Test
void assertCreatePacketPayload() {
assertThat(new PostgreSQLPacketCodecEngine().createPacketPayload(byteBuf, StandardCharsets.UTF_8).getByteBuf(), is(byteBuf));
}
|
public boolean checkStateUpdater(final long now,
final java.util.function.Consumer<Set<TopicPartition>> offsetResetter) {
addTasksToStateUpdater();
if (stateUpdater.hasExceptionsAndFailedTasks()) {
handleExceptionsFromStateUpdater();
}
if (stateUpdater.restoresActiveTasks()) {
handleRestoredTasksFromStateUpdater(now, offsetResetter);
}
return !stateUpdater.restoresActiveTasks()
&& !tasks.hasPendingTasksToInit();
}
|
@Test
public void shouldRethrowTaskCorruptedExceptionFromStateUpdater() {
final StreamTask statefulTask0 = statefulTask(taskId00, taskId00ChangelogPartitions)
.inState(State.RESTORING)
.withInputPartitions(taskId00Partitions).build();
final StreamTask statefulTask1 = statefulTask(taskId01, taskId01ChangelogPartitions)
.inState(State.RESTORING)
.withInputPartitions(taskId01Partitions).build();
final ExceptionAndTask exceptionAndTasks0 =
new ExceptionAndTask(new TaskCorruptedException(Collections.singleton(taskId00)), statefulTask0);
final ExceptionAndTask exceptionAndTasks1 =
new ExceptionAndTask(new TaskCorruptedException(Collections.singleton(taskId01)), statefulTask1);
when(stateUpdater.hasExceptionsAndFailedTasks()).thenReturn(true);
when(stateUpdater.drainExceptionsAndFailedTasks()).thenReturn(Arrays.asList(exceptionAndTasks0, exceptionAndTasks1));
final TasksRegistry tasks = mock(TasksRegistry.class);
final TaskManager taskManager = setUpTaskManager(ProcessingMode.AT_LEAST_ONCE, tasks, true);
final TaskCorruptedException thrown = assertThrows(
TaskCorruptedException.class,
() -> taskManager.checkStateUpdater(time.milliseconds(), noOpResetter)
);
assertEquals(mkSet(taskId00, taskId01), thrown.corruptedTasks());
assertEquals("Tasks [0_1, 0_0] are corrupted and hence need to be re-initialized", thrown.getMessage());
}
|
@Override
public void setConf(Configuration conf) {
if (conf != null) {
conf = addSecurityConfiguration(conf);
}
super.setConf(conf);
}
|
@Test
public void testFailoverWithForceActive() throws Exception {
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
HdfsConfiguration conf = getHAConf();
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, getFencerTrueCommand());
tool.setConf(conf);
assertEquals(0, runTool("-failover", "nn1", "nn2", "--forceactive"));
}
|
@Override
public Batch toBatch() {
return new SparkBatch(
sparkContext, table, readConf, groupingKeyType(), taskGroups(), expectedSchema, hashCode());
}
|
@Test
public void testPartitionedDays() throws Exception {
createPartitionedTable(spark, tableName, "days(ts)");
SparkScanBuilder builder = scanBuilder();
DaysFunction.TimestampToDaysFunction function = new DaysFunction.TimestampToDaysFunction();
UserDefinedScalarFunc udf = toUDF(function, expressions(fieldRef("ts")));
Predicate predicate =
new Predicate(
"<",
expressions(
udf, dateLit(timestampStrToDayOrdinal("2018-11-20T00:00:00.000000+00:00"))));
pushFilters(builder, predicate);
Batch scan = builder.build().toBatch();
assertThat(scan.planInputPartitions().length).isEqualTo(5);
// NOT LT
builder = scanBuilder();
predicate = new Not(predicate);
pushFilters(builder, predicate);
scan = builder.build().toBatch();
assertThat(scan.planInputPartitions().length).isEqualTo(5);
}
|
static int computeRawVarint32Size(final int value) {
if ((value & (0xffffffff << 7)) == 0) {
return 1;
}
if ((value & (0xffffffff << 14)) == 0) {
return 2;
}
if ((value & (0xffffffff << 21)) == 0) {
return 3;
}
if ((value & (0xffffffff << 28)) == 0) {
return 4;
}
return 5;
}
|
@Test
public void testSize3Varint() {
final int size = 3;
final int num = 0x4000;
assertThat(ProtobufVarint32LengthFieldPrepender.computeRawVarint32Size(num), is(size));
final byte[] buf = new byte[size + num];
/**
* 8 0 8 0 0 1
* 1000 0000 1000 0000 0000 0001
* 0000 0000 0000 0000 0000 0001
* 0000 0001 0000 0000 0000 0000
* 000 0001 000 0000 000 0000
*
* 0 0000 0100 0000 0000 0000
* 0 0 4 0 0 0
*
*/
buf[0] = (byte) (0x80 & 0xFF);
buf[1] = (byte) (0x80 & 0xFF);
buf[2] = 0x01;
for (int i = size; i < num + size; ++i) {
buf[i] = 1;
}
assertTrue(ch.writeOutbound(wrappedBuffer(buf, size, buf.length - size)));
ByteBuf expected = wrappedBuffer(buf);
ByteBuf actual = ch.readOutbound();
assertThat(expected, is(actual));
assertFalse(ch.finish());
expected.release();
actual.release();
}
|
public Schema mergeTables(
Map<FeatureOption, MergingStrategy> mergingStrategies,
Schema sourceSchema,
List<SqlNode> derivedColumns,
List<SqlWatermark> derivedWatermarkSpecs,
SqlTableConstraint derivedPrimaryKey) {
SchemaBuilder schemaBuilder =
new SchemaBuilder(
mergingStrategies,
sourceSchema,
(FlinkTypeFactory) validator.getTypeFactory(),
dataTypeFactory,
validator,
escapeExpression);
schemaBuilder.appendDerivedColumns(mergingStrategies, derivedColumns);
schemaBuilder.appendDerivedWatermarks(mergingStrategies, derivedWatermarkSpecs);
schemaBuilder.appendDerivedPrimaryKey(derivedPrimaryKey);
return schemaBuilder.build();
}
|
@Test
void mergeExcludingMetadataColumnsDuplicate() {
Schema sourceSchema =
Schema.newBuilder()
.column("one", DataTypes.INT())
.columnByMetadata("two", DataTypes.INT())
.build();
List<SqlNode> derivedColumns =
Collections.singletonList(metadataColumn("two", DataTypes.BOOLEAN(), false));
Map<FeatureOption, MergingStrategy> mergingStrategies = getDefaultMergingStrategies();
mergingStrategies.put(FeatureOption.METADATA, MergingStrategy.EXCLUDING);
Schema mergedSchema =
util.mergeTables(
mergingStrategies,
sourceSchema,
derivedColumns,
Collections.emptyList(),
null);
Schema expectedSchema =
Schema.newBuilder()
.column("one", DataTypes.INT())
.columnByMetadata("two", DataTypes.BOOLEAN())
.build();
assertThat(mergedSchema).isEqualTo(expectedSchema);
}
|
public static void checkValid(boolean isValid, String argName) {
checkArgument(isValid, "'%s' is invalid.", argName);
}
|
@Test
public void testCheckValidWithValues() throws Exception {
String validValues = "foo, bar";
// Should not throw.
Validate.checkValid(true, "arg", validValues);
// Verify it throws.
intercept(IllegalArgumentException.class,
"'arg' is invalid. Valid values are: foo, bar",
() -> Validate.checkValid(false, "arg", validValues));
}
|
@Override
public void register() {
client.register();
}
|
@Test
public void register() {
nacosRegister.register();
Mockito.verify(client, Mockito.times(1)).register();
}
|
public static Catalog loadCatalog(
String impl, String catalogName, Map<String, String> properties, Object hadoopConf) {
Preconditions.checkNotNull(impl, "Cannot initialize custom Catalog, impl class name is null");
DynConstructors.Ctor<Catalog> ctor;
try {
ctor = DynConstructors.builder(Catalog.class).impl(impl).buildChecked();
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException(
String.format("Cannot initialize Catalog implementation %s: %s", impl, e.getMessage()),
e);
}
Catalog catalog;
try {
catalog = ctor.newInstance();
} catch (ClassCastException e) {
throw new IllegalArgumentException(
String.format("Cannot initialize Catalog, %s does not implement Catalog.", impl), e);
}
configureHadoopConf(catalog, hadoopConf);
catalog.initialize(catalogName, properties);
return catalog;
}
|
@Test
public void loadCustomCatalog_NoArgConstructorNotFound() {
Map<String, String> options = Maps.newHashMap();
options.put("key", "val");
Configuration hadoopConf = new Configuration();
String name = "custom";
assertThatThrownBy(
() ->
CatalogUtil.loadCatalog(
TestCatalogBadConstructor.class.getName(), name, options, hadoopConf))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Cannot initialize Catalog implementation")
.hasMessageContaining(
"NoSuchMethodException: org.apache.iceberg.TestCatalogUtil$TestCatalogBadConstructor.<init>()");
}
|
public static Map<String, ResourceModel> buildResourceModels(final Set<Class<?>> restliAnnotatedClasses)
{
Map<String, ResourceModel> rootResourceModels = new HashMap<>();
Map<Class<?>, ResourceModel> resourceModels = new HashMap<>();
for (Class<?> annotatedClass : restliAnnotatedClasses)
{
processResourceInOrder(annotatedClass, resourceModels, rootResourceModels);
}
return rootResourceModels;
}
|
@Test
public void testProcessResource()
{
Set<Class<?>> set = new HashSet<>();
set.add(ParentResource.class);
set.add(TestResource.class);
Map<String, ResourceModel> models = RestLiApiBuilder.buildResourceModels(set);
ResourceModel parentResource = models.get("/ParentResource");
Assert.assertNotNull(parentResource.getSubResource("TestResource"));
}
|
@VisibleForTesting
List<Image> getCachedBaseImages()
throws IOException, CacheCorruptedException, BadContainerConfigurationFormatException,
LayerCountMismatchException, UnlistedPlatformInManifestListException,
PlatformNotFoundInBaseImageException {
ImageReference baseImage = buildContext.getBaseImageConfiguration().getImage();
Optional<ImageMetadataTemplate> metadata =
buildContext.getBaseImageLayersCache().retrieveMetadata(baseImage);
if (!metadata.isPresent()) {
return Collections.emptyList();
}
ManifestTemplate manifestList = metadata.get().getManifestList();
List<ManifestAndConfigTemplate> manifestsAndConfigs = metadata.get().getManifestsAndConfigs();
if (manifestList == null) {
Verify.verify(manifestsAndConfigs.size() == 1);
ManifestAndConfigTemplate manifestAndConfig = manifestsAndConfigs.get(0);
Optional<Image> cachedImage = getBaseImageIfAllLayersCached(manifestAndConfig, true);
if (!cachedImage.isPresent()) {
return Collections.emptyList();
}
return Collections.singletonList(cachedImage.get());
}
// Manifest list cached. Identify matching platforms and check if all of them are cached.
ImmutableList.Builder<Image> images = ImmutableList.builder();
for (Platform platform : buildContext.getContainerConfiguration().getPlatforms()) {
String manifestDigest =
lookUpPlatformSpecificImageManifest((ManifestListTemplate) manifestList, platform);
Optional<ManifestAndConfigTemplate> manifestAndConfigFound =
manifestsAndConfigs.stream()
.filter(entry -> manifestDigest.equals(entry.getManifestDigest()))
.findFirst();
if (!manifestAndConfigFound.isPresent()) {
return Collections.emptyList();
}
Optional<Image> cachedImage =
getBaseImageIfAllLayersCached(manifestAndConfigFound.get(), false);
if (!cachedImage.isPresent()) {
return Collections.emptyList();
}
images.add(cachedImage.get());
}
return images.build();
}
|
@Test
public void testGetCachedBaseImages_manifestCached()
throws InvalidImageReferenceException, IOException, CacheCorruptedException,
UnlistedPlatformInManifestListException, BadContainerConfigurationFormatException,
LayerCountMismatchException, PlatformNotFoundInBaseImageException {
ImageReference imageReference = ImageReference.parse("cat");
Mockito.when(buildContext.getBaseImageConfiguration())
.thenReturn(ImageConfiguration.builder(imageReference).build());
ContainerConfigurationTemplate containerConfigJson = new ContainerConfigurationTemplate();
containerConfigJson.setArchitecture("slim arch");
containerConfigJson.setOs("fat system");
ManifestAndConfigTemplate manifestAndConfig =
new ManifestAndConfigTemplate(
Mockito.mock(BuildableManifestTemplate.class), containerConfigJson, "sha256:digest");
ImageMetadataTemplate imageMetadata =
new ImageMetadataTemplate(null, Arrays.asList(manifestAndConfig));
Mockito.when(cache.retrieveMetadata(imageReference)).thenReturn(Optional.of(imageMetadata));
Mockito.when(cache.areAllLayersCached(manifestAndConfig.getManifest())).thenReturn(true);
List<Image> images = pullBaseImageStep.getCachedBaseImages();
Assert.assertEquals(1, images.size());
Assert.assertEquals("slim arch", images.get(0).getArchitecture());
Assert.assertEquals("fat system", images.get(0).getOs());
}
|
@VisibleForTesting
RoleDO validateRoleForUpdate(Long id) {
RoleDO role = roleMapper.selectById(id);
if (role == null) {
throw exception(ROLE_NOT_EXISTS);
}
// 内置角色,不允许删除
if (RoleTypeEnum.SYSTEM.getType().equals(role.getType())) {
throw exception(ROLE_CAN_NOT_UPDATE_SYSTEM_TYPE_ROLE);
}
return role;
}
|
@Test
public void testValidateUpdateRole_roleIdNotExist() {
assertServiceException(() -> roleService.validateRoleForUpdate(randomLongId()), ROLE_NOT_EXISTS);
}
|
@Override
public Timer timer(String name, TimeUnit unit) {
return new DefaultTimer(unit);
}
|
@Test
public void timer() {
MetricsContext metricsContext = new DefaultMetricsContext();
Timer timer = metricsContext.timer("test", TimeUnit.MICROSECONDS);
timer.record(10, TimeUnit.MINUTES);
assertThat(timer.totalDuration()).isEqualTo(Duration.ofMinutes(10L));
}
|
protected final AnyKeyboardViewBase getMiniKeyboard() {
return mMiniKeyboard;
}
|
@Test
public void testLongPressWhenNoPrimaryKeyAndNoPopupItemsButLongPressCodeShouldOutputLongPress()
throws Exception {
ExternalAnyKeyboard anyKeyboard =
new ExternalAnyKeyboard(
new DefaultAddOn(getApplicationContext(), getApplicationContext()),
getApplicationContext(),
keyboard_with_keys_with_no_codes,
keyboard_with_keys_with_no_codes,
"test",
0,
0,
"en",
"",
"",
KEYBOARD_ROW_MODE_NORMAL);
anyKeyboard.loadKeyboard(mViewUnderTest.mKeyboardDimens);
mViewUnderTest.setKeyboard(anyKeyboard, 0);
final AnyKeyboard.AnyKey key = (AnyKeyboard.AnyKey) anyKeyboard.getKeys().get(2);
Assert.assertEquals(0, key.getPrimaryCode());
Assert.assertEquals(0, key.getCodesCount());
Assert.assertEquals(0, key.popupResId);
Assert.assertEquals(45, key.longPressCode);
Assert.assertEquals("c", key.label);
Assert.assertNull(key.popupCharacters);
ViewTestUtils.navigateFromTo(mViewUnderTest, key, key, 1000, true, false);
TestRxSchedulers.foregroundAdvanceBy(1);
Assert.assertNull(mViewUnderTest.getMiniKeyboard());
Assert.assertFalse(mViewUnderTest.mMiniKeyboardPopup.isShowing());
Mockito.verify(mMockKeyboardListener)
.onKey(eq(45), same(key), eq(0), Mockito.nullable(int[].class), Mockito.anyBoolean());
}
|
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> backwardFindSessions(final Bytes key,
final long earliestSessionEndTime,
final long latestSessionStartTime) {
return wrapped().backwardFindSessions(key, earliestSessionEndTime, latestSessionStartTime);
}
|
@Test
public void shouldDelegateToUnderlyingStoreWhenBackwardFindingSessionRange() {
store.backwardFindSessions(bytesKey, bytesKey, 0, 1);
verify(inner).backwardFindSessions(bytesKey, bytesKey, 0, 1);
}
|
public MastershipInfo() {
this(0, Optional.empty(), ImmutableMap.of());
}
|
@Test
public void testMastershipInfo() throws Exception {
assertEquals(1, mastershipInfo.term());
assertEquals(node1, mastershipInfo.master().get());
assertEquals(Lists.newArrayList(node1), mastershipInfo.getRoles(MastershipRole.MASTER));
assertEquals(Lists.newArrayList(node2, node3), mastershipInfo.backups());
assertEquals(Lists.newArrayList(node2, node3), mastershipInfo.getRoles(MastershipRole.STANDBY));
assertEquals(Lists.newArrayList(node4), mastershipInfo.getRoles(MastershipRole.NONE));
}
|
@Override
public void setProperties(final Properties properties) {
}
|
@Test
public void setPropertiesTest() {
final OracleSQLPrepareInterceptor oracleSQLPrepareInterceptor = new OracleSQLPrepareInterceptor();
Assertions.assertDoesNotThrow(() -> oracleSQLPrepareInterceptor.setProperties(mock(Properties.class)));
}
|
@Override
public Response request(Request request, long timeouts) throws NacosException {
Payload grpcRequest = GrpcUtils.convert(request);
ListenableFuture<Payload> requestFuture = grpcFutureServiceStub.request(grpcRequest);
Payload grpcResponse;
try {
if (timeouts <= 0) {
grpcResponse = requestFuture.get();
} else {
grpcResponse = requestFuture.get(timeouts, TimeUnit.MILLISECONDS);
}
} catch (Exception e) {
throw new NacosException(NacosException.SERVER_ERROR, e);
}
return (Response) GrpcUtils.parse(grpcResponse);
}
|
@Test
void testRequestSuccessSync() throws NacosException {
Response response = connection.request(new HealthCheckRequest(), -1);
assertTrue(response instanceof HealthCheckResponse);
}
|
public void write(CruiseConfig configForEdit, OutputStream output, boolean skipPreprocessingAndValidation) throws Exception {
LOGGER.debug("[Serializing Config] Starting to write. Validation skipped? {}", skipPreprocessingAndValidation);
MagicalGoConfigXmlLoader loader = new MagicalGoConfigXmlLoader(configCache, registry);
if (!configForEdit.getOrigin().isLocal()) {
throw new GoConfigInvalidException(configForEdit, "Attempted to save merged configuration with partials");
}
if (!skipPreprocessingAndValidation) {
loader.preprocessAndValidate(configForEdit);
LOGGER.debug("[Serializing Config] Done with cruise config validators.");
}
Document document = createEmptyCruiseConfigDocument();
write(configForEdit, document.getRootElement(), configCache, registry);
LOGGER.debug("[Serializing Config] XSD and DOM validation.");
verifyXsdValid(document);
MagicalGoConfigXmlLoader.validateDom(document.getRootElement(), registry);
LOGGER.info("[Serializing Config] Generating config partial.");
XmlUtils.writeXml(document, output);
LOGGER.debug("[Serializing Config] Finished writing config partial.");
}
|
@Test
public void shouldNotAllowMultipleRepositoriesWithSameName() throws Exception {
Configuration packageConfiguration = new Configuration(getConfigurationProperty("name", false, "go-agent"));
Configuration repositoryConfiguration = new Configuration(getConfigurationProperty("url", false, "http://go"));
PackageRepository packageRepository = createPackageRepository("plugin-id", "version", "id1", "name", repositoryConfiguration,
new Packages(new PackageDefinition("id1", "name1", packageConfiguration)));
PackageRepository anotherPackageRepository = createPackageRepository("plugin-id", "version", "id2", "name", repositoryConfiguration,
new Packages(new PackageDefinition("id2", "name2", packageConfiguration)));
cruiseConfig.setPackageRepositories(new PackageRepositories(packageRepository, anotherPackageRepository));
try {
xmlWriter.write(cruiseConfig, output, false);
fail("should not have allowed two repositories with same id");
} catch (GoConfigInvalidException e) {
assertThat(e.getMessage(), is("You have defined multiple repositories called 'name'. Repository names are case-insensitive and must be unique."));
}
}
|
@Override
public EncodedMessage transform(ActiveMQMessage message) throws Exception {
if (message == null) {
return null;
}
long messageFormat = 0;
Header header = null;
Properties properties = null;
Map<Symbol, Object> daMap = null;
Map<Symbol, Object> maMap = null;
Map<String,Object> apMap = null;
Map<Object, Object> footerMap = null;
Section body = convertBody(message);
if (message.isPersistent()) {
if (header == null) {
header = new Header();
}
header.setDurable(true);
}
byte priority = message.getPriority();
if (priority != Message.DEFAULT_PRIORITY) {
if (header == null) {
header = new Header();
}
header.setPriority(UnsignedByte.valueOf(priority));
}
String type = message.getType();
if (type != null) {
if (properties == null) {
properties = new Properties();
}
properties.setSubject(type);
}
MessageId messageId = message.getMessageId();
if (messageId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setMessageId(getOriginalMessageId(message));
}
ActiveMQDestination destination = message.getDestination();
if (destination != null) {
if (properties == null) {
properties = new Properties();
}
properties.setTo(destination.getQualifiedName());
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination));
}
ActiveMQDestination replyTo = message.getReplyTo();
if (replyTo != null) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyTo(replyTo.getQualifiedName());
if (maMap == null) {
maMap = new HashMap<>();
}
maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo));
}
String correlationId = message.getCorrelationId();
if (correlationId != null) {
if (properties == null) {
properties = new Properties();
}
try {
properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId));
} catch (AmqpProtocolException e) {
properties.setCorrelationId(correlationId);
}
}
long expiration = message.getExpiration();
if (expiration != 0) {
long ttl = expiration - System.currentTimeMillis();
if (ttl < 0) {
ttl = 1;
}
if (header == null) {
header = new Header();
}
header.setTtl(new UnsignedInteger((int) ttl));
if (properties == null) {
properties = new Properties();
}
properties.setAbsoluteExpiryTime(new Date(expiration));
}
long timeStamp = message.getTimestamp();
if (timeStamp != 0) {
if (properties == null) {
properties = new Properties();
}
properties.setCreationTime(new Date(timeStamp));
}
// JMSX Message Properties
int deliveryCount = message.getRedeliveryCounter();
if (deliveryCount > 0) {
if (header == null) {
header = new Header();
}
header.setDeliveryCount(UnsignedInteger.valueOf(deliveryCount));
}
String userId = message.getUserID();
if (userId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setUserId(new Binary(userId.getBytes(StandardCharsets.UTF_8)));
}
String groupId = message.getGroupID();
if (groupId != null) {
if (properties == null) {
properties = new Properties();
}
properties.setGroupId(groupId);
}
int groupSequence = message.getGroupSequence();
if (groupSequence > 0) {
if (properties == null) {
properties = new Properties();
}
properties.setGroupSequence(UnsignedInteger.valueOf(groupSequence));
}
final Map<String, Object> entries;
try {
entries = message.getProperties();
} catch (IOException e) {
throw JMSExceptionSupport.create(e);
}
for (Map.Entry<String, Object> entry : entries.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (key.startsWith(JMS_AMQP_PREFIX)) {
if (key.startsWith(NATIVE, JMS_AMQP_PREFIX_LENGTH)) {
// skip transformer appended properties
continue;
} else if (key.startsWith(ORIGINAL_ENCODING, JMS_AMQP_PREFIX_LENGTH)) {
// skip transformer appended properties
continue;
} else if (key.startsWith(MESSAGE_FORMAT, JMS_AMQP_PREFIX_LENGTH)) {
messageFormat = (long) TypeConversionSupport.convert(entry.getValue(), Long.class);
continue;
} else if (key.startsWith(HEADER, JMS_AMQP_PREFIX_LENGTH)) {
if (header == null) {
header = new Header();
}
continue;
} else if (key.startsWith(PROPERTIES, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
continue;
} else if (key.startsWith(MESSAGE_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (maMap == null) {
maMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length());
maMap.put(Symbol.valueOf(name), value);
continue;
} else if (key.startsWith(FIRST_ACQUIRER, JMS_AMQP_PREFIX_LENGTH)) {
if (header == null) {
header = new Header();
}
header.setFirstAcquirer((boolean) TypeConversionSupport.convert(value, Boolean.class));
continue;
} else if (key.startsWith(CONTENT_TYPE, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentType(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class)));
continue;
} else if (key.startsWith(CONTENT_ENCODING, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setContentEncoding(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class)));
continue;
} else if (key.startsWith(REPLYTO_GROUP_ID, JMS_AMQP_PREFIX_LENGTH)) {
if (properties == null) {
properties = new Properties();
}
properties.setReplyToGroupId((String) TypeConversionSupport.convert(value, String.class));
continue;
} else if (key.startsWith(DELIVERY_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (daMap == null) {
daMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length());
daMap.put(Symbol.valueOf(name), value);
continue;
} else if (key.startsWith(FOOTER_PREFIX, JMS_AMQP_PREFIX_LENGTH)) {
if (footerMap == null) {
footerMap = new HashMap<>();
}
String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length());
footerMap.put(Symbol.valueOf(name), value);
continue;
}
} else if (key.startsWith(AMQ_SCHEDULED_MESSAGE_PREFIX )) {
// strip off the scheduled message properties
continue;
}
// The property didn't map into any other slot so we store it in the
// Application Properties section of the message.
if (apMap == null) {
apMap = new HashMap<>();
}
apMap.put(key, value);
int messageType = message.getDataStructureType();
if (messageType == CommandTypes.ACTIVEMQ_MESSAGE) {
// Type of command to recognize advisory message
Object data = message.getDataStructure();
if(data != null) {
apMap.put("ActiveMqDataStructureType", data.getClass().getSimpleName());
}
}
}
final AmqpWritableBuffer buffer = new AmqpWritableBuffer();
encoder.setByteBuffer(buffer);
if (header != null) {
encoder.writeObject(header);
}
if (daMap != null) {
encoder.writeObject(new DeliveryAnnotations(daMap));
}
if (maMap != null) {
encoder.writeObject(new MessageAnnotations(maMap));
}
if (properties != null) {
encoder.writeObject(properties);
}
if (apMap != null) {
encoder.writeObject(new ApplicationProperties(apMap));
}
if (body != null) {
encoder.writeObject(body);
}
if (footerMap != null) {
encoder.writeObject(new Footer(footerMap));
}
return new EncodedMessage(messageFormat, buffer.getArray(), 0, buffer.getArrayLength());
}
|
@Test
public void testConvertRemoveInfo() throws Exception {
String connectionId = "myConnectionId";
RemoveInfo dataStructure = new RemoveInfo(new ConnectionId(connectionId));
ActiveMQMessage outbound = createMessage();
Map<String, String> properties = new HashMap<String, String>();
properties.put("originUrl", "localhost");
outbound.setProperties(properties);
outbound.setDataStructure(dataStructure);
outbound.onSend();
outbound.storeContent();
JMSMappingOutboundTransformer transformer = new JMSMappingOutboundTransformer();
EncodedMessage encoded = transformer.transform(outbound);
assertNotNull(encoded);
Message amqp = encoded.decode();
assertNotNull(amqp.getApplicationProperties());
Map<String, Object> apMap = amqp.getApplicationProperties().getValue();
assertEquals(RemoveInfo.class.getSimpleName(), apMap.get("ActiveMqDataStructureType"));
assertNotNull(amqp.getBody());
assertTrue(amqp.getBody() instanceof AmqpValue);
assertTrue(((AmqpValue) amqp.getBody()).getValue() instanceof Map);
@SuppressWarnings("unchecked")
Map<Object, Object> amqpMap = (Map<Object, Object>) ((AmqpValue) amqp.getBody()).getValue();
assertTrue(connectionId.equals(amqpMap.get("ConnectionId")));
}
|
protected JsonNode jsonLinks(List<UiSynthLink> links) {
return collateSynthLinks(links);
}
|
@Test
public void encodeSynthLinks() {
title("encodeSynthLinks()");
ArrayNode array = (ArrayNode) t2.jsonLinks(createSynthLinks());
print(array);
assertEquals("wrong size", 2, array.size());
ObjectNode first = (ObjectNode) array.get(0);
ObjectNode second = (ObjectNode) array.get(1);
boolean firstIsAB = first.get("id").asText().equals("rA~rB");
if (firstIsAB) {
validateSynthLinks(first, second);
} else {
validateSynthLinks(second, first);
}
}
|
@VisibleForTesting
void removeDisableUsers(Set<Long> assigneeUserIds) {
if (CollUtil.isEmpty(assigneeUserIds)) {
return;
}
Map<Long, AdminUserRespDTO> userMap = adminUserApi.getUserMap(assigneeUserIds);
assigneeUserIds.removeIf(id -> {
AdminUserRespDTO user = userMap.get(id);
return user == null || !CommonStatusEnum.ENABLE.getStatus().equals(user.getStatus());
});
}
|
@Test
public void testRemoveDisableUsers() {
// 准备参数. 1L 可以找到;2L 是禁用的;3L 找不到
Set<Long> assigneeUserIds = asSet(1L, 2L, 3L);
// mock 方法
AdminUserRespDTO user1 = randomPojo(AdminUserRespDTO.class, o -> o.setId(1L)
.setStatus(CommonStatusEnum.ENABLE.getStatus()));
AdminUserRespDTO user2 = randomPojo(AdminUserRespDTO.class, o -> o.setId(2L)
.setStatus(CommonStatusEnum.DISABLE.getStatus()));
Map<Long, AdminUserRespDTO> userMap = MapUtil.builder(user1.getId(), user1)
.put(user2.getId(), user2).build();
when(adminUserApi.getUserMap(eq(assigneeUserIds))).thenReturn(userMap);
// 调用
taskCandidateInvoker.removeDisableUsers(assigneeUserIds);
// 断言
assertEquals(asSet(1L), assigneeUserIds);
}
|
@Override
public <T> T persist(T detachedObject) {
Map<Object, Object> alreadyPersisted = new HashMap<Object, Object>();
return persist(detachedObject, alreadyPersisted, RCascadeType.PERSIST);
}
|
@Test
public void testPersist() {
RLiveObjectService service = redisson.getLiveObjectService();
TestClass ts = new TestClass(new ObjectId(100));
ts.setValue("VALUE");
ts.setContent(new TestREntity("123"));
ts.addEntry("1", "2");
TestClass persisted = service.persist(ts);
assertEquals(3, redisson.getKeys().count());
assertEquals(1, persisted.getValues().size());
assertEquals("123", ((TestREntity)persisted.getContent()).getName());
assertEquals(new ObjectId(100), persisted.getId());
assertEquals("VALUE", persisted.getValue());
try {
service.persist(ts);
fail("Should not be here");
} catch (Exception e) {
assertEquals("This REntity already exists.", e.getMessage());
}
}
|
@Override
public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException {
try {
final FileEntity entity = new FilesApi(new BrickApiClient(session))
.download(StringUtils.removeStart(file.getAbsolute(), String.valueOf(Path.DELIMITER)),
null, null, null, null);
final HttpUriRequest request = new HttpGet(entity.getDownloadUri());
if(status.isAppend()) {
final HttpRange range = HttpRange.withStatus(status);
final String header;
if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) {
header = String.format("bytes=%d-", range.getStart());
}
else {
header = String.format("bytes=%d-%d", range.getStart(), range.getEnd());
}
if(log.isDebugEnabled()) {
log.debug(String.format("Add range header %s for file %s", header, file));
}
request.addHeader(new BasicHeader(HttpHeaders.RANGE, header));
// Disable compression
request.addHeader(new BasicHeader(HttpHeaders.ACCEPT_ENCODING, "identity"));
}
final HttpResponse response = session.getClient().execute(request);
switch(response.getStatusLine().getStatusCode()) {
case HttpStatus.SC_OK:
case HttpStatus.SC_PARTIAL_CONTENT:
return new HttpMethodReleaseInputStream(response, status);
default:
throw new DefaultHttpResponseExceptionMappingService().map("Download {0} failed", new HttpResponseException(
response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()), file);
}
}
catch(ApiException e) {
throw new BrickExceptionMappingService().map("Download {0} failed", e, file);
}
catch(IOException e) {
throw new DefaultIOExceptionMappingService().map("Download {0} failed", e, file);
}
}
|
@Test
public void testReadCloseReleaseEntity() throws Exception {
final TransferStatus status = new TransferStatus();
final byte[] content = RandomUtils.nextBytes(32769);
final TransferStatus writeStatus = new TransferStatus();
writeStatus.setLength(content.length);
final Path room = new BrickDirectoryFeature(session).mkdir(
new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus());
final Path test = new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
final BrickMultipartWriteFeature writer = new BrickMultipartWriteFeature(session);
final HttpResponseOutputStream<FileEntity> out = writer.write(test, writeStatus, new DisabledConnectionCallback());
assertNotNull(out);
new StreamCopier(writeStatus, writeStatus).transfer(new ByteArrayInputStream(content), out);
final CountingInputStream in = new CountingInputStream(new BrickReadFeature(session).read(test, status, new DisabledConnectionCallback()));
in.close();
assertEquals(0L, in.getByteCount(), 0L);
new BrickDeleteFeature(session).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
@Override
protected void refresh() {
Iterable<ServerConfig> dbConfigs = serverConfigRepository.findAll();
Map<String, Object> newConfigs = Maps.newHashMap();
//default cluster's configs
for (ServerConfig config : dbConfigs) {
if (Objects.equals(ConfigConsts.CLUSTER_NAME_DEFAULT, config.getCluster())) {
newConfigs.put(config.getKey(), config.getValue());
}
}
//data center's configs
String dataCenter = getCurrentDataCenter();
for (ServerConfig config : dbConfigs) {
if (Objects.equals(dataCenter, config.getCluster())) {
newConfigs.put(config.getKey(), config.getValue());
}
}
//cluster's config
if (!Strings.isNullOrEmpty(System.getProperty(ConfigConsts.APOLLO_CLUSTER_KEY))) {
String cluster = System.getProperty(ConfigConsts.APOLLO_CLUSTER_KEY);
for (ServerConfig config : dbConfigs) {
if (Objects.equals(cluster, config.getCluster())) {
newConfigs.put(config.getKey(), config.getValue());
}
}
}
//put to environment
for (Map.Entry<String, Object> config: newConfigs.entrySet()){
String key = config.getKey();
Object value = config.getValue();
if (this.source.get(key) == null) {
logger.info("Load config from DB : {} = {}", key, value);
} else if (!Objects.equals(this.source.get(key), value)) {
logger.info("Load config from DB : {} = {}. Old value = {}", key,
value, this.source.get(key));
}
this.source.put(key, value);
}
}
|
@Test
public void testGetNull() {
propertySource.refresh();
assertNull(propertySource.getProperty("noKey"));
}
|
public static void extendActiveLock(Duration lockAtMostFor, Duration lockAtLeastFor) {
SimpleLock lock = locks().peekLast();
if (lock == null) throw new NoActiveLockException();
Optional<SimpleLock> newLock = lock.extend(lockAtMostFor, lockAtLeastFor);
if (newLock.isPresent()) {
// removing and adding here should be safe as it's a thread local variable and
// the changes are
// only visible in the current thread.
locks().removeLast();
locks().addLast(newLock.get());
} else {
throw new LockCanNotBeExtendedException();
}
}
|
@Test
void shouldFailIfLockCanNotBeExtended() {
when(lock.extend(extendBy, ZERO)).thenReturn(Optional.empty());
Runnable task = () -> LockExtender.extendActiveLock(extendBy, ZERO);
assertThatThrownBy(() -> executor.executeWithLock(task, configuration))
.isInstanceOf(LockCanNotBeExtendedException.class);
}
|
@SuppressWarnings("unchecked")
public DynamicDestinations<UserT, DestinationT, OutputT> getDynamicDestinations() {
return (DynamicDestinations<UserT, DestinationT, OutputT>) dynamicDestinations;
}
|
@Test
public void testGenerateOutputFilenamesWithoutExtension() {
List<ResourceId> expected;
List<ResourceId> actual;
ResourceId root = getBaseOutputDirectory();
SimpleSink<Void> sink =
SimpleSink.makeSimpleSink(root, "file", "-SSSSS-of-NNNNN", "", Compression.UNCOMPRESSED);
FilenamePolicy policy = sink.getDynamicDestinations().getFilenamePolicy(null);
expected =
Arrays.asList(
root.resolve("file-00000-of-00003", StandardResolveOptions.RESOLVE_FILE),
root.resolve("file-00001-of-00003", StandardResolveOptions.RESOLVE_FILE),
root.resolve("file-00002-of-00003", StandardResolveOptions.RESOLVE_FILE));
actual = generateDestinationFilenames(policy, 3);
assertEquals(expected, actual);
expected =
Collections.singletonList(
root.resolve("file-00000-of-00001", StandardResolveOptions.RESOLVE_FILE));
actual = generateDestinationFilenames(policy, 1);
assertEquals(expected, actual);
expected = new ArrayList<>();
actual = generateDestinationFilenames(policy, 0);
assertEquals(expected, actual);
}
|
public byte[] getByteArray()
{
return slice.byteArray();
}
|
@Test
public void testGetByteArray()
{
int numElements = 100;
Slice slice = Slices.allocate(2 * numElements);
byte[] expected = new byte[2 * numElements];
int offset = 0;
for (int i = 0; i < numElements; i++) {
String str = "" + i;
slice.setBytes(offset, str.getBytes());
int length = str.getBytes().length;
System.arraycopy(str.getBytes(), 0, expected, offset, length);
offset += length;
}
SimpleSliceInputStream simpleSliceInputStream = new SimpleSliceInputStream(slice);
byte[] actual = simpleSliceInputStream.getByteArray();
assertEquals(actual, expected);
}
|
@Override
protected void write(final PostgreSQLPacketPayload payload) {
payload.writeInt4(AUTH_REQ_SHA256);
payload.writeInt4(PASSWORD_STORED_METHOD_SHA256);
payload.writeBytes(authHexData.getSalt().getBytes());
payload.writeBytes(authHexData.getNonce().getBytes());
if (version < OpenGaussProtocolVersion.PROTOCOL_350.getVersion()) {
payload.writeBytes(serverSignature.getBytes());
}
if (OpenGaussProtocolVersion.PROTOCOL_351.getVersion() == version) {
payload.writeInt4(serverIteration);
}
}
|
@Test
void assertWriteProtocol350Packet() {
PostgreSQLPacketPayload payload = mock(PostgreSQLPacketPayload.class);
OpenGaussAuthenticationSCRAMSha256Packet packet = new OpenGaussAuthenticationSCRAMSha256Packet(OpenGaussProtocolVersion.PROTOCOL_350.getVersion(), 2048, authHexData, "");
packet.write(payload);
verify(payload).writeInt4(10);
verify(payload).writeInt4(2);
verify(payload).writeBytes(authHexData.getSalt().getBytes());
verify(payload).writeBytes(authHexData.getNonce().getBytes());
verify(payload, times(2)).writeBytes(any());
}
|
@Override
public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) {
int nextValue = nextValue(topic);
List<PartitionInfo> availablePartitions = cluster.availablePartitionsForTopic(topic);
if (!availablePartitions.isEmpty()) {
int part = Utils.toPositive(nextValue) % availablePartitions.size();
return availablePartitions.get(part).partition();
} else {
// no partitions are available, give a non-available partition
int numPartitions = cluster.partitionsForTopic(topic).size();
return Utils.toPositive(nextValue) % numPartitions;
}
}
|
@Test
public void testRoundRobinWithUnavailablePartitions() {
// Intentionally make the partition list not in partition order to test the edge
// cases.
List<PartitionInfo> partitions = asList(
new PartitionInfo("test", 1, null, NODES, NODES),
new PartitionInfo("test", 2, NODES[1], NODES, NODES),
new PartitionInfo("test", 0, NODES[0], NODES, NODES));
// When there are some unavailable partitions, we want to make sure that (1) we
// always pick an available partition,
// and (2) the available partitions are selected in a round robin way.
int countForPart0 = 0;
int countForPart2 = 0;
Partitioner partitioner = new RoundRobinPartitioner();
Cluster cluster = new Cluster("clusterId", asList(NODES[0], NODES[1], NODES[2]), partitions,
Collections.emptySet(), Collections.emptySet());
for (int i = 1; i <= 100; i++) {
int part = partitioner.partition("test", null, null, null, null, cluster);
assertTrue(part == 0 || part == 2, "We should never choose a leader-less node in round robin");
if (part == 0)
countForPart0++;
else
countForPart2++;
}
assertEquals(countForPart0, countForPart2, "The distribution between two available partitions should be even");
}
|
@VisibleForTesting
public static boolean isDateAfterOrSame( String date1, String date2 ) {
return date2.compareTo( date1 ) >= 0;
}
|
@Test
public void isDateAfterOrSame_AfterTest() {
assertFalse( TransPreviewProgressDialog.isDateAfterOrSame( AFTER_DATE_STR, BEFORE_DATE_STR ) );
}
|
@Override
public String dumpSchedulerLogs(String time, HttpServletRequest hsr)
throws IOException {
// Step1. We will check the time parameter to
// ensure that the time parameter is not empty and greater than 0.
if (StringUtils.isBlank(time)) {
routerMetrics.incrDumpSchedulerLogsFailedRetrieved();
RouterAuditLogger.logFailure(getUser().getShortUserName(), DUMP_SCHEDULERLOGS, UNKNOWN,
TARGET_WEB_SERVICE, "Parameter error, the time is empty or null.");
throw new IllegalArgumentException("Parameter error, the time is empty or null.");
}
try {
int period = Integer.parseInt(time);
if (period <= 0) {
throw new IllegalArgumentException("time must be greater than 0.");
}
} catch (NumberFormatException e) {
routerMetrics.incrDumpSchedulerLogsFailedRetrieved();
RouterAuditLogger.logFailure(getUser().getShortUserName(), DUMP_SCHEDULERLOGS, UNKNOWN,
TARGET_WEB_SERVICE, e.getLocalizedMessage());
throw new IllegalArgumentException("time must be a number.");
} catch (IllegalArgumentException e) {
routerMetrics.incrDumpSchedulerLogsFailedRetrieved();
RouterAuditLogger.logFailure(getUser().getShortUserName(), DUMP_SCHEDULERLOGS, UNKNOWN,
TARGET_WEB_SERVICE, e.getLocalizedMessage());
throw e;
}
// Step2. Call dumpSchedulerLogs of each subcluster.
try {
long startTime = clock.getTime();
Collection<SubClusterInfo> subClustersActive = federationFacade.getActiveSubClusters();
final HttpServletRequest hsrCopy = clone(hsr);
Class[] argsClasses = new Class[]{String.class, HttpServletRequest.class};
Object[] args = new Object[]{time, hsrCopy};
ClientMethod remoteMethod = new ClientMethod("dumpSchedulerLogs", argsClasses, args);
Map<SubClusterInfo, String> dumpSchedulerLogsMap = invokeConcurrent(
subClustersActive, remoteMethod, String.class);
StringBuilder stringBuilder = new StringBuilder();
dumpSchedulerLogsMap.forEach((subClusterInfo, msg) -> {
SubClusterId subClusterId = subClusterInfo.getSubClusterId();
stringBuilder.append("subClusterId")
.append(subClusterId).append(" : ").append(msg).append("; ");
});
long stopTime = clock.getTime();
RouterAuditLogger.logSuccess(getUser().getShortUserName(), DUMP_SCHEDULERLOGS,
TARGET_WEB_SERVICE);
routerMetrics.succeededDumpSchedulerLogsRetrieved(stopTime - startTime);
return stringBuilder.toString();
} catch (IllegalArgumentException e) {
routerMetrics.incrDumpSchedulerLogsFailedRetrieved();
RouterAuditLogger.logFailure(getUser().getShortUserName(), DUMP_SCHEDULERLOGS, UNKNOWN,
TARGET_WEB_SERVICE, e.getLocalizedMessage());
RouterServerUtil.logAndThrowRunTimeException(e,
"Unable to dump SchedulerLogs by time: %s.", time);
} catch (YarnException e) {
routerMetrics.incrDumpSchedulerLogsFailedRetrieved();
RouterAuditLogger.logFailure(getUser().getShortUserName(), DUMP_SCHEDULERLOGS, UNKNOWN,
TARGET_WEB_SERVICE, e.getLocalizedMessage());
RouterServerUtil.logAndThrowRunTimeException(e,
"dumpSchedulerLogs by time = %s error .", time);
}
routerMetrics.incrDumpSchedulerLogsFailedRetrieved();
RouterAuditLogger.logFailure(getUser().getShortUserName(), DUMP_SCHEDULERLOGS, UNKNOWN,
TARGET_WEB_SERVICE, "dumpSchedulerLogs Failed.");
throw new RuntimeException("dumpSchedulerLogs Failed.");
}
|
@Test
public void testDumpSchedulerLogsError() throws Exception {
HttpServletRequest mockHsr = mockHttpServletRequestByUserName("admin");
// time is empty
LambdaTestUtils.intercept(IllegalArgumentException.class,
"Parameter error, the time is empty or null.",
() -> interceptor.dumpSchedulerLogs(null, mockHsr));
// time is negative
LambdaTestUtils.intercept(IllegalArgumentException.class,
"time must be greater than 0.",
() -> interceptor.dumpSchedulerLogs("-1", mockHsr));
// time is non-numeric
LambdaTestUtils.intercept(IllegalArgumentException.class,
"time must be a number.",
() -> interceptor.dumpSchedulerLogs("abc", mockHsr));
}
|
public HeartbeatV2Result sendHeartbeatV2(
final String addr,
final HeartbeatData heartbeatData,
final long timeoutMillis
) throws RemotingException, MQBrokerException, InterruptedException {
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.HEART_BEAT, new HeartbeatRequestHeader());
request.setLanguage(clientConfig.getLanguage());
request.setBody(heartbeatData.encode());
RemotingCommand response = this.remotingClient.invokeSync(addr, request, timeoutMillis);
assert response != null;
switch (response.getCode()) {
case ResponseCode.SUCCESS: {
if (response.getExtFields() != null) {
return new HeartbeatV2Result(response.getVersion(), Boolean.parseBoolean(response.getExtFields().get(MixAll.IS_SUB_CHANGE)), Boolean.parseBoolean(response.getExtFields().get(MixAll.IS_SUPPORT_HEART_BEAT_V2)));
}
return new HeartbeatV2Result(response.getVersion(), false, false);
}
default:
break;
}
throw new MQBrokerException(response.getCode(), response.getRemark());
}
|
@Test
public void assertSendHeartbeatV2() throws MQBrokerException, RemotingException, InterruptedException {
mockInvokeSync();
HeartbeatData heartbeatData = new HeartbeatData();
HeartbeatV2Result actual = mqClientAPI.sendHeartbeatV2(defaultBrokerAddr, heartbeatData, defaultTimeout);
assertNotNull(actual);
assertEquals(1, actual.getVersion());
assertFalse(actual.isSubChange());
assertFalse(actual.isSupportV2());
}
|
void load() {
loadClusterMembers();
loadRegions();
loadDevices();
loadDeviceLinks();
loadHosts();
}
|
@Test
public void load() {
title("load");
cache.load();
print(cache.dumpString());
// See mock service bundle for expected values (AbstractTopoModelTest)
assertEquals("unex # cnodes", 3, cache.clusterMemberCount());
assertEquals("unex # regions", 3, cache.regionCount());
assertEquals("unex # devices", 9, cache.deviceCount());
assertEquals("unex # hosts", 18, cache.hostCount());
assertEquals("unex # device-links", 8, cache.deviceLinkCount());
assertEquals("unex # edge-links", 18, cache.edgeLinkCount());
assertEquals("unex # synth-links", 0, cache.synthLinkCount());
}
|
@Nonnull
@Override
public Optional<MessageDigest> parse(
@Nullable final String str, @Nonnull DetectionLocation detectionLocation) {
if (str == null) {
return Optional.empty();
}
return switch (str.toUpperCase().trim()) {
case "MD2" -> Optional.of(new MD2(detectionLocation));
case "MD5" -> Optional.of(new MD5(detectionLocation));
case "SHA", "SHA1", "SHA-1" -> Optional.of(new SHA(detectionLocation));
case "SHA-224", "SHA224" -> Optional.of(new SHA2(224, detectionLocation));
case "SHA-256", "SHA256" -> Optional.of(new SHA2(256, detectionLocation));
case "SHA-384", "SHA384" -> Optional.of(new SHA2(384, detectionLocation));
case "SHA-512", "SHA512" -> Optional.of(new SHA2(512, detectionLocation));
case "SHA-512/224", "SHA512/224" ->
Optional.of(new SHA2(224, new SHA2(512, detectionLocation), detectionLocation));
case "SHA-512/256", "SHA512/256" ->
Optional.of(new SHA2(256, new SHA2(512, detectionLocation), detectionLocation));
case "SHA3-224" -> Optional.of(new SHA3(224, detectionLocation));
case "SHA3-256" -> Optional.of(new SHA3(256, detectionLocation));
case "SHA3-384" -> Optional.of(new SHA3(384, detectionLocation));
case "SHA3-512" -> Optional.of(new SHA3(512, detectionLocation));
default -> Optional.empty();
};
}
|
@Test
void base() {
DetectionLocation testDetectionLocation =
new DetectionLocation("testfile", 1, 1, List.of("test"), () -> "SSL");
JcaMessageDigestMapper jcaMessageDigestMapper = new JcaMessageDigestMapper();
Optional<MessageDigest> messageDigestOptional =
jcaMessageDigestMapper.parse("SHA3-224", testDetectionLocation);
assertThat(messageDigestOptional).isPresent();
assertThat(messageDigestOptional.get().getName()).isEqualTo("SHA3-224");
assertThat(messageDigestOptional.get().getDigestSize()).isPresent();
assertThat(messageDigestOptional.get().getDigestSize().get().getValue()).isEqualTo(224);
}
|
public List<DateGroup> parseSyntax(String language)
{
language = words2numbers(language);
List<DateGroup> result = new ArrayList<DateGroup>();
List<com.joestelmach.natty.DateGroup> groups = parser.parse(language);
Date now = new Date();
for (com.joestelmach.natty.DateGroup group : groups) {
result.add(new DateGroupImpl(now, group));
}
return result;
}
|
@Test
public void testParseSyntax()
{
List<DateGroup> parse = new PrettyTimeParser().parseSyntax("I did it three days ago");
Assert.assertFalse(parse.isEmpty());
String formatted = new PrettyTime(Locale.ENGLISH).format(parse.get(0).getDates().get(0));
Assert.assertEquals("3 days ago", formatted);
Assert.assertEquals(1, parse.get(0).getLine());
Assert.assertEquals(10, parse.get(0).getPosition());
Assert.assertEquals(1, parse.get(0).getDates().size());
Assert.assertNull(parse.get(0).getRecursUntil());
Assert.assertFalse(parse.get(0).isRecurring());
Assert.assertEquals(-1, parse.get(0).getRecurInterval());
}
|
void onAvailableImage(
final long correlationId,
final int sessionId,
final long subscriptionRegistrationId,
final int subscriberPositionId,
final String logFileName,
final String sourceIdentity)
{
final Subscription subscription = (Subscription)resourceByRegIdMap.get(subscriptionRegistrationId);
if (null != subscription)
{
final Image image = new Image(
subscription,
sessionId,
new UnsafeBufferPosition(counterValuesBuffer, subscriberPositionId),
logBuffers(correlationId, logFileName, subscription.channel()),
ctx.subscriberErrorHandler(),
sourceIdentity,
correlationId);
subscription.addImage(image);
final AvailableImageHandler handler = subscription.availableImageHandler();
if (null != handler)
{
isInCallback = true;
try
{
handler.onAvailableImage(image);
}
catch (final Exception ex)
{
handleError(ex);
}
finally
{
isInCallback = false;
}
}
}
}
|
@Test
void shouldIgnoreUnknownNewImage()
{
conductor.onAvailableImage(
CORRELATION_ID_2,
SESSION_ID_2,
SUBSCRIPTION_POSITION_REGISTRATION_ID,
SUBSCRIPTION_POSITION_ID,
SESSION_ID_2 + "-log",
SOURCE_INFO);
verify(logBuffersFactory, never()).map(anyString());
verify(mockAvailableImageHandler, never()).onAvailableImage(any(Image.class));
}
|
@Override
public BasicTypeDefine reconvert(Column column) {
BasicTypeDefine.BasicTypeDefineBuilder builder =
BasicTypeDefine.builder()
.name(column.getName())
.nullable(column.isNullable())
.comment(column.getComment())
.defaultValue(column.getDefaultValue());
switch (column.getDataType().getSqlType()) {
case BOOLEAN:
builder.columnType(HANA_BOOLEAN);
builder.dataType(HANA_BOOLEAN);
builder.length(2L);
break;
case TINYINT:
builder.columnType(HANA_TINYINT);
builder.dataType(HANA_TINYINT);
break;
case SMALLINT:
builder.columnType(HANA_SMALLINT);
builder.dataType(HANA_SMALLINT);
break;
case INT:
builder.columnType(HANA_INTEGER);
builder.dataType(HANA_INTEGER);
break;
case BIGINT:
builder.columnType(HANA_BIGINT);
builder.dataType(HANA_BIGINT);
break;
case FLOAT:
builder.columnType(HANA_REAL);
builder.dataType(HANA_REAL);
break;
case DOUBLE:
builder.columnType(HANA_DOUBLE);
builder.dataType(HANA_DOUBLE);
break;
case DECIMAL:
DecimalType decimalType = (DecimalType) column.getDataType();
long precision = decimalType.getPrecision();
int scale = decimalType.getScale();
if (precision <= 0) {
precision = DEFAULT_PRECISION;
scale = DEFAULT_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is precision less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (precision > MAX_PRECISION) {
scale = (int) Math.max(0, scale - (precision - MAX_PRECISION));
precision = MAX_PRECISION;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum precision of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_PRECISION,
precision,
scale);
}
if (scale < 0) {
scale = 0;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is scale less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (scale > MAX_SCALE) {
scale = MAX_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_SCALE,
precision,
scale);
}
builder.columnType(String.format("%s(%s,%s)", HANA_DECIMAL, precision, scale));
builder.dataType(HANA_DECIMAL);
builder.precision(precision);
builder.scale(scale);
break;
case BYTES:
builder.columnType(HANA_BLOB);
builder.dataType(HANA_BLOB);
break;
case STRING:
if (column.getColumnLength() == null
|| column.getColumnLength() <= MAX_NVARCHAR_LENGTH) {
builder.columnType(HANA_NVARCHAR);
builder.dataType(HANA_NVARCHAR);
builder.length(
column.getColumnLength() == null
? MAX_NVARCHAR_LENGTH
: column.getColumnLength());
} else {
builder.columnType(HANA_CLOB);
builder.dataType(HANA_CLOB);
}
break;
case DATE:
builder.columnType(HANA_DATE);
builder.dataType(HANA_DATE);
break;
case TIME:
builder.columnType(HANA_TIME);
builder.dataType(HANA_TIME);
break;
case TIMESTAMP:
if (column.getScale() == null || column.getScale() <= 0) {
builder.columnType(HANA_SECONDDATE);
builder.dataType(HANA_SECONDDATE);
} else {
int timestampScale = column.getScale();
if (column.getScale() > MAX_TIMESTAMP_SCALE) {
timestampScale = MAX_TIMESTAMP_SCALE;
log.warn(
"The timestamp column {} type timestamp({}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to timestamp({})",
column.getName(),
column.getScale(),
MAX_TIMESTAMP_SCALE,
timestampScale);
}
builder.columnType(HANA_TIMESTAMP);
builder.dataType(HANA_TIMESTAMP);
builder.scale(timestampScale);
}
break;
default:
throw CommonError.convertToConnectorTypeError(
DatabaseIdentifier.SAP_HANA,
column.getDataType().getSqlType().name(),
column.getName());
}
BasicTypeDefine typeDefine = builder.build();
typeDefine.setColumnType(
appendColumnSizeIfNeed(
typeDefine.getColumnType(), typeDefine.getLength(), typeDefine.getScale()));
return typeDefine;
}
|
@Test
public void testReconvertLong() {
Column column = PhysicalColumn.builder().name("test").dataType(BasicType.LONG_TYPE).build();
BasicTypeDefine typeDefine = SapHanaTypeConverter.INSTANCE.reconvert(column);
Assertions.assertEquals(column.getName(), typeDefine.getName());
Assertions.assertEquals(SapHanaTypeConverter.HANA_BIGINT, typeDefine.getColumnType());
Assertions.assertEquals(SapHanaTypeConverter.HANA_BIGINT, typeDefine.getDataType());
}
|
@Override
public void upgrade() {
if (clusterConfigService.get(MigrationCompleted.class) != null) {
LOG.debug("Migration already completed.");
return;
}
final LegacyAWSPluginConfiguration legacyConfiguration = clusterConfigService.get(
CLUSTER_CONFIG_TYPE,
LegacyAWSPluginConfiguration.class
);
if (legacyConfiguration != null && !Strings.isNullOrEmpty(legacyConfiguration.secretKey())) {
final AWSPluginConfiguration migratedPluginConfiguration = AWSPluginConfiguration.fromLegacyConfig(legacyConfiguration, systemConfiguration);
clusterConfigService.write(CLUSTER_CONFIG_TYPE, migratedPluginConfiguration);
}
clusterConfigService.write(MigrationCompleted.create());
}
|
@Test
public void doesNotDoAnyThingForExistingPluginConfigWithoutSecretKey() {
mockExistingConfig(V20200505121200_EncryptAWSSecretKey.LegacyAWSPluginConfiguration.create(
true,
"lookupRegions",
"something",
"",
true
));
this.migration.upgrade();
verify(clusterConfigService, never()).write(anyString(), any());
verify(clusterConfigService, times(1)).write(any(V20200505121200_EncryptAWSSecretKey.MigrationCompleted.class));
}
|
@Override
public void appendEdge(E edge) {
checkNotNull(edge, "Edge cannot be null");
checkArgument(edges.isEmpty() || dst().equals(edge.src()),
"Edge source must be the same as the current path destination");
edges.add(edge);
}
|
@Test
public void appendEdge() {
MutablePath<TestVertex, TestEdge> p = new DefaultMutablePath<>();
p.appendEdge(new TestEdge(A, B));
p.appendEdge(new TestEdge(B, C));
validatePath(p, A, C, 2);
}
|
@Operation(summary = "Get single organization")
@GetMapping(value = "{id}", produces = "application/json")
@ResponseBody
public Organization getById(@PathVariable("id") Long id) {
return organizationService.getOrganizationById(id);
}
|
@Test
public void getOrganizationById() {
when(organizationServiceMock.getOrganizationById(1L)).thenReturn(newOrganization());
Organization result = controllerMock.getById(1L);
assertEquals(newOrganization().getName(), result.getName());
verify(organizationServiceMock, times(1)).getOrganizationById(anyLong());
assertNotNull(result);
}
|
@Override
public void execute(Exchange exchange) throws SmppException {
SubmitMulti[] submitMulties = createSubmitMulti(exchange);
List<SubmitMultiResult> results = new ArrayList<>(submitMulties.length);
for (SubmitMulti submitMulti : submitMulties) {
SubmitMultiResult result;
if (log.isDebugEnabled()) {
log.debug("Sending multiple short messages for exchange id '{}'...", exchange.getExchangeId());
}
try {
result = session.submitMultiple(
submitMulti.getServiceType(),
TypeOfNumber.valueOf(submitMulti.getSourceAddrTon()),
NumberingPlanIndicator.valueOf(submitMulti.getSourceAddrNpi()),
submitMulti.getSourceAddr(),
(Address[]) submitMulti.getDestAddresses(),
new ESMClass(submitMulti.getEsmClass()),
submitMulti.getProtocolId(),
submitMulti.getPriorityFlag(),
submitMulti.getScheduleDeliveryTime(),
submitMulti.getValidityPeriod(),
new RegisteredDelivery(submitMulti.getRegisteredDelivery()),
new ReplaceIfPresentFlag(submitMulti.getReplaceIfPresentFlag()),
DataCodings.newInstance(submitMulti.getDataCoding()),
submitMulti.getSmDefaultMsgId(),
submitMulti.getShortMessage(),
submitMulti.getOptionalParameters());
results.add(result);
} catch (Exception e) {
throw new SmppException(e);
}
}
if (log.isDebugEnabled()) {
log.debug("Sent multiple short messages for exchange id '{}' and received results '{}'", exchange.getExchangeId(),
results);
}
List<String> messageIDs = new ArrayList<>(results.size());
// {messageID : [{destAddr : address, error : errorCode}]}
Map<String, List<Map<String, Object>>> errors = new HashMap<>();
for (SubmitMultiResult result : results) {
UnsuccessDelivery[] deliveries = result.getUnsuccessDeliveries();
if (deliveries != null) {
List<Map<String, Object>> undelivered = new ArrayList<>();
for (UnsuccessDelivery delivery : deliveries) {
Map<String, Object> error = new HashMap<>();
error.put(SmppConstants.DEST_ADDR, delivery.getDestinationAddress().getAddress());
error.put(SmppConstants.ERROR, delivery.getErrorStatusCode());
undelivered.add(error);
}
if (!undelivered.isEmpty()) {
errors.put(result.getMessageId(), undelivered);
}
}
messageIDs.add(result.getMessageId());
}
Message message = ExchangeHelper.getResultMessage(exchange);
message.setHeader(SmppConstants.ID, messageIDs);
message.setHeader(SmppConstants.SENT_MESSAGE_COUNT, messageIDs.size());
if (!errors.isEmpty()) {
message.setHeader(SmppConstants.ERROR, errors);
}
}
|
@Test
public void executeWithOptionalParameterNewStyle() throws Exception {
Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut);
exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti");
exchange.getIn().setHeader(SmppConstants.ID, "1");
exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_TON, TypeOfNumber.NATIONAL.value());
exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_NPI, NumberingPlanIndicator.NATIONAL.value());
exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR, "1818");
exchange.getIn().setHeader(SmppConstants.DEST_ADDR_TON, TypeOfNumber.INTERNATIONAL.value());
exchange.getIn().setHeader(SmppConstants.DEST_ADDR_NPI, NumberingPlanIndicator.INTERNET.value());
exchange.getIn().setHeader(SmppConstants.DEST_ADDR, Arrays.asList("1919"));
exchange.getIn().setHeader(SmppConstants.SCHEDULE_DELIVERY_TIME, new Date(1111111));
exchange.getIn().setHeader(SmppConstants.VALIDITY_PERIOD, new Date(2222222));
exchange.getIn().setHeader(SmppConstants.PROTOCOL_ID, (byte) 1);
exchange.getIn().setHeader(SmppConstants.PRIORITY_FLAG, (byte) 2);
exchange.getIn().setHeader(SmppConstants.REGISTERED_DELIVERY,
new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE).value());
exchange.getIn().setHeader(SmppConstants.REPLACE_IF_PRESENT_FLAG, ReplaceIfPresentFlag.REPLACE.value());
Map<Short, Object> optionalParameters = new LinkedHashMap<>();
// standard optional parameter
optionalParameters.put((short) 0x0202, "1292".getBytes("UTF-8"));
optionalParameters.put((short) 0x001D, "urgent");
optionalParameters.put((short) 0x0005, Byte.valueOf("4"));
optionalParameters.put((short) 0x0008, (short) 2);
optionalParameters.put((short) 0x0017, 3600000);
optionalParameters.put((short) 0x130C, null);
// vendor specific optional parameter
optionalParameters.put((short) 0x2150, "0815".getBytes("UTF-8"));
optionalParameters.put((short) 0x2151, "0816");
optionalParameters.put((short) 0x2152, Byte.valueOf("6"));
optionalParameters.put((short) 0x2153, (short) 9);
optionalParameters.put((short) 0x2154, 7400000);
optionalParameters.put((short) 0x2155, null);
exchange.getIn().setHeader(SmppConstants.OPTIONAL_PARAMETER, optionalParameters);
exchange.getIn().setBody("short message body");
when(session.submitMultiple(eq("CMT"), eq(TypeOfNumber.NATIONAL), eq(NumberingPlanIndicator.NATIONAL), eq("1818"),
eq(new Address[] { new Address(TypeOfNumber.INTERNATIONAL, NumberingPlanIndicator.INTERNET, "1919") }),
eq(new ESMClass()), eq((byte) 1), eq((byte) 2), eq("-300101001831100+"), eq("-300101003702200+"),
eq(new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE)),
eq(ReplaceIfPresentFlag.REPLACE), eq(DataCodings.newInstance((byte) 0)), eq((byte) 0),
eq("short message body".getBytes()),
eq(new OptionalParameter.OctetString(Tag.SOURCE_SUBADDRESS, "1292")),
eq(new OptionalParameter.COctetString(Tag.ADDITIONAL_STATUS_INFO_TEXT.code(), "urgent")),
eq(new OptionalParameter.Byte(Tag.DEST_ADDR_SUBUNIT, (byte) 4)),
eq(new OptionalParameter.Short(Tag.DEST_TELEMATICS_ID.code(), (short) 2)),
eq(new OptionalParameter.Int(Tag.QOS_TIME_TO_LIVE, 3600000)),
eq(new OptionalParameter.Null(Tag.ALERT_ON_MESSAGE_DELIVERY)),
eq(new OptionalParameter.OctetString((short) 0x2150, "1292", "UTF-8")),
eq(new OptionalParameter.COctetString((short) 0x2151, "0816")),
eq(new OptionalParameter.Byte((short) 0x2152, (byte) 6)),
eq(new OptionalParameter.Short((short) 0x2153, (short) 9)),
eq(new OptionalParameter.Int((short) 0x2154, 7400000)),
eq(new OptionalParameter.Null((short) 0x2155))))
.thenReturn(new SubmitMultiResult("1", null, null));
command.execute(exchange);
assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID));
assertEquals(1, exchange.getMessage().getHeader(SmppConstants.SENT_MESSAGE_COUNT));
assertNull(exchange.getMessage().getHeader(SmppConstants.ERROR));
}
|
public <V> V retryCallable(
Callable<V> callable, Set<Class<? extends Exception>> exceptionsToIntercept) {
return RetryHelper.runWithRetries(
callable,
getRetrySettings(),
getExceptionHandlerForExceptions(exceptionsToIntercept),
NanoClock.getDefaultClock());
}
|
@Test
public void testRetryCallable_ReturnsExpected() {
AtomicInteger executeCounter = new AtomicInteger(0);
Callable<Integer> incrementingFunction =
() -> {
executeCounter.incrementAndGet();
if (executeCounter.get() < 2) {
throw new MyException();
}
return executeCounter.get();
};
Integer result =
retryCallableManager.retryCallable(
incrementingFunction, ImmutableSet.of(MyException.class));
assertEquals(String.format("Should return 2, instead returned %d.", result), 2, (int) result);
}
|
Set<SourceName> analyzeExpression(
final Expression expression,
final String clauseType
) {
final Validator extractor = new Validator(clauseType);
extractor.process(expression, null);
return extractor.referencedSources;
}
|
@Test
public void shouldThrowOnPossibleSyntheticKeyColumnIfQualifiedColumnReference() {
// Given:
when(sourceSchemas.isJoin()).thenReturn(true);
final Expression notSyntheticKey = new QualifiedColumnReferenceExp(
SourceName.of("Bob"), ColumnName.of("ROWKEY")
);
// When:
final Exception e = assertThrows(
UnknownColumnException.class,
() -> analyzer.analyzeExpression(notSyntheticKey, "SELECT")
);
// Then:
assertThat(e.getMessage(), containsString(
"SELECT column 'Bob.ROWKEY' cannot be resolved."));
}
|
public String doLayout(ILoggingEvent event) {
if (!isStarted()) {
return CoreConstants.EMPTY_STRING;
}
return writeLoopOnConverters(event);
}
|
@Test
public void contextNameTest() {
pl.setPattern("%contextName");
loggerContext.setName("aValue");
pl.start();
String val = pl.doLayout(getEventObject());
assertEquals("aValue", val);
}
|
public static ComposeCombineFnBuilder compose() {
return new ComposeCombineFnBuilder();
}
|
@Test
public void testDuplicatedTags() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("it is already present in the composition");
TupleTag<Integer> tag = new TupleTag<>();
CombineFns.compose()
.with(new GetIntegerFunction(), Max.ofIntegers(), tag)
.with(new GetIntegerFunction(), Min.ofIntegers(), tag);
}
|
@Override
public PollResult poll(long currentTimeMs) {
return pollInternal(
prepareFetchRequests(),
this::handleFetchSuccess,
this::handleFetchFailure
);
}
|
@Test
public void testClearBufferedDataForTopicPartitions() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.seek(tp0, 0);
// normal fetch
assertEquals(1, sendFetches());
assertFalse(fetcher.hasCompletedFetches());
client.prepareResponse(fullFetchResponse(tidp0, records, Errors.NONE, 100L, 0));
networkClientDelegate.poll(time.timer(0));
assertTrue(fetcher.hasCompletedFetches());
Set<TopicPartition> newAssignedTopicPartitions = new HashSet<>();
newAssignedTopicPartitions.add(tp1);
fetcher.clearBufferedDataForUnassignedPartitions(newAssignedTopicPartitions);
assertFalse(fetcher.hasCompletedFetches());
}
|
public static String normalizeSpringBootResourceUrlPath(String resourceUrlPath) {
if (resourceUrlPath.startsWith(SPRING_BOOT_URL_PREFIX)) {
return resourceUrlPath.replace(SPRING_BOOT_URL_PREFIX, SPRING_BOOT_PREFIX); // Remove "!"
} else {
return resourceUrlPath;
}
}
|
@Test
public void normalizeSpringBootResourceUrlPath() {
String normalized = JarUtils.normalizeSpringBootResourceUrlPath("BOOT-INF/classes!/org/example/MyClass.class");
assertThat(normalized).isEqualTo("BOOT-INF/classes/org/example/MyClass.class");
}
|
@Override
public Map<String, String> findBundlesToSplit(final LoadData loadData, final PulsarService pulsar) {
bundleCache.clear();
namespaceBundleCount.clear();
final ServiceConfiguration conf = pulsar.getConfiguration();
int maxBundleCount = conf.getLoadBalancerNamespaceMaximumBundles();
long maxBundleTopics = conf.getLoadBalancerNamespaceBundleMaxTopics();
long maxBundleSessions = conf.getLoadBalancerNamespaceBundleMaxSessions();
long maxBundleMsgRate = conf.getLoadBalancerNamespaceBundleMaxMsgRate();
long maxBundleBandwidth = conf.getLoadBalancerNamespaceBundleMaxBandwidthMbytes() * LoadManagerShared.MIBI;
loadData.getBrokerData().forEach((broker, brokerData) -> {
LocalBrokerData localData = brokerData.getLocalData();
for (final Map.Entry<String, NamespaceBundleStats> entry : localData.getLastStats().entrySet()) {
final String bundle = entry.getKey();
final NamespaceBundleStats stats = entry.getValue();
if (stats.topics < 2) {
if (log.isDebugEnabled()) {
log.debug("The count of topics on the bundle {} is less than 2, skip split!", bundle);
}
continue;
}
double totalMessageRate = 0;
double totalMessageThroughput = 0;
// Attempt to consider long-term message data, otherwise effectively ignore.
if (loadData.getBundleData().containsKey(bundle)) {
final TimeAverageMessageData longTermData = loadData.getBundleData().get(bundle).getLongTermData();
totalMessageRate = longTermData.totalMsgRate();
totalMessageThroughput = longTermData.totalMsgThroughput();
}
if (stats.topics > maxBundleTopics || (maxBundleSessions > 0 && (stats.consumerCount
+ stats.producerCount > maxBundleSessions))
|| totalMessageRate > maxBundleMsgRate || totalMessageThroughput > maxBundleBandwidth) {
final String namespace = LoadManagerShared.getNamespaceNameFromBundleName(bundle);
try {
final int bundleCount = pulsar.getNamespaceService()
.getBundleCount(NamespaceName.get(namespace));
if ((bundleCount + namespaceBundleCount.getOrDefault(namespace, 0))
< maxBundleCount) {
log.info("The bundle {} is considered to be unload. Topics: {}/{}, Sessions: ({}+{})/{}, "
+ "Message Rate: {}/{} (msgs/s), Message Throughput: {}/{} (MB/s)",
bundle, stats.topics, maxBundleTopics, stats.producerCount, stats.consumerCount,
maxBundleSessions, totalMessageRate, maxBundleMsgRate,
totalMessageThroughput / LoadManagerShared.MIBI,
maxBundleBandwidth / LoadManagerShared.MIBI);
bundleCache.put(bundle, broker);
int bundleNum = namespaceBundleCount.getOrDefault(namespace, 0);
namespaceBundleCount.put(namespace, bundleNum + 1);
} else {
if (log.isDebugEnabled()) {
log.debug(
"Could not split namespace bundle {} because namespace {} has too many bundles:"
+ "{}", bundle, namespace, bundleCount);
}
}
} catch (Exception e) {
log.warn("Error while getting bundle count for namespace {}", namespace, e);
}
}
}
});
return bundleCache;
}
|
@Test
public void testSplitTaskWhenTopicJustOne() {
final BundleSplitterTask bundleSplitterTask = new BundleSplitterTask();
LoadData loadData = new LoadData();
LocalBrokerData brokerData = new LocalBrokerData();
Map<String, NamespaceBundleStats> lastStats = new HashMap<>();
final NamespaceBundleStats namespaceBundleStats = new NamespaceBundleStats();
namespaceBundleStats.topics = 1;
lastStats.put("ten/ns/0x00000000_0x80000000", namespaceBundleStats);
brokerData.setLastStats(lastStats);
loadData.getBrokerData().put("broker", new BrokerData(brokerData));
BundleData bundleData = new BundleData();
TimeAverageMessageData averageMessageData = new TimeAverageMessageData();
averageMessageData.setMsgRateIn(pulsar.getConfiguration().getLoadBalancerNamespaceBundleMaxMsgRate());
averageMessageData.setMsgRateOut(1);
bundleData.setLongTermData(averageMessageData);
loadData.getBundleData().put("ten/ns/0x00000000_0x80000000", bundleData);
final Map<String, String> bundlesToSplit = bundleSplitterTask.findBundlesToSplit(loadData, pulsar);
Assert.assertEquals(bundlesToSplit.size(), 0);
}
|
@Override
public String getFileId(final DriveItem.Metadata metadata) {
final ItemReference parent = metadata.getParentReference();
if(metadata.getRemoteItem() != null) {
final DriveItem.Metadata remoteMetadata = metadata.getRemoteItem();
final ItemReference remoteParent = remoteMetadata.getParentReference();
if(parent == null) {
return String.join(String.valueOf(Path.DELIMITER),
remoteParent.getDriveId(), remoteMetadata.getId());
}
else {
return String.join(String.valueOf(Path.DELIMITER),
parent.getDriveId(), metadata.getId(),
remoteParent.getDriveId(), remoteMetadata.getId());
}
}
else {
return String.join(String.valueOf(Path.DELIMITER), parent.getDriveId(), metadata.getId());
}
}
|
@Test
public void testSharedFolderIdInSharedWithMeDrive() throws Exception {
final DriveItem.Metadata metadata;
try (final InputStream test = getClass().getResourceAsStream("/SharedFolderIdInSharedWithMeDrive.json")) {
final InputStreamReader reader = new InputStreamReader(test);
metadata = DriveItem.parseJson(session.getClient(), (JsonObject) Json.parse(reader));
}
assertEquals("ParentDriveId/MyId/RemoteParentDriveId/RemoteId", session.getFileId(metadata));
}
|
public static void addApplicationIdMetric(final StreamsMetricsImpl streamsMetrics, final String applicationId) {
streamsMetrics.addClientLevelImmutableMetric(
APPLICATION_ID,
APPLICATION_ID_DESCRIPTION,
RecordingLevel.INFO,
applicationId
);
}
|
@Test
public void shouldAddApplicationIdMetric() {
final String name = "application-id";
final String description = "The application ID of the Kafka Streams client";
final String applicationId = "thisIsAnID";
setUpAndVerifyImmutableMetric(
name,
description,
applicationId,
() -> ClientMetrics.addApplicationIdMetric(streamsMetrics, applicationId)
);
}
|
@Override
public String arguments() {
ArrayList<String> args = new ArrayList<>();
if (buildFile != null) {
args.add("-f \"" + FilenameUtils.separatorsToUnix(buildFile) + "\"");
}
if (target != null) {
args.add(target);
}
return StringUtils.join(args, " ");
}
|
@Test
public void shouldUseRakeFileFromAnyDirectoryUnderRoot() throws Exception {
RakeTask rakeTask = new RakeTask();
String rakeFile = "build/myrakefile.rb";
rakeTask.setBuildFile(rakeFile);
rakeTask.setTarget("db:migrate VERSION=0");
assertThat(rakeTask.arguments(), is("-f \"" + rakeFile + "\" db:migrate VERSION=0"));
}
|
@SuppressWarnings({
"nullness" // TODO(https://github.com/apache/beam/issues/20497)
})
@Override
protected SchemaTransform from(KafkaReadSchemaTransformConfiguration configuration) {
return new KafkaReadSchemaTransform(configuration);
}
|
@Test
public void testBuildTransformWithAvroSchema() {
ServiceLoader<SchemaTransformProvider> serviceLoader =
ServiceLoader.load(SchemaTransformProvider.class);
List<SchemaTransformProvider> providers =
StreamSupport.stream(serviceLoader.spliterator(), false)
.filter(provider -> provider.getClass() == KafkaReadSchemaTransformProvider.class)
.collect(Collectors.toList());
KafkaReadSchemaTransformProvider kafkaProvider =
(KafkaReadSchemaTransformProvider) providers.get(0);
kafkaProvider.from(
KafkaReadSchemaTransformConfiguration.builder()
.setFormat("AVRO")
.setTopic("anytopic")
.setBootstrapServers("anybootstrap")
.setSchema(AVRO_SCHEMA)
.build());
}
|
MetricsType getMetricsType(String remaining) {
String name = StringHelper.before(remaining, ":");
MetricsType type;
if (name == null) {
type = DEFAULT_METRICS_TYPE;
} else {
type = MetricsType.getByName(name);
}
if (type == null) {
throw new RuntimeCamelException("Unknown metrics type \"" + name + "\"");
}
return type;
}
|
@Test
public void testGetMetricsTypeNotSet() {
assertThat(component.getMetricsType("no-metrics-type"), is(MetricsComponent.DEFAULT_METRICS_TYPE));
}
|
int run() {
final Map<String, String> configProps = options.getConfigFile()
.map(Ksql::loadProperties)
.orElseGet(Collections::emptyMap);
final Map<String, String> sessionVariables = options.getVariables();
try (KsqlRestClient restClient = buildClient(configProps)) {
try (Cli cli = cliBuilder.build(
options.getStreamedQueryRowLimit(),
options.getStreamedQueryTimeoutMs(),
options.getOutputFormat(),
restClient)
) {
// Add CLI variables If defined by parameters
cli.addSessionVariables(sessionVariables);
if (options.getExecute().isPresent()) {
return cli.runCommand(options.getExecute().get());
} else if (options.getScriptFile().isPresent()) {
final File scriptFile = new File(options.getScriptFile().get());
if (scriptFile.exists() && scriptFile.isFile()) {
return cli.runScript(scriptFile.getPath());
} else {
throw new KsqlException("No such script file: " + scriptFile.getPath());
}
} else {
return cli.runInteractively();
}
}
}
}
|
@Test
public void shouldRunInteractively() {
// When:
ksql.run();
// Then:
verify(cli).runInteractively();
}
|
@Override
public Map<String, List<TopicPartition>> assign(Map<String, Integer> partitionsPerTopic,
Map<String, Subscription> subscriptions) {
Map<String, List<TopicPartition>> assignment = new HashMap<>();
List<MemberInfo> memberInfoList = new ArrayList<>();
for (Map.Entry<String, Subscription> memberSubscription : subscriptions.entrySet()) {
assignment.put(memberSubscription.getKey(), new ArrayList<>());
memberInfoList.add(new MemberInfo(memberSubscription.getKey(),
memberSubscription.getValue().groupInstanceId()));
}
CircularIterator<MemberInfo> assigner = new CircularIterator<>(Utils.sorted(memberInfoList));
for (TopicPartition partition : allPartitionsSorted(partitionsPerTopic, subscriptions)) {
final String topic = partition.topic();
while (!subscriptions.get(assigner.peek().memberId).topics().contains(topic))
assigner.next();
assignment.get(assigner.next().memberId).add(partition);
}
return assignment;
}
|
@Test
public void testOneConsumerNoTopic() {
Map<String, Integer> partitionsPerTopic = new HashMap<>();
Map<String, List<TopicPartition>> assignment = assignor.assign(partitionsPerTopic,
Collections.singletonMap(consumerId, new Subscription(Collections.emptyList())));
assertEquals(Collections.singleton(consumerId), assignment.keySet());
assertTrue(assignment.get(consumerId).isEmpty());
}
|
public JsonNode getJson() {
return json;
}
|
@Test
void testCopyConstructor() {
assertEquals("{}", new JsonHttpResult(new HttpResult()).getJson().toString());
}
|
@SuppressWarnings("unchecked")
@Override
public <T extends Statement> ConfiguredStatement<T> inject(
final ConfiguredStatement<T> statement
) {
if (!(statement.getStatement() instanceof CreateSource)
&& !(statement.getStatement() instanceof CreateAsSelect)) {
return statement;
}
try {
if (statement.getStatement() instanceof CreateSource) {
final ConfiguredStatement<CreateSource> createStatement =
(ConfiguredStatement<CreateSource>) statement;
return (ConfiguredStatement<T>) forCreateStatement(createStatement).orElse(createStatement);
} else {
final ConfiguredStatement<CreateAsSelect> createStatement =
(ConfiguredStatement<CreateAsSelect>) statement;
return (ConfiguredStatement<T>) forCreateAsStatement(createStatement).orElse(
createStatement);
}
} catch (final KsqlStatementException e) {
throw e;
} catch (final KsqlException e) {
throw new KsqlStatementException(
ErrorMessageUtil.buildErrorMessage(e),
statement.getMaskedStatementText(),
e.getCause());
}
}
|
@Test
public void shouldInjectValueForCsas() {
// Given:
givenFormatsAndProps("kafka", "protobuf",
ImmutableMap.of("VALUE_SCHEMA_ID", new IntegerLiteral(42)));
givenDDLSchemaAndFormats(LOGICAL_SCHEMA_VALUE_MISSING, "kafka", "protobuf",
SerdeFeature.WRAP_SINGLES, SerdeFeature.WRAP_SINGLES);
// When:
final ConfiguredStatement<CreateStreamAsSelect> result = injector.inject(csasStatement);
// Then:
assertThat(result.getMaskedStatementText(), is(
"CREATE STREAM `csas` "
+ "WITH (KAFKA_TOPIC='some-topic', KEY_FORMAT='kafka', VALUE_FORMAT='protobuf', "
+ "VALUE_SCHEMA_FULL_NAME='myrecord', VALUE_SCHEMA_ID=42) AS SELECT *\nFROM TABLE `sink`"
));
}
|
@Override
public String toString(Charset charset) {
return toString(readerIndex, readableBytes(), charset);
}
|
@Test
public void testToString() {
ByteBuf copied = copiedBuffer("Hello, World!", CharsetUtil.ISO_8859_1);
buffer.clear();
buffer.writeBytes(copied);
assertEquals("Hello, World!", buffer.toString(CharsetUtil.ISO_8859_1));
copied.release();
}
|
@Bean
@ConditionalOnMissingBean(WebsocketDataChangedListener.class)
public DataChangedListener websocketDataChangedListener() {
return new WebsocketDataChangedListener();
}
|
@Test
public void testWebsocketDataChangedListener() {
WebSocketSyncConfiguration websocketListener = new WebSocketSyncConfiguration();
assertNotNull(websocketListener.websocketDataChangedListener());
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.