focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
public CompletableFuture<JobClient> submitJob(
JobGraph jobGraph, ClassLoader userCodeClassloader) throws Exception {
MiniClusterConfiguration miniClusterConfig =
getMiniClusterConfig(jobGraph.getMaximumParallelism());
MiniCluster miniCluster = miniClusterFactory.apply(miniClusterConfig);
miniCluster.start();
return miniCluster
.submitJob(jobGraph)
.thenApplyAsync(
FunctionUtils.uncheckedFunction(
submissionResult -> {
org.apache.flink.client.ClientUtils
.waitUntilJobInitializationFinished(
() ->
miniCluster
.getJobStatus(
submissionResult
.getJobID())
.get(),
() ->
miniCluster
.requestJobResult(
submissionResult
.getJobID())
.get(),
userCodeClassloader);
return submissionResult;
}))
.thenApply(
result ->
new MiniClusterJobClient(
result.getJobID(),
miniCluster,
userCodeClassloader,
MiniClusterJobClient.JobFinalizationBehavior
.SHUTDOWN_CLUSTER))
.whenComplete(
(ignored, throwable) -> {
if (throwable != null) {
// We failed to create the JobClient and must shutdown to ensure
// cleanup.
shutDownCluster(miniCluster);
}
})
.thenApply(Function.identity());
}
|
@Test
void testJobClientSavepoint() throws Exception {
PerJobMiniClusterFactory perJobMiniClusterFactory = initializeMiniCluster();
JobClient jobClient =
perJobMiniClusterFactory
.submitJob(getCancellableJobGraph(), ClassLoader.getSystemClassLoader())
.get();
while (jobClient.getJobStatus().get() != JobStatus.RUNNING) {
Thread.sleep(50);
}
assertThatThrownBy(
() -> jobClient.triggerSavepoint(null, SavepointFormatType.DEFAULT).get(),
"is not a streaming job.")
.isInstanceOf(ExecutionException.class);
assertThatFuture(jobClient.stopWithSavepoint(true, null, SavepointFormatType.DEFAULT))
.eventuallyFailsWith(ExecutionException.class)
.withMessageContaining("is not a streaming job.");
}
|
public static <T> T getLast(Collection<T> collection) {
return get(collection, -1);
}
|
@Test
public void getLastTest() {
// 测试:空数组返回null而不是报错
final List<String> test = CollUtil.newArrayList();
final String last = CollUtil.getLast(test);
assertNull(last);
}
|
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> findSessions(final Bytes key, final long earliestSessionEndTime, final long latestSessionStartTime) {
return wrapped().findSessions(key, earliestSessionEndTime, latestSessionStartTime);
}
|
@Test
public void shouldDelegateToUnderlyingStoreWhenFindingSessions() {
store.findSessions(bytesKey, 0, 1);
verify(inner).findSessions(bytesKey, 0, 1);
}
|
public static Object get(Object object, int index) {
if (index < 0) {
throw new IndexOutOfBoundsException("Index cannot be negative: " + index);
}
if (object instanceof Map) {
Map map = (Map) object;
Iterator iterator = map.entrySet().iterator();
return get(iterator, index);
} else if (object instanceof List) {
return ((List) object).get(index);
} else if (object instanceof Object[]) {
return ((Object[]) object)[index];
} else if (object instanceof Iterator) {
Iterator it = (Iterator) object;
while (it.hasNext()) {
index--;
if (index == -1) {
return it.next();
} else {
it.next();
}
}
throw new IndexOutOfBoundsException("Entry does not exist: " + index);
} else if (object instanceof Collection) {
Iterator iterator = ((Collection) object).iterator();
return get(iterator, index);
} else if (object instanceof Enumeration) {
Enumeration it = (Enumeration) object;
while (it.hasMoreElements()) {
index--;
if (index == -1) {
return it.nextElement();
} else {
it.nextElement();
}
}
throw new IndexOutOfBoundsException("Entry does not exist: " + index);
} else if (object == null) {
throw new IllegalArgumentException("Unsupported object type: null");
} else {
try {
return Array.get(object, index);
} catch (IllegalArgumentException ex) {
throw new IllegalArgumentException("Unsupported object type: " + object.getClass().getName());
}
}
}
|
@Test
void testGetCollection2() {
assertThrows(IndexOutOfBoundsException.class, () -> {
CollectionUtils.get(Collections.emptySet(), -1);
});
}
|
@Override
public boolean isInitializedAndRunning() {
synchronized (lock) {
return jobMasterServiceFuture.isDone()
&& !jobMasterServiceFuture.isCompletedExceptionally()
&& isRunning;
}
}
|
@Test
void testIsInitialized() {
final CompletableFuture<JobMasterService> jobMasterServiceFuture =
new CompletableFuture<>();
DefaultJobMasterServiceProcess serviceProcess = createTestInstance(jobMasterServiceFuture);
jobMasterServiceFuture.complete(new TestingJobMasterService());
assertThat(serviceProcess.isInitializedAndRunning()).isTrue();
}
|
public static String calculateTypeName(CompilationUnit compilationUnit, FullyQualifiedJavaType fqjt) {
if (fqjt.isArray()) {
// if array, then calculate the name of the base (non-array) type
// then add the array indicators back in
String fqn = fqjt.getFullyQualifiedName();
String typeName = calculateTypeName(compilationUnit,
new FullyQualifiedJavaType(fqn.substring(0, fqn.indexOf('['))));
return typeName + fqn.substring(fqn.indexOf('['));
}
if (!fqjt.getTypeArguments().isEmpty()) {
return calculateParameterizedTypeName(compilationUnit, fqjt);
}
if (compilationUnit == null
|| typeDoesNotRequireImport(fqjt)
|| typeIsInSamePackage(compilationUnit, fqjt)
|| typeIsAlreadyImported(compilationUnit, fqjt)) {
return fqjt.getShortName();
} else {
return fqjt.getFullyQualifiedName();
}
}
|
@Test
void testGenericTypeBaseTypeImportedImported() {
Interface interfaze = new Interface(new FullyQualifiedJavaType("com.foo.UserMapper"));
interfaze.addImportedType(new FullyQualifiedJavaType("java.util.Map"));
FullyQualifiedJavaType fqjt = new FullyQualifiedJavaType("java.util.Map<java.math.BigDecimal, java.util.List<com.beeant.dto.User>>");
assertEquals("Map<java.math.BigDecimal, java.util.List<com.beeant.dto.User>>",
JavaDomUtils.calculateTypeName(interfaze, fqjt));
}
|
@Override
public void close() {
if (asyncCheckpointState.compareAndSet(
AsyncCheckpointState.RUNNING, AsyncCheckpointState.DISCARDED)) {
try {
final Tuple2<Long, Long> tuple = cleanup();
reportAbortedSnapshotStats(tuple.f0, tuple.f1);
} catch (Exception cleanupException) {
LOG.warn(
"Could not properly clean up the async checkpoint runnable.",
cleanupException);
}
} else {
logFailedCleanupAttempt();
}
}
|
@Test
void testReportIncompleteStats() {
long checkpointId = 1000L;
TestEnvironment env = new TestEnvironment();
new AsyncCheckpointRunnable(
new HashMap<>(),
new CheckpointMetaData(checkpointId, 1),
new CheckpointMetricsBuilder(),
0,
"Task Name",
r -> {},
env,
(msg, ex) -> {},
false,
false,
() -> true)
.close();
assertThat(((TestTaskStateManager) env.getTaskStateManager()).getReportedCheckpointId())
.isEqualTo(checkpointId);
}
|
public ProjectList searchProjects(String gitlabUrl, String personalAccessToken, @Nullable String projectName,
@Nullable Integer pageNumber, @Nullable Integer pageSize) {
String url = format("%s/projects?archived=false&simple=true&membership=true&order_by=name&sort=asc&search=%s%s%s",
gitlabUrl,
projectName == null ? "" : urlEncode(projectName),
pageNumber == null ? "" : format("&page=%d", pageNumber),
pageSize == null ? "" : format("&per_page=%d", pageSize)
);
LOG.debug("get projects : [{}]", url);
Request request = new Request.Builder()
.addHeader(PRIVATE_TOKEN, personalAccessToken)
.url(url)
.get()
.build();
try (Response response = client.newCall(request).execute()) {
Headers headers = response.headers();
checkResponseIsSuccessful(response, "Could not get projects from GitLab instance");
List<Project> projectList = Project.parseJsonArray(response.body().string());
int returnedPageNumber = parseAndGetIntegerHeader(headers.get("X-Page"));
int returnedPageSize = parseAndGetIntegerHeader(headers.get("X-Per-Page"));
String xtotal = headers.get("X-Total");
Integer totalProjects = Strings.isEmpty(xtotal) ? null : parseAndGetIntegerHeader(xtotal);
return new ProjectList(projectList, returnedPageNumber, returnedPageSize, totalProjects);
} catch (JsonSyntaxException e) {
throw new IllegalArgumentException("Could not parse GitLab answer to search projects. Got a non-json payload as result.");
} catch (IOException e) {
logException(url, e);
throw new IllegalStateException(e.getMessage(), e);
}
}
|
@Test
public void throws_ISE_when_get_projects_not_http_200() {
MockResponse projects = new MockResponse()
.setResponseCode(500)
.setBody("test");
server.enqueue(projects);
assertThatThrownBy(() -> underTest.searchProjects(gitlabUrl, "pat", "example", 1, 2))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Could not get projects from GitLab instance");
}
|
@Udf
public boolean check(@UdfParameter(description = "The input JSON string") final String input) {
if (input == null) {
return false;
}
try {
return !UdfJsonMapper.parseJson(input).isMissingNode();
} catch (KsqlFunctionException e) {
return false;
}
}
|
@Test
public void shouldInterpretNumber() {
assertTrue(udf.check("1"));
}
|
private void getOpenApi() {
this.getOpenApi(Locale.getDefault());
}
|
@Test
void preLoadingModeShouldNotOverwriteServers() throws InterruptedException {
doCallRealMethod().when(openAPIService).updateServers(any());
when(openAPIService.getCachedOpenAPI(any())).thenCallRealMethod();
doAnswer(new CallsRealMethods()).when(openAPIService).setServersPresent(true);
doAnswer(new CallsRealMethods()).when(openAPIService).setServerBaseUrl(any(), any());
doAnswer(new CallsRealMethods()).when(openAPIService).setCachedOpenAPI(any(), any());
String customUrl = "https://custom.com";
String generatedUrl = "https://generated.com";
OpenApiCustomizer openApiCustomizer = openApi -> openApi.setServers(singletonList(new Server().url(customUrl)));
SpringDocConfigProperties properties = new SpringDocConfigProperties();
properties.setPreLoadingEnabled(true);
resource = new EmptyPathsOpenApiResource(
GROUP_NAME,
openAPIBuilderObjectFactory,
requestBuilder,
responseBuilder,
operationParser,
properties, springDocProviders, new SpringDocCustomizers(Optional.of(singletonList(openApiCustomizer)),Optional.empty(),Optional.empty(),Optional.empty())
);
// wait for executor to be done
Thread.sleep(1_000);
// emulate generating base url
openAPIService.setServerBaseUrl(generatedUrl, new MockClientHttpRequest());
openAPIService.updateServers(openAPI);
Locale locale = Locale.US;
OpenAPI after = resource.getOpenApi(locale);
assertThat(after.getServers().get(0).getUrl(), is(customUrl));
}
|
public static UPrimitiveType create(TypeKind typeKind) {
checkArgument(
isDeFactoPrimitive(typeKind), "Non-primitive type %s passed to UPrimitiveType", typeKind);
return new AutoValue_UPrimitiveType(typeKind);
}
|
@Test
public void equality() {
new EqualsTester()
.addEqualityGroup(UPrimitiveType.create(TypeKind.INT), UPrimitiveType.INT)
.addEqualityGroup(UPrimitiveType.create(TypeKind.LONG), UPrimitiveType.LONG)
.addEqualityGroup(UPrimitiveType.create(TypeKind.DOUBLE), UPrimitiveType.DOUBLE)
.addEqualityGroup(UPrimitiveType.create(TypeKind.FLOAT), UPrimitiveType.FLOAT)
.addEqualityGroup(UPrimitiveType.create(TypeKind.CHAR), UPrimitiveType.CHAR)
.addEqualityGroup(UPrimitiveType.create(TypeKind.VOID), UPrimitiveType.VOID)
.addEqualityGroup(UPrimitiveType.create(TypeKind.NULL), UPrimitiveType.NULL)
.addEqualityGroup(UPrimitiveType.create(TypeKind.BOOLEAN), UPrimitiveType.BOOLEAN)
.addEqualityGroup(UPrimitiveType.create(TypeKind.BYTE), UPrimitiveType.BYTE)
.addEqualityGroup(UPrimitiveType.create(TypeKind.SHORT), UPrimitiveType.SHORT)
.testEquals();
}
|
@Override
public T get(int idx) {
if (idx < 0) {
throw new IndexOutOfBoundsException();
}
int base = 0;
Iterator<List<T>> it = chunks.iterator();
while (it.hasNext()) {
List<T> list = it.next();
int size = list.size();
if (idx < base + size) {
return list.get(idx - base);
}
base += size;
}
throw new IndexOutOfBoundsException();
}
|
@Test
public void testGet() throws Exception {
final int NUM_ELEMS = 100001;
ChunkedArrayList<Integer> list = new ChunkedArrayList<Integer>();
for (int i = 0; i < NUM_ELEMS; i++) {
list.add(i);
}
Assert.assertEquals(Integer.valueOf(100), list.get(100));
Assert.assertEquals(Integer.valueOf(1000), list.get(1000));
Assert.assertEquals(Integer.valueOf(10000), list.get(10000));
Assert.assertEquals(Integer.valueOf(100000), list.get(100000));
Iterator<Integer> iter = list.iterator();
iter.next();
iter.remove();
Assert.assertEquals(Integer.valueOf(1), list.get(0));
iter = list.iterator();
for (int i = 0; i < 500; i++) {
iter.next();
}
iter.remove();
Assert.assertEquals(Integer.valueOf(502), list.get(500));
Assert.assertEquals(Integer.valueOf(602), list.get(600));
}
|
public List<AnalyzedInstruction> getAnalyzedInstructions() {
return analyzedInstructions.getValues();
}
|
@Test
public void testInstanceOfNarrowingAfterMove_dalvik() throws IOException {
MethodImplementationBuilder builder = new MethodImplementationBuilder(3);
builder.addInstruction(new BuilderInstruction12x(Opcode.MOVE_OBJECT, 1, 2));
builder.addInstruction(new BuilderInstruction22c(Opcode.INSTANCE_OF, 0, 1,
new ImmutableTypeReference("Lmain;")));
builder.addInstruction(new BuilderInstruction21t(Opcode.IF_EQZ, 0, builder.getLabel("not_instance_of")));
builder.addInstruction(new BuilderInstruction10x(Opcode.RETURN_VOID));
builder.addLabel("not_instance_of");
builder.addInstruction(new BuilderInstruction10x(Opcode.RETURN_VOID));
MethodImplementation methodImplementation = builder.getMethodImplementation();
Method method = new ImmutableMethod("Lmain;", "narrowing",
Collections.singletonList(new ImmutableMethodParameter("Ljava/lang/Object;", null, null)), "V",
AccessFlags.PUBLIC.getValue(), null, null, methodImplementation);
ClassDef classDef = new ImmutableClassDef("Lmain;", AccessFlags.PUBLIC.getValue(), "Ljava/lang/Object;", null,
null, null, null, Collections.singletonList(method));
DexFile dexFile = new ImmutableDexFile(Opcodes.getDefault(), Collections.singletonList(classDef));
ClassPath classPath = new ClassPath(new DexClassProvider(dexFile));
MethodAnalyzer methodAnalyzer = new MethodAnalyzer(classPath, method, null, false);
List<AnalyzedInstruction> analyzedInstructions = methodAnalyzer.getAnalyzedInstructions();
Assert.assertEquals("Ljava/lang/Object;",
analyzedInstructions.get(3).getPreInstructionRegisterType(1).type.getType());
Assert.assertEquals("Ljava/lang/Object;",
analyzedInstructions.get(3).getPreInstructionRegisterType(2).type.getType());
Assert.assertEquals("Ljava/lang/Object;",
analyzedInstructions.get(4).getPreInstructionRegisterType(1).type.getType());
Assert.assertEquals("Ljava/lang/Object;",
analyzedInstructions.get(4).getPreInstructionRegisterType(2).type.getType());
}
|
private void resolveNativeEntityLookupTable(EntityDescriptor entityDescriptor,
InputWithExtractors inputWithExtractors,
MutableGraph<EntityDescriptor> mutableGraph) {
final Stream<String> extractorLookupNames = inputWithExtractors.extractors().stream()
.filter(e -> e.getType().equals(Extractor.Type.LOOKUP_TABLE))
.map(e -> (String) e.getExtractorConfig().get(LookupTableExtractor.CONFIG_LUT_NAME));
final Stream<String> converterLookupNames = inputWithExtractors.extractors().stream()
.flatMap(e -> e.getConverters().stream())
.filter(c -> c.getType().equals(Converter.Type.LOOKUP_TABLE))
.map(c -> (String) c.getConfig().get("lookup_table_name"));
Stream.concat(extractorLookupNames, converterLookupNames)
.map(lookupTableService::get)
.filter(Optional::isPresent)
.map(Optional::get)
.forEach(lookupTableDto -> {
EntityDescriptor lookupTable = EntityDescriptor.create(
ModelId.of(lookupTableDto.id()), ModelTypes.LOOKUP_TABLE_V1);
mutableGraph.putEdge(entityDescriptor, lookupTable);
});
}
|
@Test
@MongoDBFixtures("InputFacadeTest.json")
public void resolveNativeEntityLookupTable() throws NotFoundException {
when(lookupuptableBuilder.lookupTable("whois")).thenReturn(lookupuptableBuilder);
when(lookupuptableBuilder.lookupTable("tor-exit-node-list")).thenReturn(lookupuptableBuilder);
when(lookupuptableBuilder.build()).thenReturn(lookupTable);
when(lookupTableService.newBuilder()).thenReturn(lookupuptableBuilder);
when(lookupTableService.hasTable("whois")).thenReturn(true);
when(lookupTableService.hasTable("tor-exit-node-list")).thenReturn(true);
when(lookupTableWhois.id()).thenReturn("dead-beef");
when(dbLookupTableService.get("whois")).thenReturn(Optional.of(lookupTableWhois));
when(lookupTableTor.id()).thenReturn("dead-feed");
when(dbLookupTableService.get("tor-exit-node-list")).thenReturn(Optional.of(lookupTableTor));
final Input input = inputService.find("5ae2eb0a3d27464477f0fd8b");
EntityDescriptor entityDescriptor = EntityDescriptor.create(ModelId.of(input.getId()), ModelTypes.INPUT_V1);
EntityDescriptor expectedEntitiyDescriptorWhois = EntityDescriptor.create(ModelId.of("dead-beef"), ModelTypes.LOOKUP_TABLE_V1);
EntityDescriptor expectedEntitiyDescriptorTor = EntityDescriptor.create(ModelId.of("dead-feed"), ModelTypes.LOOKUP_TABLE_V1);
Graph<EntityDescriptor> graph = facade.resolveNativeEntity(entityDescriptor);
assertThat(graph.nodes()).contains(expectedEntitiyDescriptorWhois);
assertThat(graph.nodes()).contains(expectedEntitiyDescriptorTor);
}
|
@Override
public CRTask deserialize(JsonElement json,
Type type,
JsonDeserializationContext context) throws JsonParseException {
return determineJsonElementForDistinguishingImplementers(json, context, TYPE, ARTIFACT_ORIGIN);
}
|
@Test
public void shouldInstantiateATaskForTypeFetch() {
JsonObject jsonObject = new JsonObject();
jsonObject.addProperty("type", "fetch");
jsonObject.addProperty(TypeAdapter.ARTIFACT_ORIGIN, "gocd");
taskTypeAdapter.deserialize(jsonObject, type, jsonDeserializationContext);
verify(jsonDeserializationContext).deserialize(jsonObject, CRFetchArtifactTask.class);
}
|
public static String trim(final String str) {
return str == null ? null : str.trim();
}
|
@Test
public void testTrim() {
assertThat(StringUtils.trim(null)).isNull();
assertThat(StringUtils.trim("abc")).isEqualTo("abc");
assertThat(StringUtils.trim("")).isEqualTo("");
assertThat(StringUtils.trim(" ")).isEqualTo("");
}
|
@Override
public void setConf(Configuration conf) {
super.setConf(conf);
resourcesHandler = getResourcesHandler(conf);
containerSchedPriorityIsSet = false;
if (conf.get(YarnConfiguration.NM_CONTAINER_EXECUTOR_SCHED_PRIORITY)
!= null) {
containerSchedPriorityIsSet = true;
containerSchedPriorityAdjustment = conf
.getInt(YarnConfiguration.NM_CONTAINER_EXECUTOR_SCHED_PRIORITY,
YarnConfiguration.DEFAULT_NM_CONTAINER_EXECUTOR_SCHED_PRIORITY);
}
nonsecureLocalUser = conf.get(
YarnConfiguration.NM_NONSECURE_MODE_LOCAL_USER_KEY,
YarnConfiguration.DEFAULT_NM_NONSECURE_MODE_LOCAL_USER);
nonsecureLocalUserPattern = Pattern.compile(
conf.get(YarnConfiguration.NM_NONSECURE_MODE_USER_PATTERN_KEY,
YarnConfiguration.DEFAULT_NM_NONSECURE_MODE_USER_PATTERN));
containerLimitUsers = conf.getBoolean(
YarnConfiguration.NM_NONSECURE_MODE_LIMIT_USERS,
YarnConfiguration.DEFAULT_NM_NONSECURE_MODE_LIMIT_USERS);
if (!containerLimitUsers) {
LOG.warn("{}: impersonation without authentication enabled",
YarnConfiguration.NM_NONSECURE_MODE_LIMIT_USERS);
}
}
|
@Test
public void testNonSecureRunAsSubmitter() throws Exception {
Assume.assumeTrue(shouldRun());
Assume.assumeFalse(UserGroupInformation.isSecurityEnabled());
String expectedRunAsUser = appSubmitter;
conf.set(YarnConfiguration.NM_NONSECURE_MODE_LIMIT_USERS, "false");
exec.setConf(conf);
File touchFile = new File(workSpace, "touch-file");
int ret = runAndBlock("touch", touchFile.getAbsolutePath());
assertEquals(0, ret);
FileStatus fileStatus =
FileContext.getLocalFSFileContext().getFileStatus(
new Path(touchFile.getAbsolutePath()));
assertEquals(expectedRunAsUser, fileStatus.getOwner());
cleanupAppFiles(expectedRunAsUser);
// reset conf
conf.unset(YarnConfiguration.NM_NONSECURE_MODE_LIMIT_USERS);
exec.setConf(conf);
}
|
@Override
public SelType call(String methodName, SelType[] args) {
if (args.length == 0 && "size".equals(methodName)) {
return SelLong.of(val == null ? 0 : val.size());
} else if (args.length == 1 && "get".equals(methodName)) {
if (!val.containsKey((SelString) args[0])) {
return NULL;
}
return val.get((SelString) args[0]);
} else if (args.length == 1 && "containsKey".equals(methodName)) {
return SelBoolean.of(val != null && val.containsKey((SelString) args[0]));
} else if (args.length == 2 && "put".equals(methodName)) {
SelType value = args[1] == null ? NULL : args[1];
SelType res = val.put((SelString) args[0], value);
if (res == null) {
return NULL;
}
return res;
} else if (args.length == 2 && "getOrDefault".equals(methodName)) {
if (!val.containsKey((SelString) args[0])) {
return args[1];
}
return val.get((SelString) args[0]);
}
throw new UnsupportedOperationException(
type()
+ " DO NOT support calling method: "
+ methodName
+ " with args: "
+ Arrays.toString(args));
}
|
@Test(expected = ClassCastException.class)
public void testCallGetNullKey() {
orig.call("get", new SelType[] {SelType.NULL});
}
|
public static HazelcastInstance newHazelcastInstance(Config config) {
if (config == null) {
config = Config.load();
}
return newHazelcastInstance(
config,
config.getInstanceName(),
new DefaultNodeContext()
);
}
|
@Test(expected = ExpectedRuntimeException.class)
public void test_NewInstance_failed_beforeNodeShutdown() throws Exception {
NodeContext context = new TestNodeContext() {
@Override
public NodeExtension createNodeExtension(Node node) {
NodeExtension nodeExtension = super.createNodeExtension(node);
doAnswer(new Answer() {
final AtomicBoolean throwException = new AtomicBoolean(false);
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
if (throwException.compareAndSet(false, true)) {
throw new ExpectedRuntimeException();
}
return null;
}
}).when(nodeExtension).beforeShutdown(true);
return nodeExtension;
}
};
Config config = new Config();
config.getNetworkConfig().getJoin().getAutoDetectionConfig().setEnabled(false);
hazelcastInstance = HazelcastInstanceFactory.newHazelcastInstance(config, randomString(), context);
try {
hazelcastInstance.getLifecycleService().terminate();
} catch (ExpectedRuntimeException expected) {
hazelcastInstance.getLifecycleService().terminate();
throw expected;
}
}
|
@Override
public void beginRound() {
logger.info(LOG_PREFIX + "=========================================beginRound");
}
|
@Test
public void testBeginRound() {
defaultMonitorListener.beginRound();
}
|
@Override
protected void doExecute() {
if (vpls == null) {
vpls = get(Vpls.class);
}
if (interfaceService == null) {
interfaceService = get(InterfaceService.class);
}
VplsCommandEnum enumCommand = VplsCommandEnum.enumFromString(command);
if (enumCommand != null) {
switch (enumCommand) {
case ADD_IFACE:
addIface(vplsName, optArg);
break;
case CREATE:
create(vplsName);
break;
case DELETE:
delete(vplsName);
break;
case LIST:
list();
break;
case REMOVE_IFACE:
removeIface(vplsName, optArg);
break;
case SET_ENCAP:
setEncap(vplsName, optArg);
break;
case SHOW:
show(vplsName);
break;
case CLEAN:
cleanVpls();
break;
default:
print(VPLS_COMMAND_NOT_FOUND, command);
}
} else {
print(VPLS_COMMAND_NOT_FOUND, command);
}
}
|
@Test
public void testShowOne() {
((TestVpls) vplsCommand.vpls).initSampleData();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
System.setOut(ps);
vplsCommand.command = VplsCommandEnum.SHOW.toString();
vplsCommand.vplsName = VPLS1;
vplsCommand.doExecute();
String result = baos.toString();
assertEquals(SHOW_ONE_RES, result);
}
|
@CanIgnoreReturnValue
public PrefItem addValue(String key, String value) {
mValues.put(validKey(key), value);
return this;
}
|
@Test(expected = java.lang.IllegalArgumentException.class)
public void testFailsIfKeyHasSpaces() {
mPrefItem.addValue("key ", "value");
}
|
@Override
public Mono<ClearRegistrationLockResponse> clearRegistrationLock(final ClearRegistrationLockRequest request) {
final AuthenticatedDevice authenticatedDevice = AuthenticationUtil.requireAuthenticatedPrimaryDevice();
return Mono.fromFuture(() -> accountsManager.getByAccountIdentifierAsync(authenticatedDevice.accountIdentifier()))
.map(maybeAccount -> maybeAccount.orElseThrow(Status.UNAUTHENTICATED::asRuntimeException))
.flatMap(account -> Mono.fromFuture(() -> accountsManager.updateAsync(account,
a -> a.setRegistrationLock(null, null))))
.map(ignored -> ClearRegistrationLockResponse.newBuilder().build());
}
|
@Test
void clearRegistrationLockLinkedDevice() {
getMockAuthenticationInterceptor().setAuthenticatedDevice(AUTHENTICATED_ACI, (byte) (Device.PRIMARY_ID + 1));
//noinspection ResultOfMethodCallIgnored
GrpcTestUtils.assertStatusException(Status.PERMISSION_DENIED,
() -> authenticatedServiceStub().clearRegistrationLock(ClearRegistrationLockRequest.newBuilder().build()));
verify(accountsManager, never()).updateAsync(any(), any());
}
|
@Override
public <OUT> ProcessConfigurableAndNonKeyedPartitionStream<OUT> process(
OneInputStreamProcessFunction<T, OUT> processFunction) {
validateStates(
processFunction.usesStates(),
new HashSet<>(
Arrays.asList(
StateDeclaration.RedistributionMode.NONE,
StateDeclaration.RedistributionMode.IDENTICAL)));
TypeInformation<OUT> outType =
StreamUtils.getOutputTypeForOneInputProcessFunction(processFunction, getType());
ProcessOperator<T, OUT> operator = new ProcessOperator<>(processFunction);
OneInputTransformation<T, OUT> outputTransform =
StreamUtils.getOneInputTransformation("Process", this, outType, operator);
environment.addOperator(outputTransform);
return StreamUtils.wrapWithConfigureHandle(
new NonKeyedPartitionStreamImpl<>(environment, outputTransform));
}
|
@Test
void testProcessTwoOutput() throws Exception {
ExecutionEnvironmentImpl env = StreamTestUtils.getEnv();
NonKeyedPartitionStreamImpl<Integer> stream =
new NonKeyedPartitionStreamImpl<>(
env, new TestingTransformation<>("t1", Types.INT, 1));
NonKeyedPartitionStream.TwoNonKeyedPartitionStreams<Integer, Long> resultStream =
stream.process(new StreamTestUtils.NoOpTwoOutputStreamProcessFunction());
assertThat(resultStream.getFirst()).isInstanceOf(NonKeyedPartitionStream.class);
assertThat(resultStream.getSecond()).isInstanceOf(NonKeyedPartitionStream.class);
List<Transformation<?>> transformations = env.getTransformations();
assertThat(transformations).hasSize(1);
assertProcessType(transformations.get(0), OneInputTransformation.class, Types.INT);
}
|
@VisibleForTesting
static Properties extractCommonsHikariProperties(Properties properties) {
Properties result = new Properties();
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
String key = (String) entry.getKey();
if (!ALLOWED_SONAR_PROPERTIES.contains(key)) {
if (DEPRECATED_SONAR_PROPERTIES.contains(key)) {
LOG.warn("Property [{}] has no effect as pool connection implementation changed, check 9.7 upgrade notes.", key);
}
continue;
}
if (StringUtils.startsWith(key, SONAR_JDBC)) {
String resolvedKey = toHikariPropertyKey(key);
String existingValue = (String) result.setProperty(resolvedKey, (String) entry.getValue());
checkState(existingValue == null || existingValue.equals(entry.getValue()),
"Duplicate property declaration for resolved jdbc key '%s': conflicting values are '%s' and '%s'", resolvedKey, existingValue, entry.getValue());
result.setProperty(resolvedKey, (String) entry.getValue());
}
}
return result;
}
|
@Test
@UseDataProvider("sonarJdbcAndHikariProperties")
public void shouldThrowISEIfDuplicatedResolvedPropertiesWithDifferentValue(String jdbcProperty, String hikariProperty) {
Properties props = new Properties();
props.setProperty(jdbcProperty, "100");
props.setProperty(hikariProperty, "200");
assertThatThrownBy(() -> DefaultDatabase.extractCommonsHikariProperties(props))
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining(String.format("Duplicate property declaration for resolved jdbc key '%s': conflicting values are", removeStart(hikariProperty, SONAR_JDBC)));
}
|
public GenericRecord convert(String json, Schema schema) {
try {
Map<String, Object> jsonObjectMap = mapper.readValue(json, Map.class);
return convertJsonToAvro(jsonObjectMap, schema, shouldSanitize, invalidCharMask);
} catch (IOException e) {
throw new HoodieIOException(e.getMessage(), e);
}
}
|
@Test
public void conversionWithFieldNameSanitization() throws IOException {
String sanitizedSchemaString = "{\"namespace\": \"example.avro\", \"type\": \"record\", \"name\": \"User\", \"fields\": [{\"name\": \"__name\", \"type\": \"string\"}, "
+ "{\"name\": \"favorite__number\", \"type\": \"int\"}, {\"name\": \"favorite__color__\", \"type\": \"string\"}]}";
Schema sanitizedSchema = Schema.parse(sanitizedSchemaString);
String name = "John Smith";
int number = 1337;
String color = "Blue. No yellow!";
Map<String, Object> data = new HashMap<>();
data.put("$name", name);
data.put("favorite-number", number);
data.put("favorite.color!", color);
String json = MAPPER.writeValueAsString(data);
GenericRecord rec = new GenericData.Record(sanitizedSchema);
rec.put("__name", name);
rec.put("favorite__number", number);
rec.put("favorite__color__", color);
Assertions.assertEquals(rec, CONVERTER.convert(json, sanitizedSchema));
}
|
@Override
public Map<String, String> getAnnotations() {
return flinkConfig
.getOptional(KubernetesConfigOptions.JOB_MANAGER_ANNOTATIONS)
.orElse(Collections.emptyMap());
}
|
@Test
void testGetEmptyAnnotations() {
assertThat(kubernetesJobManagerParameters.getAnnotations()).isEmpty();
}
|
static boolean equalWithinTolerance(long left, long right, long tolerance) {
try {
// subtractExact is always desugared.
@SuppressWarnings("Java7ApiChecker")
long absDiff = Math.abs(subtractExact(left, right));
return 0 <= absDiff && absDiff <= Math.abs(tolerance);
} catch (ArithmeticException e) {
// The numbers are so far apart their difference isn't even a long.
return false;
}
}
|
@Test
public void equalsDifferentTypes() {
assertThat(equalWithinTolerance(1.3d, 1.3f, 0.00000000000001d)).isFalse();
assertThat(equalWithinTolerance(1.3f, 1.3d, 0.00000000000001f)).isFalse();
}
|
public static int findLevel(Level expectedLevel) {
int count = 0;
List<Log> logList = DubboAppender.logList;
for (int i = 0; i < logList.size(); i++) {
Level logLevel = logList.get(i).getLogLevel();
if (logLevel.equals(expectedLevel)) {
count++;
}
}
return count;
}
|
@Test
void testFindLevel() {
Log log = mock(Log.class);
DubboAppender.logList.add(log);
when(log.getLogLevel()).thenReturn(Level.ERROR);
assertThat(LogUtil.findLevel(Level.ERROR), equalTo(1));
assertThat(LogUtil.findLevel(Level.INFO), equalTo(0));
}
|
@Override
public void connect() throws IllegalStateException, IOException {
if (isConnected()) {
throw new IllegalStateException("Already connected");
}
InetSocketAddress address = this.address;
if (address == null) {
address = new InetSocketAddress(hostname, port);
}
if (address.getAddress() == null) {
throw new UnknownHostException(address.getHostName());
}
this.socket = socketFactory.createSocket(address.getAddress(), address.getPort());
this.writer = new BufferedWriter(new OutputStreamWriter(socket.getOutputStream(), charset));
}
|
@Test
public void doesNotAllowDoubleConnections() throws Exception {
graphite.connect();
try {
graphite.connect();
failBecauseExceptionWasNotThrown(IllegalStateException.class);
} catch (IllegalStateException e) {
assertThat(e.getMessage())
.isEqualTo("Already connected");
}
}
|
public static Builder builder() {
return new AutoValue_HttpHeaders.Builder();
}
|
@Test
public void builderAddHeader_withNullValue_throwsNullPointerException() {
assertThrows(
NullPointerException.class, () -> HttpHeaders.builder().addHeader("test_header", null));
}
|
public Domain subtract(Domain other)
{
checkCompatibility(other);
return new Domain(values.subtract(other.getValues()), this.isNullAllowed() && !other.isNullAllowed());
}
|
@Test
public void testSubtract()
{
assertEquals(
Domain.all(BIGINT).subtract(Domain.all(BIGINT)),
Domain.none(BIGINT));
assertEquals(
Domain.all(BIGINT).subtract(Domain.none(BIGINT)),
Domain.all(BIGINT));
assertEquals(
Domain.all(BIGINT).subtract(Domain.notNull(BIGINT)),
Domain.onlyNull(BIGINT));
assertEquals(
Domain.all(BIGINT).subtract(Domain.onlyNull(BIGINT)),
Domain.notNull(BIGINT));
assertEquals(
Domain.all(BIGINT).subtract(Domain.singleValue(BIGINT, 0L)),
Domain.create(ValueSet.ofRanges(Range.lessThan(BIGINT, 0L), Range.greaterThan(BIGINT, 0L)), true));
assertEquals(
Domain.none(BIGINT).subtract(Domain.all(BIGINT)),
Domain.none(BIGINT));
assertEquals(
Domain.none(BIGINT).subtract(Domain.none(BIGINT)),
Domain.none(BIGINT));
assertEquals(
Domain.none(BIGINT).subtract(Domain.notNull(BIGINT)),
Domain.none(BIGINT));
assertEquals(
Domain.none(BIGINT).subtract(Domain.onlyNull(BIGINT)),
Domain.none(BIGINT));
assertEquals(
Domain.none(BIGINT).subtract(Domain.singleValue(BIGINT, 0L)),
Domain.none(BIGINT));
assertEquals(
Domain.notNull(BIGINT).subtract(Domain.all(BIGINT)),
Domain.none(BIGINT));
assertEquals(
Domain.notNull(BIGINT).subtract(Domain.none(BIGINT)),
Domain.notNull(BIGINT));
assertEquals(
Domain.notNull(BIGINT).subtract(Domain.notNull(BIGINT)),
Domain.none(BIGINT));
assertEquals(
Domain.notNull(BIGINT).subtract(Domain.onlyNull(BIGINT)),
Domain.notNull(BIGINT));
assertEquals(
Domain.notNull(BIGINT).subtract(Domain.singleValue(BIGINT, 0L)),
Domain.create(ValueSet.ofRanges(Range.lessThan(BIGINT, 0L), Range.greaterThan(BIGINT, 0L)), false));
assertEquals(
Domain.onlyNull(BIGINT).subtract(Domain.all(BIGINT)),
Domain.none(BIGINT));
assertEquals(
Domain.onlyNull(BIGINT).subtract(Domain.none(BIGINT)),
Domain.onlyNull(BIGINT));
assertEquals(
Domain.onlyNull(BIGINT).subtract(Domain.notNull(BIGINT)),
Domain.onlyNull(BIGINT));
assertEquals(
Domain.onlyNull(BIGINT).subtract(Domain.onlyNull(BIGINT)),
Domain.none(BIGINT));
assertEquals(
Domain.onlyNull(BIGINT).subtract(Domain.singleValue(BIGINT, 0L)),
Domain.onlyNull(BIGINT));
assertEquals(
Domain.singleValue(BIGINT, 0L).subtract(Domain.all(BIGINT)),
Domain.none(BIGINT));
assertEquals(
Domain.singleValue(BIGINT, 0L).subtract(Domain.none(BIGINT)),
Domain.singleValue(BIGINT, 0L));
assertEquals(
Domain.singleValue(BIGINT, 0L).subtract(Domain.notNull(BIGINT)),
Domain.none(BIGINT));
assertEquals(
Domain.singleValue(BIGINT, 0L).subtract(Domain.onlyNull(BIGINT)),
Domain.singleValue(BIGINT, 0L));
assertEquals(
Domain.singleValue(BIGINT, 0L).subtract(Domain.singleValue(BIGINT, 0L)),
Domain.none(BIGINT));
assertEquals(
Domain.create(ValueSet.ofRanges(Range.equal(BIGINT, 1L)), true).subtract(Domain.create(ValueSet.ofRanges(Range.equal(BIGINT, 2L)), true)),
Domain.singleValue(BIGINT, 1L));
assertEquals(
Domain.create(ValueSet.ofRanges(Range.equal(BIGINT, 1L)), true).subtract(Domain.create(ValueSet.ofRanges(Range.equal(BIGINT, 1L), Range.equal(BIGINT, 2L)), false)),
Domain.onlyNull(BIGINT));
}
|
public BulkChange(Saveable saveable) {
this.parent = current();
this.saveable = saveable;
// remember who allocated this object in case
// someone forgot to call save() at the end.
allocator = new Exception();
// in effect at construction
INSCOPE.set(this);
}
|
@Test
public void bulkChange() throws Exception {
Point pt = new Point();
BulkChange bc = new BulkChange(pt);
try {
pt.set(0, 0);
} finally {
bc.commit();
}
assertEquals(1, pt.saveCount);
}
|
@VisibleForTesting
public List<ProjectionContext> planRemoteAssignments(Assignments assignments, VariableAllocator variableAllocator)
{
ImmutableList.Builder<List<ProjectionContext>> assignmentProjections = ImmutableList.builder();
for (Map.Entry<VariableReferenceExpression, RowExpression> entry : assignments.getMap().entrySet()) {
List<ProjectionContext> rewritten = entry.getValue().accept(new Visitor(functionAndTypeManager, variableAllocator), null);
if (rewritten.isEmpty()) {
assignmentProjections.add(ImmutableList.of(new ProjectionContext(ImmutableMap.of(entry.getKey(), entry.getValue()), false)));
}
else {
checkState(rewritten.get(rewritten.size() - 1).getProjections().size() == 1, "Expect at most 1 assignment from last projection in rewrite");
ProjectionContext last = rewritten.get(rewritten.size() - 1);
ImmutableList.Builder<ProjectionContext> projectionContextBuilder = ImmutableList.builder();
projectionContextBuilder.addAll(rewritten.subList(0, rewritten.size() - 1));
projectionContextBuilder.add(new ProjectionContext(ImmutableMap.of(entry.getKey(), getOnlyElement(last.getProjections().values())), last.isRemote()));
assignmentProjections.add(projectionContextBuilder.build());
}
}
List<ProjectionContext> mergedProjectionContexts = mergeProjectionContexts(assignmentProjections.build());
return dedupVariables(mergedProjectionContexts);
}
|
@Test
void testSpecialForm()
{
PlanBuilder planBuilder = new PlanBuilder(TEST_SESSION, new PlanNodeIdAllocator(), getMetadata());
planBuilder.variable("x", INTEGER);
planBuilder.variable("y", INTEGER);
PlanRemoteProjections rule = new PlanRemoteProjections(getFunctionAndTypeManager());
List<ProjectionContext> rewritten = rule.planRemoteAssignments(Assignments.builder()
.put(planBuilder.variable("a"), planBuilder.rowExpression("unittest.memory.remote_foo(x, y + unittest.memory.remote_foo(x))"))
.put(planBuilder.variable("b"), planBuilder.rowExpression("x IS NULL OR y IS NULL"))
.put(planBuilder.variable("c"), planBuilder.rowExpression("IF(abs(unittest.memory.remote_foo()) > 0, x, y)"))
.put(planBuilder.variable("d"), planBuilder.rowExpression("unittest.memory.remote_foo(x + y, abs(x))"))
.put(planBuilder.variable("e"), planBuilder.rowExpression("TRUE OR FALSE"))
.build(), new VariableAllocator(planBuilder.getTypes().allVariables()));
assertEquals(rewritten.size(), 4);
assertEquals(rewritten.get(3).getProjections().size(), 5);
}
|
public static String formatSql(final AstNode root) {
final StringBuilder builder = new StringBuilder();
new Formatter(builder).process(root, 0);
return StringUtils.stripEnd(builder.toString(), "\n");
}
|
@Test
public void shouldFormatSelectWithLowerCaseAlias() {
final String statementString = "CREATE STREAM S AS SELECT address AS `foO` FROM address;";
final Statement statement = parseSingle(statementString);
assertThat(SqlFormatter.formatSql(statement),
equalTo("CREATE STREAM S AS SELECT"
+ " ADDRESS `foO`\n"
+ "FROM ADDRESS ADDRESS\nEMIT CHANGES"));
}
|
@Override
public void update() {
if (patrollingLeft) {
position -= 1;
if (position == PATROLLING_LEFT_BOUNDING) {
patrollingLeft = false;
}
} else {
position += 1;
if (position == PATROLLING_RIGHT_BOUNDING) {
patrollingLeft = true;
}
}
logger.info("Skeleton {} is on position {}.", id, position);
}
|
@Test
void testUpdateForReverseDirectionFromRightToLeft() {
skeleton.patrollingLeft = false;
skeleton.setPosition(99);
skeleton.update();
assertEquals(100, skeleton.getPosition());
assertTrue(skeleton.patrollingLeft);
}
|
@Override
public boolean addAll(Collection<? extends E> c) {
for (E e : c) {
add(e);
}
return true;
}
|
@Test
public void testAddAll() {
queue.addAll(asList(23, 42));
assertEquals(2, queue.size());
assertContains(queue, 23);
assertContains(queue, 42);
}
|
@VisibleForTesting
static void initKeyStore(Properties consumerProps) {
Path keyStorePath = getKeyStorePath(consumerProps);
if (Files.exists(keyStorePath)) {
deleteFile(keyStorePath);
}
LOGGER.info("Initializing the SSL key store");
try {
// Create the key store path
createFile(keyStorePath);
} catch (FileAlreadyExistsException fex) {
LOGGER.warn("SSL key store initialization failed as key store already exists.");
return;
} catch (IOException iex) {
throw new RuntimeException(String.format("Failed to create the key store path: %s", keyStorePath), iex);
}
String keyStorePassword = consumerProps.getProperty(SSL_KEYSTORE_PASSWORD);
String keyPassword = consumerProps.getProperty(SSL_KEY_PASSWORD);
String clientCertificate = consumerProps.getProperty(STREAM_KAFKA_SSL_CLIENT_CERTIFICATE);
String certificateType = consumerProps.getProperty(STREAM_KAFKA_SSL_CERTIFICATE_TYPE, DEFAULT_CERTIFICATE_TYPE);
String privateKeyString = consumerProps.getProperty(STREAM_KAFKA_SSL_CLIENT_KEY);
String privateKeyAlgorithm = consumerProps.getProperty(STREAM_KAFKA_SSL_CLIENT_KEY_ALGORITHM,
DEFAULT_KEY_ALGORITHM);
String keyStoreType = consumerProps.getProperty(SSL_KEYSTORE_TYPE, DEFAULT_KEYSTORE_TYPE);
consumerProps.setProperty(SSL_KEYSTORE_TYPE, keyStoreType);
try {
// decode the private key and certificate into bytes
byte[] pkBytes = Base64.getDecoder().decode(privateKeyString);
byte[] certBytes = Base64.getDecoder().decode(clientCertificate);
// Create the private key object
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(pkBytes);
KeyFactory keyFactory = KeyFactory.getInstance(privateKeyAlgorithm);
PrivateKey privateKey = keyFactory.generatePrivate(keySpec);
// Create the Certificate object
CertificateFactory certFactory = CertificateFactory.getInstance(certificateType);
InputStream certInputStream = new ByteArrayInputStream(certBytes);
Certificate certificate = certFactory.generateCertificate(certInputStream);
// Create a KeyStore object and load a new empty keystore
KeyStore keyStore = KeyStore.getInstance(keyStoreType);
keyStore.load(null, null);
// Add the key pair and certificate to the keystore
KeyStore.PrivateKeyEntry privateKeyEntry = new KeyStore.PrivateKeyEntry(
privateKey, new Certificate[]{certificate}
);
KeyStore.PasswordProtection keyPasswordProtection = new KeyStore.PasswordProtection(keyPassword.toCharArray());
keyStore.setEntry(DEFAULT_CLIENT_ALIAS, privateKeyEntry, keyPasswordProtection);
// Save the keystore to the specified location
try (FileOutputStream fos = new FileOutputStream(keyStorePath.toString())) {
keyStore.store(fos, keyStorePassword.toCharArray());
}
LOGGER.info("Initialized the SSL key store.");
} catch (Exception ex) {
throw new RuntimeException("Error initializing the SSL key store", ex);
}
}
|
@Test
public void testInitKeyStore()
throws CertificateException, NoSuchAlgorithmException, OperatorCreationException, NoSuchProviderException,
IOException, KeyStoreException {
Properties consumerProps = new Properties();
setKeyStoreProps(consumerProps);
// should not throw any exceptions
KafkaSSLUtils.initKeyStore(consumerProps);
validateKeyStoreCertificateCount(1);
}
|
public int getInt(@NotNull final String key) throws InvalidSettingException {
try {
return Integer.parseInt(getString(key));
} catch (NumberFormatException ex) {
throw new InvalidSettingException("Could not convert property '" + key + "' to an int.", ex);
}
}
|
@Test
public void testGetInt() throws InvalidSettingException {
String key = "SomeNumber";
int expResult = 85;
getSettings().setString(key, "85");
int result = getSettings().getInt(key);
Assert.assertEquals(expResult, result);
}
|
@Override
public Health check(Set<NodeHealth> nodeHealths) {
Set<NodeHealth> appNodes = nodeHealths.stream()
.filter(s -> s.getDetails().getType() == NodeDetails.Type.APPLICATION)
.collect(Collectors.toSet());
return Arrays.stream(AppNodeClusterHealthSubChecks.values())
.map(s -> s.check(appNodes))
.reduce(Health.GREEN, HealthReducer::merge);
}
|
@Test
public void status_GREEN_when_two_GREEN_application_node_and_any_number_of_other_is_GREEN() {
Set<NodeHealth> nodeHealths = of(
// at least 1 extra GREEN
of(appNodeHealth(GREEN)),
// 0 to 10 GREEN
randomNumberOfAppNodeHealthOfAnyStatus(GREEN),
// 2 GREEN
nodeHealths(GREEN, GREEN))
.flatMap(s -> s)
.collect(toSet());
Health check = underTest.check(nodeHealths);
assertThat(check)
.forInput(nodeHealths)
.hasStatus(Health.Status.GREEN)
.andCauses();
}
|
public static MethodDeclaration addMethod(final MethodDeclaration methodTemplate,
final ClassOrInterfaceDeclaration tableTemplate,
final String methodName) {
final BlockStmt body =
methodTemplate.getBody().orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_BODY_TEMPLATE, methodTemplate.getName())));
final MethodDeclaration toReturn = tableTemplate.addMethod(methodName).setBody(body);
toReturn.setModifiers(methodTemplate.getModifiers());
methodTemplate.getParameters().forEach(toReturn::addParameter);
toReturn.setType(methodTemplate.getType());
return toReturn;
}
|
@Test
void addMethod() {
final MethodDeclaration methodTemplate = new MethodDeclaration();
methodTemplate.setName("methodTemplate");
final BlockStmt body = new BlockStmt();
methodTemplate.setBody(body);
final String methodName = "METHOD_NAME";
final ClassOrInterfaceDeclaration classOrInterfaceDeclaration = new ClassOrInterfaceDeclaration();
assertThat(classOrInterfaceDeclaration.getMethodsByName(methodName)).isEmpty();
CommonCodegenUtils.addMethod(methodTemplate, classOrInterfaceDeclaration, methodName);
assertThat(classOrInterfaceDeclaration.getMethodsByName(methodName)).hasSize(1);
assertThat(classOrInterfaceDeclaration.getMethodsByName(methodName).get(0).getBody().get()).isEqualTo(body);
}
|
@VisibleForTesting
void validateEmailUnique(Long id, String email) {
if (StrUtil.isBlank(email)) {
return;
}
AdminUserDO user = userMapper.selectByEmail(email);
if (user == null) {
return;
}
// 如果 id 为空,说明不用比较是否为相同 id 的用户
if (id == null) {
throw exception(USER_EMAIL_EXISTS);
}
if (!user.getId().equals(id)) {
throw exception(USER_EMAIL_EXISTS);
}
}
|
@Test
public void testValidateEmailUnique_emailExistsForCreate() {
// 准备参数
String email = randomString();
// mock 数据
userMapper.insert(randomAdminUserDO(o -> o.setEmail(email)));
// 调用,校验异常
assertServiceException(() -> userService.validateEmailUnique(null, email),
USER_EMAIL_EXISTS);
}
|
public boolean initAndAddIssue(Issue issue) {
DefaultInputComponent inputComponent = (DefaultInputComponent) issue.primaryLocation().inputComponent();
if (noSonar(inputComponent, issue)) {
return false;
}
ActiveRule activeRule = activeRules.find(issue.ruleKey());
if (activeRule == null) {
// rule does not exist or is not enabled -> ignore the issue
return false;
}
ScannerReport.Issue rawIssue = createReportIssue(issue, inputComponent.scannerId(), activeRule.severity());
if (filters.accept(inputComponent, rawIssue)) {
write(inputComponent.scannerId(), rawIssue);
return true;
}
return false;
}
|
@Test
public void should_accept_issues_on_no_sonar_rules() {
// The "No Sonar" rule logs violations on the lines that are flagged with "NOSONAR" !!
activeRulesBuilder.addRule(new NewActiveRule.Builder()
.setRuleKey(NOSONAR_RULE_KEY)
.setSeverity(Severity.INFO)
.setQProfileKey("qp-1")
.build());
initModuleIssues();
file.noSonarAt(new HashSet<>(Collections.singletonList(3)));
DefaultIssue issue = new DefaultIssue(project)
.at(new DefaultIssueLocation().on(file).at(file.selectLine(3)).message(""))
.forRule(NOSONAR_RULE_KEY);
when(filters.accept(any(InputComponent.class), any(ScannerReport.Issue.class))).thenReturn(true);
boolean added = moduleIssues.initAndAddIssue(issue);
assertThat(added).isTrue();
verify(reportPublisher.getWriter()).appendComponentIssue(eq(file.scannerId()), any());
}
|
@SuppressWarnings("unchecked")
public static <T extends SpecificRecord> TypeInformation<Row> convertToTypeInfo(
Class<T> avroClass) {
return convertToTypeInfo(avroClass, true);
}
|
@Test
void testTimestampsSchemaToTypeInfoNewMapping() {
final Tuple4<Class<? extends SpecificRecord>, SpecificRecord, GenericRecord, Row> testData =
AvroTestUtils.getTimestampTestData();
String schemaStr = testData.f1.getSchema().toString();
TypeInformation<Row> typeInfo = AvroSchemaConverter.convertToTypeInfo(schemaStr, false);
validateTimestampsSchema(typeInfo);
}
|
public static List<KeyProvider> getProviders(Configuration conf
) throws IOException {
List<KeyProvider> result = new ArrayList<KeyProvider>();
for(String path: conf.getStringCollection(KEY_PROVIDER_PATH)) {
try {
URI uri = new URI(path);
KeyProvider kp = get(uri, conf);
if (kp != null) {
result.add(kp);
} else {
throw new IOException("No KeyProviderFactory for " + uri + " in " +
KEY_PROVIDER_PATH);
}
} catch (URISyntaxException error) {
throw new IOException("Bad configuration of " + KEY_PROVIDER_PATH +
" at " + path, error);
}
}
return result;
}
|
@Test
public void testFactoryErrors() throws Exception {
Configuration conf = new Configuration();
conf.set(KeyProviderFactory.KEY_PROVIDER_PATH, "unknown:///");
try {
List<KeyProvider> providers = KeyProviderFactory.getProviders(conf);
assertTrue("should throw!", false);
} catch (IOException e) {
assertEquals("No KeyProviderFactory for unknown:/// in " +
KeyProviderFactory.KEY_PROVIDER_PATH,
e.getMessage());
}
}
|
public QueryCacheConfig setEvictionConfig(EvictionConfig evictionConfig) {
checkNotNull(evictionConfig, "evictionConfig cannot be null");
this.evictionConfig = evictionConfig;
return this;
}
|
@Test(expected = NullPointerException.class)
public void testSetEvictionConfig_throwsException_whenNull() {
QueryCacheConfig config = new QueryCacheConfig();
config.setEvictionConfig(null);
}
|
public FEELFnResult<List<Object>> invoke(@ParameterName( "list" ) List list, @ParameterName( "position" ) BigDecimal position) {
if ( list == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null"));
}
if ( position == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "position", "cannot be null"));
}
if ( position.intValue() == 0 ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "position", "cannot be zero (parameter 'position' is 1-based)"));
}
if ( position.abs().intValue() > list.size() ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "position", "inconsistent with 'list' size"));
}
// spec requires us to return a new list
List<Object> result = new ArrayList<>( list );
if( position.intValue() > 0 ) {
result.remove( position.intValue()-1 );
} else {
result.remove( list.size()+position.intValue() );
}
return FEELFnResult.ofResult( result );
}
|
@Test
void invokePositionZero() {
FunctionTestUtil.assertResultError(removeFunction.invoke(Collections.singletonList(1), BigDecimal.ZERO),
InvalidParametersEvent.class);
}
|
public CompletableFuture<Account> removeDevice(final Account account, final byte deviceId) {
if (deviceId == Device.PRIMARY_ID) {
throw new IllegalArgumentException("Cannot remove primary device");
}
return accountLockManager.withLockAsync(List.of(account.getNumber()),
() -> removeDevice(account.getIdentifier(IdentityType.ACI), deviceId, MAX_UPDATE_ATTEMPTS),
accountLockExecutor);
}
|
@Test
void testRemoveDevice() {
final Device primaryDevice = new Device();
primaryDevice.setId(Device.PRIMARY_ID);
final Device linkedDevice = new Device();
linkedDevice.setId((byte) (Device.PRIMARY_ID + 1));
Account account = AccountsHelper.generateTestAccount("+14152222222", List.of(primaryDevice, linkedDevice));
when(accounts.getByAccountIdentifierAsync(account.getUuid()))
.thenReturn(CompletableFuture.completedFuture(Optional.of(account)));
when(keysManager.deleteSingleUsePreKeys(any(), anyByte())).thenReturn(CompletableFuture.completedFuture(null));
when(messagesManager.clear(any(), anyByte())).thenReturn(CompletableFuture.completedFuture(null));
assertTrue(account.getDevice(linkedDevice.getId()).isPresent());
account = accountsManager.removeDevice(account, linkedDevice.getId()).join();
assertFalse(account.getDevice(linkedDevice.getId()).isPresent());
verify(messagesManager, times(2)).clear(account.getUuid(), linkedDevice.getId());
verify(keysManager, times(2)).deleteSingleUsePreKeys(account.getUuid(), linkedDevice.getId());
verify(keysManager).buildWriteItemsForRemovedDevice(account.getUuid(), account.getPhoneNumberIdentifier(), linkedDevice.getId());
verify(clientPublicKeysManager).buildTransactWriteItemForDeletion(account.getUuid(), linkedDevice.getId());
verify(clientPresenceManager).disconnectPresence(account.getUuid(), linkedDevice.getId());
}
|
@Override
public boolean isSupport(URL address) {
return dubboCertManager != null && dubboCertManager.isConnected();
}
|
@Test
void testEnable1() {
ClassLoader originClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader newClassLoader = new ClassLoader(originClassLoader) {
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
if (name.startsWith("io.grpc.Channel")) {
throw new ClassNotFoundException("Test");
}
return super.loadClass(name);
}
};
Thread.currentThread().setContextClassLoader(newClassLoader);
try (MockedConstruction<DubboCertManager> construction =
Mockito.mockConstruction(DubboCertManager.class, (mock, context) -> {
// ignore
})) {
FrameworkModel frameworkModel = new FrameworkModel();
DubboCertProvider provider = new DubboCertProvider(frameworkModel);
Assertions.assertFalse(provider.isSupport(null));
frameworkModel.destroy();
}
Thread.currentThread().setContextClassLoader(originClassLoader);
}
|
@VisibleForTesting
static void createDocumentationFile(
String title,
DocumentingRestEndpoint restEndpoint,
RestAPIVersion apiVersion,
Path outputFile)
throws IOException {
final OpenAPI openApi = createDocumentation(title, restEndpoint, apiVersion);
Files.deleteIfExists(outputFile);
Files.write(outputFile, Yaml.pretty(openApi).getBytes(StandardCharsets.UTF_8));
}
|
@Test
void testExcludeFromDocumentation(@TempDir Path tmpDir) throws Exception {
final Path file = tmpDir.resolve("openapi_spec.yaml");
OpenApiSpecGenerator.createDocumentationFile(
"title",
DocumentingRestEndpoint.forRestHandlerSpecifications(
new TestEmptyMessageHeaders("/test/empty1", "This is a testing REST API."),
new TestEmptyMessageHeaders(
"/test/empty2", "This is another testing REST API."),
new TestExcludeMessageHeaders(
"/test/exclude1",
"This REST API should not appear in the generated documentation."),
new TestExcludeMessageHeaders(
"/test/exclude2",
"This REST API should also not appear in the generated documentation.")),
RuntimeRestAPIVersion.V0,
file);
final String actual = new String(Files.readAllBytes(file), StandardCharsets.UTF_8);
assertThat(actual).contains("/test/empty1");
assertThat(actual).contains("This is a testing REST API.");
assertThat(actual).contains("/test/empty2");
assertThat(actual).contains("This is another testing REST API.");
assertThat(actual).doesNotContain("/test/exclude1");
assertThat(actual)
.doesNotContain("This REST API should not appear in the generated documentation.");
assertThat(actual).doesNotContain("/test/exclude2");
assertThat(actual)
.doesNotContain(
"This REST API should also not appear in the generated documentation.");
}
|
public static MessageHeaders createAfnemersberichtAanDGLHeaders(Map<String, Object> additionalHeaders) {
validateHeaders(additionalHeaders);
Map<String, Object> headersMap = createBasicHeaderMap();
headersMap.put(nl.logius.digid.digilevering.lib.model.Headers.X_AUX_ACTION, "BRPAfnemersberichtAanDGL");
headersMap.put(nl.logius.digid.digilevering.lib.model.Headers.X_AUX_ACTIVITY, "dgl:objecten:1.0");
headersMap.put(nl.logius.digid.digilevering.lib.model.Headers.X_AUX_PROCESS_TYPE, "dgl:objecten:1.0");
headersMap.put(nl.logius.digid.digilevering.lib.model.Headers.X_AUX_PROCESS_VERSION, "1.0");
headersMap.putAll(additionalHeaders);
MessageHeaders headers = new MessageHeaders(headersMap);
return headers;
}
|
@Test
public void testReceiverHeaderPresent() {
Map<String, Object> map = new HashMap<>();
map.put(Headers.X_AUX_SENDER_ID, "senderId");
assertThrows(IllegalArgumentException.class, () -> HeaderUtil.createAfnemersberichtAanDGLHeaders(map), "x_aux_receiver_id receiver header is mandatory");
}
|
public Builder toBuilder() {
Builder result = new Builder();
result.flags = flags;
result.traceIdHigh = traceIdHigh;
result.traceId = traceId;
return result;
}
|
@Test void canUsePrimitiveOverloads_false() {
base = base.toBuilder().debug(true).build();
TraceIdContext primitives = base.toBuilder()
.sampled(false)
.debug(false)
.build();
TraceIdContext objects = base.toBuilder()
.sampled(Boolean.FALSE)
.debug(Boolean.FALSE)
.build();
assertThat(primitives)
.isEqualToComparingFieldByField(objects);
assertThat(primitives.debug())
.isFalse();
assertThat(primitives.sampled())
.isFalse();
}
|
public int toInt(String name) {
return toInt(name, 0);
}
|
@Test
public void testToInt_String() {
System.out.println("toInt");
int expResult;
int result;
Properties props = new Properties();
props.put("value1", "123");
props.put("value2", "-54");
props.put("empty", "");
props.put("str", "abc");
props.put("boolean", "true");
props.put("float", "24.98");
props.put("int", "12");
props.put("char", "a");
PropertyParser instance = new PropertyParser(props);
expResult = 123;
result = instance.toInt("value1");
assertEquals(expResult, result);
expResult = -54;
result = instance.toInt("value2");
assertEquals(expResult, result);
expResult = 0;
result = instance.toInt("empty");
assertEquals(expResult, result);
expResult = 0;
result = instance.toInt("str");
assertEquals(expResult, result);
expResult = 0;
result = instance.toInt("boolean");
assertEquals(expResult, result);
expResult = 0;
result = instance.toInt("float");
assertEquals(expResult, result);
expResult = 12;
result = instance.toInt("int");
assertEquals(expResult, result);
expResult = 0;
result = instance.toInt("char");
assertEquals(expResult, result);
expResult = 0;
result = instance.toInt("nonexistent");
assertEquals(expResult, result);
}
|
@JsonCreator
public static RefinementInfo of(
@JsonProperty(value = "outputRefinement", required = true)
final OutputRefinement outputRefinement
) {
return new RefinementInfo(outputRefinement);
}
|
@Test
public void shouldImplementEquals() {
new EqualsTester()
.addEqualityGroup(
RefinementInfo.of(OutputRefinement.FINAL),
RefinementInfo.of(OutputRefinement.FINAL)
)
.addEqualityGroup(
RefinementInfo.of(OutputRefinement.CHANGES)
)
.testEquals();
}
|
public static byte[] serializeOrDiscard(StateObject stateObject) throws Exception {
try {
return InstantiationUtil.serializeObject(stateObject);
} catch (Exception e) {
try {
stateObject.discardState();
} catch (Exception discardException) {
e.addSuppressed(discardException);
}
ExceptionUtils.rethrowException(e);
}
// will never happen but is added to please the compiler
return new byte[0];
}
|
@Test
void testSerializeOrDiscardFailureHandling() throws Exception {
final AtomicBoolean discardCalled = new AtomicBoolean(false);
final StateObject original =
new FailingSerializationStateObject(() -> discardCalled.set(true));
assertThatThrownBy(() -> StateHandleStoreUtils.serializeOrDiscard(original))
.withFailMessage("An IOException is expected to be thrown.")
.isInstanceOf(IOException.class);
assertThat(discardCalled).isTrue();
}
|
@Override
public boolean shouldHandle(String key) {
return super.shouldHandle(key) && RouterConstant.ROUTER_KEY_PREFIX.equals(key);
}
|
@Test
public void testShouldHandle() {
Assert.assertTrue(handler.shouldHandle("servicecomb.routeRule"));
Assert.assertFalse(handler.shouldHandle("servicecomb.routeRule.foo"));
}
|
@Override
public void processWatermark(Instant watermark, OpEmitter<OutT> emitter) {
// propagate watermark immediately if no bundle is in progress and all the previous bundles have
// completed.
if (!isBundleStarted() && pendingBundleCount.get() == 0) {
LOG.debug("Propagating watermark: {} directly since no bundle in progress.", watermark);
bundleProgressListener.onWatermark(watermark, emitter);
return;
}
// hold back the watermark since there is either a bundle in progress or previously closed
// bundles are unfinished.
this.bundleWatermarkHold = watermark;
// for batch mode, the max watermark should force the bundle to close
if (BoundedWindow.TIMESTAMP_MAX_VALUE.equals(watermark)) {
/*
* Due to lack of async watermark function, we block on the previous watermark futures before propagating the watermark
* downstream. If a bundle is in progress tryFinishBundle() fill force the bundle to close and emit watermark.
* If no bundle in progress, we progress watermark explicitly after the completion of previous watermark futures.
*/
if (isBundleStarted()) {
LOG.info(
"Received max watermark. Triggering finish bundle before flushing the watermark downstream.");
tryFinishBundle(emitter);
watermarkFuture.toCompletableFuture().join();
} else {
LOG.info(
"Received max watermark. Waiting for previous bundles to complete before flushing the watermark downstream.");
watermarkFuture.toCompletableFuture().join();
bundleProgressListener.onWatermark(watermark, emitter);
}
}
}
|
@Test
public void testProcessWatermarkWhenNoBundleInProgress() {
Instant now = Instant.now();
OpEmitter<String> mockEmitter = mock(OpEmitter.class);
bundleManager.processWatermark(now, mockEmitter);
verify(bundleProgressListener, times(1)).onWatermark(now, mockEmitter);
}
|
@Override
public void trackFragmentAppViewScreen() {
}
|
@Test
public void trackFragmentAppViewScreen() {
mSensorsAPI.trackFragmentAppViewScreen();
Assert.assertFalse(mSensorsAPI.isTrackFragmentAppViewScreenEnabled());
}
|
@Override
public Session createSession(QueryId queryId, SessionContext context, WarningCollectorFactory warningCollectorFactory, Optional<AuthorizedIdentity> authorizedIdentity)
{
Identity identity = context.getIdentity();
if (authorizedIdentity.isPresent()) {
identity = new Identity(
identity.getUser(),
identity.getPrincipal(),
identity.getRoles(),
identity.getExtraCredentials(),
identity.getExtraAuthenticators(),
Optional.of(authorizedIdentity.get().getUserName()),
authorizedIdentity.get().getReasonForSelect());
log.info(String.format(
"For query %s, given user is %s, authorized user is %s",
queryId.getId(),
identity.getUser(),
authorizedIdentity.get().getUserName()));
}
SessionBuilder sessionBuilder = Session.builder(sessionPropertyManager)
.setQueryId(queryId)
.setIdentity(identity)
.setSource(context.getSource())
.setCatalog(context.getCatalog())
.setSchema(context.getSchema())
.setRemoteUserAddress(context.getRemoteUserAddress())
.setUserAgent(context.getUserAgent())
.setClientInfo(context.getClientInfo())
.setClientTags(context.getClientTags())
.setTraceToken(context.getTraceToken())
.setResourceEstimates(context.getResourceEstimates())
.setTracer(context.getTracer())
.setRuntimeStats(context.getRuntimeStats());
if (forcedSessionTimeZone.isPresent()) {
sessionBuilder.setTimeZoneKey(forcedSessionTimeZone.get());
}
else if (context.getTimeZoneId() != null) {
sessionBuilder.setTimeZoneKey(getTimeZoneKey(context.getTimeZoneId()));
}
if (context.getLanguage() != null) {
sessionBuilder.setLocale(Locale.forLanguageTag(context.getLanguage()));
}
for (Entry<String, String> entry : context.getSystemProperties().entrySet()) {
sessionBuilder.setSystemProperty(entry.getKey(), entry.getValue());
}
for (Entry<String, Map<String, String>> catalogProperties : context.getCatalogSessionProperties().entrySet()) {
String catalog = catalogProperties.getKey();
for (Entry<String, String> entry : catalogProperties.getValue().entrySet()) {
sessionBuilder.setCatalogSessionProperty(catalog, entry.getKey(), entry.getValue());
}
}
for (Entry<String, String> preparedStatement : context.getPreparedStatements().entrySet()) {
sessionBuilder.addPreparedStatement(preparedStatement.getKey(), preparedStatement.getValue());
}
if (context.supportClientTransaction()) {
sessionBuilder.setClientTransactionSupport();
}
for (Entry<SqlFunctionId, SqlInvokedFunction> entry : context.getSessionFunctions().entrySet()) {
sessionBuilder.addSessionFunction(entry.getKey(), entry.getValue());
}
// Put after setSystemProperty are called
WarningCollector warningCollector = warningCollectorFactory.create(sessionBuilder.getSystemProperty(WARNING_HANDLING, WarningHandlingLevel.class));
sessionBuilder.setWarningCollector(warningCollector);
Session session = sessionBuilder.build();
if (context.getTransactionId().isPresent()) {
session = session.beginTransactionId(context.getTransactionId().get(), transactionManager, accessControl);
}
return session;
}
|
@Test
public void testCreateSession()
{
HttpRequestSessionContext context = new HttpRequestSessionContext(TEST_REQUEST, new SqlParserOptions());
QuerySessionSupplier sessionSupplier = new QuerySessionSupplier(
createTestTransactionManager(),
new AllowAllAccessControl(),
new SessionPropertyManager(),
new SqlEnvironmentConfig());
WarningCollectorFactory warningCollectorFactory = new WarningCollectorFactory()
{
@Override
public WarningCollector create(WarningHandlingLevel warningHandlingLevel)
{
return WarningCollector.NOOP;
}
};
Session session = sessionSupplier.createSession(new QueryId("test_query_id"), context, warningCollectorFactory, Optional.empty());
assertEquals(session.getQueryId(), new QueryId("test_query_id"));
assertEquals(session.getUser(), "testUser");
assertEquals(session.getSource().get(), "testSource");
assertEquals(session.getCatalog().get(), "testCatalog");
assertEquals(session.getSchema().get(), "testSchema");
assertEquals(session.getLocale(), Locale.TAIWAN);
assertEquals(session.getTimeZoneKey(), getTimeZoneKey("Asia/Taipei"));
assertEquals(session.getRemoteUserAddress().get(), "testRemote");
assertEquals(session.getClientInfo().get(), "client-info");
assertEquals(session.getClientTags(), ImmutableSet.of("tag1", "tag2", "tag3"));
assertEquals(session.getSystemProperties(), ImmutableMap.<String, String>builder()
.put(QUERY_MAX_MEMORY, "1GB")
.put(JOIN_DISTRIBUTION_TYPE, "partitioned")
.put(HASH_PARTITION_COUNT, "43")
.build());
assertEquals(session.getPreparedStatements(), ImmutableMap.<String, String>builder()
.put("query1", "select * from foo")
.put("query2", "select * from bar")
.build());
assertEquals(session.getSessionFunctions(), ImmutableMap.of(SQL_FUNCTION_ID_ADD, SQL_FUNCTION_ADD));
}
|
@Override
public AuthenticationResult authenticate(final ChannelHandlerContext context, final PacketPayload payload) {
if (SSL_REQUEST_PAYLOAD_LENGTH == payload.getByteBuf().markReaderIndex().readInt() && SSL_REQUEST_CODE == payload.getByteBuf().readInt()) {
if (ProxySSLContext.getInstance().isSSLEnabled()) {
SslHandler sslHandler = new SslHandler(ProxySSLContext.getInstance().newSSLEngine(context.alloc()), true);
context.pipeline().addFirst(SslHandler.class.getSimpleName(), sslHandler);
context.writeAndFlush(new PostgreSQLSSLWillingPacket());
} else {
context.writeAndFlush(new PostgreSQLSSLUnwillingPacket());
}
return AuthenticationResultBuilder.continued();
}
payload.getByteBuf().resetReaderIndex();
AuthorityRule rule = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getGlobalRuleMetaData().getSingleRule(AuthorityRule.class);
return startupMessageReceived ? processPasswordMessage(context, (PostgreSQLPacketPayload) payload, rule) : processStartupMessage(context, (PostgreSQLPacketPayload) payload, rule);
}
|
@Test
void assertSSLWilling() {
ByteBuf byteBuf = createByteBuf(8, 8);
byteBuf.writeInt(8);
byteBuf.writeInt(80877103);
PacketPayload payload = new PostgreSQLPacketPayload(byteBuf, StandardCharsets.UTF_8);
ChannelHandlerContext context = mock(ChannelHandlerContext.class, RETURNS_DEEP_STUBS);
when(ProxySSLContext.getInstance().isSSLEnabled()).thenReturn(true);
AuthenticationResult actual = new PostgreSQLAuthenticationEngine().authenticate(context, payload);
verify(context).writeAndFlush(any(PostgreSQLSSLWillingPacket.class));
verify(context.pipeline()).addFirst(eq(SslHandler.class.getSimpleName()), any(SslHandler.class));
assertFalse(actual.isFinished());
}
|
public static void getSemanticPropsSingleFromString(
SingleInputSemanticProperties result,
String[] forwarded,
String[] nonForwarded,
String[] readSet,
TypeInformation<?> inType,
TypeInformation<?> outType) {
getSemanticPropsSingleFromString(
result, forwarded, nonForwarded, readSet, inType, outType, false);
}
|
@Test
void testReadFieldsBasic() {
String[] readFields = {"*"};
SingleInputSemanticProperties sp = new SingleInputSemanticProperties();
SemanticPropUtil.getSemanticPropsSingleFromString(
sp, null, null, readFields, intType, intType);
FieldSet fs = sp.getReadFields(0);
assertThat(fs).containsExactly(0);
sp = new SingleInputSemanticProperties();
SemanticPropUtil.getSemanticPropsSingleFromString(
sp, null, null, readFields, intType, fiveIntTupleType);
fs = sp.getReadFields(0);
assertThat(fs).containsExactly(0);
}
|
public static Object getRequestWithoutData(Object message) {
if (logger.isDebugEnabled()) {
return message;
}
if (message instanceof Request) {
Request request = (Request) message;
request.setData(null);
return request;
} else if (message instanceof Response) {
Response response = (Response) message;
response.setResult(null);
return response;
}
return message;
}
|
@Test
void test() {
Request request = new Request(1);
request.setData(new Object());
Request requestWithoutData = (Request) PayloadDropper.getRequestWithoutData(request);
Assertions.assertEquals(requestWithoutData.getId(), request.getId());
Assertions.assertNull(requestWithoutData.getData());
Response response = new Response(1);
response.setResult(new Object());
Response responseWithoutData = (Response) PayloadDropper.getRequestWithoutData(response);
Assertions.assertEquals(responseWithoutData.getId(), response.getId());
Assertions.assertNull(responseWithoutData.getResult());
Object object = new Object();
Assertions.assertEquals(object, PayloadDropper.getRequestWithoutData(object));
}
|
public static Wrapper getWrapper(Class<?> c) {
while (ClassGenerator.isDynamicClass(c)) // can not wrapper on dynamic class.
{
c = c.getSuperclass();
}
if (c == Object.class) {
return OBJECT_WRAPPER;
}
return ConcurrentHashMapUtils.computeIfAbsent(WRAPPER_MAP, c, Wrapper::makeWrapper);
}
|
@Test
void testWrapPrimitive() throws Exception {
Assertions.assertThrows(IllegalArgumentException.class, () -> {
Wrapper.getWrapper(Byte.TYPE);
});
}
|
public void addStripAction(@NonNull StripActionProvider provider, boolean highPriority) {
for (var stripActionView : mStripActionViews) {
if (stripActionView.getTag(PROVIDER_TAG_ID) == provider) {
return;
}
}
var actionView = provider.inflateActionView(this);
if (actionView.getParent() != null)
throw new IllegalStateException("StripActionProvider inflated a view with a parent!");
actionView.setTag(PROVIDER_TAG_ID, provider);
if (mShowActionStrip) {
if (highPriority) {
addView(actionView, FIRST_PROVIDER_VIEW_INDEX);
} else {
addView(actionView);
}
}
if (highPriority) {
mStripActionViews.add(0, actionView);
} else {
mStripActionViews.add(actionView);
}
invalidate();
}
|
@Test
public void testDoubleAddDoesNotAddAgain() {
View view = new View(mUnderTest.getContext());
KeyboardViewContainerView.StripActionProvider provider =
Mockito.mock(KeyboardViewContainerView.StripActionProvider.class);
Mockito.doReturn(view).when(provider).inflateActionView(any());
mUnderTest.addStripAction(provider, false);
mUnderTest.addStripAction(provider, false);
Mockito.verify(provider).inflateActionView(mUnderTest);
Mockito.verify(provider, Mockito.never()).onRemoved();
Assert.assertEquals(3, mUnderTest.getChildCount());
Assert.assertSame(view, mUnderTest.getChildAt(2));
}
|
public static <T> T loadObject(String content, Class<T> type) {
return new Yaml(new YamlParserConstructor(), new CustomRepresenter()).loadAs(content, type);
}
|
@Test
void testNotSupportType() {
assertThrows(ConstructorException.class, () -> {
YamlParserUtil.loadObject("name: test", YamlTest.class);
});
}
|
void initServletContext(ServletContext context) {
assert context != null;
this.servletContext = context;
final String serverInfo = servletContext.getServerInfo();
jboss = serverInfo.contains("JBoss") || serverInfo.contains("WildFly");
glassfish = serverInfo.contains("GlassFish")
|| serverInfo.contains("Sun Java System Application Server")
|| serverInfo.contains("Payara");
weblogic = serverInfo.contains("WebLogic");
connectionInformationsEnabled = Parameters.isSystemActionsEnabled()
&& !Parameters.isNoDatabase();
}
|
@Test
public void testInitServletContext() {
final String[] servers = { "JBoss", "WildFly", "GlassFish",
"Sun Java System Application Server", "WebLogic", };
for (final String serverName : servers) {
final ServletContext servletContext = createNiceMock(ServletContext.class);
expect(servletContext.getServerInfo()).andReturn(serverName).anyTimes();
replay(servletContext);
jdbcWrapper.initServletContext(servletContext);
verify(servletContext);
}
}
|
@Override
public boolean isSatisfied(int index, TradingRecord tradingRecord) {
boolean satisfied = false;
// No trading history or no position opened, no loss
if (tradingRecord != null) {
Position currentPosition = tradingRecord.getCurrentPosition();
if (currentPosition.isOpened()) {
Num entryPrice = currentPosition.getEntry().getNetPrice();
Num currentPrice = closePrice.getValue(index);
if (currentPosition.getEntry().isBuy()) {
satisfied = isBuyStopSatisfied(entryPrice, currentPrice);
} else {
satisfied = isSellStopSatisfied(entryPrice, currentPrice);
}
}
}
traceIsSatisfied(index, satisfied);
return satisfied;
}
|
@Test
public void isSatisfiedWorksForBuy() {
final TradingRecord tradingRecord = new BaseTradingRecord(Trade.TradeType.BUY);
final Num tradedAmount = numOf(1);
// 5% stop-loss
StopLossRule rule = new StopLossRule(closePrice, numOf(5));
assertFalse(rule.isSatisfied(0, null));
assertFalse(rule.isSatisfied(1, tradingRecord));
// Enter at 114
tradingRecord.enter(2, numOf(114), tradedAmount);
assertFalse(rule.isSatisfied(2, tradingRecord));
assertFalse(rule.isSatisfied(3, tradingRecord));
assertTrue(rule.isSatisfied(4, tradingRecord));
// Exit
tradingRecord.exit(5);
// Enter at 128
tradingRecord.enter(5, numOf(128), tradedAmount);
assertFalse(rule.isSatisfied(5, tradingRecord));
assertTrue(rule.isSatisfied(6, tradingRecord));
assertTrue(rule.isSatisfied(7, tradingRecord));
}
|
public void close() {
commandConsumer.close();
commandTopicBackup.close();
}
|
@Test
public void shouldCloseAllResources() {
// When:
commandTopic.close();
//Then:
verify(commandConsumer).close();
}
|
@Override
public void execute(ComputationStep.Context context) {
new PathAwareCrawler<>(
FormulaExecutorComponentVisitor.newBuilder(metricRepository, measureRepository)
.buildFor(List.of(duplicationFormula)))
.visit(treeRootHolder.getRoot());
}
|
@Test
public void compute_duplicated_lines_counts_lines_from_original_and_InnerDuplicate_only_once() {
TextBlock original = new TextBlock(1, 10);
duplicationRepository.addDuplication(FILE_1_REF, original, new TextBlock(10, 11), new TextBlock(11, 12));
duplicationRepository.addDuplication(FILE_1_REF, new TextBlock(2, 2), new TextBlock(4, 4));
setNewLines(FILE_1);
underTest.execute(new TestComputationStepContext());
assertRawMeasureValue(FILE_1_REF, NEW_DUPLICATED_LINES_KEY, 11);
}
|
@Override
public void run() {
if (processor != null) {
processor.execute();
} else {
if (!beforeHook()) {
logger.info("before-feature hook returned [false], aborting: {}", this);
} else {
scenarios.forEachRemaining(this::processScenario);
}
afterFeature();
}
}
|
@Test
void testOutline() {
run("outline.feature");
}
|
DecodedJWT verifyJWT(PublicKey publicKey,
String publicKeyAlg,
DecodedJWT jwt) throws AuthenticationException {
if (publicKeyAlg == null) {
incrementFailureMetric(AuthenticationExceptionCode.UNSUPPORTED_ALGORITHM);
throw new AuthenticationException("PublicKey algorithm cannot be null");
}
Algorithm alg;
try {
switch (publicKeyAlg) {
case ALG_RS256:
alg = Algorithm.RSA256((RSAPublicKey) publicKey, null);
break;
case ALG_RS384:
alg = Algorithm.RSA384((RSAPublicKey) publicKey, null);
break;
case ALG_RS512:
alg = Algorithm.RSA512((RSAPublicKey) publicKey, null);
break;
case ALG_ES256:
alg = Algorithm.ECDSA256((ECPublicKey) publicKey, null);
break;
case ALG_ES384:
alg = Algorithm.ECDSA384((ECPublicKey) publicKey, null);
break;
case ALG_ES512:
alg = Algorithm.ECDSA512((ECPublicKey) publicKey, null);
break;
default:
incrementFailureMetric(AuthenticationExceptionCode.UNSUPPORTED_ALGORITHM);
throw new AuthenticationException("Unsupported algorithm: " + publicKeyAlg);
}
} catch (ClassCastException e) {
incrementFailureMetric(AuthenticationExceptionCode.ALGORITHM_MISMATCH);
throw new AuthenticationException("Expected PublicKey alg [" + publicKeyAlg + "] does match actual alg.");
}
// We verify issuer when retrieving the PublicKey, so it is not verified here.
// The claim presence requirements are based on https://openid.net/specs/openid-connect-basic-1_0.html#IDToken
Verification verifierBuilder = JWT.require(alg)
.acceptLeeway(acceptedTimeLeewaySeconds)
.withAnyOfAudience(allowedAudiences)
.withClaimPresence(RegisteredClaims.ISSUED_AT)
.withClaimPresence(RegisteredClaims.EXPIRES_AT)
.withClaimPresence(RegisteredClaims.NOT_BEFORE)
.withClaimPresence(RegisteredClaims.SUBJECT);
if (isRoleClaimNotSubject) {
verifierBuilder = verifierBuilder.withClaimPresence(roleClaim);
}
JWTVerifier verifier = verifierBuilder.build();
try {
return verifier.verify(jwt);
} catch (TokenExpiredException e) {
incrementFailureMetric(AuthenticationExceptionCode.EXPIRED_JWT);
throw new AuthenticationException("JWT expired: " + e.getMessage());
} catch (SignatureVerificationException e) {
incrementFailureMetric(AuthenticationExceptionCode.ERROR_VERIFYING_JWT_SIGNATURE);
throw new AuthenticationException("JWT signature verification exception: " + e.getMessage());
} catch (InvalidClaimException e) {
incrementFailureMetric(AuthenticationExceptionCode.INVALID_JWT_CLAIM);
throw new AuthenticationException("JWT contains invalid claim: " + e.getMessage());
} catch (AlgorithmMismatchException e) {
incrementFailureMetric(AuthenticationExceptionCode.ALGORITHM_MISMATCH);
throw new AuthenticationException("JWT algorithm does not match Public Key algorithm: " + e.getMessage());
} catch (JWTDecodeException e) {
incrementFailureMetric(AuthenticationExceptionCode.ERROR_DECODING_JWT);
throw new AuthenticationException("Error while decoding JWT: " + e.getMessage());
} catch (JWTVerificationException | IllegalArgumentException e) {
incrementFailureMetric(AuthenticationExceptionCode.ERROR_VERIFYING_JWT);
throw new AuthenticationException("JWT verification failed: " + e.getMessage());
}
}
|
@Test(dataProvider = "supportedAlgorithms")
public void testThatSupportedAlgsWork(SignatureAlgorithm alg) throws AuthenticationException {
KeyPair keyPair = Keys.keyPairFor(alg);
DefaultJwtBuilder defaultJwtBuilder = new DefaultJwtBuilder();
addValidMandatoryClaims(defaultJwtBuilder, basicProviderAudience);
defaultJwtBuilder.signWith(keyPair.getPrivate());
// Convert to the right class
DecodedJWT expectedValue = JWT.decode(defaultJwtBuilder.compact());
DecodedJWT actualValue = basicProvider.verifyJWT(keyPair.getPublic(), alg.getValue(), expectedValue);
Assert.assertEquals(expectedValue, actualValue);
}
|
public Expression toPredicate(TupleDomain<String> tupleDomain)
{
if (tupleDomain.isNone()) {
return FALSE_LITERAL;
}
Map<String, Domain> domains = tupleDomain.getDomains().get();
return domains.entrySet().stream()
.sorted(comparing(entry -> entry.getKey()))
.map(entry -> toPredicate(entry.getValue(), new SymbolReference(entry.getKey())))
.collect(collectingAndThen(toImmutableList(), ExpressionUtils::combineConjuncts));
}
|
@Test
public void testToPredicate()
{
TupleDomain<String> tupleDomain;
tupleDomain = withColumnDomains(ImmutableMap.of(C_BIGINT, Domain.notNull(BIGINT)));
assertEquals(toPredicate(tupleDomain), isNotNull(C_BIGINT));
tupleDomain = withColumnDomains(ImmutableMap.of(C_BIGINT, Domain.onlyNull(BIGINT)));
assertEquals(toPredicate(tupleDomain), isNull(C_BIGINT));
tupleDomain = withColumnDomains(ImmutableMap.of(C_BIGINT, Domain.none(BIGINT)));
assertEquals(toPredicate(tupleDomain), FALSE_LITERAL);
tupleDomain = withColumnDomains(ImmutableMap.of(C_BIGINT, Domain.all(BIGINT)));
assertEquals(toPredicate(tupleDomain), TRUE_LITERAL);
tupleDomain = withColumnDomains(ImmutableMap.of(C_BIGINT, Domain.create(ValueSet.ofRanges(Range.greaterThan(BIGINT, 1L)), false)));
assertEquals(toPredicate(tupleDomain), greaterThan(C_BIGINT, bigintLiteral(1L)));
tupleDomain = withColumnDomains(ImmutableMap.of(C_BIGINT, Domain.create(ValueSet.ofRanges(Range.greaterThanOrEqual(BIGINT, 1L)), false)));
assertEquals(toPredicate(tupleDomain), greaterThanOrEqual(C_BIGINT, bigintLiteral(1L)));
tupleDomain = withColumnDomains(ImmutableMap.of(C_BIGINT, Domain.create(ValueSet.ofRanges(Range.lessThan(BIGINT, 1L)), false)));
assertEquals(toPredicate(tupleDomain), lessThan(C_BIGINT, bigintLiteral(1L)));
tupleDomain = withColumnDomains(ImmutableMap.of(C_BIGINT, Domain.create(ValueSet.ofRanges(Range.range(BIGINT, 0L, false, 1L, true)), false)));
assertEquals(toPredicate(tupleDomain), and(greaterThan(C_BIGINT, bigintLiteral(0L)), lessThanOrEqual(C_BIGINT, bigintLiteral(1L))));
tupleDomain = withColumnDomains(ImmutableMap.of(C_BIGINT, Domain.create(ValueSet.ofRanges(Range.lessThanOrEqual(BIGINT, 1L)), false)));
assertEquals(toPredicate(tupleDomain), lessThanOrEqual(C_BIGINT, bigintLiteral(1L)));
tupleDomain = withColumnDomains(ImmutableMap.of(C_BIGINT, Domain.singleValue(BIGINT, 1L)));
assertEquals(toPredicate(tupleDomain), equal(C_BIGINT, bigintLiteral(1L)));
tupleDomain = withColumnDomains(ImmutableMap.of(C_BIGINT, Domain.create(ValueSet.ofRanges(Range.equal(BIGINT, 1L), Range.equal(BIGINT, 2L)), false)));
assertEquals(toPredicate(tupleDomain), in(C_BIGINT, ImmutableList.of(1L, 2L)));
tupleDomain = withColumnDomains(ImmutableMap.of(C_BIGINT, Domain.create(ValueSet.ofRanges(Range.lessThan(BIGINT, 1L)), true)));
assertEquals(toPredicate(tupleDomain), or(lessThan(C_BIGINT, bigintLiteral(1L)), isNull(C_BIGINT)));
tupleDomain = withColumnDomains(ImmutableMap.of(C_COLOR, Domain.create(ValueSet.of(COLOR, COLOR_VALUE_1), true)));
assertEquals(toPredicate(tupleDomain), or(equal(C_COLOR, colorLiteral(COLOR_VALUE_1)), isNull(C_COLOR)));
tupleDomain = withColumnDomains(ImmutableMap.of(C_COLOR, Domain.create(ValueSet.of(COLOR, COLOR_VALUE_1).complement(), true)));
assertEquals(toPredicate(tupleDomain), or(not(equal(C_COLOR, colorLiteral(COLOR_VALUE_1))), isNull(C_COLOR)));
tupleDomain = withColumnDomains(ImmutableMap.of(C_HYPER_LOG_LOG, Domain.onlyNull(HYPER_LOG_LOG)));
assertEquals(toPredicate(tupleDomain), isNull(C_HYPER_LOG_LOG));
tupleDomain = withColumnDomains(ImmutableMap.of(C_HYPER_LOG_LOG, Domain.notNull(HYPER_LOG_LOG)));
assertEquals(toPredicate(tupleDomain), isNotNull(C_HYPER_LOG_LOG));
}
|
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
ThreadContext.unbindSubject();
final boolean secure = requestContext.getSecurityContext().isSecure();
final MultivaluedMap<String, String> headers = requestContext.getHeaders();
final Map<String, Cookie> cookies = requestContext.getCookies();
final Request grizzlyRequest = grizzlyRequestProvider.get();
final String host = RestTools.getRemoteAddrFromRequest(grizzlyRequest, trustedProxies);
final String authHeader = headers.getFirst(HttpHeaders.AUTHORIZATION);
final Set<Class<?>> matchedResources = requestContext.getUriInfo().getMatchedResources().stream()
.map(Object::getClass).collect(Collectors.toSet());
final SecurityContext securityContext;
if (authHeader != null && authHeader.startsWith("Basic")) {
final String base64UserPass = authHeader.substring(authHeader.indexOf(' ') + 1);
final String userPass = decodeBase64(base64UserPass);
final String[] split = userPass.split(":", 2);
if (split.length != 2) {
throw new BadRequestException("Invalid credentials in Authorization header");
}
securityContext = createSecurityContext(split[0],
split[1],
secure,
SecurityContext.BASIC_AUTH,
host,
grizzlyRequest.getRemoteAddr(),
headers,
cookies,
matchedResources);
} else {
securityContext = createSecurityContext(null, null, secure, null, host,
grizzlyRequest.getRemoteAddr(),
headers,
cookies,
matchedResources);
}
requestContext.setSecurityContext(securityContext);
}
|
@Test
public void filterWithBasicAuthAndSessionIdShouldCreateShiroSecurityContextWithSessionIdToken() throws Exception {
final MultivaluedHashMap<String, String> headers = new MultivaluedHashMap<>();
final String credentials = Base64.getEncoder().encodeToString("test:session".getBytes(StandardCharsets.US_ASCII));
headers.putSingle(HttpHeaders.AUTHORIZATION, "Basic " + credentials);
when(requestContext.getHeaders()).thenReturn(headers);
filter.filter(requestContext);
final ArgumentCaptor<ShiroSecurityContext> argument = ArgumentCaptor.forClass(ShiroSecurityContext.class);
verify(requestContext).setSecurityContext(argument.capture());
final ShiroSecurityContext securityContext = argument.getValue();
assertThat(securityContext).isExactlyInstanceOf(ShiroSecurityContext.class);
assertThat(securityContext.getAuthenticationScheme()).isEqualTo(SecurityContext.BASIC_AUTH);
assertThat(securityContext.getToken()).isExactlyInstanceOf(SessionIdToken.class);
}
|
@Override
protected int command() {
if (!validateConfigFilePresent()) {
return 1;
}
final MigrationConfig config;
try {
config = MigrationConfig.load(getConfigFile());
} catch (KsqlException | MigrationException e) {
LOGGER.error(e.getMessage());
return 1;
}
return command(config, MigrationsUtil::getKsqlClient);
}
|
@Test
public void shouldCreateMigrationsTable() {
// When:
final int status = command.command(config, cfg -> client);
// Then:
assertThat(status, is(0));
verify(client).executeStatement(EXPECTED_CTAS_STATEMENT);
}
|
@Override
public int run(String[] argv) {
if (argv.length < 1) {
printUsage("");
return -1;
}
int exitCode = -1;
int i = 0;
String cmd = argv[i++];
//
// verify that we have enough command line parameters
//
if ("-safemode".equals(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-allowSnapshot".equalsIgnoreCase(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-disallowSnapshot".equalsIgnoreCase(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-provisionSnapshotTrash".equalsIgnoreCase(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-report".equals(cmd)) {
if (argv.length > DFS_REPORT_ARGS.length + 1) {
printUsage(cmd);
return exitCode;
}
} else if ("-saveNamespace".equals(cmd)) {
if (argv.length != 1 && argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-rollEdits".equals(cmd)) {
if (argv.length != 1) {
printUsage(cmd);
return exitCode;
}
} else if ("-restoreFailedStorage".equals(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-refreshNodes".equals(cmd)) {
if (argv.length != 1) {
printUsage(cmd);
return exitCode;
}
} else if ("-finalizeUpgrade".equals(cmd)) {
if (argv.length != 1) {
printUsage(cmd);
return exitCode;
}
} else if (RollingUpgradeCommand.matches(cmd)) {
if (argv.length > 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-upgrade".equals(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-metasave".equals(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-refreshServiceAcl".equals(cmd)) {
if (argv.length != 1) {
printUsage(cmd);
return exitCode;
}
} else if ("-refresh".equals(cmd)) {
if (argv.length < 3) {
printUsage(cmd);
return exitCode;
}
} else if ("-refreshUserToGroupsMappings".equals(cmd)) {
if (argv.length != 1) {
printUsage(cmd);
return exitCode;
}
} else if ("-printTopology".equals(cmd)) {
if(argv.length != 1) {
printUsage(cmd);
return exitCode;
}
} else if ("-refreshNamenodes".equals(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-getVolumeReport".equals(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-reconfig".equals(cmd)) {
if (argv.length != 4) {
printUsage(cmd);
return exitCode;
}
} else if ("-deleteBlockPool".equals(cmd)) {
if ((argv.length != 3) && (argv.length != 4)) {
printUsage(cmd);
return exitCode;
}
} else if ("-setBalancerBandwidth".equals(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-getBalancerBandwidth".equalsIgnoreCase(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-fetchImage".equals(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-shutdownDatanode".equals(cmd)) {
if ((argv.length != 2) && (argv.length != 3)) {
printUsage(cmd);
return exitCode;
}
} else if ("-getDatanodeInfo".equals(cmd)) {
if (argv.length != 2) {
printUsage(cmd);
return exitCode;
}
} else if ("-triggerBlockReport".equals(cmd)) {
if ((argv.length < 2) || (argv.length > 5)) {
printUsage(cmd);
return exitCode;
}
} else if ("-listOpenFiles".equals(cmd)) {
if ((argv.length > 4)) {
printUsage(cmd);
return exitCode;
}
}
// initialize DFSAdmin
init();
Exception debugException = null;
exitCode = 0;
try {
if ("-report".equals(cmd)) {
report(argv, i);
} else if ("-safemode".equals(cmd)) {
setSafeMode(argv, i);
} else if ("-allowSnapshot".equalsIgnoreCase(cmd)) {
allowSnapshot(argv);
} else if ("-disallowSnapshot".equalsIgnoreCase(cmd)) {
disallowSnapshot(argv);
} else if ("-provisionSnapshotTrash".equalsIgnoreCase(cmd)) {
provisionSnapshotTrash(argv);
} else if ("-saveNamespace".equals(cmd)) {
exitCode = saveNamespace(argv);
} else if ("-rollEdits".equals(cmd)) {
exitCode = rollEdits();
} else if ("-restoreFailedStorage".equals(cmd)) {
exitCode = restoreFailedStorage(argv[i]);
} else if ("-refreshNodes".equals(cmd)) {
exitCode = refreshNodes();
} else if ("-finalizeUpgrade".equals(cmd)) {
exitCode = finalizeUpgrade();
} else if (RollingUpgradeCommand.matches(cmd)) {
exitCode = RollingUpgradeCommand.run(getDFS(), argv, i);
} else if ("-upgrade".equals(cmd)) {
exitCode = upgrade(argv[i]);
} else if ("-metasave".equals(cmd)) {
exitCode = metaSave(argv, i);
} else if (ClearQuotaCommand.matches(cmd)) {
exitCode = new ClearQuotaCommand(argv, i, getConf()).runAll();
} else if (SetQuotaCommand.matches(cmd)) {
exitCode = new SetQuotaCommand(argv, i, getConf()).runAll();
} else if (ClearSpaceQuotaCommand.matches(cmd)) {
exitCode = new ClearSpaceQuotaCommand(argv, i, getConf()).runAll();
} else if (SetSpaceQuotaCommand.matches(cmd)) {
exitCode = new SetSpaceQuotaCommand(argv, i, getConf()).runAll();
} else if ("-refreshServiceAcl".equals(cmd)) {
exitCode = refreshServiceAcl();
} else if ("-refreshUserToGroupsMappings".equals(cmd)) {
exitCode = refreshUserToGroupsMappings();
} else if ("-refreshSuperUserGroupsConfiguration".equals(cmd)) {
exitCode = refreshSuperUserGroupsConfiguration();
} else if ("-refreshCallQueue".equals(cmd)) {
exitCode = refreshCallQueue();
} else if ("-refresh".equals(cmd)) {
exitCode = genericRefresh(argv, i);
} else if ("-printTopology".equals(cmd)) {
exitCode = printTopology();
} else if ("-refreshNamenodes".equals(cmd)) {
exitCode = refreshNamenodes(argv, i);
} else if ("-getVolumeReport".equals(cmd)) {
exitCode = getVolumeReport(argv, i);
} else if ("-deleteBlockPool".equals(cmd)) {
exitCode = deleteBlockPool(argv, i);
} else if ("-setBalancerBandwidth".equals(cmd)) {
exitCode = setBalancerBandwidth(argv, i);
} else if ("-getBalancerBandwidth".equals(cmd)) {
exitCode = getBalancerBandwidth(argv, i);
} else if ("-fetchImage".equals(cmd)) {
exitCode = fetchImage(argv, i);
} else if ("-shutdownDatanode".equals(cmd)) {
exitCode = shutdownDatanode(argv, i);
} else if ("-evictWriters".equals(cmd)) {
exitCode = evictWriters(argv, i);
} else if ("-getDatanodeInfo".equals(cmd)) {
exitCode = getDatanodeInfo(argv, i);
} else if ("-reconfig".equals(cmd)) {
exitCode = reconfig(argv, i);
} else if ("-triggerBlockReport".equals(cmd)) {
exitCode = triggerBlockReport(argv);
} else if ("-listOpenFiles".equals(cmd)) {
exitCode = listOpenFiles(argv);
} else if ("-help".equals(cmd)) {
if (i < argv.length) {
printHelp(argv[i]);
} else {
printHelp("");
}
} else {
exitCode = -1;
System.err.println(cmd.substring(1) + ": Unknown command");
printUsage("");
}
} catch (IllegalArgumentException arge) {
debugException = arge;
exitCode = -1;
System.err.println(cmd.substring(1) + ": " + arge.getLocalizedMessage());
printUsage(cmd);
} catch (RemoteException e) {
//
// This is a error returned by hadoop server. Print
// out the first line of the error message, ignore the stack trace.
exitCode = -1;
debugException = e;
try {
String[] content;
content = e.getLocalizedMessage().split("\n");
System.err.println(cmd.substring(1) + ": "
+ content[0]);
} catch (Exception ex) {
System.err.println(cmd.substring(1) + ": "
+ ex.getLocalizedMessage());
debugException = ex;
}
} catch (Exception e) {
exitCode = -1;
debugException = e;
System.err.println(cmd.substring(1) + ": "
+ e.getLocalizedMessage());
}
if (LOG.isDebugEnabled() && debugException != null) {
LOG.debug("Exception encountered:", debugException);
}
return exitCode;
}
|
@Test
public void testAllowSnapshotWhenTrashExists() throws Exception {
final Path dirPath = new Path("/ssdir3");
final Path trashRoot = new Path(dirPath, ".Trash");
final DistributedFileSystem dfs = cluster.getFileSystem();
final DFSAdmin dfsAdmin = new DFSAdmin(conf);
// Case 1: trash directory exists and permission matches
dfs.mkdirs(trashRoot);
dfs.setPermission(trashRoot, TRASH_PERMISSION);
// allowSnapshot should still succeed even when trash exists
assertEquals(0, ToolRunner.run(dfsAdmin,
new String[]{"-allowSnapshot", dirPath.toString()}));
// Clean up. disallowSnapshot should remove the empty trash
assertEquals(0, ToolRunner.run(dfsAdmin,
new String[]{"-disallowSnapshot", dirPath.toString()}));
assertFalse(dfs.exists(trashRoot));
// Case 2: trash directory exists and but permission doesn't match
dfs.mkdirs(trashRoot);
dfs.setPermission(trashRoot, new FsPermission((short)0755));
// allowSnapshot should fail here
assertEquals(-1, ToolRunner.run(dfsAdmin,
new String[]{"-allowSnapshot", dirPath.toString()}));
// Correct trash permission and retry
dfs.setPermission(trashRoot, TRASH_PERMISSION);
assertEquals(0, ToolRunner.run(dfsAdmin,
new String[]{"-allowSnapshot", dirPath.toString()}));
// Clean up
assertEquals(0, ToolRunner.run(dfsAdmin,
new String[]{"-disallowSnapshot", dirPath.toString()}));
assertFalse(dfs.exists(trashRoot));
// Case 3: trash directory path is taken by a file
dfs.create(trashRoot).close();
// allowSnapshot should fail here
assertEquals(-1, ToolRunner.run(dfsAdmin,
new String[]{"-allowSnapshot", dirPath.toString()}));
// Remove the file and retry
dfs.delete(trashRoot, false);
assertEquals(0, ToolRunner.run(dfsAdmin,
new String[]{"-allowSnapshot", dirPath.toString()}));
// Clean up
assertEquals(0, ToolRunner.run(dfsAdmin,
new String[]{"-disallowSnapshot", dirPath.toString()}));
assertFalse(dfs.exists(trashRoot));
// Cleanup
dfs.delete(dirPath, true);
}
|
@DeleteMapping("/batch")
@RequiresPermissions("system:api:delete")
public ShenyuAdminResult deleteApis(@RequestBody @NotEmpty final List<@NotBlank String> ids) {
final String result = apiService.delete(ids);
if (StringUtils.isNoneBlank(result)) {
return ShenyuAdminResult.error(result);
}
return ShenyuAdminResult.success(ShenyuResultMessage.DELETE_SUCCESS);
}
|
@Test
public void testDeleteApis() throws Exception {
given(this.apiService.delete(Collections.singletonList("123"))).willReturn(StringUtils.EMPTY);
this.mockMvc.perform(MockMvcRequestBuilders.delete("/api/batch")
.contentType(MediaType.APPLICATION_JSON)
.content("[\"123\"]"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.message", is(ShenyuResultMessage.DELETE_SUCCESS)))
.andReturn();
}
|
@Override
public Num calculate(BarSeries series, Position position) {
return isBreakEvenPosition(position) ? series.one() : series.zero();
}
|
@Test
public void calculateWithOneLongPosition() {
MockBarSeries series = new MockBarSeries(numFunction, 100, 105, 110, 100, 95, 105);
Position position = new Position(Trade.buyAt(0, series), Trade.sellAt(3, series));
assertNumEquals(1, getCriterion().calculate(series, position));
}
|
@Override
public Cursor<byte[]> scan(RedisClusterNode node, ScanOptions options) {
return new ScanCursor<byte[]>(0, options) {
private RedisClient client = getEntry(node);
@Override
protected ScanIteration<byte[]> doScan(long cursorId, ScanOptions options) {
if (isQueueing() || isPipelined()) {
throw new UnsupportedOperationException("'SSCAN' cannot be called in pipeline / transaction mode.");
}
if (client == null) {
return null;
}
List<Object> args = new ArrayList<Object>();
if (cursorId == 101010101010101010L) {
cursorId = 0;
}
args.add(Long.toUnsignedString(cursorId));
if (options.getPattern() != null) {
args.add("MATCH");
args.add(options.getPattern());
}
if (options.getCount() != null) {
args.add("COUNT");
args.add(options.getCount());
}
RFuture<ListScanResult<byte[]>> f = executorService.readAsync(client, ByteArrayCodec.INSTANCE, RedisCommands.SCAN, args.toArray());
ListScanResult<byte[]> res = syncFuture(f);
String pos = res.getPos();
client = res.getRedisClient();
if ("0".equals(pos)) {
client = null;
}
return new ScanIteration<byte[]>(Long.parseUnsignedLong(pos), res.getValues());
}
}.open();
}
|
@Test
public void testScan() {
for (int i = 0; i < 1000; i++) {
connection.set(("" + i).getBytes(StandardCharsets.UTF_8), ("" + i).getBytes(StandardCharsets.UTF_8));
}
Cursor<byte[]> b = connection.scan(ScanOptions.scanOptions().build());
int counter = 0;
while (b.hasNext()) {
b.next();
counter++;
}
assertThat(counter).isEqualTo(1000);
}
|
public StatementProxy(AbstractConnectionProxy connectionWrapper, T targetStatement, String targetSQL)
throws SQLException {
super(connectionWrapper, targetStatement, targetSQL);
}
|
@Test
public void testStatementProxy() {
Assertions.assertNotNull(statementProxy);
}
|
static Properties resolveProducerProperties(Map<String, String> options, Object keySchema, Object valueSchema) {
Properties properties = from(options);
withSerdeProducerProperties(true, options, keySchema, properties);
withSerdeProducerProperties(false, options, valueSchema, properties);
return properties;
}
|
@Test
public void test_producerProperties_json() {
// key
assertThat(resolveProducerProperties(Map.of(OPTION_KEY_FORMAT, JSON_FLAT_FORMAT)))
.containsExactlyEntriesOf(Map.of(KEY_SERIALIZER, ByteArraySerializer.class.getCanonicalName()));
// value
assertThat(resolveProducerProperties(Map.of(
OPTION_KEY_FORMAT, UNKNOWN_FORMAT,
OPTION_VALUE_FORMAT, JSON_FLAT_FORMAT
))).containsExactlyEntriesOf(Map.of(VALUE_SERIALIZER, ByteArraySerializer.class.getCanonicalName()));
}
|
@Override
public long getTick() {
return THREAD_MX_BEAN.getCurrentThreadCpuTime();
}
|
@Test
public void cpuTimeClock() {
final CpuTimeClock clock = new CpuTimeClock();
assertThat((double) clock.getTime())
.isEqualTo(System.currentTimeMillis(),
offset(250D));
assertThat((double) clock.getTick())
.isEqualTo(ManagementFactory.getThreadMXBean().getCurrentThreadCpuTime(),
offset(1000000.0));
}
|
@VisibleForTesting
static Map<Severity, List<String>> checkNoticeFile(
Map<String, Set<Dependency>> modulesWithShadedDependencies,
String moduleName,
@Nullable NoticeContents noticeContents) {
final Map<Severity, List<String>> problemsBySeverity = new HashMap<>();
if (noticeContents == null) {
addProblem(problemsBySeverity, Severity.CRITICAL, "The NOTICE file was empty.");
} else {
// first line must be the module name.
if (!noticeContents.getNoticeModuleName().equals(moduleName)) {
addProblem(
problemsBySeverity,
Severity.TOLERATED,
String.format(
"First line does not start with module name. firstLine=%s",
noticeContents.getNoticeModuleName()));
}
// collect all declared dependencies from NOTICE file
Set<Dependency> declaredDependencies = new HashSet<>();
for (Dependency declaredDependency : noticeContents.getDeclaredDependencies()) {
if (!declaredDependencies.add(declaredDependency)) {
addProblem(
problemsBySeverity,
Severity.CRITICAL,
String.format("Dependency %s is declared twice.", declaredDependency));
}
}
// find all dependencies missing from NOTICE file
Collection<Dependency> expectedDependencies =
modulesWithShadedDependencies.getOrDefault(moduleName, Collections.emptySet())
.stream()
.filter(
dependency ->
!dependency.getGroupId().equals("org.apache.flink"))
.collect(Collectors.toList());
for (Dependency expectedDependency : expectedDependencies) {
if (!declaredDependencies.contains(expectedDependency)) {
addProblem(
problemsBySeverity,
Severity.CRITICAL,
String.format("Dependency %s is not listed.", expectedDependency));
}
}
boolean moduleDefinesExcessDependencies =
MODULES_DEFINING_EXCESS_DEPENDENCIES.contains(moduleName);
// find all dependencies defined in NOTICE file, which were not expected
for (Dependency declaredDependency : declaredDependencies) {
if (!expectedDependencies.contains(declaredDependency)) {
final Severity severity =
moduleDefinesExcessDependencies
? Severity.SUPPRESSED
: Severity.TOLERATED;
addProblem(
problemsBySeverity,
severity,
String.format(
"Dependency %s is not bundled, but listed.",
declaredDependency));
}
}
}
return problemsBySeverity;
}
|
@Test
void testCheckNoticeFileRejectsMissingDependency() {
final String moduleName = "test";
final Map<String, Set<Dependency>> bundleDependencies = new HashMap<>();
bundleDependencies.put(
moduleName, Collections.singleton(Dependency.create("a", "b", "c", null)));
assertThat(
NoticeFileChecker.checkNoticeFile(
bundleDependencies,
moduleName,
new NoticeContents(moduleName, Collections.emptyList())))
.containsOnlyKeys(NoticeFileChecker.Severity.CRITICAL);
}
|
@Override
public void run() {
try {
// We kill containers until the kernel reports the OOM situation resolved
// Note: If the kernel has a delay this may kill more than necessary
while (true) {
String status = cgroups.getCGroupParam(
CGroupsHandler.CGroupController.MEMORY,
"",
CGROUP_PARAM_MEMORY_OOM_CONTROL);
if (!status.contains(CGroupsHandler.UNDER_OOM)) {
break;
}
boolean containerKilled = killContainer();
if (!containerKilled) {
// This can happen, if SIGKILL did not clean up
// non-PGID or containers or containers launched by other users
// or if a process was put to the root YARN cgroup.
throw new YarnRuntimeException(
"Could not find any containers but CGroups " +
"reserved for containers ran out of memory. " +
"I am giving up");
}
}
} catch (ResourceHandlerException ex) {
LOG.warn("Could not fetch OOM status. " +
"This is expected at shutdown. Exiting.", ex);
}
}
|
@Test
public void testKillGuaranteedContainerWithKillFailuresUponOOM()
throws Exception {
ConcurrentHashMap<ContainerId, Container> containers =
new ConcurrentHashMap<>();
Container c1 = createContainer(1, false, 1L, true);
containers.put(c1.getContainerId(), c1);
Container c2 = createContainer(2, false, 2L, true);
containers.put(c2.getContainerId(), c2);
Container c3 = createContainer(3, true, 2L, true);
containers.put(c3.getContainerId(), c3);
ContainerExecutor ex = createContainerExecutor(containers);
Context context = mock(Context.class);
when(context.getContainers()).thenReturn(containers);
when(context.getContainerExecutor()).thenReturn(ex);
CGroupsHandler cGroupsHandler = mock(CGroupsHandler.class);
when(cGroupsHandler.getCGroupParam(
CGroupsHandler.CGroupController.MEMORY,
"",
CGROUP_PARAM_MEMORY_OOM_CONTROL))
.thenReturn("under_oom 1").thenReturn("under_oom 0");
// c1 process has not started, hence no cgroup.procs file yet
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c1.getContainerId().toString(), CGROUP_PROCS_FILE))
.thenThrow(
new ResourceHandlerException(CGROUP_PROCS_FILE + " not found"));
// c2 process has not started, hence no cgroup.procs file yet
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c2.getContainerId().toString(), CGROUP_PROCS_FILE))
.thenThrow(
new ResourceHandlerException(CGROUP_PROCS_FILE + " not found"));
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c3.getContainerId().toString(), CGROUP_PROCS_FILE))
.thenReturn("1234").thenReturn("");
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c3.getContainerId().toString(), CGROUP_PARAM_MEMORY_USAGE_BYTES))
.thenReturn(getMB(9));
when(cGroupsHandler.getCGroupParam(CGroupsHandler.CGroupController.MEMORY,
c3.getContainerId().toString(), CGROUP_PARAM_MEMORY_MEMSW_USAGE_BYTES))
.thenReturn(getMB(9));
DefaultOOMHandler handler =
new DefaultOOMHandler(context, false) {
@Override
protected CGroupsHandler getCGroupsHandler() {
return cGroupsHandler;
}
};
handler.run();
verify(ex, times(1)).signalContainer(
new ContainerSignalContext.Builder()
.setPid("1235")
.setContainer(c3)
.setSignal(ContainerExecutor.Signal.KILL)
.build()
);
verify(ex, times(1)).signalContainer(any());
}
|
public static Set<String> getRequirementsForCategoryAndLevel(String category, int level) {
return OWASP_ASVS_40_REQUIREMENTS_BY_LEVEL.get(level).stream()
.filter(req -> req.startsWith(category + "."))
.collect(Collectors.toSet());
}
|
@Test
void owaspAsvs40_requirements_by_category_and_level_check() {
assertEquals(0, getRequirementsForCategoryAndLevel(OwaspAsvs.C1, 1).size());
assertEquals(31, getRequirementsForCategoryAndLevel(OwaspAsvs.C2, 1).size());
assertEquals(12, getRequirementsForCategoryAndLevel(OwaspAsvs.C3, 1).size());
assertEquals(9, getRequirementsForCategoryAndLevel(OwaspAsvs.C4, 1).size());
assertEquals(27, getRequirementsForCategoryAndLevel(OwaspAsvs.C5, 1).size());
assertEquals(1, getRequirementsForCategoryAndLevel(OwaspAsvs.C6, 1).size());
assertEquals(3, getRequirementsForCategoryAndLevel(OwaspAsvs.C7, 1).size());
assertEquals(7, getRequirementsForCategoryAndLevel(OwaspAsvs.C8, 1).size());
assertEquals(3, getRequirementsForCategoryAndLevel(OwaspAsvs.C9, 1).size());
assertEquals(3, getRequirementsForCategoryAndLevel(OwaspAsvs.C10, 1).size());
assertEquals(5, getRequirementsForCategoryAndLevel(OwaspAsvs.C11, 1).size());
assertEquals(11, getRequirementsForCategoryAndLevel(OwaspAsvs.C12, 1).size());
assertEquals(7, getRequirementsForCategoryAndLevel(OwaspAsvs.C13, 1).size());
assertEquals(16, getRequirementsForCategoryAndLevel(OwaspAsvs.C14, 1).size());
}
|
public String doLayout(ILoggingEvent event) {
if (!isStarted()) {
return CoreConstants.EMPTY_STRING;
}
return writeLoopOnConverters(event);
}
|
@Test
public void testWithLettersComingFromLog4j() {
// Letters: p = level and c = logger
pl.setPattern("%d %p [%t] %c{30} - %m%n");
pl.start();
String val = pl.doLayout(getEventObject());
// 2006-02-01 22:38:06,212 INFO [main] c.q.l.pattern.ConverterTest - Some
// message
String regex = ClassicTestConstants.ISO_REGEX + " INFO " + MAIN_REGEX
+ " c.q.l.c.pattern.ConverterTest - Some message\\s*";
assertThat(val, matchesPattern(regex));
}
|
public static void scheduleLongPolling(Runnable runnable, long initialDelay, long delay, TimeUnit unit) {
LONG_POLLING_EXECUTOR.scheduleWithFixedDelay(runnable, initialDelay, delay, unit);
}
|
@Test
void testScheduleLongPollingV2() throws InterruptedException {
AtomicInteger atomicInteger = new AtomicInteger();
Runnable runnable = atomicInteger::incrementAndGet;
ConfigExecutor.scheduleLongPolling(runnable, 20, TimeUnit.MILLISECONDS);
assertEquals(0, atomicInteger.get());
TimeUnit.MILLISECONDS.sleep(40);
assertEquals(1, atomicInteger.get());
}
|
@Override
public void onEvent(Event event) {
if (EnvUtil.getStandaloneMode()) {
return;
}
if (event instanceof ClientEvent.ClientVerifyFailedEvent) {
syncToVerifyFailedServer((ClientEvent.ClientVerifyFailedEvent) event);
} else {
syncToAllServer((ClientEvent) event);
}
}
|
@Test
void testOnClientChangedEventWithoutClient() {
distroClientDataProcessor.onEvent(new ClientEvent.ClientChangedEvent(null));
verify(distroProtocol, never()).syncToTarget(any(), any(), anyString(), anyLong());
verify(distroProtocol, never()).sync(any(), any());
}
|
@CheckForNull
public String getDescriptionAsHtml(RuleDto ruleDto) {
if (ruleDto.getDescriptionFormat() == null) {
return null;
}
Collection<RuleDescriptionSectionDto> ruleDescriptionSectionDtos = ruleDto.getRuleDescriptionSectionDtos();
return retrieveDescription(ruleDescriptionSectionDtos, Objects.requireNonNull(ruleDto.getDescriptionFormat()));
}
|
@Test
public void getHtmlDescriptionAsIs() {
RuleDto rule = new RuleDto().setDescriptionFormat(RuleDto.Format.HTML).addRuleDescriptionSectionDto(HTML_SECTION).setType(RuleType.BUG);
String html = ruleDescriptionFormatter.getDescriptionAsHtml(rule);
assertThat(html).isEqualTo(HTML_SECTION.getContent());
}
|
@Override
public NvdApiProcessor call() throws Exception {
if (jsonFile.getName().endsWith(".jsonarray.gz")) {
try (InputStream fis = Files.newInputStream(jsonFile.toPath());
InputStream is = new BufferedInputStream(new GZIPInputStream(fis));
CveItemSource<DefCveItem> itemSource = new JsonArrayCveItemSource(is)) {
updateCveDb(itemSource);
}
} else if (jsonFile.getName().endsWith(".gz")) {
try (InputStream fis = Files.newInputStream(jsonFile.toPath());
InputStream is = new BufferedInputStream(new GZIPInputStream(fis));
CveItemSource<DefCveItem> itemSource = new CveApiJson20CveItemSource(is)) {
updateCveDb(itemSource);
}
} else {
try (InputStream fis = Files.newInputStream(jsonFile.toPath());
InputStream is = new BufferedInputStream(fis);
CveItemSource<DefCveItem> itemSource = new JsonArrayCveItemSource(is)) {
updateCveDb(itemSource);
}
}
endTime = System.currentTimeMillis();
return this;
}
|
@Test
public void unspecifiedFileName() throws Exception {
try (CveDB cve = new CveDB(getSettings())) {
File file = File.createTempFile("test", "test");
writeFileString(file, "");
NvdApiProcessor processor = new NvdApiProcessor(null, file);
processor.call();
}
}
|
public static <T> Iterator<Class<T>> classIterator(Class<T> expectedType, String factoryId, ClassLoader classLoader) {
Set<ServiceDefinition> serviceDefinitions = getServiceDefinitions(factoryId, classLoader);
return new ClassIterator<>(serviceDefinitions, expectedType);
}
|
@Test
public void testMultipleClassloaderLoadsTheSameClass() throws Exception {
ClassLoader parent = this.getClass().getClassLoader();
//child classloader will steal bytecode from the parent and will define classes on its own
ClassLoader childLoader = new StealingClassloader(parent);
Class<?> interfaceClass = childLoader.loadClass(DataSerializerHook.class.getName());
Iterator<? extends Class<?>> iterator
= ServiceLoader.classIterator(interfaceClass, "com.hazelcast.DataSerializerHook", childLoader);
//make sure some hook were found.
assertTrue(iterator.hasNext());
while (iterator.hasNext()) {
Class<?> hook = iterator.next();
assertEquals(childLoader, hook.getClassLoader());
}
}
|
@Override
public String toString() {
return pf.format(ratioValue());
}
|
@Test
public void testToString() {
NumberFormat pf = DecimalFormat.getPercentInstance(); //Varies by machine
pf.setMaximumFractionDigits(3);
assertEquals(pf.format(0.12345f), MilliPct.ofMilliPct(12345).toString());
assertEquals(pf.format(-0.12345f), MilliPct.ofMilliPct(-12345).toString());
}
|
public <T> void resolve(T resolvable) {
ParamResolver resolver = this;
if (ParamScope.class.isAssignableFrom(resolvable.getClass())) {
ParamScope newScope = (ParamScope) resolvable;
resolver = newScope.applyOver(resolver);
}
resolveStringLeaves(resolvable, resolver);
resolveNonStringLeaves(resolvable, resolver);
resolveNodes(resolvable, resolver);
}
|
@Test
public void shouldUseValidationErrorKeyAnnotationForFieldNameInCaseOfException() {
PipelineConfig pipelineConfig = PipelineConfigMother.createPipelineConfig("cruise", "dev", "ant", "nant");
FetchTask task = new FetchTask(new CaseInsensitiveString("cruise"), new CaseInsensitiveString("dev"), new CaseInsensitiveString("ant"), "#a", "dest");
pipelineConfig.get(0).getJobs().getJob(new CaseInsensitiveString("nant")).addTask(task);
new ParamResolver(new ParamSubstitutionHandlerFactory(params(param("foo", "pavan"), param("bar", "jj"))), fieldCache).resolve(pipelineConfig);
assertThat(task.errors().isEmpty(), is(false));
assertThat(task.errors().on(FetchTask.SRC), is("Error when processing params for '#a' used in field 'src', # must be followed by a parameter pattern or escaped by another #"));
}
|
static @Nullable <T extends PluginConfig> T getPluginConfig(
Map<String, Object> params, Class<T> configClass) {
// Validate configClass
if (configClass == null) {
throw new IllegalArgumentException("Config class must be not null!");
}
List<Field> allFields = new ArrayList<>();
Class<?> currClass = configClass;
while (currClass != null && !currClass.equals(Object.class)) {
allFields.addAll(
Arrays.stream(currClass.getDeclaredFields())
.filter(f -> !Modifier.isStatic(f.getModifiers()))
.collect(Collectors.toList()));
currClass = currClass.getSuperclass();
}
InstantiatorFactory instantiatorFactory = new InstantiatorFactory(false);
@Initialized T config = instantiatorFactory.get(TypeToken.of(configClass)).create();
if (config != null) {
for (Field field : allFields) {
field.setAccessible(true);
Class<?> fieldType = field.getType();
Name declaredAnnotation = field.getDeclaredAnnotation(Name.class);
Object fieldValue =
declaredAnnotation != null ? params.get(declaredAnnotation.value()) : null;
if (fieldValue != null && fieldType.equals(fieldValue.getClass())) {
try {
field.set(config, fieldValue);
} catch (IllegalAccessException e) {
LOG.error("Can not set a field with value {}", fieldValue);
}
} else if (field.getName().equals(MACRO_FIELDS_FIELD_NAME)) {
try {
field.set(config, Collections.emptySet());
} catch (IllegalAccessException e) {
LOG.error("Can not set macro fields");
}
}
}
}
return config;
}
|
@Test
public void testBuildingPluginConfigFromParamsMap() {
try {
ServiceNowSourceConfig config =
PluginConfigInstantiationUtils.getPluginConfig(
TEST_SERVICE_NOW_PARAMS_MAP, ServiceNowSourceConfig.class);
assertNotNull(config);
validateServiceNowConfigObject(TEST_SERVICE_NOW_PARAMS_MAP, config);
} catch (Exception e) {
LOG.error("Error occurred while building the config object", e);
fail();
}
}
|
@Nullable
public Float getFloatValue(@FloatFormat final int formatType,
@IntRange(from = 0) final int offset) {
if ((offset + getTypeLen(formatType)) > size()) return null;
switch (formatType) {
case FORMAT_SFLOAT -> {
if (mValue[offset + 1] == 0x07 && mValue[offset] == (byte) 0xFE)
return Float.POSITIVE_INFINITY;
if ((mValue[offset + 1] == 0x07 && mValue[offset] == (byte) 0xFF) ||
(mValue[offset + 1] == 0x08 && mValue[offset] == 0x00) ||
(mValue[offset + 1] == 0x08 && mValue[offset] == 0x01))
return Float.NaN;
if (mValue[offset + 1] == 0x08 && mValue[offset] == 0x02)
return Float.NEGATIVE_INFINITY;
return bytesToFloat(mValue[offset], mValue[offset + 1]);
}
case FORMAT_FLOAT -> {
if (mValue[offset + 3] == 0x00) {
if (mValue[offset + 2] == 0x7F && mValue[offset + 1] == (byte) 0xFF) {
if (mValue[offset] == (byte) 0xFE)
return Float.POSITIVE_INFINITY;
if (mValue[offset] == (byte) 0xFF)
return Float.NaN;
} else if (mValue[offset + 2] == (byte) 0x80 && mValue[offset + 1] == 0x00) {
if (mValue[offset] == 0x00 || mValue[offset] == 0x01)
return Float.NaN;
if (mValue[offset] == 0x02)
return Float.NEGATIVE_INFINITY;
}
}
return bytesToFloat(mValue[offset], mValue[offset + 1],
mValue[offset + 2], mValue[offset + 3]);
}
}
return null;
}
|
@Test
public void setValue_SFLOAT_cutPrecision() {
final MutableData data = new MutableData(new byte[2]);
data.setValue(1000400f, Data.FORMAT_SFLOAT, 0);
final float value = data.getFloatValue(Data.FORMAT_SFLOAT, 0);
assertEquals(1000000f, value, 0.00);
}
|
public boolean cleanTable() {
boolean allRemoved = true;
Set<String> removedPaths = new HashSet<>();
for (PhysicalPartition partition : table.getAllPhysicalPartitions()) {
try {
WarehouseManager manager = GlobalStateMgr.getCurrentState().getWarehouseMgr();
Warehouse warehouse = manager.getBackgroundWarehouse();
ShardInfo shardInfo = LakeTableHelper.getAssociatedShardInfo(partition, warehouse.getId()).orElse(null);
if (shardInfo == null || removedPaths.contains(shardInfo.getFilePath().getFullPath())) {
continue;
}
removedPaths.add(shardInfo.getFilePath().getFullPath());
if (!LakeTableHelper.removeShardRootDirectory(shardInfo)) {
allRemoved = false;
}
} catch (StarClientException e) {
LOG.warn("Fail to get shard info of partition {}: {}", partition.getId(), e.getMessage());
allRemoved = false;
}
}
return allRemoved;
}
|
@Test
public void testShardNotFound(@Mocked LakeTable table,
@Mocked PhysicalPartition partition,
@Mocked MaterializedIndex index,
@Mocked LakeTablet tablet,
@Mocked LakeService lakeService) throws StarClientException {
LakeTableCleaner cleaner = new LakeTableCleaner(table);
new MockUp<WarehouseManager>() {
@Mock
public Warehouse getWarehouse(String warehouseName) {
return new DefaultWarehouse(WarehouseManager.DEFAULT_WAREHOUSE_ID, WarehouseManager.DEFAULT_WAREHOUSE_NAME);
}
@Mock
public Warehouse getWarehouse(long warehouseId) {
return new DefaultWarehouse(WarehouseManager.DEFAULT_WAREHOUSE_ID, WarehouseManager.DEFAULT_WAREHOUSE_NAME);
}
};
new Expectations() {
{
table.getAllPhysicalPartitions();
result = Lists.newArrayList(partition);
minTimes = 1;
maxTimes = 1;
partition.getMaterializedIndices(MaterializedIndex.IndexExtState.ALL);
result = Lists.newArrayList(index);
minTimes = 1;
maxTimes = 1;
index.getTablets();
result = Lists.newArrayList(tablet);
minTimes = 1;
maxTimes = 1;
}
};
new MockUp<StarOSAgent>() {
@Mock
public ShardInfo getShardInfo(long shardId, long workerGroupId) throws StarClientException {
throw new StarClientException(StatusCode.NOT_EXIST, "injected error");
}
};
Assert.assertTrue(cleaner.cleanTable());
}
|
@VisibleForTesting
static DefaultIssue toDefaultIssue(IssueCache.Issue next) {
DefaultIssue defaultIssue = new DefaultIssue();
defaultIssue.setKey(next.getKey());
defaultIssue.setType(RuleType.valueOf(next.getRuleType()));
defaultIssue.setComponentUuid(next.hasComponentUuid() ? next.getComponentUuid() : null);
defaultIssue.setComponentKey(next.getComponentKey());
defaultIssue.setProjectUuid(next.getProjectUuid());
defaultIssue.setProjectKey(next.getProjectKey());
defaultIssue.setRuleKey(RuleKey.parse(next.getRuleKey()));
defaultIssue.setLanguage(next.hasLanguage() ? next.getLanguage() : null);
defaultIssue.setSeverity(next.hasSeverity() ? next.getSeverity() : null);
defaultIssue.setManualSeverity(next.getManualSeverity());
defaultIssue.setMessage(next.hasMessage() ? next.getMessage() : null);
defaultIssue.setMessageFormattings(next.hasMessageFormattings() ? next.getMessageFormattings() : null);
defaultIssue.setLine(next.hasLine() ? next.getLine() : null);
defaultIssue.setGap(next.hasGap() ? next.getGap() : null);
defaultIssue.setEffort(next.hasEffort() ? Duration.create(next.getEffort()) : null);
defaultIssue.setStatus(next.getStatus());
defaultIssue.setResolution(next.hasResolution() ? next.getResolution() : null);
defaultIssue.setAssigneeUuid(next.hasAssigneeUuid() ? next.getAssigneeUuid() : null);
defaultIssue.setAssigneeLogin(next.hasAssigneeLogin() ? next.getAssigneeLogin() : null);
defaultIssue.setChecksum(next.hasChecksum() ? next.getChecksum() : null);
defaultIssue.setAuthorLogin(next.hasAuthorLogin() ? next.getAuthorLogin() : null);
next.getCommentsList().forEach(c -> defaultIssue.addComment(toDefaultIssueComment(c)));
defaultIssue.setTags(ImmutableSet.copyOf(STRING_LIST_SPLITTER.split(next.getTags())));
defaultIssue.setCodeVariants(ImmutableSet.copyOf(STRING_LIST_SPLITTER.split(next.getCodeVariants())));
defaultIssue.setRuleDescriptionContextKey(next.hasRuleDescriptionContextKey() ? next.getRuleDescriptionContextKey() : null);
defaultIssue.setLocations(next.hasLocations() ? next.getLocations() : null);
defaultIssue.setIsFromExternalRuleEngine(next.getIsFromExternalRuleEngine());
defaultIssue.setCreationDate(new Date(next.getCreationDate()));
defaultIssue.setUpdateDate(next.hasUpdateDate() ? new Date(next.getUpdateDate()) : null);
defaultIssue.setCloseDate(next.hasCloseDate() ? new Date(next.getCloseDate()) : null);
defaultIssue.setCurrentChangeWithoutAddChange(next.hasCurrentChanges() ? toDefaultIssueChanges(next.getCurrentChanges()) : null);
defaultIssue.setNew(next.getIsNew());
defaultIssue.setIsOnChangedLine(next.getIsOnChangedLine());
defaultIssue.setIsNewCodeReferenceIssue(next.getIsNewCodeReferenceIssue());
defaultIssue.setCopied(next.getIsCopied());
defaultIssue.setBeingClosed(next.getBeingClosed());
defaultIssue.setOnDisabledRule(next.getOnDisabledRule());
defaultIssue.setChanged(next.getIsChanged());
defaultIssue.setSendNotifications(next.getSendNotifications());
defaultIssue.setSelectedAt(next.hasSelectedAt() ? next.getSelectedAt() : null);
defaultIssue.setQuickFixAvailable(next.getQuickFixAvailable());
defaultIssue.setPrioritizedRule(next.getIsPrioritizedRule());
defaultIssue.setIsNoLongerNewCodeReferenceIssue(next.getIsNoLongerNewCodeReferenceIssue());
defaultIssue.setCleanCodeAttribute(next.hasCleanCodeAttribute() ? CleanCodeAttribute.valueOf(next.getCleanCodeAttribute()) : null);
if (next.hasAnticipatedTransitionUuid()) {
defaultIssue.setAnticipatedTransitionUuid(next.getAnticipatedTransitionUuid());
}
for (IssueCache.Impact impact : next.getImpactsList()) {
defaultIssue.addImpact(SoftwareQuality.valueOf(impact.getSoftwareQuality()), Severity.valueOf(impact.getSeverity()));
}
for (IssueCache.FieldDiffs protoFieldDiffs : next.getChangesList()) {
defaultIssue.addChange(toDefaultIssueChanges(protoFieldDiffs));
}
return defaultIssue;
}
|
@Test
public void toDefaultIssue_whenRuleDescriptionContextKeyAbsent_shouldNotSetItInDefaultIssue() {
IssueCache.Issue issue = prepareIssueWithCompulsoryFields()
.build();
DefaultIssue defaultIssue = ProtobufIssueDiskCache.toDefaultIssue(issue);
assertThat(defaultIssue.getRuleDescriptionContextKey()).isEmpty();
}
|
public Msg putShortString(String data)
{
if (data == null) {
return this;
}
ByteBuffer dup = buf.duplicate();
dup.position(writeIndex);
writeIndex += Wire.putShortString(dup, data);
return this;
}
|
@Test(expected = IllegalArgumentException.class)
public void testPutStringLongerThan255InBuilder()
{
final Msg.Builder msg = new Msg.Builder();
char[] charArray = new char[256];
Arrays.fill(charArray, ' ');
String str = new String(charArray);
msg.putShortString(str);
}
|
Meter.Type getMetricsType(String remaining) {
String type = StringHelper.before(remaining, ":");
return type == null
? DEFAULT_METER_TYPE
: MicrometerUtils.getByName(type);
}
|
@Test
public void testGetMetricsTypeNotFound() {
assertThrows(RuntimeCamelException.class,
() -> component.getMetricsType("unknown-metrics:metrics-name"));
}
|
public String getSplunkEndpoint() {
return this.splunkEndpoint;
}
|
@Test
public void testDefaultEndpoint() {
SplunkHECConfiguration config = new SplunkHECConfiguration();
assertEquals("/services/collector/event", config.getSplunkEndpoint());
}
|
@Override
public void write(String entry) throws IOException {
final LoggingEvent event = new LoggingEvent();
event.setLevel(Level.INFO);
event.setLoggerName("http.request");
event.setMessage(entry);
event.setTimeStamp(System.currentTimeMillis());
event.setLoggerContext(loggerContext);
appenders.appendLoopOnAppenders(event);
}
|
@Test
void logsRequestsToTheAppenders() throws Exception {
final String requestLine = "1, 2 buckle my shoe";
slf4jRequestLog.write(requestLine);
final ArgumentCaptor<ILoggingEvent> captor = ArgumentCaptor.forClass(ILoggingEvent.class);
verify(appender, timeout(1000)).doAppend(captor.capture());
final ILoggingEvent event = captor.getValue();
assertThat(event.getFormattedMessage()).isEqualTo(requestLine);
assertThat(event.getLevel()).isEqualTo(Level.INFO);
assertThat(event).hasToString("[INFO] 1, 2 buckle my shoe");
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.