focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
@Operation(summary= "Cancel current session from digid")
@PostMapping(value = "cancel", consumes = "application/json", produces = "application/json")
public Map<String, String> cancel(@Valid @RequestBody AppRequest request) {
RdaSession session = null;
try{
session = findSession(request, null);
}catch(NotFoundException notFoundException){
logger.info("Session not found");
}
if (session != null) {
session.setStatus(Status.CANCELLED);
sessionRepo.save(session);
}
// Result OK
return ImmutableMap.of("status", "OK");
}
|
@Test
public void testCancelRestService() {
AppRequest appRequest = new AppRequest();
appRequest.setSessionId("sessionId");
RdaSession session = new RdaSession();
session.setId("sessionId");
mockSession(session);
Map<String, String> responseData = controller.cancel(appRequest);
assertEquals("OK", responseData.get("status"));
Mockito.verify(sessionRepo, Mockito.times(1)).save(Mockito.isA(RdaSession.class));
}
|
@Override
public Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) {
ParamMappingRuleHandle paramMappingRuleHandle = ParamMappingPluginDataHandler.CACHED_HANDLE.get().obtainHandle(CacheKeyUtils.INST.getKey(rule));
if (Objects.isNull(paramMappingRuleHandle)) {
LOG.error("param mapping rule configuration is null :{}", rule.getId());
return chain.execute(exchange);
}
HttpHeaders headers = exchange.getRequest().getHeaders();
MediaType contentType = headers.getContentType();
return match(contentType).apply(exchange, chain, paramMappingRuleHandle);
}
|
@Test
public void testDoExecute() {
SelectorData selectorData = mock(SelectorData.class);
when(this.chain.execute(any())).thenReturn(Mono.empty());
paramMappingPluginDataHandler.handlerRule(ruleData);
StepVerifier.create(paramMappingPlugin.doExecute(this.exchange, this.chain, selectorData, this.ruleData)).expectSubscription().verifyComplete();
}
|
static void handleJvmOptions(String[] args, String lsJavaOpts) {
final JvmOptionsParser parser = new JvmOptionsParser(args[0]);
final String jvmOpts = args.length == 2 ? args[1] : null;
try {
Optional<Path> jvmOptions = parser.lookupJvmOptionsFile(jvmOpts);
parser.handleJvmOptions(jvmOptions, lsJavaOpts);
} catch (JvmOptionsFileParserException pex) {
System.err.printf(Locale.ROOT,
"encountered [%d] error%s parsing [%s]",
pex.invalidLines().size(),
pex.invalidLines().size() == 1 ? "" : "s",
pex.jvmOptionsFile());
int errorCounter = 0;
for (final Map.Entry<Integer, String> entry : pex.invalidLines().entrySet()) {
errorCounter++;
System.err.printf(Locale.ROOT,
"[%d]: encountered improperly formatted JVM option in [%s] on line number [%d]: [%s]",
errorCounter,
pex.jvmOptionsFile(),
entry.getKey(),
entry.getValue());
}
} catch (IOException ex) {
System.err.println("Error accessing jvm.options file");
System.exit(1);
}
}
|
@Test
public void testNettyMaxOrderRuleDoNotAppliesIfAlreadyDefinedExplicitlyByUser() throws IOException {
File optionsFile = writeIntoTempOptionsFile(writer -> writer.println("-Dio.netty.allocator.maxOrder=10"));
JvmOptionsParser.handleJvmOptions(new String[] {"/path/to/ls_home", optionsFile.toString()}, "-Dcli.opts=something");
// Verify
final String output = outputStreamCaptor.toString();
assertTrue("Netty's maxOrder MUST be forcibly defined to the expected default", output.contains("-Dio.netty.allocator.maxOrder=10"));
}
|
public ParseResult parse(File file) throws IOException, SchemaParseException {
return parse(file, null);
}
|
@Test
void testParseByCustomParser() {
Schema schema = new SchemaParser().parse(DummySchemaParser.SCHEMA_TEXT_ONE).mainSchema();
assertEquals(DummySchemaParser.FIXED_SCHEMA, schema);
}
|
static InjectorSource instantiateUserSpecifiedInjectorSource(Class<?> injectorSourceClass) {
try {
return (InjectorSource) injectorSourceClass.getConstructor().newInstance();
} catch (Exception e) {
String message = format("Instantiation of '%s' failed. Check the caused by exception and ensure your " +
"InjectorSource implementation is accessible and has a public zero args constructor.",
injectorSourceClass.getName());
throw new InjectorSourceInstantiationFailed(message, e);
}
}
|
@Test
void failsToInstantiateClassNotImplementingInjectorSource() {
Executable testMethod = () -> instantiateUserSpecifiedInjectorSource(String.class);
InjectorSourceInstantiationFailed actualThrown = assertThrows(InjectorSourceInstantiationFailed.class,
testMethod);
assertAll(
() -> assertThat("Unexpected exception message", actualThrown.getMessage(), is(equalTo(
"Instantiation of 'java.lang.String' failed. Check the caused by exception and ensure your InjectorSource implementation is accessible and has a public zero args constructor."))),
() -> assertThat("Unexpected exception cause class", actualThrown.getCause(),
isA(ClassCastException.class)));
}
|
public T getValue() {
T resolvedValue = resolveValue();
if (log.isDebugEnabled()) {
log.debug("Resolved value for property {}={}", key, resolvedValue);
}
return resolveValue();
}
|
@Test
void testResolveValueWithoutOverride() {
byte defaultByteValue = 9;
short defaultShortValue = 9;
long defaultLongValue = 9;
int defaultIntValue = 9;
float defaultFloatValue = 9.0f;
double defaultDoubleValue = 9.0;
boolean defaultBooleanValue = false;
String defaultStringValue = "bar";
ConfigOption<String> testStrConf =
ConfigOption.<String>builder()
.key("foo")
.typeParameterClass(String.class)
.description("foo foo.")
.defaultValue(defaultStringValue)
.build();
ConfigOption<Integer> testIntConf =
ConfigOption.<Integer>builder()
.key("fooint")
.typeParameterClass(Integer.class)
.description("foo foo.")
.defaultValue(defaultIntValue)
.build();
ConfigOption<Short> testShortConf =
ConfigOption.<Short>builder()
.key("fooshort")
.typeParameterClass(Short.class)
.description("foo foo.")
.defaultValue(defaultShortValue)
.build();
ConfigOption<Long> testLongConf =
ConfigOption.<Long>builder()
.key("foolong")
.typeParameterClass(Long.class)
.description("foo foo.")
.defaultValue(defaultLongValue)
.build();
ConfigOption<Boolean> testBooleanConf =
ConfigOption.<Boolean>builder()
.key("foobool")
.typeParameterClass(Boolean.class)
.description("foo foo.")
.defaultValue(defaultBooleanValue)
.build();
ConfigOption<Float> testFloatConf =
ConfigOption.<Float>builder()
.key("foofloat")
.typeParameterClass(Float.class)
.description("foo foo.")
.defaultValue(defaultFloatValue)
.build();
ConfigOption<Double> testDoubleConf =
ConfigOption.<Double>builder()
.key("foodouble")
.typeParameterClass(Double.class)
.description("foo foo.")
.defaultValue(defaultDoubleValue)
.build();
ConfigOption<Byte> testByteConf =
ConfigOption.<Byte>builder()
.key("foobyte")
.typeParameterClass(Byte.class)
.description("foo foo.")
.defaultValue(defaultByteValue)
.build();
Assertions.assertEquals(defaultStringValue, testStrConf.getValue());
Assertions.assertEquals(defaultIntValue, testIntConf.getValue());
Assertions.assertEquals(defaultLongValue, testLongConf.getValue());
Assertions.assertEquals(defaultBooleanValue, testBooleanConf.getValue());
Assertions.assertEquals(defaultFloatValue, testFloatConf.getValue());
Assertions.assertEquals(defaultByteValue, testByteConf.getValue());
Assertions.assertEquals(defaultShortValue, testShortConf.getValue());
Assertions.assertEquals(defaultDoubleValue, testDoubleConf.getValue());
}
|
@Override
public int compareTo(DateTimeStamp dateTimeStamp) {
return comparator.compare(this,dateTimeStamp);
}
|
@Test
void testCompareSmallerTimeStampWithoutDateTime() {
DateTimeStamp smaller = new DateTimeStamp(123);
DateTimeStamp greater = new DateTimeStamp(124);
assertEquals(-1, smaller.compareTo(greater));
}
|
@Override
public AbstractWALEvent decode(final ByteBuffer data, final BaseLogSequenceNumber logSequenceNumber) {
AbstractWALEvent result;
byte[] bytes = new byte[data.remaining()];
data.get(bytes);
String dataText = new String(bytes, StandardCharsets.UTF_8);
if (decodeWithTX) {
result = decodeDataWithTX(dataText);
} else {
result = decodeDataIgnoreTX(dataText);
}
result.setLogSequenceNumber(logSequenceNumber);
return result;
}
|
@Test
void assertDecodeWriteRowEventWithByteA() {
MppTableData tableData = new MppTableData();
tableData.setTableName("public.test");
tableData.setOpType("INSERT");
tableData.setColumnsName(new String[]{"data"});
tableData.setColumnsType(new String[]{"bytea"});
tableData.setColumnsVal(new String[]{"'\\xff00ab'"});
ByteBuffer data = ByteBuffer.wrap(JsonUtils.toJsonString(tableData).getBytes());
WriteRowEvent actual = (WriteRowEvent) new MppdbDecodingPlugin(null, false, false).decode(data, logSequenceNumber);
assertThat(actual.getLogSequenceNumber(), is(logSequenceNumber));
assertThat(actual.getTableName(), is("test"));
Object byteaObj = actual.getAfterRow().get(0);
assertThat(byteaObj, instanceOf(byte[].class));
assertThat(byteaObj, is(new byte[]{(byte) 0xff, (byte) 0, (byte) 0xab}));
}
|
public int completeName(String buffer, int cursor, List<InterpreterCompletion> candidates,
Map<String, String> aliases) {
CursorArgument cursorArgument = parseCursorArgument(buffer, cursor);
// find schema and table name if they are
String schema;
String table;
String column;
if (cursorArgument.getSchema() == null) { // process all
List<CharSequence> keywordsCandidates = new ArrayList<>();
List<CharSequence> schemaCandidates = new ArrayList<>();
int keywordsRes = completeKeyword(buffer, cursor, keywordsCandidates);
int schemaRes = completeSchema(buffer, cursor, schemaCandidates);
addCompletions(candidates, keywordsCandidates, CompletionType.keyword.name());
addCompletions(candidates, schemaCandidates, CompletionType.schema.name());
return NumberUtils.max(keywordsRes, schemaRes);
} else {
schema = cursorArgument.getSchema();
if (aliases.containsKey(schema)) { // process alias case
String alias = aliases.get(schema);
int pointPos = alias.indexOf('.');
schema = alias.substring(0, pointPos);
table = alias.substring(pointPos + 1);
column = cursorArgument.getColumn();
List<CharSequence> columnCandidates = new ArrayList<>();
int columnRes = completeColumn(schema, table, column, cursorArgument.getCursorPosition(),
columnCandidates);
addCompletions(candidates, columnCandidates, CompletionType.column.name());
// process schema.table case
} else if (cursorArgument.getTable() != null && cursorArgument.getColumn() == null) {
List<CharSequence> tableCandidates = new ArrayList<>();
table = cursorArgument.getTable();
int tableRes = completeTable(schema, table, cursorArgument.getCursorPosition(),
tableCandidates);
addCompletions(candidates, tableCandidates, CompletionType.table.name());
return tableRes;
} else {
List<CharSequence> columnCandidates = new ArrayList<>();
table = cursorArgument.getTable();
column = cursorArgument.getColumn();
int columnRes = completeColumn(schema, table, column, cursorArgument.getCursorPosition(),
columnCandidates);
addCompletions(candidates, columnCandidates, CompletionType.column.name());
}
}
return -1;
}
|
@Test
void testCompleteName_WithAliasAndPoint() {
String buffer = "a.";
int cursor = 2;
List<InterpreterCompletion> candidates = new ArrayList<>();
Map<String, String> aliases = new HashMap<>();
aliases.put("a", "prod_dds.financial_account");
sqlCompleter.completeName(buffer, cursor, candidates, aliases);
assertEquals(2, candidates.size());
assertTrue(candidates.contains(new InterpreterCompletion("account_rk", "account_rk",
CompletionType.column.name())));
assertTrue(candidates.contains(new InterpreterCompletion("account_id", "account_id",
CompletionType.column.name())));
}
|
@Around(DELETE_CONFIG)
public Object aroundDeleteConfig(ProceedingJoinPoint pjp, HttpServletRequest request, HttpServletResponse response,
String dataId, String group, String tenant) throws Throwable {
if (!PropertyUtil.isManageCapacity()) {
return pjp.proceed();
}
LOGGER.info("[capacityManagement] aroundDeleteConfig");
ConfigInfo configInfo = configInfoPersistService.findConfigInfo(dataId, group, tenant);
if (configInfo == null) {
return pjp.proceed();
}
return do4Delete(pjp, response, group, tenant, configInfo);
}
|
@Test
void testAroundDeleteConfigForTenant() throws Throwable {
when(PropertyUtil.isManageCapacity()).thenReturn(true);
when(configInfoPersistService.findConfigInfo(any(), any(), any())).thenReturn(null);
when(capacityService.insertAndUpdateClusterUsage(any(), anyBoolean())).thenReturn(true);
when(capacityService.insertAndUpdateTenantUsage(any(), eq(mockTenant), anyBoolean())).thenReturn(true);
when(capacityService.updateClusterUsage(any())).thenReturn(true);
when(capacityService.updateTenantUsage(any(), eq(mockTenant))).thenReturn(true);
MockHttpServletRequest mockHttpServletRequest = new MockHttpServletRequest();
MockHttpServletResponse mockHttpServletResponse = new MockHttpServletResponse();
String localMockResult = (String) capacityManagementAspect.aroundDeleteConfig(proceedingJoinPoint, mockHttpServletRequest,
mockHttpServletResponse, mockDataId, mockGroup, mockTenant);
assertEquals(localMockResult, mockProceedingJoinPointResult);
Mockito.verify(proceedingJoinPoint, Mockito.times(1)).proceed();
when(configInfoPersistService.findConfigInfo(any(), any(), any())).thenReturn(new ConfigInfoWrapper());
localMockResult = (String) capacityManagementAspect.aroundDeleteConfig(proceedingJoinPoint, mockHttpServletRequest,
mockHttpServletResponse, mockDataId, mockGroup, mockTenant);
assertEquals(localMockResult, mockProceedingJoinPointResult);
Mockito.verify(capacityService, Mockito.times(1)).insertAndUpdateClusterUsage(eq(CounterMode.DECREMENT), anyBoolean());
Mockito.verify(capacityService, Mockito.times(1))
.insertAndUpdateTenantUsage(eq(CounterMode.DECREMENT), eq(mockTenant), anyBoolean());
Mockito.verify(proceedingJoinPoint, Mockito.times(2)).proceed();
localMockResult = null;
try {
localMockResult = (String) capacityManagementAspect.aroundDeleteConfig(localMockProceedingJoinPoint, mockHttpServletRequest,
mockHttpServletResponse, mockDataId, mockGroup, mockTenant);
} catch (Throwable e) {
assertEquals(e.getMessage(), mockException.getMessage());
}
assertNull(localMockResult);
Mockito.verify(capacityService, Mockito.times(2)).insertAndUpdateClusterUsage(eq(CounterMode.DECREMENT), anyBoolean());
Mockito.verify(capacityService, Mockito.times(1)).updateClusterUsage(eq(CounterMode.INCREMENT));
Mockito.verify(capacityService, Mockito.times(2))
.insertAndUpdateTenantUsage(eq(CounterMode.DECREMENT), eq(mockTenant), anyBoolean());
Mockito.verify(capacityService, Mockito.times(1)).updateTenantUsage(eq(CounterMode.INCREMENT), eq(mockTenant));
Mockito.verify(localMockProceedingJoinPoint, Mockito.times(1)).proceed();
}
|
protected void setInternalEntryCurrentDirectory() {
variables.setVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY, variables.getVariable(
repository != null ? Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY
: filename != null ? Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY
: Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY ) );
}
|
@Test
public void testSetInternalEntryCurrentDirectoryWithFilename( ) {
TransMeta transMetaTest = new TransMeta( );
transMetaTest.setFilename( "hasFilename" );
transMetaTest.setVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY, "Original value defined at run execution" );
transMetaTest.setVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, "file:///C:/SomeFilenameDirectory" );
transMetaTest.setVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY, "/SomeRepDirectory" );
transMetaTest.setInternalEntryCurrentDirectory();
assertEquals( "file:///C:/SomeFilenameDirectory", transMetaTest.getVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY ) );
}
|
public String anonymize(final ParseTree tree) {
return build(tree);
}
|
@Test
public void terminateQueryShouldGetAnonymized() {
Assert.assertEquals("TERMINATE query;",
anon.anonymize("TERMINATE my_query;"));
Assert.assertEquals("TERMINATE ALL;",
anon.anonymize("TERMINATE ALL;"));
}
|
public static <T> List<List<T>> partition(List<T> originalList, int pageSize) {
Preconditions.checkArgument(originalList != null && originalList.size() > 0,
"Invalid original list");
Preconditions.checkArgument(pageSize > 0, "Page size should " +
"be greater than 0 for performing partition");
List<List<T>> result = new ArrayList<>();
int i=0;
while (i < originalList.size()) {
result.add(originalList.subList(i,
Math.min(i + pageSize, originalList.size())));
i = i + pageSize;
}
return result;
}
|
@Test
public void testListsPartition() {
List<String> list = new ArrayList<>();
list.add("a");
list.add("b");
list.add("c");
list.add("d");
list.add("e");
List<List<String>> res = Lists.
partition(list, 2);
Assertions.assertThat(res)
.describedAs("Number of partitions post partition")
.hasSize(3);
Assertions.assertThat(res.get(0))
.describedAs("Number of elements in first partition")
.hasSize(2);
Assertions.assertThat(res.get(2))
.describedAs("Number of elements in last partition")
.hasSize(1);
List<List<String>> res2 = Lists.
partition(list, 1);
Assertions.assertThat(res2)
.describedAs("Number of partitions post partition")
.hasSize(5);
Assertions.assertThat(res2.get(0))
.describedAs("Number of elements in first partition")
.hasSize(1);
Assertions.assertThat(res2.get(4))
.describedAs("Number of elements in last partition")
.hasSize(1);
List<List<String>> res3 = Lists.
partition(list, 6);
Assertions.assertThat(res3)
.describedAs("Number of partitions post partition")
.hasSize(1);
Assertions.assertThat(res3.get(0))
.describedAs("Number of elements in first partition")
.hasSize(5);
}
|
@Override
public void checkExit(int status) {
if (userSystemExitMonitored()) {
switch (userSystemExitMode) {
case DISABLED:
break;
case LOG:
// Add exception trace log to help users to debug where exit came from.
LOG.warn(
"Exiting JVM with status {} is monitored: The system will exit due to this call.",
status,
new UserSystemExitException());
break;
case THROW:
throw new UserSystemExitException();
default:
// Must not happen if exhaustively handling all modes above. Logging as being
// already at exit path.
LOG.warn("No valid check exit mode configured: {}", userSystemExitMode);
}
}
// As this security manager is current at outer most of the chain and it has exit guard
// option, invoke inner security manager here after passing guard checking above, if any.
if (originalSecurityManager != null) {
originalSecurityManager.checkExit(status);
}
// At this point, exit is determined. Halt if defined, otherwise check ended, JVM will call
// System.exit
if (haltOnSystemExit) {
Runtime.getRuntime().halt(status);
}
}
|
@Test
void testExistingSecurityManagerRespected() {
// Don't set the following security manager directly to system, which makes test hang.
SecurityManager originalSecurityManager =
new SecurityManager() {
@Override
public void checkPermission(Permission perm) {
throw new SecurityException("not allowed");
}
};
FlinkSecurityManager flinkSecurityManager =
new FlinkSecurityManager(
ClusterOptions.UserSystemExitMode.DISABLED, false, originalSecurityManager);
assertThatThrownBy(() -> flinkSecurityManager.checkExit(TEST_EXIT_CODE))
.isInstanceOf(SecurityException.class)
.hasMessage("not allowed");
}
|
ConcurrentPublication addPublication(final String channel, final int streamId)
{
clientLock.lock();
try
{
ensureActive();
ensureNotReentrant();
final long registrationId = driverProxy.addPublication(channel, streamId);
stashedChannelByRegistrationId.put(registrationId, channel);
awaitResponse(registrationId);
return (ConcurrentPublication)resourceByRegIdMap.get(registrationId);
}
finally
{
clientLock.unlock();
}
}
|
@Test
void closingPublicationShouldPurgeCache()
{
whenReceiveBroadcastOnMessage(
ControlProtocolEvents.ON_PUBLICATION_READY, publicationReadyBuffer, (buffer) -> publicationReady.length());
final Publication firstPublication = conductor.addPublication(CHANNEL, STREAM_ID_1);
whenReceiveBroadcastOnMessage(
ControlProtocolEvents.ON_OPERATION_SUCCESS,
operationSuccessBuffer,
(buffer) -> OperationSucceededFlyweight.LENGTH);
firstPublication.close();
whenReceiveBroadcastOnMessage(
ControlProtocolEvents.ON_PUBLICATION_READY, publicationReadyBuffer, (buffer) -> publicationReady.length());
final Publication secondPublication = conductor.addPublication(CHANNEL, STREAM_ID_1);
assertThat(firstPublication, not(sameInstance(secondPublication)));
}
|
@Override
public int compare(ChronoZonedDateTime<?> date1, ChronoZonedDateTime<?> date2) {
return ChronoZonedDateTime.timeLineOrder().compare(date1, date2);
}
|
@Test
void should_disregard_chronology_difference() {
ZonedDateTime now = ZonedDateTime.now();
ZonedDateTime inTokyo = now.withZoneSameInstant(ZoneId.of("Asia/Tokyo"));
ChronoZonedDateTime<JapaneseDate> inTokyoJapanese = JapaneseChronology.INSTANCE.zonedDateTime(now);
assertThat(inTokyoJapanese.compareTo(inTokyo)).as("Built-in comparison should report that they differ").isNotZero();
assertThat(comparator.compare(inTokyoJapanese, inTokyo)).isZero();
}
|
public static SerializableFunction<Row, byte[]> getRowToProtoBytesFromSchema(
String schemaString, String messageName) {
Descriptors.Descriptor descriptor = getDescriptorFromProtoSchema(schemaString, messageName);
ProtoDynamicMessageSchema<DynamicMessage> protoDynamicMessageSchema =
ProtoDynamicMessageSchema.forDescriptor(ProtoDomain.buildFrom(descriptor), descriptor);
return new SimpleFunction<Row, byte[]>() {
@Override
public byte[] apply(Row input) {
SerializableFunction<Row, DynamicMessage> res =
protoDynamicMessageSchema.getFromRowFunction();
return res.apply(input).toByteArray();
}
};
}
|
@Test
public void testRowToProtoSchemaFunction() {
Row row =
Row.withSchema(SCHEMA)
.withFieldValue("id", 1234)
.withFieldValue("name", "Doe")
.withFieldValue("active", false)
.withFieldValue("address.city", "seattle")
.withFieldValue("address.street", "fake street")
.withFieldValue("address.zip_code", "TO-1234")
.withFieldValue("address.state", "wa")
.build();
Assert.assertNotNull(
ProtoByteUtils.getRowToProtoBytesFromSchema(PROTO_STRING_SCHEMA, "MyMessage").apply(row));
}
|
@Override
public void execute(ComputationStep.Context context) {
DuplicationVisitor visitor = new DuplicationVisitor();
new DepthTraversalTypeAwareCrawler(visitor).visit(treeRootHolder.getReportTreeRoot());
context.getStatistics().add("duplications", visitor.count);
}
|
@Test
public void loads_multiple_duplications_with_multiple_duplicates() {
reportReader.putDuplications(
FILE_2_REF,
createDuplication(
singleLineTextRange(LINE),
createInnerDuplicate(LINE + 1), createInnerDuplicate(LINE + 2), createInProjectDuplicate(FILE_1_REF, LINE), createInProjectDuplicate(FILE_1_REF, LINE + 10)),
createDuplication(
singleLineTextRange(OTHER_LINE),
createInProjectDuplicate(FILE_1_REF, OTHER_LINE)),
createDuplication(
singleLineTextRange(OTHER_LINE + 80),
createInnerDuplicate(LINE), createInnerDuplicate(LINE + 10)));
TestComputationStepContext context = new TestComputationStepContext();
underTest.execute(context);
Component file1Component = treeRootHolder.getComponentByRef(FILE_1_REF);
assertThat(duplicationRepository.getDuplications(FILE_2_REF)).containsOnly(
duplication(
singleLineDetailedTextBlock(1, LINE),
new InnerDuplicate(singleLineTextBlock(LINE + 1)), new InnerDuplicate(singleLineTextBlock(LINE + 2)), new InProjectDuplicate(file1Component, singleLineTextBlock(LINE)),
new InProjectDuplicate(file1Component, singleLineTextBlock(LINE + 10))),
duplication(
singleLineDetailedTextBlock(2, OTHER_LINE),
new InProjectDuplicate(file1Component, singleLineTextBlock(OTHER_LINE))),
duplication(
singleLineDetailedTextBlock(3, OTHER_LINE + 80),
new InnerDuplicate(singleLineTextBlock(LINE)), new InnerDuplicate(singleLineTextBlock(LINE + 10))));
assertNbOfDuplications(context, 3);
}
|
public ParsedQuery parse(final String query) throws ParseException {
final TokenCollectingQueryParser parser = new TokenCollectingQueryParser(ParsedTerm.DEFAULT_FIELD, ANALYZER);
parser.setSplitOnWhitespace(true);
parser.setAllowLeadingWildcard(allowLeadingWildcard);
final Query parsed = parser.parse(query);
final ParsedQuery.Builder builder = ParsedQuery.builder().query(query);
builder.tokensBuilder().addAll(parser.getTokens());
final TermCollectingQueryVisitor visitor = new TermCollectingQueryVisitor(ANALYZER, parser.getTokenLookup());
parsed.visit(visitor);
builder.termsBuilder().addAll(visitor.getParsedTerms());
return builder.build();
}
|
@Test
void getFieldExistPosition() throws ParseException {
final ParsedQuery fields = parser.parse("_exists_:lorem");
assertThat(fields.allFieldNames()).contains("lorem");
assertThat(fields.terms())
.hasSize(1)
.extracting(ParsedTerm::keyToken)
.hasOnlyOneElementSatisfying(term ->
assertThat(term).hasValueSatisfying(t -> {
assertThat(t.beginLine()).isEqualTo(1);
assertThat(t.beginColumn()).isEqualTo(9);
assertThat(t.endLine()).isEqualTo(1);
assertThat(t.endColumn()).isEqualTo(14);
}));
}
|
@Override
public boolean shouldWait() {
RingbufferContainer ringbuffer = getRingBufferContainerOrNull();
if (ringbuffer == null) {
return true;
}
if (ringbuffer.isTooLargeSequence(sequence) || ringbuffer.isStaleSequence(sequence)) {
//no need to wait, let the operation continue and fail in beforeRun
return false;
}
// the sequence is not readable
return sequence == ringbuffer.tailSequence() + 1;
}
|
@Test
public void whenBeforeHead() {
ringbuffer.add("item1");
ringbuffer.add("item2");
ringbuffer.add("item3");
long oldhead = ringbuffer.headSequence();
ringbufferContainer.setHeadSequence(ringbufferContainer.tailSequence());
ReadOneOperation op = getReadOneOperation(oldhead);
op.shouldWait();
assertThrows(StaleSequenceException.class, op::beforeRun);
}
|
public static SerdeFeatures buildValueFeatures(
final LogicalSchema schema,
final Format valueFormat,
final SerdeFeatures explicitFeatures,
final KsqlConfig ksqlConfig
) {
final boolean singleColumn = schema.value().size() == 1;
final ImmutableSet.Builder<SerdeFeature> builder = ImmutableSet.builder();
getValueWrapping(singleColumn, valueFormat, explicitFeatures, ksqlConfig)
.ifPresent(builder::add);
return SerdeFeatures.from(builder.build());
}
|
@Test
public void shouldThrowIfWrapSingleValuePresentForMultiField() {
// When:
final Exception e = assertThrows(
KsqlException.class,
() -> SerdeFeaturesFactory.buildValueFeatures(
MULTI_FIELD_SCHEMA,
JSON,
SerdeFeatures.of(SerdeFeature.WRAP_SINGLES),
ksqlConfig
)
);
// Then:
assertThat(e.getMessage(), containsString(
"'WRAP_SINGLE_VALUE' is only valid for single-field value schemas"));
}
|
public Optional<MaskTable> findMaskTable(final String tableName) {
return Optional.ofNullable(tables.get(tableName));
}
|
@Test
void assertFindMaskTableWhenTableNameExists() {
assertTrue(maskRule.findMaskTable("t_mask").isPresent());
}
|
@Override
@Transactional(rollbackFor = Exception.class)
@CacheEvict(value = RedisKeyConstants.PERMISSION_MENU_ID_LIST,
allEntries = true) // allEntries 清空所有缓存,因为此时不知道 id 对应的 permission 是多少。直接清理,简单有效
public void deleteMenu(Long id) {
// 校验是否还有子菜单
if (menuMapper.selectCountByParentId(id) > 0) {
throw exception(MENU_EXISTS_CHILDREN);
}
// 校验删除的菜单是否存在
if (menuMapper.selectById(id) == null) {
throw exception(MENU_NOT_EXISTS);
}
// 标记删除
menuMapper.deleteById(id);
// 删除授予给角色的权限
permissionService.processMenuDeleted(id);
}
|
@Test
public void testDeleteMenu_menuNotExist() {
assertServiceException(() -> menuService.deleteMenu(randomLongId()),
MENU_NOT_EXISTS);
}
|
public List<Supplier<PageProjectionWithOutputs>> compileProjections(
SqlFunctionProperties sqlFunctionProperties,
Map<SqlFunctionId, SqlInvokedFunction> sessionFunctions,
List<? extends RowExpression> projections,
boolean isOptimizeCommonSubExpression,
Optional<String> classNameSuffix)
{
if (isOptimizeCommonSubExpression) {
ImmutableList.Builder<Supplier<PageProjectionWithOutputs>> pageProjections = ImmutableList.builder();
ImmutableMap.Builder<RowExpression, Integer> expressionsWithPositionBuilder = ImmutableMap.builder();
Set<RowExpression> expressionCandidates = new HashSet<>();
for (int i = 0; i < projections.size(); i++) {
RowExpression projection = projections.get(i);
// Duplicate expressions are not expected here in general due to duplicate assignments pruning in query optimization, hence we skip CSE for them to allow for a
// simpler implementation (and duplicate projections in expressionsWithPositionBuilder will throw exception when calling expressionsWithPositionBuilder.build())
if (projection instanceof ConstantExpression || projection instanceof InputReferenceExpression || expressionCandidates.contains(projection)) {
pageProjections.add(toPageProjectionWithOutputs(compileProjection(sqlFunctionProperties, sessionFunctions, projection, classNameSuffix), new int[] {i}));
}
else {
expressionsWithPositionBuilder.put(projection, i);
expressionCandidates.add(projection);
}
}
Map<RowExpression, Integer> expressionsWithPosition = expressionsWithPositionBuilder.build();
Map<List<RowExpression>, Boolean> projectionsPartitionedByCSE = getExpressionsPartitionedByCSE(expressionsWithPosition.keySet(), MAX_PROJECTION_GROUP_SIZE);
for (Map.Entry<List<RowExpression>, Boolean> entry : projectionsPartitionedByCSE.entrySet()) {
if (entry.getValue()) {
pageProjections.add(toPageProjectionWithOutputs(
compileProjectionCached(sqlFunctionProperties, sessionFunctions, entry.getKey(), true, classNameSuffix),
toIntArray(entry.getKey().stream().map(expressionsWithPosition::get).collect(toImmutableList()))));
}
else {
verify(entry.getKey().size() == 1, "Expect non-cse expression list to only have one element");
RowExpression projection = entry.getKey().get(0);
pageProjections.add(toPageProjectionWithOutputs(
compileProjection(sqlFunctionProperties, sessionFunctions, projection, classNameSuffix),
new int[] {expressionsWithPosition.get(projection)}));
}
}
return pageProjections.build();
}
return IntStream.range(0, projections.size())
.mapToObj(outputChannel -> toPageProjectionWithOutputs(
compileProjection(sqlFunctionProperties, sessionFunctions, projections.get(outputChannel), classNameSuffix),
new int[] {outputChannel}))
.collect(toImmutableList());
}
|
@Test
public void testCommonSubExpressionDuplicatesInProjection()
{
PageFunctionCompiler functionCompiler = new PageFunctionCompiler(createTestMetadataManager(), 0);
List<Supplier<PageProjectionWithOutputs>> pageProjections = functionCompiler.compileProjections(SESSION.getSqlFunctionProperties(), ImmutableList.of(ADD_X_Y, ADD_X_Y), true, Optional.empty());
assertEquals(pageProjections.size(), 2);
}
|
@Override
public String rpcType() {
return RpcTypeEnum.SPRING_CLOUD.getName();
}
|
@Test
public void testRpcType() {
Assertions.assertEquals(springCloudShenyuContextDecorator.rpcType(), "springCloud");
}
|
public Statement buildStatement(final ParserRuleContext parseTree) {
return build(Optional.of(getSources(parseTree)), parseTree);
}
|
@Test
public void shouldSupportExplicitEmitChangesForInsertInto() {
// Given:
final SingleStatementContext stmt =
givenQuery("INSERT INTO TEST1 SELECT * FROM TEST2 EMIT CHANGES;");
// When:
final Query result = ((QueryContainer) builder.buildStatement(stmt)).getQuery();
// Then:
assertThat("Should be push", result.isPullQuery(), is(false));
assertThat(result.getRefinement().get().getOutputRefinement(), is(OutputRefinement.CHANGES));
}
|
@Override
public String toString() {
char[] encoded = new char[2 * value.size() + 2];
encoded[0] = '[';
int cnt = 1;
ByteIterator iterator = value.iterator();
while (iterator.hasNext()) {
byte b = iterator.nextByte();
encoded[cnt] = HEX[(b & 0xF0) >>> 4];
++cnt;
encoded[cnt] = HEX[b & 0xF];
++cnt;
}
encoded[cnt] = ']';
return new String(encoded);
}
|
@Test
public void testToString() {
assertEquals("[]", ByteKey.EMPTY.toString());
assertEquals("[00]", ByteKey.of(0).toString());
assertEquals("[0000]", ByteKey.of(0x00, 0x00).toString());
assertEquals(
"[0123456789abcdef]",
ByteKey.of(0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef).toString());
}
|
public SendResult putMessageToRemoteBroker(MessageExtBrokerInner messageExt, String brokerNameToSend) {
if (this.brokerController.getBrokerConfig().getBrokerName().equals(brokerNameToSend)) { // not remote broker
return null;
}
final boolean isTransHalfMessage = TransactionalMessageUtil.buildHalfTopic().equals(messageExt.getTopic());
MessageExtBrokerInner messageToPut = messageExt;
if (isTransHalfMessage) {
messageToPut = TransactionalMessageUtil.buildTransactionalMessageFromHalfMessage(messageExt);
}
final TopicPublishInfo topicPublishInfo = this.brokerController.getTopicRouteInfoManager().tryToFindTopicPublishInfo(messageToPut.getTopic());
if (null == topicPublishInfo || !topicPublishInfo.ok()) {
LOG.warn("putMessageToRemoteBroker: no route info of topic {} when escaping message, msgId={}",
messageToPut.getTopic(), messageToPut.getMsgId());
return null;
}
final MessageQueue mqSelected;
if (StringUtils.isEmpty(brokerNameToSend)) {
mqSelected = topicPublishInfo.selectOneMessageQueue(this.brokerController.getBrokerConfig().getBrokerName());
messageToPut.setQueueId(mqSelected.getQueueId());
brokerNameToSend = mqSelected.getBrokerName();
if (this.brokerController.getBrokerConfig().getBrokerName().equals(brokerNameToSend)) {
LOG.warn("putMessageToRemoteBroker failed, remote broker not found. Topic: {}, MsgId: {}, Broker: {}",
messageExt.getTopic(), messageExt.getMsgId(), brokerNameToSend);
return null;
}
} else {
mqSelected = new MessageQueue(messageExt.getTopic(), brokerNameToSend, messageExt.getQueueId());
}
final String brokerAddrToSend = this.brokerController.getTopicRouteInfoManager().findBrokerAddressInPublish(brokerNameToSend);
if (null == brokerAddrToSend) {
LOG.warn("putMessageToRemoteBroker failed, remote broker address not found. Topic: {}, MsgId: {}, Broker: {}",
messageExt.getTopic(), messageExt.getMsgId(), brokerNameToSend);
return null;
}
final long beginTimestamp = System.currentTimeMillis();
try {
final SendResult sendResult = this.brokerController.getBrokerOuterAPI().sendMessageToSpecificBroker(
brokerAddrToSend, brokerNameToSend,
messageToPut, this.getProducerGroup(messageToPut), SEND_TIMEOUT);
if (null != sendResult && SendStatus.SEND_OK.equals(sendResult.getSendStatus())) {
return sendResult;
} else {
LOG.error("Escaping failed! cost {}ms, Topic: {}, MsgId: {}, Broker: {}",
System.currentTimeMillis() - beginTimestamp, messageExt.getTopic(),
messageExt.getMsgId(), brokerNameToSend);
}
} catch (RemotingException | MQBrokerException e) {
LOG.error(String.format("putMessageToRemoteBroker exception, MsgId: %s, RT: %sms, Broker: %s",
messageToPut.getMsgId(), System.currentTimeMillis() - beginTimestamp, mqSelected), e);
} catch (InterruptedException e) {
LOG.error(String.format("putMessageToRemoteBroker interrupted, MsgId: %s, RT: %sms, Broker: %s",
messageToPut.getMsgId(), System.currentTimeMillis() - beginTimestamp, mqSelected), e);
Thread.currentThread().interrupt();
}
return null;
}
|
@Test
public void testPutMessageToRemoteBroker_noSpecificBrokerName_hasRemoteBroker() throws Exception {
MessageExtBrokerInner message = new MessageExtBrokerInner();
message.setTopic(TEST_TOPIC);
String anotherBrokerName = "broker_b";
TopicPublishInfo publishInfo = mockTopicPublishInfo(BROKER_NAME, anotherBrokerName);
when(topicRouteInfoManager.tryToFindTopicPublishInfo(anyString())).thenReturn(publishInfo);
when(topicRouteInfoManager.findBrokerAddressInPublish(anotherBrokerName)).thenReturn("127.0.0.1");
escapeBridge.putMessageToRemoteBroker(message, null);
verify(brokerOuterAPI).sendMessageToSpecificBroker(eq("127.0.0.1"), eq(anotherBrokerName), any(MessageExtBrokerInner.class), anyString(), anyLong());
}
|
@Override
public Object get(int ordinal, DataType dataType) {
if (ordinal < metaFields.length) {
validateMetaFieldDataType(dataType);
return metaFields[ordinal];
}
return sourceRow.get(rebaseOrdinal(ordinal), dataType);
}
|
@Test
public void testGet() {
Object[] values = getRandomValue(true);
InternalRow row = new GenericInternalRow(values);
HoodieInternalRow hoodieInternalRow = new HoodieInternalRow(UTF8String.fromString("commitTime"),
UTF8String.fromString("commitSeqNo"),
UTF8String.fromString("recordKey"),
UTF8String.fromString("partitionPath"),
UTF8String.fromString("fileName"),
row,
true);
assertValues(hoodieInternalRow, "commitTime", "commitSeqNo", "recordKey", "partitionPath",
"fileName", values, nullIndices);
}
|
@Override
public boolean alterOffsets(Map<String, String> connectorConfig, Map<Map<String, ?>, Map<String, ?>> offsets) {
AbstractConfig config = new AbstractConfig(CONFIG_DEF, connectorConfig);
String filename = config.getString(FILE_CONFIG);
if (filename == null || filename.isEmpty()) {
throw new ConnectException("Offsets cannot be modified if the '" + FILE_CONFIG + "' configuration is unspecified. " +
"This is because stdin is used for input and offsets are not tracked.");
}
// This connector makes use of a single source partition at a time which represents the file that it is configured to read from.
// However, there could also be source partitions from previous configurations of the connector.
for (Map.Entry<Map<String, ?>, Map<String, ?>> partitionOffset : offsets.entrySet()) {
Map<String, ?> offset = partitionOffset.getValue();
if (offset == null) {
// We allow tombstones for anything; if there's garbage in the offsets for the connector, we don't
// want to prevent users from being able to clean it up using the REST API
continue;
}
if (!offset.containsKey(POSITION_FIELD)) {
throw new ConnectException("Offset objects should either be null or contain the key '" + POSITION_FIELD + "'");
}
// The 'position' in the offset represents the position in the file's byte stream and should be a non-negative long value
if (!(offset.get(POSITION_FIELD) instanceof Long)) {
throw new ConnectException("The value for the '" + POSITION_FIELD + "' key in the offset is expected to be a Long value");
}
long offsetPosition = (Long) offset.get(POSITION_FIELD);
if (offsetPosition < 0) {
throw new ConnectException("The value for the '" + POSITION_FIELD + "' key in the offset should be a non-negative value");
}
Map<String, ?> partition = partitionOffset.getKey();
if (partition == null) {
throw new ConnectException("Partition objects cannot be null");
}
if (!partition.containsKey(FILENAME_FIELD)) {
throw new ConnectException("Partition objects should contain the key '" + FILENAME_FIELD + "'");
}
}
// Let the task check whether the actual value for the offset position is valid for the configured file on startup
return true;
}
|
@Test
public void testSuccessfulAlterOffsets() {
Map<Map<String, ?>, Map<String, ?>> offsets = Collections.singletonMap(
Collections.singletonMap(FILENAME_FIELD, FILENAME),
Collections.singletonMap(POSITION_FIELD, 0L)
);
// Expect no exception to be thrown when a valid offsets map is passed. An empty offsets map is treated as valid
// since it could indicate that the offsets were reset previously or that no offsets have been committed yet
// (for a reset operation)
assertTrue(connector.alterOffsets(sourceProperties, offsets));
assertTrue(connector.alterOffsets(sourceProperties, new HashMap<>()));
}
|
public int attempts() {
return this.attempts.intValue();
}
|
@Test
void attempts() {
final var e = new BusinessException("unhandled");
final var retry = new Retry<String>(
() -> {
throw e;
},
2,
0
);
try {
retry.perform();
} catch (BusinessException ex) {
//ignore
}
assertThat(retry.attempts(), is(1));
}
|
public static DateTime parse(CharSequence dateStr, DateFormat dateFormat) {
return new DateTime(dateStr, dateFormat);
}
|
@Test
public void parseUTCTest4() {
final String dateStr = "2023-02-07T00:02:16.12345+08:00";
final DateTime dateTime = DateUtil.parse(dateStr);
assertNotNull(dateTime);
assertEquals("2023-02-07 00:02:16", dateTime.toString());
final String dateStr2 = "2023-02-07T00:02:16.12345-08:00";
final DateTime dateTime2 = DateUtil.parse(dateStr2);
assertNotNull(dateTime2);
assertEquals("2023-02-07 00:02:16", dateTime2.toString());
final String dateStr3 = "2021-03-17T06:31:33.9999";
final DateTime dateTime3 = DateUtil.parse(dateStr3);
assertNotNull(dateTime3);
assertEquals("2021-03-17 06:31:33", dateTime3.toString());
}
|
public static Locale createLocale( String localeCode ) {
if ( Utils.isEmpty( localeCode ) ) {
return null;
}
StringTokenizer parser = new StringTokenizer( localeCode, "_" );
if ( parser.countTokens() == 2 ) {
return new Locale( parser.nextToken(), parser.nextToken() );
}
if ( parser.countTokens() == 3 ) {
return new Locale( parser.nextToken(), parser.nextToken(), parser.nextToken() );
}
return new Locale( localeCode );
}
|
@Test
public void createLocale_SingleCode() throws Exception {
assertEquals( Locale.ENGLISH, EnvUtil.createLocale( "en" ) );
}
|
public static Map<String, List<String>> tagInstantsOfBaseAndLogFiles(
String basePath, List<StoragePath> allPaths) {
// Instant time -> Set of base and log file paths
Map<String, List<String>> instantToFilesMap = new HashMap<>();
allPaths.forEach(path -> {
String instantTime = FSUtils.getCommitTime(path.getName());
instantToFilesMap.computeIfAbsent(instantTime, k -> new ArrayList<>());
instantToFilesMap.get(instantTime).add(
FSUtils.getRelativePartitionPath(new StoragePath(basePath), path));
});
return instantToFilesMap;
}
|
@Test
public void testTagInstantsOfBaseAndLogFiles() {
Map<String, List<String>> expectedResult = new HashMap<>();
List<StoragePath> inputPathList = new ArrayList<>();
for (Map.Entry<String, List<Pair<String, String>>> entry : BASE_FILE_INFO.entrySet()) {
String instantTime = entry.getKey();
List<String> fileNameList = entry.getValue().stream()
.map(e -> {
String partitionPath = e.getKey();
String fileId = e.getValue();
return new StoragePath(
new StoragePath(partitionPath), getBaseFilename(instantTime, fileId)).toString();
})
.collect(Collectors.toList());
List<String> expectedList = expectedResult.computeIfAbsent(
instantTime, k -> new ArrayList<>());
expectedList.addAll(fileNameList);
inputPathList.addAll(fileNameList.stream()
.map(path -> new StoragePath(basePath, path)).collect(Collectors.toList()));
}
for (Map.Entry<String, List<Pair<String, String>>> entry : LOG_FILE_INFO.entrySet()) {
String instantTime = entry.getKey();
List<String> fileNameList = entry.getValue().stream()
.map(e -> {
String partitionPath = e.getKey();
String fileId = e.getValue();
return new StoragePath(
new StoragePath(partitionPath), getLogFilename(instantTime, fileId)).toString();
})
.collect(Collectors.toList());
List<String> expectedList = expectedResult.computeIfAbsent(
instantTime, k -> new ArrayList<>());
expectedList.addAll(fileNameList);
inputPathList.addAll(fileNameList.stream()
.map(path -> new StoragePath(basePath, path)).collect(Collectors.toList()));
}
assertEquals(expectedResult,
RepairUtils.tagInstantsOfBaseAndLogFiles(basePath, inputPathList));
}
|
@Override
public ByteBuf setInt(int index, int value) {
checkIndex(index, 4);
_setInt(index, value);
return this;
}
|
@Test
public void testSetIntAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setInt(0, 1);
}
});
}
|
public synchronized RepositoryConnectResult connect( final String username, final String password )
throws KettleException {
if ( serviceManager != null ) {
disconnect();
}
serviceManager = new WebServiceManager( repositoryMeta.getRepositoryLocation().getUrl(), username );
RepositoryServiceRegistry purRepositoryServiceRegistry = new RepositoryServiceRegistry();
IUser user1 = new EEUserInfo();
final String decryptedPassword = Encr.decryptPasswordOptionallyEncrypted( password );
final RepositoryConnectResult result = new RepositoryConnectResult( purRepositoryServiceRegistry );
try {
final String urlEncodedPassword = encodePassword( decryptedPassword );
/*
* Three scenarios: 1. Connect in process: username fetched using PentahoSessionHolder; no authentication occurs
* 2. Connect externally with trust: username specified is assumed authenticated if IP of calling code is trusted
* 3. Connect externally: authentication occurs normally (i.e. password is checked)
*/
user1.setLogin( username );
user1.setPassword( urlEncodedPassword );
user1.setName( username );
result.setUser( user1 );
// We need to have the application context and the session available in order for us to skip authentication
if ( PentahoSystem.getApplicationContext() != null && PentahoSessionHolder.getSession() != null
&& PentahoSessionHolder.getSession().isAuthenticated() ) {
if ( inProcess() ) {
// connect to the IUnifiedRepository through PentahoSystem
// this assumes we're running in a BI Platform
result.setUnifiedRepository( PentahoSystem.get( IUnifiedRepository.class ) );
if ( result.getUnifiedRepository() != null ) {
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "PurRepositoryConnector.ConnectInProgress.Begin" ) );
}
String name = PentahoSessionHolder.getSession().getName();
user1 = new EEUserInfo();
user1.setLogin( name );
user1.setName( name );
user1.setPassword( urlEncodedPassword );
result.setUser( user1 );
result.setSuccess( true );
result.getUser().setAdmin(
PentahoSystem.get( IAuthorizationPolicy.class ).isAllowed(
IAbsSecurityProvider.ADMINISTER_SECURITY_ACTION )
);
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString(
PKG, "PurRepositoryConnector.ConnectInProgress", name, result.getUnifiedRepository() ) );
}
// for now, there is no need to support the security manager
// what about security provider?
return result;
}
}
}
ExecutorService executor = getExecutor();
Future<Boolean> authorizationWebserviceFuture = executor.submit( new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
// We need to add the service class in the list in the order of dependencies
// IRoleSupportSecurityManager depends RepositorySecurityManager to be present
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "PurRepositoryConnector.CreateServiceProvider.Start" ) );
}
result.setSecurityProvider( new AbsSecurityProvider( purRepository, repositoryMeta, result.getUser(),
serviceManager ) );
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "PurRepositoryConnector.CreateServiceProvider.End" ) ); //$NON-NLS-1$
}
// If the user does not have access to administer security we do not
// need to added them to the service list
if ( allowedActionsContains( (AbsSecurityProvider) result.getSecurityProvider(),
IAbsSecurityProvider.ADMINISTER_SECURITY_ACTION ) ) {
result.setSecurityManager( new AbsSecurityManager( purRepository, repositoryMeta, result.getUser(),
serviceManager ) );
// Set the reference of the security manager to security provider for user role list change event
( (PurRepositorySecurityProvider) result.getSecurityProvider() )
.setUserRoleDelegate( ( (PurRepositorySecurityManager) result.getSecurityManager() )
.getUserRoleDelegate() );
return true;
}
return false;
}
} );
Future<WebServiceException> repoWebServiceFuture = executor.submit( new Callable<WebServiceException>() {
@Override
public WebServiceException call() throws Exception {
try {
IUnifiedRepositoryJaxwsWebService repoWebService = null;
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "PurRepositoryConnector.CreateRepositoryWebService.Start" ) ); //$NON-NLS-1$
}
repoWebService =
serviceManager.createService( username, urlEncodedPassword, IUnifiedRepositoryJaxwsWebService.class ); //$NON-NLS-1$
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "PurRepositoryConnector.CreateRepositoryWebService.End" ) ); //$NON-NLS-1$
}
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "PurRepositoryConnector.CreateUnifiedRepositoryToWebServiceAdapter.Start" ) ); //$NON-NLS-1$
}
result.setUnifiedRepository( new UnifiedRepositoryToWebServiceAdapter( repoWebService ) );
} catch ( WebServiceException wse ) {
return wse;
}
return null;
}
} );
Future<Exception> syncWebserviceFuture = executor.submit( new Callable<Exception>() {
@Override
public Exception call() throws Exception {
try {
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "PurRepositoryConnector.CreateRepositorySyncWebService.Start" ) );
}
IRepositorySyncWebService syncWebService =
serviceManager.createService( username, urlEncodedPassword, IRepositorySyncWebService.class ); //$NON-NLS-1$
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "PurRepositoryConnector.CreateRepositorySyncWebService.Sync" ) ); //$NON-NLS-1$
}
syncWebService.sync( repositoryMeta.getName(), repositoryMeta.getRepositoryLocation().getUrl() );
} catch ( RepositorySyncException e ) {
log.logError( e.getMessage(), e );
// this message will be presented to the user in spoon
result.setConnectMessage( e.getMessage() );
return null;
} catch ( WebServiceException e ) {
// if we can speak to the repository okay but not the sync service, assume we're talking to a BA Server
log.logError( e.getMessage(), e );
return new Exception( BaseMessages.getString( PKG, "PurRepository.BAServerLogin.Message" ), e );
}
return null;
}
} );
Future<String> sessionServiceFuture = executor.submit( new Callable<String>() {
@Override
public String call() throws Exception {
try {
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "PurRepositoryConnector.SessionService.Start" ) );
}
CredentialsProvider provider = new BasicCredentialsProvider();
UsernamePasswordCredentials credentials = new UsernamePasswordCredentials( username, urlEncodedPassword );
provider.setCredentials( AuthScope.ANY, credentials );
HttpClient client = HttpClientBuilder.create().setDefaultCredentialsProvider( provider ).build();
HttpGet method = new HttpGet( repositoryMeta.getRepositoryLocation().getUrl() + "/api/session/userName" );
if ( StringUtils.isNotBlank( System.getProperty( "pentaho.repository.client.attemptTrust" ) ) ) {
method.addHeader( TRUST_USER, username );
}
HttpResponse response = client.execute( method );
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "PurRepositoryConnector.SessionService.Sync" ) );
}
return EntityUtils.toString( response.getEntity() );
} catch ( Exception e ) {
if ( log.isError() ) {
log.logError( BaseMessages.getString( PKG, "PurRepositoryConnector.Error.EnableToGetUser" ), e );
}
return null;
}
}
} );
WebServiceException repoException = repoWebServiceFuture.get();
if ( repoException != null ) {
log.logError( repoException.getMessage() );
throw new Exception( BaseMessages.getString( PKG, "PurRepository.FailedLogin.Message" ), repoException );
}
Exception syncException = syncWebserviceFuture.get();
if ( syncException != null ) {
throw syncException;
}
Boolean isAdmin = authorizationWebserviceFuture.get();
result.getUser().setAdmin( isAdmin );
String userName = sessionServiceFuture.get();
if ( userName != null ) {
result.getUser().setLogin( userName );
}
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "PurRepositoryConnector.RegisterSecurityProvider.Start" ) );
}
purRepositoryServiceRegistry.registerService( RepositorySecurityProvider.class, result.getSecurityProvider() );
purRepositoryServiceRegistry.registerService( IAbsSecurityProvider.class, result.getSecurityProvider() );
if ( isAdmin ) {
purRepositoryServiceRegistry.registerService( RepositorySecurityManager.class, result.getSecurityManager() );
purRepositoryServiceRegistry.registerService( IRoleSupportSecurityManager.class, result.getSecurityManager() );
purRepositoryServiceRegistry.registerService( IAbsSecurityManager.class, result.getSecurityManager() );
}
purRepositoryServiceRegistry.registerService( PurRepositoryRestService.PurRepositoryPluginApiRevision.class,
serviceManager.createService( username, urlEncodedPassword,
PurRepositoryRestService.PurRepositoryPluginApiRevision.class ) );
purRepositoryServiceRegistry.registerService( IRevisionService.class, new UnifiedRepositoryRevisionService(
result.getUnifiedRepository(), rootRef ) );
purRepositoryServiceRegistry.registerService( IAclService.class, new UnifiedRepositoryConnectionAclService(
result.getUnifiedRepository() ) );
purRepositoryServiceRegistry.registerService( IConnectionAclService.class,
new UnifiedRepositoryConnectionAclService( result.getUnifiedRepository() ) );
purRepositoryServiceRegistry.registerService( ITrashService.class, new UnifiedRepositoryTrashService( result
.getUnifiedRepository(), rootRef ) );
purRepositoryServiceRegistry.registerService( ILockService.class, new UnifiedRepositoryLockService( result
.getUnifiedRepository() ) );
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "PurRepositoryConnector.RepositoryServicesRegistered.End" ) );
}
result.setSuccess( true );
} catch ( NullPointerException | UnsupportedEncodingException e ) {
result.setSuccess( false );
throw new KettleException( BaseMessages.getString( PKG, "PurRepository.LoginException.Message" ) );
} catch ( Throwable e ) {
result.setSuccess( false );
serviceManager.close();
throw new KettleException( e );
}
return result;
}
|
@Test
public void testConnect() {
PurRepository mockPurRepository = mock( PurRepository.class );
PurRepositoryMeta mockPurRepositoryMeta = mock( PurRepositoryMeta.class );
PurRepositoryLocation location = mock( PurRepositoryLocation.class );
RootRef mockRootRef = mock( RootRef.class );
PurRepositoryConnector purRepositoryConnector =
spy( new PurRepositoryConnector( mockPurRepository, mockPurRepositoryMeta, mockRootRef ) );
doReturn( location ).when( mockPurRepositoryMeta ).getRepositoryLocation();
doReturn( "" ).when( location ).getUrl();
ExecutorService service = mock( ExecutorService.class );
doReturn( service ).when( purRepositoryConnector ).getExecutor();
Future future = mock( Future.class );
try {
doReturn( "U1" ).when( future ).get();
} catch ( Exception e ) {
e.printStackTrace();
}
Future future2 = mock( Future.class );
try {
doReturn( false ).when( future2 ).get();
} catch ( Exception e ) {
e.printStackTrace();
}
Future future3 = mock( Future.class );
try {
doReturn( null ).when( future3 ).get();
} catch ( Exception e ) {
e.printStackTrace();
}
when( service.submit( any( Callable.class ) ) ).thenReturn( future2 ).thenReturn( future3 ).thenReturn( future3 ).thenReturn( future );
try {
RepositoryConnectResult res = purRepositoryConnector.connect( "userNam", "password" );
Assert.assertEquals( "U1", res.getUser().getLogin() );
} catch ( KettleException e ) {
e.printStackTrace();
}
}
|
static ApplicationId getFromApplicationId(HttpRequest request) {
String from = request.getProperty("from");
if (from == null || "".equals(from))
throw new BadRequestException("Parameter 'from' has illegal value '" + from + "'");
return getAndValidateFromParameter(URI.create(from));
}
|
@Test
public void require_that_application_urls_can_be_given_as_from_parameter() throws Exception {
ApplicationId applicationId = ApplicationId.from(tenant.value(), "foo", "quux");
HttpRequest request = post(Map.of(
"from",
"http://myhost:40555/application/v2/tenant/" + tenant + "/application/foo/environment/test/region/baz/instance/quux"));
assertEquals(applicationId, SessionCreateHandler.getFromApplicationId(request));
}
|
@Override
public ConfigOperateResult insertOrUpdateBeta(final ConfigInfo configInfo, final String betaIps, final String srcIp,
final String srcUser) {
if (findConfigInfo4BetaState(configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant()) == null) {
return addConfigInfo4Beta(configInfo, betaIps, srcIp, srcUser);
} else {
return updateConfigInfo4Beta(configInfo, betaIps, srcIp, srcUser);
}
}
|
@Test
void testInsertOrUpdateBetaOfAdd() {
String dataId = "betaDataId113";
String group = "group113";
String tenant = "tenant113";
//mock exist beta
ConfigInfoStateWrapper mockedConfigInfoStateWrapper = new ConfigInfoStateWrapper();
mockedConfigInfoStateWrapper.setDataId(dataId);
mockedConfigInfoStateWrapper.setGroup(group);
mockedConfigInfoStateWrapper.setTenant(tenant);
mockedConfigInfoStateWrapper.setId(123456L);
mockedConfigInfoStateWrapper.setLastModified(System.currentTimeMillis());
Mockito.when(
databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant}), eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER)))
.thenReturn(null).thenReturn(mockedConfigInfoStateWrapper);
String betaIps = "betaips...";
String srcIp = "srcUp...";
String srcUser = "srcUser...";
String appName = "appname";
String content = "content111";
ConfigInfo configInfo = new ConfigInfo(dataId, group, tenant, appName, content);
configInfo.setEncryptedDataKey("key34567");
//execute
ConfigOperateResult configOperateResult = embeddedConfigInfoBetaPersistService.insertOrUpdateBeta(configInfo, betaIps, srcIp,
srcUser);
//expect return obj
assertEquals(mockedConfigInfoStateWrapper.getId(), configOperateResult.getId());
assertEquals(mockedConfigInfoStateWrapper.getLastModified(), configOperateResult.getLastModified());
//verify add to be invoked
embeddedStorageContextHolderMockedStatic.verify(
() -> EmbeddedStorageContextHolder.addSqlContext(anyString(), eq(dataId), eq(group), eq(tenant),
eq(configInfo.getAppName()), eq(configInfo.getContent()), eq(configInfo.getMd5()),
eq(betaIps), eq(srcIp), eq(srcUser), eq(configInfo.getEncryptedDataKey())), times(1));
}
|
static void populateSchemaWithConstraints(Schema toPopulate, SimpleTypeImpl t) {
if (t.getAllowedValues() != null && !t.getAllowedValues().isEmpty()) {
parseSimpleType(DMNOASConstants.X_DMN_ALLOWED_VALUES, toPopulate, t.getAllowedValuesFEEL(), t.getAllowedValues());
}
if (t.getTypeConstraint() != null && !t.getTypeConstraint().isEmpty()) {
parseSimpleType(DMNOASConstants.X_DMN_TYPE_CONSTRAINTS, toPopulate, t.getTypeConstraintFEEL(), t.getTypeConstraint());
}
}
|
@Test
void populateSchemaWithRangesForAllowedValues() {
List<Object> toRange = Arrays.asList("(>1)", "(<=10)");
String allowedValuesString = String.join(",",
toRange.stream().map(toMap -> String.format("%s", toMap)).toList());
SimpleTypeImpl toRead = getSimpleType(allowedValuesString, null, FEEL_STRING, BuiltInType.STRING);
AtomicReference<Schema> toPopulate = new AtomicReference<>(getSchemaForSimpleType(toRead));
DMNTypeSchemas.populateSchemaWithConstraints(toPopulate.get(), toRead);
assertEquals(BigDecimal.ONE, toPopulate.get().getMinimum());
assertTrue(toPopulate.get().getExclusiveMinimum());
assertEquals(BigDecimal.TEN, toPopulate.get().getMaximum());
assertFalse(toPopulate.get().getExclusiveMaximum());
assertTrue(toPopulate.get().getExtensions().containsKey(DMNOASConstants.X_DMN_ALLOWED_VALUES));
String retrieved =
((String) toPopulate.get().getExtensions().get(DMNOASConstants.X_DMN_ALLOWED_VALUES)).replace(" ", "");
String expected = allowedValuesString.replace("(", "").replace(")", "");
assertEquals(expected, retrieved);
}
|
@Override
public void dispose() {
mDisposable.dispose();
}
|
@Test
public void testDispose() {
DefaultSkinTonePrefTracker tracker =
new DefaultSkinTonePrefTracker(AnyApplication.prefs(getApplicationContext()));
Assert.assertFalse(tracker.isDisposed());
Assert.assertNull(tracker.getDefaultSkinTone());
SharedPrefsHelper.setPrefsValue(R.string.settings_key_default_emoji_skin_tone, "type_2");
Assert.assertEquals(JavaEmojiUtils.SkinTone.Fitzpatrick_2, tracker.getDefaultSkinTone());
tracker.dispose();
Assert.assertTrue(tracker.isDisposed());
SharedPrefsHelper.setPrefsValue(R.string.settings_key_default_emoji_skin_tone, "type_3");
// does not change
Assert.assertEquals(JavaEmojiUtils.SkinTone.Fitzpatrick_2, tracker.getDefaultSkinTone());
}
|
@Draft
public ZMsg msgBinaryPicture(String picture, Object... args)
{
if (!BINARY_FORMAT.matcher(picture).matches()) {
throw new ZMQException(picture + " is not in expected binary format " + BINARY_FORMAT.pattern(),
ZError.EPROTO);
}
ZMsg msg = new ZMsg();
// Pass 1: calculate total size of data frame
int frameSize = 0;
for (int index = 0; index < picture.length(); index++) {
char pattern = picture.charAt(index);
switch (pattern) {
case '1': {
frameSize += 1;
break;
}
case '2': {
frameSize += 2;
break;
}
case '4': {
frameSize += 4;
break;
}
case '8': {
frameSize += 8;
break;
}
case 's': {
String string = (String) args[index];
frameSize += 1 + (string != null ? string.getBytes(ZMQ.CHARSET).length : 0);
break;
}
case 'S': {
String string = (String) args[index];
frameSize += 4 + (string != null ? string.getBytes(ZMQ.CHARSET).length : 0);
break;
}
case 'b':
case 'c': {
byte[] block = (byte[]) args[index];
frameSize += 4 + block.length;
break;
}
case 'f': {
ZFrame frame = (ZFrame) args[index];
msg.add(frame);
break;
}
case 'm': {
ZMsg other = (ZMsg) args[index];
if (other == null) {
msg.add(new ZFrame((byte[]) null));
}
else {
msg.addAll(other);
}
break;
}
default:
assert (false) : "invalid picture element '" + pattern + "'";
}
}
// Pass 2: encode data into data frame
ZFrame frame = new ZFrame(new byte[frameSize]);
ZNeedle needle = new ZNeedle(frame);
for (int index = 0; index < picture.length(); index++) {
char pattern = picture.charAt(index);
switch (pattern) {
case '1': {
needle.putNumber1((int) args[index]);
break;
}
case '2': {
needle.putNumber2((int) args[index]);
break;
}
case '4': {
needle.putNumber4((int) args[index]);
break;
}
case '8': {
needle.putNumber8((long) args[index]);
break;
}
case 's': {
needle.putString((String) args[index]);
break;
}
case 'S': {
needle.putLongString((String) args[index]);
break;
}
case 'b':
case 'c': {
byte[] block = (byte[]) args[index];
needle.putNumber4(block.length);
needle.putBlock(block, block.length);
break;
}
case 'f':
case 'm':
break;
default:
assert (false) : "invalid picture element '" + pattern + "'";
}
}
msg.addFirst(frame);
return msg;
}
|
@Test(expected = ZMQException.class)
public void testInvalidPictureMsgNotInTheEnd()
{
String picture = "m1";
ZMsg msg = new ZMsg().push("Hello");
pic.msgBinaryPicture(picture, msg, 255);
}
|
@Override
public RList<V> get(K key) {
String keyHash = keyHash(key);
String setName = getValuesName(keyHash);
return new RedissonList<V>(codec, commandExecutor, setName, null) {
@Override
public RFuture<Boolean> addAsync(V value) {
return RedissonListMultimap.this.putAsync(key, value);
}
@Override
public RFuture<Boolean> addAllAsync(Collection<? extends V> c) {
return RedissonListMultimap.this.putAllAsync(key, c);
}
@Override
public RFuture<Boolean> removeAsync(Object value) {
return RedissonListMultimap.this.removeAsync(key, value);
}
@Override
public RFuture<Boolean> removeAllAsync(Collection<?> c) {
if (c.isEmpty()) {
return new CompletableFutureWrapper<>(false);
}
List<Object> args = new ArrayList<>(c.size() + 1);
args.add(encodeMapKey(key));
encode(args, c);
return commandExecutor.evalWriteAsync(RedissonListMultimap.this.getRawName(), codec, RedisCommands.EVAL_BOOLEAN,
"local v = 0 " +
"for i = 2, #ARGV, 1 do "
+ "if redis.call('lrem', KEYS[2], 0, ARGV[i]) == 1 then "
+ "v = 1; "
+ "end "
+"end "
+ "if v == 1 and redis.call('exists', KEYS[2]) == 0 then "
+ "redis.call('hdel', KEYS[1], ARGV[1]); "
+"end "
+ "return v",
Arrays.asList(RedissonListMultimap.this.getRawName(), setName),
args.toArray());
}
@Override
public RFuture<Boolean> deleteAsync() {
ByteBuf keyState = encodeMapKey(key);
return RedissonListMultimap.this.fastRemoveAsync(Arrays.asList(keyState),
Arrays.asList(RedissonListMultimap.this.getRawName(), setName), RedisCommands.EVAL_BOOLEAN_AMOUNT);
}
@Override
public RFuture<Boolean> clearExpireAsync() {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit, String param, String... keys) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
protected RFuture<Boolean> expireAtAsync(long timestamp, String param, String... keys) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Long> remainTimeToLiveAsync() {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Void> renameAsync(String newName) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Boolean> renamenxAsync(String newName) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
};
}
|
@Test
public void testDistributedIterator() {
RListMultimap<String, String> map = redisson.getListMultimap("set", StringCodec.INSTANCE);
// populate set with elements
List<String> stringsOne = IntStream.range(0, 64).mapToObj(i -> "" + i).collect(Collectors.toList());
map.putAll("someKey", stringsOne);
Iterator<String> stringIterator = map.get("someKey")
.distributedIterator("iterator_{set}", 10);
// read some elements using iterator
List<String> strings = new ArrayList<>();
for (int i = 0; i < 20; i++) {
if (stringIterator.hasNext()) {
strings.add(stringIterator.next());
}
}
// create another iterator instance using the same name
RListMultimap<String, String> map2 = redisson.getListMultimap("set", StringCodec.INSTANCE);
Iterator<String> stringIterator2 = map2.get("someKey")
.distributedIterator("iterator_{set}", 10);
assertTrue(stringIterator2.hasNext());
// read all remaining elements
stringIterator2.forEachRemaining(strings::add);
stringIterator.forEachRemaining(strings::add);
assertThat(strings).containsAll(stringsOne);
assertThat(strings).hasSize(stringsOne.size());
}
|
@Override
public JType apply(String nodeName, JsonNode node, JsonNode parent, JClassContainer jClassContainer, Schema schema) {
String propertyTypeName = getTypeName(node);
JType type;
if (propertyTypeName.equals("object") || node.has("properties") && node.path("properties").size() > 0) {
type = ruleFactory.getObjectRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema);
} else if (node.has("existingJavaType")) {
String typeName = node.path("existingJavaType").asText();
if (isPrimitive(typeName, jClassContainer.owner())) {
type = primitiveType(typeName, jClassContainer.owner());
} else {
type = resolveType(jClassContainer, typeName);
}
} else if (propertyTypeName.equals("string")) {
type = jClassContainer.owner().ref(String.class);
} else if (propertyTypeName.equals("number")) {
type = getNumberType(jClassContainer.owner(), ruleFactory.getGenerationConfig());
} else if (propertyTypeName.equals("integer")) {
type = getIntegerType(jClassContainer.owner(), node, ruleFactory.getGenerationConfig());
} else if (propertyTypeName.equals("boolean")) {
type = unboxIfNecessary(jClassContainer.owner().ref(Boolean.class), ruleFactory.getGenerationConfig());
} else if (propertyTypeName.equals("array")) {
type = ruleFactory.getArrayRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema);
} else {
type = jClassContainer.owner().ref(Object.class);
}
if (!node.has("javaType") && !node.has("existingJavaType") && node.has("format")) {
type = ruleFactory.getFormatRule().apply(nodeName, node.get("format"), node, type, schema);
} else if (!node.has("javaType") && !node.has("existingJavaType") && propertyTypeName.equals("string") && node.has("media")) {
type = ruleFactory.getMediaRule().apply(nodeName, node.get("media"), node, type, schema);
}
return type;
}
|
@Test
public void applyGeneratesArray() {
JPackage jpackage = new JCodeModel()._package(getClass().getPackage().getName());
ObjectNode objectNode = new ObjectMapper().createObjectNode();
objectNode.put("type", "array");
JClass mockArrayType = mock(JClass.class);
ArrayRule mockArrayRule = mock(ArrayRule.class);
when(mockArrayRule.apply("fooBar", objectNode, null, jpackage, null)).thenReturn(mockArrayType);
when(ruleFactory.getArrayRule()).thenReturn(mockArrayRule);
JType result = rule.apply("fooBar", objectNode, null, jpackage, null);
assertThat(result, is(mockArrayType));
}
|
static Polygon buildPolygon(TDWay way) {
Coordinate[] coordinates = JTSUtils.toCoordinates(way);
return GEOMETRY_FACTORY.createPolygon(GEOMETRY_FACTORY.createLinearRing(coordinates), null);
}
|
@Test
public void testBuildInValidPolygonWith2InnerRings() {
String testfile = "invalid-polygon-2-inner-rings.wkt";
List<TDWay> ways = MockingUtils.wktPolygonToWays(testfile);
Polygon polygon = JTSUtils.buildPolygon(ways.get(0), ways.subList(1, ways.size()));
Geometry expected = MockingUtils.readWKTFile(testfile);
Assert.isTrue(!polygon.isValid());
Assert.equals(expected, polygon);
}
|
@Override
public double get(int i, int j) {
return values[i][j];
}
|
@Test
public void serialization431Test() throws URISyntaxException, IOException {
Path matrixPath = Paths.get(DenseMatrixTest.class.getResource("dense-matrix-431.tribuo").toURI());
try (InputStream fis = Files.newInputStream(matrixPath)) {
TensorProto proto = TensorProto.parseFrom(fis);
Tensor matrix = Tensor.deserialize(proto);
assertEquals(generateA(), matrix);
}
}
|
public static void testHandleResourceUsageReport(long backendId, TResourceUsage usage) {
resourceUsageReport(backendId, usage);
}
|
@Test
public void testHandleResourceUsageReport() {
ResourceUsageMonitor resourceUsageMonitor = GlobalStateMgr.getCurrentState().getResourceUsageMonitor();
Backend backend = new Backend(0, "127.0.0.1", 80);
ComputeNode computeNode = new ComputeNode(2, "127.0.0.1", 88);
new MockUp<SystemInfoService>() {
@Mock
public ComputeNode getBackendOrComputeNode(long id) {
if (id == backend.getId()) {
return backend;
}
if (id == computeNode.getId()) {
return computeNode;
}
return null;
}
};
new Expectations(resourceUsageMonitor) {
{
resourceUsageMonitor.notifyResourceUsageUpdate();
times = 2;
}
};
int numRunningQueries = 1;
long memLimitBytes = 3;
long memUsedBytes = 2;
int cpuUsedPermille = 300;
TResourceUsage resourceUsage = genResourceUsage(numRunningQueries, memLimitBytes, memUsedBytes, cpuUsedPermille);
// For backend, sync to FE followers and notify pending queries.
ReportHandler.testHandleResourceUsageReport(backend.getId(), resourceUsage);
Assert.assertEquals(numRunningQueries, backend.getNumRunningQueries());
// Assert.assertEquals(memLimitBytes, backend.getMemLimitBytes());
Assert.assertEquals(memUsedBytes, backend.getMemUsedBytes());
Assert.assertEquals(cpuUsedPermille, backend.getCpuUsedPermille());
// For compute node, sync to FE followers and notify pending queries.
numRunningQueries = 10;
memLimitBytes = 30;
memUsedBytes = 20;
cpuUsedPermille = 310;
resourceUsage = genResourceUsage(numRunningQueries, memLimitBytes, memUsedBytes, cpuUsedPermille);
ReportHandler.testHandleResourceUsageReport(computeNode.getId(), resourceUsage);
Assert.assertEquals(numRunningQueries, computeNode.getNumRunningQueries());
// Assert.assertEquals(memLimitBytes, computeNode.getMemLimitBytes());
Assert.assertEquals(memUsedBytes, computeNode.getMemUsedBytes());
Assert.assertEquals(cpuUsedPermille, computeNode.getCpuUsedPermille());
// Don't sync and notify, because this BE doesn't exist.
ReportHandler.testHandleResourceUsageReport(/* Not Exist */ 1, resourceUsage);
}
|
@Override
protected Class<?> loadClass(final String name, final boolean resolve)
throws ClassNotFoundException {
synchronized (getClassLoadingLock(name)) {
try {
final Class<?> loadedClass = findLoadedClass(name);
if (loadedClass != null) {
return resolveIfNeeded(resolve, loadedClass);
}
if (isComponentFirstClass(name)) {
return loadClassFromComponentFirst(name, resolve);
}
if (isOwnerFirstClass(name)) {
return loadClassFromOwnerFirst(name, resolve);
}
// making this behavior configurable (component-only/component-first/owner-first)
// would allow this class to subsume the FlinkUserCodeClassLoader (with an added
// exception handler)
return loadClassFromComponentOnly(name, resolve);
} catch (ClassNotFoundException e) {
// If we know the package of this class
Optional<String> foundAssociatedModule =
knownPackagePrefixesModuleAssociation.entrySet().stream()
.filter(entry -> name.startsWith(entry.getKey()))
.map(Map.Entry::getValue)
.findFirst();
if (foundAssociatedModule.isPresent()) {
throw new ClassNotFoundException(
String.format(
"Class '%s' not found. Perhaps you forgot to add the module '%s' to the classpath?",
name, foundAssociatedModule.get()),
e);
}
throw e;
}
}
}
|
@Test
void testOwnerFirstClassNotFoundFallsBackToComponent() throws Exception {
TestUrlClassLoader owner = new TestUrlClassLoader();
final ComponentClassLoader componentClassLoader =
new ComponentClassLoader(
new URL[0],
owner,
new String[] {CLASS_TO_LOAD.getName()},
new String[0],
Collections.emptyMap());
final Class<?> loadedClass = componentClassLoader.loadClass(CLASS_TO_LOAD.getName());
assertThat(loadedClass).isSameAs(CLASS_TO_LOAD);
}
|
public static long getPresetReminder(Long currentReminder) {
long now = Calendar.getInstance().getTimeInMillis();
return currentReminder != null && currentReminder > now ? currentReminder : getNextMinute();
}
|
@Test
public void getPresetReminder() {
var nextHour = Calendar.getInstance().getTimeInMillis() + 60 * 60 * 1000;
assertEquals(nextHour, DateUtils.getPresetReminder(nextHour));
var previousMinute = Calendar.getInstance().getTimeInMillis() - 60 * 1000;
var nextMinute = Calendar.getInstance().getTimeInMillis() + 61 * 1000;
var presetReminder = DateUtils.getPresetReminder(previousMinute);
assertTrue(Calendar.getInstance().getTimeInMillis() < presetReminder && presetReminder < nextMinute);
}
|
public static void info(final Object message, final String query) {
log(Level.INFO, message, query);
}
|
@Test
public void shouldContainAQueryID() {
String message = "my message";
String query = "DESCRIBE cat EXTENDED;";
QueryLogger.info(message, query);
testAppender
.getLog()
.forEach(
(e) -> {
final QueryLoggerMessage msg = (QueryLoggerMessage) e.getMessage();
final QueryGuid queryGuid = msg.getQueryIdentifier();
assertThat(queryGuid.getStructuralGuid(), not(isEmptyOrNullString()));
assertThat(queryGuid.getQueryGuid(), not(isEmptyOrNullString()));
});
}
|
static SortedMap<Field, Descriptor> buildBeanedDescriptorsMap(
Class<?> clz, boolean searchParent) {
List<Field> fieldList = new ArrayList<>();
Class<?> clazz = clz;
Map<Tuple2<Class, String>, Method> methodMap = new HashMap<>();
do {
Field[] fields = clazz.getDeclaredFields();
for (Field field : fields) {
int modifiers = field.getModifiers();
// final and non-private field validation left to {@link isBean(clz)}
if (!Modifier.isTransient(modifiers)
&& !Modifier.isStatic(modifiers)
&& !field.isAnnotationPresent(Ignore.class)) {
fieldList.add(field);
}
}
Arrays.stream(clazz.getDeclaredMethods())
.filter(m -> !Modifier.isPrivate(m.getModifiers()))
// if override, use subClass method; getter/setter method won't overload
.forEach(m -> methodMap.put(Tuple2.of(m.getDeclaringClass(), m.getName()), m));
clazz = clazz.getSuperclass();
} while (clazz != null && searchParent);
for (Class<?> anInterface : clz.getInterfaces()) {
Method[] methods = anInterface.getDeclaredMethods();
for (Method method : methods) {
if (method.isDefault()) {
methodMap.put(Tuple2.of(method.getDeclaringClass(), method.getName()), method);
}
}
}
// use TreeMap to sort to fix field order
TreeMap<Field, Descriptor> descriptorMap = new TreeMap<>(fieldComparator);
for (Field field : fieldList) {
Class<?> fieldDeclaringClass = field.getDeclaringClass();
String fieldName = field.getName();
String cap = StringUtils.capitalize(fieldName);
Method getter;
if ("boolean".equalsIgnoreCase(field.getType().getSimpleName())) {
getter = methodMap.get(Tuple2.of(fieldDeclaringClass, "is" + cap));
} else {
getter = methodMap.get(Tuple2.of(fieldDeclaringClass, "get" + cap));
}
if (getter != null) {
if (getter.getParameterCount() != 0
|| !getter
.getGenericReturnType()
.getTypeName()
.equals(field.getGenericType().getTypeName())) {
getter = null;
}
}
Method setter = methodMap.get(Tuple2.of(fieldDeclaringClass, "set" + cap));
if (setter != null) {
if (setter.getParameterCount() != 1
|| !setter
.getGenericParameterTypes()[0]
.getTypeName()
.equals(field.getGenericType().getTypeName())) {
setter = null;
}
}
TypeRef fieldType = TypeRef.of(field.getGenericType());
descriptorMap.put(field, new Descriptor(field, fieldType, getter, setter));
}
// Don't cache descriptors using a static `WeakHashMap<Class<?>, SortedMap<Field, Descriptor>>`,
// otherwise classes can't be gc.
return descriptorMap;
}
|
@Test
public void testBuildBeanedDescriptorsMap() throws Exception {
Assert.assertEquals(BeanA.class.getDeclaredField("f1"), BeanA.class.getDeclaredField("f1"));
Assert.assertNotSame(BeanA.class.getDeclaredField("f1"), BeanA.class.getDeclaredField("f1"));
SortedMap<Field, Descriptor> map = Descriptor.buildBeanedDescriptorsMap(BeanA.class, true);
Assert.assertTrue(map.containsKey(BeanA.class.getDeclaredField("f1")));
Assert.assertEquals(
map.get(BeanA.class.getDeclaredField("doubleList")).getTypeRef(),
new TypeRef<List<Double>>() {});
Assert.assertNotNull(map.get(BeanA.class.getDeclaredField("longStringField")).getReadMethod());
Assert.assertEquals(
map.get(BeanA.class.getDeclaredField("longStringField")).getWriteMethod(),
BeanA.class.getDeclaredMethod("setLongStringField", String.class));
SortedMap<Field, Descriptor> map2 = Descriptor.buildBeanedDescriptorsMap(B.class, false);
Assert.assertEquals(map2.size(), 1);
}
|
public static SourceDescription create(
final DataSource dataSource,
final boolean extended,
final List<RunningQuery> readQueries,
final List<RunningQuery> writeQueries,
final Optional<TopicDescription> topicDescription,
final List<QueryOffsetSummary> queryOffsetSummaries,
final List<String> sourceConstraints,
final MetricCollectors metricCollectors
) {
return create(
dataSource,
extended,
readQueries,
writeQueries,
topicDescription,
queryOffsetSummaries,
sourceConstraints,
Stream.empty(),
Stream.empty(),
new KsqlHostInfo("", 0),
metricCollectors
);
}
|
@Test
public void testShouldIncludeRemoteStatsIfProvided() {
final List<QueryHostStat> remoteStats = IntStream.range(0, 5)
.boxed()
.map(x -> new QueryHostStat(
new KsqlHostInfoEntity("otherhost:1090"),
ConsumerCollector.CONSUMER_MESSAGES_PER_SEC,
x,
x)
).collect(Collectors.toList());
final List<QueryHostStat> remoteErrors = IntStream.range(0, 5)
.boxed()
.map(x -> new QueryHostStat(
new KsqlHostInfoEntity("otherhost:1090"),
StreamsErrorCollector.CONSUMER_FAILED_MESSAGES_PER_SEC,
x,
x)
).collect(Collectors.toList());
// Given:
final String kafkaTopicName = "kafka";
final DataSource dataSource = buildDataSource(kafkaTopicName, Optional.empty());
final MetricCollectors mock = Mockito.mock(MetricCollectors.class);
Mockito.when(mock.getAndFormatStatsFor(anyString(), anyBoolean())).thenReturn(mockStringStat);
Mockito.when(mock.getStatsFor(dataSource.getKafkaTopicName(), true)).thenReturn(errorStats);
Mockito.when(mock.getStatsFor(dataSource.getKafkaTopicName(), false)).thenReturn(stats);
// When
final SourceDescription sourceDescription = SourceDescriptionFactory.create(
dataSource,
true,
Collections.emptyList(),
Collections.emptyList(),
Optional.empty(),
Collections.emptyList(),
Collections.emptyList(),
remoteStats.stream(),
remoteErrors.stream(),
new KsqlHostInfo("myhost", 10),
mock
);
// Then:
assertThat(
remoteStats,
everyItem(isIn(sourceDescription.getClusterStatistics()))
);
assertThat(
remoteErrors,
everyItem(isIn(sourceDescription.getClusterErrorStats()))
);
}
|
public static ScheduledTaskHandler of(UUID uuid, String schedulerName, String taskName) {
return new ScheduledTaskHandlerImpl(uuid, -1, schedulerName, taskName);
}
|
@Test(expected = IllegalArgumentException.class)
public void of_withWrongBase() {
ScheduledTaskHandler.of("wrongbase:-\u00000\u0000Scheduler\u0000Task");
}
|
@Override
public <T extends ComponentRoot> T get(Class<T> providerId) {
try {
return providerId.getConstructor().newInstance();
} catch (ReflectiveOperationException e) {
throw new IllegalArgumentException(e);
}
}
|
@Test
void getNoDefaultConstructorImplementation() {
IllegalArgumentException thrown = assertThrows(
IllegalArgumentException.class,
() -> appRoot.get(ComponentRootNoDefaultConstructor.class),
"Expected constructor to throw, but it didn't"
);
String expectedMessage = "java.lang.NoSuchMethodException";
assertThat(thrown.getMessage()).startsWith(expectedMessage);
}
|
@Operation(summary = "get services", tags = { SwaggerConfig.SHARED }, operationId = "app_services",
parameters = {@Parameter(ref = "API-V"), @Parameter(ref = "OS-T"), @Parameter(ref = "APP-V"), @Parameter(ref = "OS-V"), @Parameter(ref = "REL-T")})
@GetMapping(value = "services", produces = "application/json")
@ResponseBody
public WebServerResponse getWebserversUrls() {
return configService.getWebserverUrls();
}
|
@Test
void validateIfCorrectProcessesAreCalledGetUrls() {
configController.getWebserversUrls();
verify(configService, times(1)).getWebserverUrls();
}
|
public static DateTime parse(CharSequence dateStr, DateFormat dateFormat) {
return new DateTime(dateStr, dateFormat);
}
|
@SuppressWarnings("ConstantConditions")
@Test
public void parseSingleNumberTest() {
DateTime dateTime = DateUtil.parse("2020-5-08");
assertEquals("2020-05-08 00:00:00", dateTime.toString());
dateTime = DateUtil.parse("2020-5-8");
assertEquals("2020-05-08 00:00:00", dateTime.toString());
dateTime = DateUtil.parse("2020-05-8");
assertEquals("2020-05-08 00:00:00", dateTime.toString());
//datetime
dateTime = DateUtil.parse("2020-5-8 3:12:3");
assertEquals("2020-05-08 03:12:03", dateTime.toString());
dateTime = DateUtil.parse("2020-5-8 3:2:3");
assertEquals("2020-05-08 03:02:03", dateTime.toString());
dateTime = DateUtil.parse("2020-5-8 3:12:13");
assertEquals("2020-05-08 03:12:13", dateTime.toString());
dateTime = DateUtil.parse("2020-5-8 4:12:26.223");
assertEquals("2020-05-08 04:12:26", dateTime.toString());
}
|
public void shiftOffsetsBy(final Consumer<byte[], byte[]> client,
final Set<TopicPartition> inputTopicPartitions,
final long shiftBy) {
final Map<TopicPartition, Long> endOffsets = client.endOffsets(inputTopicPartitions);
final Map<TopicPartition, Long> beginningOffsets = client.beginningOffsets(inputTopicPartitions);
final Map<TopicPartition, Long> topicPartitionsAndOffset = new HashMap<>(inputTopicPartitions.size());
for (final TopicPartition topicPartition : inputTopicPartitions) {
final long position = client.position(topicPartition);
final long offset = position + shiftBy;
topicPartitionsAndOffset.put(topicPartition, offset);
}
final Map<TopicPartition, Long> validatedTopicPartitionsAndOffset =
checkOffsetRange(topicPartitionsAndOffset, beginningOffsets, endOffsets);
for (final TopicPartition topicPartition : inputTopicPartitions) {
client.seek(topicPartition, validatedTopicPartitionsAndOffset.get(topicPartition));
}
}
|
@Test
public void testShiftOffsetByWhenBeforeBeginningOffset() {
final Map<TopicPartition, Long> endOffsets = new HashMap<>();
endOffsets.put(topicPartition, 4L);
consumer.updateEndOffsets(endOffsets);
final Map<TopicPartition, Long> beginningOffsets = new HashMap<>();
beginningOffsets.put(topicPartition, 0L);
consumer.updateBeginningOffsets(beginningOffsets);
streamsResetter.shiftOffsetsBy(consumer, inputTopicPartitions, -3L);
final ConsumerRecords<byte[], byte[]> records = consumer.poll(Duration.ofMillis(500));
assertEquals(5, records.count());
}
|
public static String getPasswordFromProperties(Properties properties, Function<String, String> keyTransform)
throws HiveException, IOException {
String passwd = properties.getProperty(keyTransform.apply(CONFIG_PWD));
String keystore = properties.getProperty(keyTransform.apply(CONFIG_PWD_KEYSTORE));
String uri = properties.getProperty(keyTransform.apply(CONFIG_PWD_URI));
if (countNonNull(passwd, keystore, uri) > 1) {
// In tez, when the job conf is copied there is a code path in HiveInputFormat where all the table properties
// are copied and the password is copied from the job credentials, so its possible to have 2 of them set.
// For now ignore this and print a warning message, we should fix so that the above code is used instead.
LOGGER.warn("Only one of " + CONFIG_PWD + ", " + CONFIG_PWD_KEYSTORE + ", " + CONFIG_PWD_URI + " can be set");
// throw new HiveException(
// "Only one of " + CONFIG_PWD + ", " + CONFIG_PWD_KEYSTORE + ", " + CONFIG_PWD_URI + " can be set");
}
if (passwd == null && keystore != null) {
String key = properties.getProperty(keyTransform.apply(CONFIG_PWD_KEY));
passwd = Utilities.getPasswdFromKeystore(keystore, key);
}
if (passwd == null && uri != null) {
try {
passwd = Utilities.getPasswdFromUri(uri);
} catch (URISyntaxException e) {
// Should I include the uri in the exception? Suppressing for now, since it may have sensitive info.
throw new HiveException("Invalid password uri specified", e);
}
}
return passwd;
}
|
@Test
public void testExtractPassword() throws Exception {
String prefix = "test.";
String password = "my-super-secret";
Properties props = new Properties();
props.put(prefix + JdbcStorageConfigManager.CONFIG_PWD, password);
props.put(prefix + JdbcStorageConfigManager.CONFIG_PWD_URI, "test:///random-stuff");
String passwd = JdbcStorageConfigManager.getPasswordFromProperties(props, s -> prefix + s);
assertThat(passwd, is(equalTo(password)));
}
|
public int initialWindowLength()
{
return CongestionControl.receiverWindowLength(ccOutcome);
}
|
@Test
void shouldSetWindowLengthFromContext()
{
final UdpChannel channelWithoutWindow = UdpChannel.parse("aeron:udp?endpoint=127.0.0.1:9999");
final MediaDriver.Context context = new MediaDriver.Context().initialWindowLength(16536);
final int termLength = 1_000_000;
final StaticWindowCongestionControl staticWindowCongestionControl = new StaticWindowCongestionControl(
0, channelWithoutWindow, 0, 0, termLength, 0, null, null, null, context, null);
assertEquals(16536, staticWindowCongestionControl.initialWindowLength());
}
|
@Override
public void loginSuccess(HttpRequest request, @Nullable String login, Source source) {
checkRequest(request);
requireNonNull(source, "source can't be null");
LOGGER.atDebug().setMessage("login success [method|{}][provider|{}|{}][IP|{}|{}][login|{}]")
.addArgument(source::getMethod)
.addArgument(source::getProvider)
.addArgument(source::getProviderName)
.addArgument(request::getRemoteAddr)
.addArgument(() -> getAllIps(request))
.addArgument(() -> preventLogFlood(sanitizeLog(emptyIfNull(login))))
.log();
}
|
@Test
public void login_success_does_not_interact_with_request_if_log_level_is_above_DEBUG() {
HttpRequest request = mock(HttpRequest.class);
logTester.setLevel(Level.INFO);
underTest.loginSuccess(request, "login", Source.sso());
assertThat(logTester.logs()).isEmpty();
}
|
public String toJsonString(Object object) {
return String.valueOf(toJson(object));
}
|
@Test
public void testJsonString() {
String output = _obfuscator.toJsonString("{\"key\":\"VALUE\",\"my.secret\":\"SECRET\"}");
Assert.assertTrue(output.contains(VALUE));
Assert.assertFalse(output.contains(SECRET));
}
|
@Override
public void upload(UploadTask uploadTask) throws IOException {
Throwable error = getErrorSafe();
if (error != null) {
LOG.debug("don't persist {} changesets, already failed", uploadTask.changeSets.size());
uploadTask.fail(error);
return;
}
LOG.debug("persist {} changeSets", uploadTask.changeSets.size());
try {
long size = uploadTask.getSize();
synchronized (lock) {
while (!uploadThrottle.hasCapacity()) {
lock.wait();
}
uploadThrottle.seizeCapacity(size);
if (!uploadThrottle.hasCapacity()) {
availabilityHelper.resetUnavailable();
}
scheduledBytesCounter += size;
scheduled.add(wrapWithSizeUpdate(uploadTask, size));
scheduleUploadIfNeeded();
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
uploadTask.fail(e);
throw new IOException(e);
} catch (Exception e) {
uploadTask.fail(e);
throw e;
}
}
|
@Test
void testInterruptedWhenBackPressured() throws Exception {
int limit = MAX_BYTES_IN_FLIGHT;
TestScenario test =
(uploader, probe) -> {
List<StateChangeSet> changes = getChanges(limit + 1);
upload(uploader, changes);
assertSaved(probe, changes); // only sent for upload
probe.reset(); // don't complete the upload - so capacity isn't released
try {
upload(uploader, getChanges(1)); // should block
fail("upload shouldn't succeed after exceeding the limit");
} catch (IOException e) {
if (findThrowable(e, InterruptedException.class).isPresent()) {
assertThat(probe.getUploaded()).isEmpty();
} else {
rethrow(e);
}
}
};
Tuple2<Thread, CompletableFuture<Void>> threadAndFuture = uploadAsync(limit, test);
Thread.sleep(500); // allow to upload (i.e. fail)
threadAndFuture.f0.interrupt();
threadAndFuture.f1.join();
}
|
public Object evaluate(
final GenericRow row,
final Object defaultValue,
final ProcessingLogger logger,
final Supplier<String> errorMsg
) {
try {
return expressionEvaluator.evaluate(new Object[]{
spec.resolveArguments(row),
defaultValue,
logger,
row
});
} catch (final Exception e) {
final Throwable cause = e instanceof InvocationTargetException
? e.getCause()
: e;
logger.error(RecordProcessingError.recordProcessingError(errorMsg.get(), cause, row));
return defaultValue;
}
}
|
@Test
public void shouldReturnDefaultIfEvalThrows() throws Exception {
// Given:
spec.addParameter(
ColumnName.of("foo1"),
Integer.class,
0
);
compiledExpression = new CompiledExpression(
expressionEvaluator,
spec.build(),
EXPRESSION_TYPE,
expression
);
final RuntimeException e = new RuntimeException("Boom");
when(expressionEvaluator.evaluate(any())).thenThrow(new InvocationTargetException(e));
// When:
final Object result = compiledExpression
.evaluate(genericRow(123), DEFAULT_VAL, processingLogger, errorMsgSupplier);
// Then:
assertThat(result, is(DEFAULT_VAL));
}
|
public String[] getPathComponents() {
return getFullPath().split(QUEUE_REGEX_DELIMITER);
}
|
@Test
public void testGetPathComponents() {
Assert.assertArrayEquals(TEST_QUEUE_PATH.getPathComponents(),
new String[] {"root", "level_1", "level_2", "level_3"});
Assert.assertArrayEquals(ROOT_PATH.getPathComponents(), new String[] {"root"});
Assert.assertArrayEquals(EMPTY_PATH.getPathComponents(), new String[] {""});
}
|
public HikariDataSource getDataSource() {
return ds;
}
|
@Test
@Ignore
public void testGetDataSource() {
DataSource ds = SingletonServiceFactory.getBean(DataSource.class);
assertNotNull(ds);
HikariDataSource hds = (HikariDataSource)ds;
System.out.println(hds.getMaximumPoolSize());
try(Connection connection = ds.getConnection()){
assertNotNull(connection);
} catch (SQLException e) {
e.printStackTrace();
}
hds = (HikariDataSource)ds;
System.out.println(hds.getMaximumPoolSize());
}
|
@Override
public Mono<Long> delete(final long id) {
return Mono.zip(
dataSourceRepository.existsByNamespace(id),
collectorRepository.existsByNamespace(id),
termRepository.existsByNamespace(id),
dataEntityRepository.existsNonDeletedByNamespaceId(id)
)
.map(t -> BooleanUtils.toBoolean(t.getT1())
|| BooleanUtils.toBoolean(t.getT2())
|| BooleanUtils.toBoolean(t.getT3())
|| BooleanUtils.toBoolean(t.getT4()))
.filter(exists -> !exists)
.switchIfEmpty(Mono.error(new CascadeDeleteException(
"Namespace cannot be deleted: there are still resources attached")))
.flatMap(ign -> namespaceRepository.delete(id))
.map(NamespacePojo::getId);
}
|
@Test
@DisplayName("Tries to delete a namespace which is tied with existing data source and fails with an error")
public void testDeleteTiedNamespaceWithDataSource() {
final long namespaceId = 1L;
when(collectorRepository.existsByNamespace(eq(namespaceId))).thenReturn(Mono.just(false));
when(dataSourceRepository.existsByNamespace(eq(namespaceId))).thenReturn(Mono.just(true));
when(termRepository.existsByNamespace(eq(namespaceId))).thenReturn(Mono.just(false));
when(dataEntityRepository.existsNonDeletedByNamespaceId(eq(namespaceId))).thenReturn(Mono.just(false));
namespaceService.delete(namespaceId)
.as(StepVerifier::create)
.verifyError(CascadeDeleteException.class);
verify(namespaceRepository, never()).delete(eq(namespaceId));
verify(dataSourceRepository, only()).existsByNamespace(eq(namespaceId));
verify(termRepository, only()).existsByNamespace(eq(namespaceId));
verify(dataEntityRepository, only()).existsNonDeletedByNamespaceId(eq(namespaceId));
verify(collectorRepository, only()).existsByNamespace(eq(namespaceId));
}
|
@PostMapping("/change_two_factor")
@Operation(summary = "Set the two factor setting of an account")
public AccountResult changeTwoFactor(@RequestBody DTwoFactorChangeRequest deprecatedRequest) {
validateSettingTwoFactor(deprecatedRequest);
AppSession appSession = validate(deprecatedRequest);
var request = deprecatedRequest.getRequest();
var result = accountService.changeTwoFactor(appSession.getAccountId(), request);
return result;
}
|
@Test
public void invalidSettingTwoFactor() {
DTwoFactorChangeRequest request = new DTwoFactorChangeRequest();
request.setAppSessionId("id");
DAccountException exc = assertThrows(DAccountException.class, () -> {
twoFactorController.changeTwoFactor(request);
});
assertEquals(HttpStatus.BAD_REQUEST, exc.getAccountErrorMessage().getHttpStatus());
assertEquals("Missing parameters.", exc.getAccountErrorMessage().getMessage());
}
|
public TimeUnit getConnTimeToLiveTimeUnit() {
return connTimeToLiveTimeUnit;
}
|
@Test
void testGetConnTimeToLiveTimeUnit() {
HttpClientConfig config = HttpClientConfig.builder().setConnectionTimeToLive(4000, TimeUnit.SECONDS).build();
assertEquals(TimeUnit.SECONDS, config.getConnTimeToLiveTimeUnit());
}
|
@Override
public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException {
if(file.isRoot()) {
return PathAttributes.EMPTY;
}
try {
try {
final PreferencesReader preferences = new HostPreferences(session.getHost());
if(preferences.getBoolean("ftp.command.mlsd")) {
if(session.getClient().hasFeature(FTPCmd.MLST.getCommand())) {
if(!FTPReply.isPositiveCompletion(session.getClient().sendCommand(FTPCmd.MLST, file.getAbsolute()))) {
throw new FTPException(session.getClient().getReplyCode(), session.getClient().getReplyString());
}
final FTPDataResponseReader reader = new FTPMlsdListResponseReader();
final AttributedList<Path> attributes
= reader.read(file.getParent(), Arrays.asList(session.getClient().getReplyStrings()));
if(attributes.contains(file)) {
return attributes.get(attributes.indexOf(file)).attributes();
}
}
log.warn("No support for MLST in reply to FEAT");
}
return new DefaultAttributesFinderFeature(session).find(file, listener);
}
catch(IOException e) {
throw new FTPExceptionMappingService().map("Failure to read attributes of {0}", e, file);
}
}
catch(InteroperabilityException | AccessDeniedException | NotfoundException f) {
log.warn(String.format("Failure reading attributes for %s. %s", file, f.getMessage()));
return new DefaultAttributesFinderFeature(session).find(file, listener);
}
}
|
@Test
public void testFindRoot() throws Exception {
final FTPAttributesFinderFeature f = new FTPAttributesFinderFeature(session);
assertEquals(PathAttributes.EMPTY, f.find(new Path("/", EnumSet.of(Path.Type.directory))));
}
|
public String fingerprint(final PublicKey key) throws BackgroundException {
return this.fingerprint(new ByteArrayInputStream(
new Buffer.PlainBuffer().putPublicKey(key).getCompactData()));
}
|
@Test
public void testFingerprint() throws Exception {
final FileKeyProvider f = new OpenSSHKeyFile.Factory().create();
f.init("", "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC/71hmi4R+CZqGvZ+aVdaKIt5yb2H87yNAAcdtPAQBJBqKw/vR0iYeU/tnwKWRfnTK/NcN2H6yG/wx0o9WiavUhUaSUPesJo3/PpZ7fZMUk/Va8I7WI0i25XlWJTE8SMFftIuJ8/AVPNSCmL46qy93BlQb8W70O9XQD/yj/Cy6aPb9wlHxdaswrmdoIzI4BS28Tu1F45TalqarqTLm3wY4RpghxHo8LxCgNbmd0cr6XnOmz1RM+rlbkiuSdNphW3Ah2iCHMif/KdRCFCPi5LyUrdheOtQYvQCmFREczb3kyuQPCElQac4DeL37F9ZLLBHnRVi7KxFqDbcbNLadfExx dkocher@osaka.local");
assertEquals("87:60:23:a3:56:b5:1a:24:8b:63:43:ea:5a:d4:e1:9d",
new SSHFingerprintGenerator().fingerprint(f.getPublic())
);
}
|
public static boolean deleteQuietly(@Nullable File file) {
if (file == null) {
return false;
}
try {
if (file.isDirectory()) {
deleteDirectory(file);
if (file.exists()) {
LOG.warn("Unable to delete directory '{}'", file);
}
} else {
Files.delete(file.toPath());
}
return true;
} catch (IOException | SecurityException ignored) {
return false;
}
}
|
@Test
public void deleteQuietly_deletes_directory_and_content() throws IOException {
Path target = temporaryFolder.newFolder().toPath();
Path childFile1 = Files.createFile(target.resolve("file1.txt"));
Path childDir1 = Files.createDirectory(target.resolve("subDir1"));
Path childFile2 = Files.createFile(childDir1.resolve("file2.txt"));
Path childDir2 = Files.createDirectory(childDir1.resolve("subDir2"));
assertThat(target).isDirectory();
assertThat(childFile1).isRegularFile();
assertThat(childDir1).isDirectory();
assertThat(childFile2).isRegularFile();
assertThat(childDir2).isDirectory();
FileUtils2.deleteQuietly(target.toFile());
assertThat(target).doesNotExist();
assertThat(childFile1).doesNotExist();
assertThat(childDir1).doesNotExist();
assertThat(childFile2).doesNotExist();
assertThat(childDir2).doesNotExist();
}
|
@Override
public ContinuousEnumerationResult planSplits(IcebergEnumeratorPosition lastPosition) {
table.refresh();
if (lastPosition != null) {
return discoverIncrementalSplits(lastPosition);
} else {
return discoverInitialSplits();
}
}
|
@Test
public void testIncrementalFromLatestSnapshotWithNonEmptyTable() throws Exception {
appendTwoSnapshots();
ScanContext scanContext =
ScanContext.builder()
.startingStrategy(StreamingStartingStrategy.INCREMENTAL_FROM_LATEST_SNAPSHOT)
.build();
ContinuousSplitPlannerImpl splitPlanner =
new ContinuousSplitPlannerImpl(TABLE_RESOURCE.tableLoader().clone(), scanContext, null);
ContinuousEnumerationResult initialResult = splitPlanner.planSplits(null);
assertThat(initialResult.fromPosition()).isNull();
// For inclusive behavior, the initial result should point to snapshot1
// Then the next incremental scan shall discover files from latest snapshot2 (inclusive)
assertThat(initialResult.toPosition().snapshotId().longValue())
.isEqualTo(snapshot1.snapshotId());
assertThat(initialResult.toPosition().snapshotTimestampMs().longValue())
.isEqualTo(snapshot1.timestampMillis());
assertThat(initialResult.splits()).isEmpty();
ContinuousEnumerationResult secondResult = splitPlanner.planSplits(initialResult.toPosition());
assertThat(secondResult.fromPosition().snapshotId().longValue())
.isEqualTo(snapshot1.snapshotId());
assertThat(secondResult.fromPosition().snapshotTimestampMs().longValue())
.isEqualTo(snapshot1.timestampMillis());
assertThat(secondResult.toPosition().snapshotId().longValue())
.isEqualTo(snapshot2.snapshotId());
assertThat(secondResult.toPosition().snapshotTimestampMs().longValue())
.isEqualTo(snapshot2.timestampMillis());
IcebergSourceSplit split = Iterables.getOnlyElement(secondResult.splits());
assertThat(split.task().files()).hasSize(1);
Set<String> discoveredFiles =
split.task().files().stream()
.map(fileScanTask -> fileScanTask.file().path().toString())
.collect(Collectors.toSet());
// should discover dataFile2 appended in snapshot2
Set<String> expectedFiles = ImmutableSet.of(dataFile2.path().toString());
assertThat(discoveredFiles).containsExactlyElementsOf(expectedFiles);
IcebergEnumeratorPosition lastPosition = secondResult.toPosition();
for (int i = 0; i < 3; ++i) {
lastPosition = verifyOneCycle(splitPlanner, lastPosition).lastPosition;
}
}
|
@Override
public String convert(IAccessEvent accessEvent) {
if (!isStarted()) {
return "INACTIVE_REQUEST_PARAM_CONV";
}
// This call should be safe, because the request map is cached beforehand
final String[] paramArray = accessEvent.getRequestParameterMap().get(key);
if (paramArray == null || paramArray.length == 0) {
return "-";
} else if (paramArray.length == 1) {
return paramArray[0];
} else {
return Arrays.toString(paramArray);
}
}
|
@Test
void testConvertSeveralParameters() throws Exception {
Mockito.when(httpServletRequest.getParameterValues("name")).thenReturn(new String[]{"Alice", "Bob"});
Mockito.when(httpServletRequest.getParameterNames())
.thenReturn(Collections.enumeration(Collections.singleton("name")));
// Invoked by AccessEvent#prepareForDeferredProcessing
accessEvent.buildRequestParameterMap();
// Jetty recycled the request
Mockito.reset(httpServletRequest);
final String value = safeRequestParameterConverter.convert(accessEvent);
assertThat(value).isEqualTo("[Alice, Bob]");
}
|
@Override
public void upgrade() {
if (clusterConfigService.get(MigrationCompleted.class) != null) {
LOG.debug("Migration already done.");
return;
}
// Do not overwrite an existing default index config
boolean defaultDone = clusterConfigService.get(DefaultIndexSetConfig.class) != null;
final ImmutableSet.Builder<String> builder = ImmutableSet.builder();
final FindIterable<Document> documents = collection.find(exists(FIELD_DEFAULT)).sort(ascending(FIELD_CREATION_DATE));
for (final Document document : documents) {
final ObjectId id = document.getObjectId(FIELD_ID);
final String idString = id.toHexString();
final boolean isDefault = firstNonNull(document.getBoolean(FIELD_DEFAULT), false);
if (!defaultDone && isDefault) {
defaultDone = true;
clusterConfigService.write(DefaultIndexSetConfig.create(idString));
}
final long modifiedCount = collection.updateOne(eq(FIELD_ID, id), unset(FIELD_DEFAULT)).getMatchedCount();
if (modifiedCount > 0) {
LOG.info("Removed <default> field from index set <{}> ({})", document.getString(FIELD_TITLE), idString);
builder.add(idString);
} else {
LOG.error("Couldn't remove <default> field from index set <{}> ({})", document.getString(FIELD_TITLE), idString);
}
}
clusterConfigService.write(MigrationCompleted.create(builder.build()));
}
|
@Test
@MongoDBFixtures("V20161215163900_MoveIndexSetDefaultConfigTest.json")
public void upgradeWhenMigrationCompleted() throws Exception {
// Count how many documents with a "default" field are in the database.
final long count = collection.countDocuments(Filters.exists("default"));
assertThat(count)
.withFailMessage("There should be at least one document with a \"default\" field in the database")
.isGreaterThan(0L);
clusterConfigService.write(MigrationCompleted.create(Collections.emptySet()));
migration.upgrade();
// If the MigrationCompleted object has been written to the cluster config, the migration shouldn't do anything
// and shouldn't touch the database. Thank means we should still have all documents with the "default" field
// from the seed file in the database.
assertThat(collection.countDocuments(Filters.exists("default"))).isEqualTo(count);
}
|
public Plan validateReservationSubmissionRequest(
ReservationSystem reservationSystem, ReservationSubmissionRequest request,
ReservationId reservationId) throws YarnException {
String message;
if (reservationId == null) {
message = "Reservation id cannot be null. Please try again specifying "
+ " a valid reservation id by creating a new reservation id.";
throw RPCUtil.getRemoteException(message);
}
// Check if it is a managed queue
String queue = request.getQueue();
Plan plan = getPlanFromQueue(reservationSystem, queue,
AuditConstants.SUBMIT_RESERVATION_REQUEST);
validateReservationDefinition(reservationId,
request.getReservationDefinition(), plan,
AuditConstants.SUBMIT_RESERVATION_REQUEST);
return plan;
}
|
@Test
public void testSubmitReservationEmptyRR() {
ReservationSubmissionRequest request =
createSimpleReservationSubmissionRequest(1, 0, 1, 5, 3);
Plan plan = null;
try {
plan =
rrValidator.validateReservationSubmissionRequest(rSystem, request,
ReservationSystemTestUtil.getNewReservationId());
Assert.fail();
} catch (YarnException e) {
Assert.assertNull(plan);
String message = e.getMessage();
Assert.assertTrue(message
.startsWith("No resources have been specified to reserve"));
LOG.info(message);
}
}
|
public static Http2Headers toHttp2Headers(HttpMessage in, boolean validateHeaders) {
HttpHeaders inHeaders = in.headers();
final Http2Headers out = new DefaultHttp2Headers(validateHeaders, inHeaders.size());
if (in instanceof HttpRequest) {
HttpRequest request = (HttpRequest) in;
String host = inHeaders.getAsString(HttpHeaderNames.HOST);
if (isOriginForm(request.uri()) || isAsteriskForm(request.uri())) {
out.path(new AsciiString(request.uri()));
setHttp2Scheme(inHeaders, out);
} else {
URI requestTargetUri = URI.create(request.uri());
out.path(toHttp2Path(requestTargetUri));
// Take from the request-line if HOST header was empty
host = isNullOrEmpty(host) ? requestTargetUri.getAuthority() : host;
setHttp2Scheme(inHeaders, requestTargetUri, out);
}
setHttp2Authority(host, out);
out.method(request.method().asciiName());
} else if (in instanceof HttpResponse) {
HttpResponse response = (HttpResponse) in;
out.status(response.status().codeAsText());
}
// Add the HTTP headers which have not been consumed above
toHttp2Headers(inHeaders, out);
return out;
}
|
@Test
public void cookieNoSpace() {
final HttpHeaders inHeaders = new DefaultHttpHeaders();
inHeaders.add(COOKIE, "one=foo;two=bar");
final Http2Headers out = new DefaultHttp2Headers();
HttpConversionUtil.toHttp2Headers(inHeaders, out);
assertEquals("one=foo;two=bar", out.get(COOKIE)); // not split
}
|
public void setSendFullErrorException(boolean sendFullErrorException) {
this.sendFullErrorException = sendFullErrorException;
}
|
@Test
void handleFlowableIllegalStateExceptionWithoutSendFullErrorException() throws Exception {
testController.exceptionSupplier = () -> new FlowableIllegalStateException("task not active");
handlerAdvice.setSendFullErrorException(false);
String body = mockMvc.perform(get("/"))
.andExpect(status().isBadRequest())
.andReturn()
.getResponse()
.getContentAsString();
assertThatJson(body)
.isEqualTo("{"
+ " message: 'Bad request',"
+ " exception: 'task not active'"
+ "}");
}
|
public String getTag() {
return tag;
}
|
@Test
void testGetTag() {
String tag = metadataOperation.getTag();
assertNull(tag);
}
|
@PublicEvolving
public static <IN, OUT> TypeInformation<OUT> getMapReturnTypes(
MapFunction<IN, OUT> mapInterface, TypeInformation<IN> inType) {
return getMapReturnTypes(mapInterface, inType, null, false);
}
|
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
void testNestedTupleGenerics() {
RichMapFunction<?, ?> function =
new RichMapFunction<Nested<String, Integer>, Nested<String, Integer>>() {
private static final long serialVersionUID = 1L;
@Override
public Nested<String, Integer> map(Nested<String, Integer> value)
throws Exception {
return null;
}
};
TypeInformation<?> ti =
TypeExtractor.getMapReturnTypes(
function,
(TypeInformation)
TypeInformation.of(
new TypeHint<
Tuple2<String, Tuple2<Integer, Integer>>>() {}));
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(2);
TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti;
assertThat(tti.getTypeClass()).isEqualTo(Nested.class);
assertThat(tti.getTypeAt(0)).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(tti.getTypeAt(1).isTupleType()).isTrue();
assertThat(tti.getTypeAt(1).getArity()).isEqualTo(2);
// Nested
TupleTypeInfo<?> tti2 = (TupleTypeInfo<?>) tti.getTypeAt(1);
assertThat(tti2.getTypeClass()).isEqualTo(Tuple2.class);
assertThat(tti2.getTypeAt(0)).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(tti2.getTypeAt(1)).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
}
|
public static Map<String, Object> flatten(Map<String, Object> originalMap, String parentKey, String separator) {
final Map<String, Object> result = new HashMap<>();
for (Map.Entry<String, Object> entry : originalMap.entrySet()) {
final String key = parentKey.isEmpty() ? entry.getKey() : parentKey + separator + entry.getKey();
final Object value = entry.getValue();
if (value instanceof Map) {
@SuppressWarnings("unchecked")
final Map<String, Object> valueMap = (Map<String, Object>) value;
result.putAll(flatten(valueMap, key, separator));
} else {
result.put(key, value);
}
}
return result;
}
|
@Test
public void flattenHandlesEmptyMap() throws Exception {
assertThat(MapUtils.flatten(Collections.emptyMap(), "", "_")).isEmpty();
}
|
public List<KuduPredicate> convert(ScalarOperator operator) {
if (operator == null) {
return null;
}
return operator.accept(this, null);
}
|
@Test
public void testIn() {
ConstantOperator value = ConstantOperator.createInt(5);
ConstantOperator value1 = ConstantOperator.createInt(6);
ConstantOperator value2 = ConstantOperator.createInt(7);
ScalarOperator op = new InPredicateOperator(false, F0, value, value1, value2);
List<KuduPredicate> result = CONVERTER.convert(op);
Assert.assertEquals(result.get(0).toString(), "`f0` IN (5, 6, 7)");
}
|
@GET
@Path("{name}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response getConfig(@PathParam("name") String configName) {
log.trace(String.format(MESSAGE_CONFIG, QUERY));
final TelemetryConfig config =
nullIsNotFound(configService.getConfig(configName), CONFIG_NOT_FOUND);
final ObjectNode root = codec(TelemetryConfig.class).encode(config, this);
return ok(root).build();
}
|
@Test
public void testDeleteConfigWithModifyOperation() {
expect(mockConfigAdminService.getConfig(anyString()))
.andReturn(telemetryConfig).once();
mockConfigAdminService.removeTelemetryConfig(anyString());
replay(mockConfigAdminService);
final WebTarget wt = target();
Response response = wt.path(PATH + "/test1")
.request(MediaType.APPLICATION_JSON_TYPE)
.delete();
final int status = response.getStatus();
assertEquals(204, status);
verify(mockConfigAdminService);
}
|
@Nullable
public <T> Map<String, T> getInstancesWithoutAncestors(String name, Class<T> type) {
return getContext(name).getBeansOfType(type);
}
|
@Test
void getInstancesWithoutAncestors_verifyEmptyForMissing() {
AnnotationConfigApplicationContext parent = new AnnotationConfigApplicationContext();
parent.refresh();
FeignClientFactory feignClientFactory = new FeignClientFactory();
feignClientFactory.setApplicationContext(parent);
feignClientFactory.setConfigurations(Lists.newArrayList(getSpec("empty", null, EmptyConfiguration.class)));
Collection<RequestInterceptor> interceptors = feignClientFactory
.getInstancesWithoutAncestors("empty", RequestInterceptor.class)
.values();
assertThat(interceptors).as("Interceptors is not empty").isEmpty();
}
|
@Override
public void visit(Entry entry) {
if(Boolean.FALSE.equals(entry.getAttribute("allowed")))
return;
if (containsSubmenu(entry))
addSubmenu(entry);
else
addActionItem(entry);
}
|
@Test
public void createsSubmenuWithAction() {
Entry parentMenuEntry = new Entry();
final JMenu parentMenu = new JMenu();
new EntryAccessor().setComponent(parentMenuEntry, parentMenu);
parentMenuEntry.addChild(menuEntry);
menuEntry.addChild(actionEntry);
new EntryAccessor().setAction(menuEntry, action);
menuActionGroupBuilder.visit(menuEntry);
final JMenuItem menuItem = getFirstSubMenuItem(menuEntry);
assertThatMenuItemHasCorrectAction(menuItem);
}
|
public boolean isExpired() {
Instant now = clock.instant();
Duration between = Duration.between(lastUpdatedTime, now);
// Compare elapsed time to EXPIRATION_MINUTES
long minutes = between.toMinutes();
return minutes >= EXPIRATION_MINUTES;
}
|
@Test
public void testIsExpired() {
Instant now = Instant.now();
Instant expiredTime = now.minus(JobUploadStatus.EXPIRATION_MINUTES, ChronoUnit.MINUTES);
when(clock.instant()).thenReturn(expiredTime, now);
jobUploadStatus.changeLastUpdatedTime();
boolean result = jobUploadStatus.isExpired();
assertTrue(result);
}
|
public static ColumnDataType convertToColumnDataType(RelDataType relDataType) {
SqlTypeName sqlTypeName = relDataType.getSqlTypeName();
if (sqlTypeName == SqlTypeName.NULL) {
return ColumnDataType.UNKNOWN;
}
boolean isArray = (sqlTypeName == SqlTypeName.ARRAY);
if (isArray) {
assert relDataType.getComponentType() != null;
sqlTypeName = relDataType.getComponentType().getSqlTypeName();
}
switch (sqlTypeName) {
case BOOLEAN:
return isArray ? ColumnDataType.BOOLEAN_ARRAY : ColumnDataType.BOOLEAN;
case TINYINT:
case SMALLINT:
case INTEGER:
return isArray ? ColumnDataType.INT_ARRAY : ColumnDataType.INT;
case BIGINT:
return isArray ? ColumnDataType.LONG_ARRAY : ColumnDataType.LONG;
case DECIMAL:
return resolveDecimal(relDataType, isArray);
case FLOAT:
case REAL:
return isArray ? ColumnDataType.FLOAT_ARRAY : ColumnDataType.FLOAT;
case DOUBLE:
return isArray ? ColumnDataType.DOUBLE_ARRAY : ColumnDataType.DOUBLE;
case DATE:
case TIME:
case TIMESTAMP:
return isArray ? ColumnDataType.TIMESTAMP_ARRAY : ColumnDataType.TIMESTAMP;
case CHAR:
case VARCHAR:
return isArray ? ColumnDataType.STRING_ARRAY : ColumnDataType.STRING;
case BINARY:
case VARBINARY:
return isArray ? ColumnDataType.BYTES_ARRAY : ColumnDataType.BYTES;
case OTHER:
case ANY:
return ColumnDataType.OBJECT;
default:
if (relDataType.getComponentType() != null) {
throw new IllegalArgumentException("Unsupported collection type: " + relDataType);
}
LOGGER.warn("Unexpected SQL type: {}, use OBJECT instead", sqlTypeName);
return ColumnDataType.OBJECT;
}
}
|
@Test
public void testConvertToColumnDataTypeForArray() {
Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType(
new ArraySqlType(new ObjectSqlType(SqlTypeName.BOOLEAN, SqlIdentifier.STAR, true, null, null), true)),
DataSchema.ColumnDataType.BOOLEAN_ARRAY);
Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType(
new ArraySqlType(new ObjectSqlType(SqlTypeName.TINYINT, SqlIdentifier.STAR, true, null, null), true)),
DataSchema.ColumnDataType.INT_ARRAY);
Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType(
new ArraySqlType(new ObjectSqlType(SqlTypeName.SMALLINT, SqlIdentifier.STAR, true, null, null), true)),
DataSchema.ColumnDataType.INT_ARRAY);
Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType(
new ArraySqlType(new ObjectSqlType(SqlTypeName.INTEGER, SqlIdentifier.STAR, true, null, null), true)),
DataSchema.ColumnDataType.INT_ARRAY);
Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType(
new ArraySqlType(new ObjectSqlType(SqlTypeName.BIGINT, SqlIdentifier.STAR, true, null, null), true)),
DataSchema.ColumnDataType.LONG_ARRAY);
Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType(
new ArraySqlType(new ObjectSqlType(SqlTypeName.FLOAT, SqlIdentifier.STAR, true, null, null), true)),
DataSchema.ColumnDataType.FLOAT_ARRAY);
Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType(
new ArraySqlType(new ObjectSqlType(SqlTypeName.DOUBLE, SqlIdentifier.STAR, true, null, null), true)),
DataSchema.ColumnDataType.DOUBLE_ARRAY);
Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType(
new ArraySqlType(new ObjectSqlType(SqlTypeName.TIMESTAMP, SqlIdentifier.STAR, true, null, null), true)),
DataSchema.ColumnDataType.TIMESTAMP_ARRAY);
Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType(
new ArraySqlType(new ObjectSqlType(SqlTypeName.CHAR, SqlIdentifier.STAR, true, null, null), true)),
DataSchema.ColumnDataType.STRING_ARRAY);
Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType(
new ArraySqlType(new ObjectSqlType(SqlTypeName.VARCHAR, SqlIdentifier.STAR, true, null, null), true)),
DataSchema.ColumnDataType.STRING_ARRAY);
Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType(
new ArraySqlType(new ObjectSqlType(SqlTypeName.VARBINARY, SqlIdentifier.STAR, true, null, null), true)),
DataSchema.ColumnDataType.BYTES_ARRAY);
}
|
public static DwrfTableEncryptionProperties forPerColumn(ColumnEncryptionInformation columnEncryptionInformation, String encryptionAlgorithm, String encryptionProvider)
{
return new DwrfTableEncryptionProperties(Optional.empty(), Optional.of(columnEncryptionInformation), encryptionAlgorithm, encryptionProvider);
}
|
@Test
public void testEncryptColumns()
{
ColumnEncryptionInformation columnEncryptionInformation = fromMap(ImmutableMap.of("c1", "abcd", "c2", "defg"));
DwrfTableEncryptionProperties properties = forPerColumn(columnEncryptionInformation, "test_algo", "test_prov");
assertEquals(properties.toHiveProperties(), ImmutableMap.of(
ENCRYPT_COLUMNS_KEY, columnEncryptionInformation.toHiveProperty(),
DWRF_ENCRYPTION_PROVIDER_KEY, "test_prov",
DWRF_ENCRYPTION_ALGORITHM_KEY, "test_algo"));
assertEquals(properties.toTableProperties(), ImmutableMap.of(
ENCRYPT_COLUMNS, columnEncryptionInformation,
DWRF_ENCRYPTION_PROVIDER, "test_prov",
DWRF_ENCRYPTION_ALGORITHM, "test_algo"));
}
|
public static <NumT extends Number>
Combine.AccumulatingCombineFn<NumT, CountSum<NumT>, Double> of() {
return new MeanFn<>();
}
|
@Test
public void testMeanFn() throws Exception {
testCombineFn(Mean.of(), Lists.newArrayList(1, 2, 3, 4), 2.5);
}
|
public FEELFnResult<Boolean> invoke(@ParameterName("string") String string, @ParameterName("match") String match) {
if ( string == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "string", "cannot be null"));
}
if ( match == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "match", "cannot be null"));
}
return FEELFnResult.ofResult(string.contains(match));
}
|
@Test
void invokeContains() {
FunctionTestUtil.assertResult(containsFunction.invoke("test", "es"), true);
FunctionTestUtil.assertResult(containsFunction.invoke("test", "t"), true);
FunctionTestUtil.assertResult(containsFunction.invoke("testy", "y"), true);
}
|
public synchronized void doMigrate(MigrationRule rule) {
if (migrationInvoker instanceof ServiceDiscoveryMigrationInvoker) {
refreshInvoker(MigrationStep.FORCE_APPLICATION, 1.0f, rule);
return;
}
// initial step : APPLICATION_FIRST
MigrationStep step = MigrationStep.APPLICATION_FIRST;
float threshold = -1f;
try {
step = rule.getStep(consumerURL);
threshold = rule.getThreshold(consumerURL);
} catch (Exception e) {
logger.error(
REGISTRY_NO_PARAMETERS_URL, "", "", "Failed to get step and threshold info from rule: " + rule, e);
}
if (refreshInvoker(step, threshold, rule)) {
// refresh success, update rule
setMigrationRule(rule);
}
}
|
@Test
void test() {
MigrationClusterInvoker<?> invoker = Mockito.mock(MigrationClusterInvoker.class);
URL url = Mockito.mock(URL.class);
Mockito.when(url.getDisplayServiceKey()).thenReturn("test");
Mockito.when(url.getParameter(Mockito.any(), (String) Mockito.any())).thenAnswer(i -> i.getArgument(1));
Mockito.when(url.getOrDefaultApplicationModel()).thenReturn(ApplicationModel.defaultModel());
MigrationRuleHandler<?> handler = new MigrationRuleHandler<>(invoker, url);
Mockito.when(invoker.migrateToForceApplicationInvoker(Mockito.any())).thenReturn(true);
Mockito.when(invoker.migrateToForceInterfaceInvoker(Mockito.any())).thenReturn(true);
MigrationRule initRule = MigrationRule.getInitRule();
handler.doMigrate(initRule);
Mockito.verify(invoker, Mockito.times(1)).migrateToApplicationFirstInvoker(initRule);
MigrationRule rule = Mockito.mock(MigrationRule.class);
Mockito.when(rule.getStep(url)).thenReturn(MigrationStep.FORCE_APPLICATION);
handler.doMigrate(rule);
Mockito.verify(invoker, Mockito.times(1)).migrateToForceApplicationInvoker(rule);
Mockito.when(rule.getStep(url)).thenReturn(MigrationStep.APPLICATION_FIRST);
handler.doMigrate(rule);
Mockito.verify(invoker, Mockito.times(1)).migrateToApplicationFirstInvoker(rule);
Mockito.when(rule.getStep(url)).thenReturn(MigrationStep.FORCE_INTERFACE);
handler.doMigrate(rule);
Mockito.verify(invoker, Mockito.times(1)).migrateToForceInterfaceInvoker(rule);
// migration failed, current rule not changed
testMigrationFailed(rule, url, handler, invoker);
// rule not changed, check migration not actually executed
testMigrationWithStepUnchanged(rule, url, handler, invoker);
}
|
public static void renameTo(File src, File dst)
throws IOException {
if (!nativeLoaded) {
if (!src.renameTo(dst)) {
throw new IOException("renameTo(src=" + src + ", dst=" +
dst + ") failed.");
}
} else {
renameTo0(src.getAbsolutePath(), dst.getAbsolutePath());
}
}
|
@Test (timeout = 30000)
public void testCreateFile() throws Exception {
assumeWindows();
LOG.info("Open a file on Windows with SHARE_DELETE shared mode");
try {
File testfile = new File(TEST_DIR, "testCreateFile");
assertTrue("Create test subject",
testfile.exists() || testfile.createNewFile());
FileDescriptor fd = NativeIO.Windows.createFile(
testfile.getCanonicalPath(),
NativeIO.Windows.GENERIC_READ,
NativeIO.Windows.FILE_SHARE_READ |
NativeIO.Windows.FILE_SHARE_WRITE |
NativeIO.Windows.FILE_SHARE_DELETE,
NativeIO.Windows.OPEN_EXISTING);
FileInputStream fin = new FileInputStream(fd);
try {
fin.read();
File newfile = new File(TEST_DIR, "testRenamedFile");
boolean renamed = testfile.renameTo(newfile);
assertTrue("Rename failed.", renamed);
fin.read();
} catch (Exception e) {
fail("Got unexpected exception: " + e.getMessage());
}
finally {
fin.close();
}
} catch (Exception e) {
fail("Got unexpected exception: " + e.getMessage());
}
}
|
@Override
public JwtToken getToken(@Nullable @QueryParameter("expiryTimeInMins") Integer expiryTimeInMins, @Nullable @QueryParameter("maxExpiryTimeInMins") Integer maxExpiryTimeInMins) {
long expiryTime= Long.getLong("EXPIRY_TIME_IN_MINS",DEFAULT_EXPIRY_IN_SEC);
int maxExpiryTime = Integer.getInteger("MAX_EXPIRY_TIME_IN_MINS",DEFAULT_MAX_EXPIRY_TIME_IN_MIN);
if(maxExpiryTimeInMins != null){
maxExpiryTime = maxExpiryTimeInMins;
}
if(expiryTimeInMins != null){
if(expiryTimeInMins > maxExpiryTime) {
throw new ServiceException.BadRequestException(
String.format("expiryTimeInMins %s can't be greater than %s", expiryTimeInMins, maxExpiryTime));
}
expiryTime = expiryTimeInMins * 60;
}
Authentication authentication = Jenkins.getAuthentication2();
String userId = authentication.getName();
User user = User.get(userId, false, Collections.emptyMap());
String email = null;
String fullName = null;
if(user != null) {
fullName = user.getFullName();
userId = user.getId();
Mailer.UserProperty p = user.getProperty(Mailer.UserProperty.class);
if(p!=null)
email = p.getAddress();
}
Plugin plugin = Jenkins.get().getPlugin("blueocean-jwt");
String issuer = "blueocean-jwt:"+ ((plugin!=null) ? plugin.getWrapper().getVersion() : "");
JwtToken jwtToken = new JwtToken();
jwtToken.claim.put("jti", UUID.randomUUID().toString().replace("-",""));
jwtToken.claim.put("iss", issuer);
jwtToken.claim.put("sub", userId);
jwtToken.claim.put("name", fullName);
long currentTime = System.currentTimeMillis()/1000;
jwtToken.claim.put("iat", currentTime);
jwtToken.claim.put("exp", currentTime+expiryTime);
jwtToken.claim.put("nbf", currentTime - DEFAULT_NOT_BEFORE_IN_SEC);
//set claim
JSONObject context = new JSONObject();
JSONObject userObject = new JSONObject();
userObject.put("id", userId);
userObject.put("fullName", fullName);
userObject.put("email", email);
JwtAuthenticationStore authenticationStore = getJwtStore( authentication);
authenticationStore.store(authentication, context);
context.put("user", userObject);
jwtToken.claim.put("context", context);
return jwtToken;
}
|
@Test
public void anonymousUserToken() throws Exception{
j.jenkins.setSecurityRealm(j.createDummySecurityRealm());
JenkinsRule.WebClient webClient = j.createWebClient();
String token = getToken(webClient);
Assert.assertNotNull(token);
JsonWebStructure jsonWebStructure = JsonWebStructure.fromCompactSerialization(token);
Assert.assertTrue(jsonWebStructure instanceof JsonWebSignature);
JsonWebSignature jsw = (JsonWebSignature) jsonWebStructure;
String kid = jsw.getHeader("kid");
Assert.assertNotNull(kid);
Page page = webClient.goTo("jwt-auth/jwks/"+kid+"/", "application/json");
// for(NameValuePair valuePair: page.getWebResponse().getResponseHeaders()){
// System.out.println(valuePair);
// }
JSONObject jsonObject = JSONObject.fromObject(page.getWebResponse().getContentAsString());
RsaJsonWebKey rsaJsonWebKey = new RsaJsonWebKey(jsonObject,null);
JwtConsumer jwtConsumer = new JwtConsumerBuilder()
.setRequireExpirationTime() // the JWT must have an expiration time
.setAllowedClockSkewInSeconds(30) // allow some leeway in validating time based claims to account for clock skew
.setRequireSubject() // the JWT must have a subject claim
.setVerificationKey(rsaJsonWebKey.getKey()) // verify the sign with the public key
.build(); // create the JwtConsumer instance
JwtClaims claims = jwtConsumer.processToClaims(token);
Assert.assertEquals("anonymous",claims.getSubject());
Map<String,Object> claimMap = claims.getClaimsMap();
Map<String,Object> context = (Map<String, Object>) claimMap.get("context");
Map<String,String> userContext = (Map<String, String>) context.get("user");
Assert.assertEquals("anonymous", userContext.get("id"));
}
|
public CompletableFuture<Optional<MessageProtos.Envelope>> deleteMessageByDestinationAndGuid(
final UUID destinationAccountUuid, final Device destinationDevice, final UUID messageUuid) {
return Stream.of(convertPartitionKey(destinationAccountUuid, destinationDevice),
convertPartitionKeyDeprecated(destinationAccountUuid, destinationDevice))
.distinct()
.map(pk -> deleteMessageByDestinationAndGuid(pk, messageUuid))
// this combines the futures by producing a future that returns an arbitrary nonempty
// result if there is one, which should be OK because only one of the keys
// should produce a nonempty result for any given message uuid
.reduce((f, g) -> f.thenCombine(g, (a, b) -> a.or(() -> b)))
.get();
}
|
@Test
void testDeleteMessageByDestinationAndGuid() throws Exception {
final UUID destinationUuid = UUID.randomUUID();
final UUID secondDestinationUuid = UUID.randomUUID();
final Device primary = DevicesHelper.createDevice((byte) 1);
final Device device2 = DevicesHelper.createDevice((byte) 2);
messagesDynamoDb.store(List.of(MESSAGE1), destinationUuid, primary);
messagesDynamoDb.store(List.of(MESSAGE2), secondDestinationUuid, primary);
messagesDynamoDb.store(List.of(MESSAGE3), destinationUuid, device2);
assertThat(load(destinationUuid, primary, MessagesDynamoDb.RESULT_SET_CHUNK_SIZE)).isNotNull().hasSize(1)
.element(0).isEqualTo(MESSAGE1);
assertThat(load(destinationUuid, device2, MessagesDynamoDb.RESULT_SET_CHUNK_SIZE)).isNotNull()
.hasSize(1)
.element(0).isEqualTo(MESSAGE3);
assertThat(load(secondDestinationUuid, primary, MessagesDynamoDb.RESULT_SET_CHUNK_SIZE)).isNotNull()
.hasSize(1).element(0).isEqualTo(MESSAGE2);
final Optional<MessageProtos.Envelope> deletedMessage = messagesDynamoDb.deleteMessageByDestinationAndGuid(
secondDestinationUuid, primary,
UUID.fromString(MESSAGE2.getServerGuid())).get(5, TimeUnit.SECONDS);
assertThat(deletedMessage).isPresent();
assertThat(load(destinationUuid, primary, MessagesDynamoDb.RESULT_SET_CHUNK_SIZE)).isNotNull().hasSize(1)
.element(0).isEqualTo(MESSAGE1);
assertThat(load(destinationUuid, device2, MessagesDynamoDb.RESULT_SET_CHUNK_SIZE)).isNotNull()
.hasSize(1)
.element(0).isEqualTo(MESSAGE3);
assertThat(load(secondDestinationUuid, primary, MessagesDynamoDb.RESULT_SET_CHUNK_SIZE)).isNotNull()
.isEmpty();
final Optional<MessageProtos.Envelope> alreadyDeletedMessage = messagesDynamoDb.deleteMessageByDestinationAndGuid(
secondDestinationUuid, primary,
UUID.fromString(MESSAGE2.getServerGuid())).get(5, TimeUnit.SECONDS);
assertThat(alreadyDeletedMessage).isNotPresent();
}
|
@Override
public boolean removeAll(Collection<?> collection) {
// Using this implementation from the Android ArrayList since the Java 1.8 ArrayList
// doesn't call through to remove. Calling through to remove lets us leverage the notification
// done there
boolean result = false;
Iterator<?> it = iterator();
while (it.hasNext()) {
if (collection.contains(it.next())) {
it.remove();
result = true;
}
}
return result;
}
|
@Test
public void testRemoveAll() {
List<EpoxyModel<?>> modelsToRemove = new ArrayList<>();
modelsToRemove.add(modelList.get(0));
modelsToRemove.add(modelList.get(1));
modelList.removeAll(modelsToRemove);
verify(observer, times(2)).onItemRangeRemoved(0, 1);
}
|
@Override
public boolean createMep(MdId mdName, MaIdShort maName, Mep newMep) throws CfmConfigException {
MepKeyId key = new MepKeyId(mdName, maName, newMep.mepId());
log.debug("Creating MEP " + newMep.mepId() + " on MD {}, MA {} on Device {}",
mdName, maName, newMep.deviceId().toString());
if (mepStore.getMep(key).isPresent()) {
return false;
}
//Will throw IllegalArgumentException if ma does not exist
cfmMdService.getMaintenanceAssociation(mdName, maName);
DeviceId mepDeviceId = newMep.deviceId();
if (deviceService.getDevice(mepDeviceId) == null) {
throw new CfmConfigException("Device not found " + mepDeviceId);
} else if (!deviceService.getDevice(mepDeviceId).is(CfmMepProgrammable.class)) {
throw new CfmConfigException("Device " + mepDeviceId + " does not support CfmMepProgrammable behaviour.");
}
boolean deviceResult =
deviceService.getDevice(mepDeviceId).as(CfmMepProgrammable.class).createMep(mdName, maName, newMep);
log.debug("MEP created on {}", mepDeviceId);
if (deviceResult) {
boolean alreadyExisted = mepStore.createUpdateMep(key, newMep);
//Add to other Remote Mep List on other devices
for (Mep mep:mepStore.getMepsByMdMa(mdName, maName)) {
List<DeviceId> alreadyHandledDevices = new ArrayList<>();
if (mep.deviceId().equals(mepDeviceId) ||
alreadyHandledDevices.contains(mep.deviceId())) {
continue;
}
boolean created = deviceService.getDevice(mep.deviceId())
.as(CfmMepProgrammable.class)
.createMaRemoteMepOnDevice(mdName, maName, newMep.mepId());
alreadyHandledDevices.add(mep.deviceId());
log.info("Created RMep entry on {} on device {}",
mdName.mdName() + "/" + maName.maName(), mep.deviceId());
}
return !alreadyExisted;
} else {
return deviceResult;
}
}
|
@Test
public void testCreateMep() throws CfmConfigException {
expect(mdService.getMaintenanceAssociation(MDNAME1, MANAME1))
.andReturn(Optional.ofNullable(ma1))
.anyTimes();
replay(mdService);
expect(deviceService.getDevice(DEVICE_ID1)).andReturn(device1).anyTimes();
expect(deviceService.getDevice(DEVICE_ID2)).andReturn(device2).anyTimes();
replay(deviceService);
expect(driverService.getDriver(DEVICE_ID1)).andReturn(testDriver).anyTimes();
expect(driverService.getDriver(DEVICE_ID2)).andReturn(testDriver).anyTimes();
replay(driverService);
MepId mepId3 = MepId.valueOf((short) 3);
Mep mep3 = DefaultMep.builder(mepId3, DEVICE_ID1, PortNumber.portNumber(1),
Mep.MepDirection.UP_MEP, MDNAME1, MANAME1).build();
//Expecting false - since it was not found
assertTrue(mepManager.createMep(MDNAME1, MANAME1, mep3));
}
|
public boolean sendEventToJS(String eventName, Bundle data, ReactContext reactContext) {
if (reactContext != null) {
sendEventToJS(eventName, Arguments.fromBundle(data), reactContext);
return true;
}
return false;
}
|
@Test
public void sendEventToJS_hasReactContext_emitsEventToJs() throws Exception {
WritableMap data = mock(WritableMap.class);
final JsIOHelper uut = createUUT();
boolean result = uut.sendEventToJS("my-event", data, mReactContext);
assertTrue(result);
verify(mRCTDeviceEventEmitter).emit("my-event", data);
}
|
@Override
public void onUpdate(Extension oldExtension, Extension newExtension) {
if (isDisposed() || !matchers.onUpdateMatcher().match(newExtension)) {
return;
}
// TODO filter the event
queue.addImmediately(new Request(newExtension.getMetadata().getName()));
}
|
@Test
void shouldUpdateExtensionWhenUpdatePredicateAlwaysTrue() {
when(matchers.onUpdateMatcher()).thenReturn(getEmptyMatcher());
watcher.onUpdate(createFake("old-fake-name"), createFake("new-fake-name"));
verify(matchers, times(1)).onUpdateMatcher();
verify(queue, times(1)).addImmediately(
argThat(request -> request.name().equals("new-fake-name")));
verify(queue, times(0)).add(any());
}
|
public static Criterion matchInPhyPort(PortNumber port) {
return new PortCriterion(port, Type.IN_PHY_PORT);
}
|
@Test
public void testMatchInPhyPortMethod() {
PortNumber p1 = portNumber(1);
Criterion matchInPhyPort = Criteria.matchInPhyPort(p1);
PortCriterion portCriterion =
checkAndConvert(matchInPhyPort,
Criterion.Type.IN_PHY_PORT,
PortCriterion.class);
assertThat(portCriterion.port(), is(equalTo(p1)));
}
|
@Override
public void configure(Map<String, ?> configs) {
final SimpleConfig simpleConfig = new SimpleConfig(CONFIG_DEF, configs);
final String field = simpleConfig.getString(FIELD_CONFIG);
final String type = simpleConfig.getString(TARGET_TYPE_CONFIG);
String formatPattern = simpleConfig.getString(FORMAT_CONFIG);
final String unixPrecision = simpleConfig.getString(UNIX_PRECISION_CONFIG);
schemaUpdateCache = new SynchronizedCache<>(new LRUCache<>(16));
replaceNullWithDefault = simpleConfig.getBoolean(REPLACE_NULL_WITH_DEFAULT_CONFIG);
if (type.equals(TYPE_STRING) && Utils.isBlank(formatPattern)) {
throw new ConfigException("TimestampConverter requires format option to be specified when using string timestamps");
}
SimpleDateFormat format = null;
if (!Utils.isBlank(formatPattern)) {
try {
format = new SimpleDateFormat(formatPattern);
format.setTimeZone(UTC);
} catch (IllegalArgumentException e) {
throw new ConfigException("TimestampConverter requires a SimpleDateFormat-compatible pattern for string timestamps: "
+ formatPattern, e);
}
}
config = new Config(field, type, format, unixPrecision);
}
|
@Test
public void testConfigNoTargetType() {
assertThrows(ConfigException.class, () -> xformValue.configure(Collections.emptyMap()));
}
|
private static Object parseInstance( String value, Function<String, String> resolver, List<ClassLoader> addonClassLoaders ) {
return (LazyValue) t -> {
try {
if( value.indexOf( ',' ) >= 0 ) {
// Syntax: className,param1,param2,...
List<String> parts = splitFunctionParams( value, ',' );
String className = parts.get( 0 );
Class<?> cls = findClass( className, addonClassLoaders );
Constructor<?>[] constructors = cls.getDeclaredConstructors();
Object result = invokeConstructorOrStaticMethod( constructors, parts, resolver );
if( result != null )
return result;
LoggingFacade.INSTANCE.logSevere( "FlatLaf: Failed to instantiate '" + className
+ "': no constructor found for parameters '"
+ value.substring( value.indexOf( ',' + 1 ) ) + "'.", null );
return null;
} else
return findClass( value, addonClassLoaders ).getDeclaredConstructor().newInstance();
} catch( Exception ex ) {
LoggingFacade.INSTANCE.logSevere( "FlatLaf: Failed to instantiate '" + value + "'.", ex );
return null;
}
};
}
|
@Test
void parseInstance() {
String className = TestInstance.class.getName();
assertEquals( new TestInstance(), ((LazyValue)UIDefaultsLoader.parseValue( "dummyIcon", className, null )).createValue( null ) );
assertInstanceEquals( new TestInstance(), null );
assertInstanceEquals( new TestInstance( "some string" ), "some string" );
assertInstanceEquals( new TestInstance( false ), "false" );
assertInstanceEquals( new TestInstance( true ), "true" );
assertInstanceEquals( new TestInstance( 123 ), "123" );
assertInstanceEquals( new TestInstance( 123.456f ), "123.456" );
assertInstanceEquals( new TestInstance( Color.red ), "#f00" );
assertInstanceEquals( new TestInstance( "some string", true ), "some string, true" );
assertInstanceEquals( new TestInstance( "some string", true, 123 ), "some string, true, 123" );
assertInstanceEquals( new TestInstance( "some string", 123, true ), "some string, 123, true" );
assertInstanceEquals( new TestInstance( "some string", 123.456f, true ), "some string, 123.456, true" );
assertInstanceEquals( new TestInstance( 123, "some string" ), "123, some string" );
}
|
@Override
public Collection<Process> getProcessList() {
return ProcessRegistry.getInstance().listAll();
}
|
@Test
void assertGetProcessList() {
ProcessRegistry processRegistry = mock(ProcessRegistry.class);
when(ProcessRegistry.getInstance()).thenReturn(processRegistry);
processPersistService.getProcessList();
verify(processRegistry).listAll();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.