focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
|---|---|
public static long calculate(PhysicalRel rel, ExpressionEvalContext evalContext) {
GcdCalculatorVisitor visitor = new GcdCalculatorVisitor(evalContext);
visitor.go(rel);
if (visitor.gcd == 0) {
// there's no window aggr in the rel, return the value for joins, which is already capped at some reasonable value
return visitor.maximumIntervalForJoins;
}
// if there's window aggr, cap it with the maximumIntervalForJoins
return Math.min(visitor.gcd, visitor.maximumIntervalForJoins);
}
|
@Test
public void when_twoConsecutiveSlidingWindowsAgg_then_returnGcdOfWindowsSize() {
HazelcastTable table = streamGeneratorTable("s1", 100);
HazelcastTable table2 = partitionedTable("map", asList(field(KEY, INT), field(VALUE, INT)), 1);
List<QueryDataType> parameterTypes = asList(INT, INT);
final String sql = "SELECT window_end, window_end_inner, v, COUNT(v) FROM " +
"TABLE(HOP(" +
" (SELECT v, window_end AS window_end_inner FROM " +
" TABLE(HOP(" +
" (SELECT * FROM TABLE(IMPOSE_ORDER((SELECT * FROM TABLE(GENERATE_STREAM(10))), DESCRIPTOR(v), 1)))" +
" , DESCRIPTOR(v), 6, 2" +
" )) GROUP BY v, window_end_inner" +
" )" +
" , DESCRIPTOR(window_end_inner), 15, 5" +
")) " +
"GROUP BY window_end, window_end_inner, v";
PhysicalRel optimizedPhysicalRel = optimizePhysical(sql, parameterTypes, table, table2).getPhysical();
assertPlan(optimizedPhysicalRel, plan(
planRow(0, SlidingWindowAggregatePhysicalRel.class),
planRow(1, CalcPhysicalRel.class),
planRow(2, SlidingWindowAggregatePhysicalRel.class),
planRow(3, CalcPhysicalRel.class),
planRow(4, FullScanPhysicalRel.class)
));
// GCD(15, 6) = 3
assertThat(WatermarkThrottlingFrameSizeCalculator.calculate(optimizedPhysicalRel, MOCK_EEC))
.isEqualTo(1L);
}
|
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + getKey().hashCode();
result = 31 * result + getValue().hashCode();
long cost = getCost();
long creationTime = getCreationTime();
long expirationTime = getExpirationTime();
long hits = getHits();
long lastAccessTime = getLastAccessTime();
long lastStoredTime = getLastStoredTime();
long lastUpdateTime = getLastUpdateTime();
long version = getVersion();
long ttl = getTtl();
result = 31 * result + (int) (cost ^ (cost >>> 32));
result = 31 * result + (int) (creationTime ^ (creationTime >>> 32));
result = 31 * result + (int) (expirationTime ^ (expirationTime >>> 32));
result = 31 * result + (int) (hits ^ (hits >>> 32));
result = 31 * result + (int) (lastAccessTime ^ (lastAccessTime >>> 32));
result = 31 * result + (int) (lastStoredTime ^ (lastStoredTime >>> 32));
result = 31 * result + (int) (lastUpdateTime ^ (lastUpdateTime >>> 32));
result = 31 * result + (int) (version ^ (version >>> 32));
result = 31 * result + (int) (ttl ^ (ttl >>> 32));
return result;
}
|
@Test
public void test_hashCode() {
EntryView entryView = createLazyEvictableEntryView();
assertEquals(entryView.hashCode(), view.hashCode());
}
|
@SuppressWarnings("deprecation")
public static <K> KStreamHolder<K> build(
final KStreamHolder<K> left,
final KStreamHolder<K> right,
final StreamStreamJoin<K> join,
final RuntimeBuildContext buildContext,
final StreamJoinedFactory streamJoinedFactory) {
final QueryContext queryContext = join.getProperties().getQueryContext();
final QueryContext.Stacker stacker = QueryContext.Stacker.of(queryContext);
final LogicalSchema leftSchema;
final LogicalSchema rightSchema;
final Formats rightFormats;
final Formats leftFormats;
if (join.getJoinType().equals(RIGHT)) {
leftFormats = join.getRightInternalFormats();
rightFormats = join.getLeftInternalFormats();
leftSchema = right.getSchema();
rightSchema = left.getSchema();
} else {
leftFormats = join.getLeftInternalFormats();
rightFormats = join.getRightInternalFormats();
leftSchema = left.getSchema();
rightSchema = right.getSchema();
}
final PhysicalSchema leftPhysicalSchema = PhysicalSchema.from(
leftSchema,
leftFormats.getKeyFeatures(),
leftFormats.getValueFeatures()
);
final Serde<GenericRow> leftSerde = buildContext.buildValueSerde(
leftFormats.getValueFormat(),
leftPhysicalSchema,
stacker.push(LEFT_SERDE_CTX).getQueryContext()
);
final PhysicalSchema rightPhysicalSchema = PhysicalSchema.from(
rightSchema,
rightFormats.getKeyFeatures(),
rightFormats.getValueFeatures()
);
final Serde<GenericRow> rightSerde = buildContext.buildValueSerde(
rightFormats.getValueFormat(),
rightPhysicalSchema,
stacker.push(RIGHT_SERDE_CTX).getQueryContext()
);
final Serde<K> keySerde = left.getExecutionKeyFactory().buildKeySerde(
leftFormats.getKeyFormat(),
leftPhysicalSchema,
queryContext
);
final StreamJoined<K, GenericRow, GenericRow> joined = streamJoinedFactory.create(
keySerde,
leftSerde,
rightSerde,
StreamsUtil.buildOpName(queryContext),
StreamsUtil.buildOpName(queryContext)
);
final JoinParams joinParams = JoinParamsFactory
.create(join.getKeyColName(), leftSchema, rightSchema);
JoinWindows joinWindows;
// Grace, as optional, helps to identify if a user specified the GRACE PERIOD syntax in the
// join window. If specified, then we'll call the new KStreams API ofTimeDifferenceAndGrace()
// which enables the "spurious" results bugfix with left/outer joins (see KAFKA-10847).
if (join.getGraceMillis().isPresent()) {
joinWindows = JoinWindows.ofTimeDifferenceAndGrace(
join.getBeforeMillis(),
join.getGraceMillis().get());
} else {
joinWindows = JoinWindows.of(join.getBeforeMillis());
}
joinWindows = joinWindows.after(join.getAfterMillis());
final KStream<K, GenericRow> result;
switch (join.getJoinType()) {
case LEFT:
result = left.getStream().leftJoin(
right.getStream(), joinParams.getJoiner(), joinWindows, joined);
break;
case RIGHT:
result = right.getStream().leftJoin(
left.getStream(), joinParams.getJoiner(), joinWindows, joined);
break;
case OUTER:
result = left.getStream().outerJoin(
right.getStream(), joinParams.getJoiner(), joinWindows, joined);
break;
case INNER:
result = left.getStream().join(
right.getStream(), joinParams.getJoiner(), joinWindows, joined);
break;
default:
throw new IllegalStateException("invalid join type");
}
return left.withStream(result, joinParams.getSchema());
}
|
@Test
public void shouldDoInnerJoin() {
// Given:
givenInnerJoin(L_KEY);
// When:
final KStreamHolder<Struct> result = join.build(planBuilder, planInfo);
// Then:
verify(leftKStream).join(
same(rightKStream),
eq(new KsqlValueJoiner(LEFT_SCHEMA.value().size(), RIGHT_SCHEMA.value().size(), 0)),
eq(WINDOWS_NO_GRACE),
same(joined)
);
verifyNoMoreInteractions(leftKStream, rightKStream, resultKStream);
assertThat(result.getStream(), is(resultKStream));
assertThat(result.getExecutionKeyFactory(), is(executionKeyFactory));
}
|
public static Map<String, MavenArtifact> getWebappDependencies() throws IOException {
// list dependencies in WEB-INF/lib
// and read names, urls, licences in META-INF/maven/.../pom.xml from jar files when available
final Map<String, MavenArtifact> webappDependencies = getWebappDependenciesFromWebInfLib();
// when pom.xml not available in some jar files,
// list all dependencies in webapp's pom.xml if it exists or in the other dependencies' pom.xml,
// including transitive dependencies
final List<MavenArtifact> allDependencies = new ArrayList<>(
getWebappDependenciesFromPomXml());
for (final MavenArtifact dependency : webappDependencies.values()) {
if (dependency != null && !dependency.isContained(allDependencies)) {
allDependencies.add(dependency);
for (final MavenArtifact transitiveDependency : dependency.getAllDependencies()) {
if (!transitiveDependency.isContained(allDependencies)) {
allDependencies.add(transitiveDependency);
}
}
}
}
// in order to complete names, urls, licences from all dependencies and parents
for (final Map.Entry<String, MavenArtifact> entry : webappDependencies.entrySet()) {
if (entry.getValue() == null) {
final String jarFileName = entry.getKey();
for (final MavenArtifact dependency : allDependencies) {
if (jarFileName.startsWith(
dependency.getArtifactId() + '-' + dependency.getVersion())) {
entry.setValue(dependency);
break;
}
}
}
}
return webappDependencies;
}
|
@Test
public void testGetWebappDependencies() throws IOException {
final ServletContext context = createNiceMock(ServletContext.class);
final String javamelodyDir = "/META-INF/maven/net.bull.javamelody/";
final String webapp = javamelodyDir + "javamelody-test-webapp/";
expect(context.getResourcePaths("/META-INF/maven/"))
.andReturn(Collections.singleton(javamelodyDir)).anyTimes();
expect(context.getResourcePaths(javamelodyDir)).andReturn(Collections.singleton(webapp))
.anyTimes();
final IAnswer<InputStream> answer = () -> getClass().getResourceAsStream("/pom.xml");
expect(context.getResourceAsStream(webapp + "pom.xml")).andAnswer(answer).anyTimes();
final Set<String> dependencies = new LinkedHashSet<>(List
.of("/WEB-INF/lib/jrobin-1.5.9.jar", "/WEB-INF/lib/javamelody-core-1.65.0.jar"));
expect(context.getResourcePaths("/WEB-INF/lib/")).andReturn(dependencies).anyTimes();
final URL jrobinJar = RrdGraph.class.getProtectionDomain().getCodeSource().getLocation();
expect(context.getResource("/WEB-INF/lib/jrobin-1.5.9.jar")).andReturn(jrobinJar)
.anyTimes();
expect(context.getMajorVersion()).andReturn(5).anyTimes();
expect(context.getMinorVersion()).andReturn(0).anyTimes();
replay(context);
Parameters.initialize(context);
final Map<String, MavenArtifact> webappDependencies = MavenArtifact.getWebappDependencies();
assertFalse("getWebappDependencies", webappDependencies.isEmpty());
verify(context);
for (final MavenArtifact dependency : webappDependencies.values()) {
if (dependency != null) {
assertNotNull("groupId", dependency.getGroupId());
assertNotNull("artifactId", dependency.getArtifactId());
assertNotNull("version", dependency.getVersion());
if ("jrobin".equals(dependency.getArtifactId())) {
assertNotNull("name", dependency.getName());
assertNotNull("url", dependency.getUrl());
assertNotNull("licenseUrlsByName", dependency.getLicenseUrlsByName());
assertNotNull("allDependencies", dependency.getAllDependencies());
assertNotNull("toString", dependency.toString());
}
}
}
}
|
@Override
public boolean syncData(DistroData data, String targetServer) {
if (isNoExistTarget(targetServer)) {
return true;
}
DistroDataRequest request = new DistroDataRequest(data, data.getType());
Member member = memberManager.find(targetServer);
if (checkTargetServerStatusUnhealthy(member)) {
Loggers.DISTRO
.warn("[DISTRO] Cancel distro sync caused by target server {} unhealthy, key: {}", targetServer,
data.getDistroKey());
return false;
}
try {
Response response = clusterRpcClientProxy.sendRequest(member, request);
return checkResponse(response);
} catch (NacosException e) {
Loggers.DISTRO.error("[DISTRO-FAILED] Sync distro data failed! key: {}", data.getDistroKey(), e);
}
return false;
}
|
@Test
void testSyncDataForMemberDisconnect() throws NacosException {
when(memberManager.hasMember(member.getAddress())).thenReturn(true);
when(memberManager.find(member.getAddress())).thenReturn(member);
member.setState(NodeState.UP);
assertFalse(transportAgent.syncData(new DistroData(), member.getAddress()));
verify(clusterRpcClientProxy, never()).sendRequest(any(Member.class), any());
}
|
@Override
public void marshal(final Exchange exchange, final Object graph, final OutputStream stream) throws Exception {
// ask for a mandatory type conversion to avoid a possible NPE beforehand as we do copy from the InputStream
final InputStream is = exchange.getContext().getTypeConverter().mandatoryConvertTo(InputStream.class, exchange, graph);
final Deflater deflater = new Deflater(compressionLevel);
final DeflaterOutputStream zipOutput = new DeflaterOutputStream(stream, deflater);
try {
IOHelper.copy(is, zipOutput);
} finally {
IOHelper.close(is, zipOutput);
/*
* As we create the Deflater our self and do not use the stream default
* (see {@link java.util.zip.DeflaterOutputStream#usesDefaultDeflater})
* we need to close the Deflater to not risk a OutOfMemoryException
* in native code parts (see {@link java.util.zip.Deflater#end})
*/
deflater.end();
}
}
|
@Test
public void testMarshalTextToZipBestSpeed() throws Exception {
context.addRoutes(new RouteBuilder() {
public void configure() {
from("direct:start")
.marshal().zipDeflater(Deflater.BEST_SPEED)
.process(new ZippedMessageProcessor());
}
});
context.start();
sendText();
}
|
public Map<String, Long> getClusterTerm(String clusterName) {
return clusterTerm.computeIfAbsent(clusterName, k -> new ConcurrentHashMap<>());
}
|
@Test
public void testGetClusterTerm() {
Assertions.assertDoesNotThrow(() -> metadata.getClusterTerm("cluster"));
}
|
public static ParsedCommand parse(
// CHECKSTYLE_RULES.ON: CyclomaticComplexity
final String sql, final Map<String, String> variables) {
validateSupportedStatementType(sql);
final String substituted;
try {
substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables);
} catch (ParseFailedException e) {
throw new MigrationException(String.format(
"Failed to parse the statement. Statement: %s. Reason: %s",
sql, e.getMessage()));
}
final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted)
.get(0).getStatement();
final boolean isStatement = StatementType.get(statementContext.statement().getClass())
== StatementType.STATEMENT;
return new ParsedCommand(substituted,
isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY)
.buildStatement(statementContext)));
}
|
@Test
public void shouldThrowOnMissingSemicolon() {
// When:
final MigrationException e = assertThrows(MigrationException.class,
() -> parse("create stream foo as select * from no_semicolon_after_this"));
// Then:
assertThat(e.getMessage(), containsString("Unmatched command at end of file; missing semicolon"));
}
|
public static <T> Read<T> read() {
return new AutoValue_CassandraIO_Read.Builder<T>().build();
}
|
@Test
public void testReadWithUnfilteredQuery() throws Exception {
String query =
String.format(
"select person_id, writetime(person_name) from %s.%s",
CASSANDRA_KEYSPACE, CASSANDRA_TABLE);
PCollection<Scientist> output =
pipeline.apply(
CassandraIO.<Scientist>read()
.withHosts(Collections.singletonList(CASSANDRA_HOST))
.withPort(cassandraPort)
.withKeyspace(CASSANDRA_KEYSPACE)
.withTable(CASSANDRA_TABLE)
.withMinNumberOfSplits(20)
.withQuery(query)
.withCoder(SerializableCoder.of(Scientist.class))
.withEntity(Scientist.class));
PAssert.thatSingleton(output.apply("Count", Count.globally())).isEqualTo(NUM_ROWS);
PAssert.that(output)
.satisfies(
input -> {
for (Scientist sci : input) {
assertNull(sci.name);
assertTrue(sci.nameTs != null && sci.nameTs > 0);
}
return null;
});
pipeline.run();
}
|
@Override
public boolean supportsGroupBy() {
return false;
}
|
@Test
void assertSupportsGroupBy() {
assertFalse(metaData.supportsGroupBy());
}
|
@Override
public String getName() {
return ANALYZER_NAME;
}
|
@Test
public void testGetName() {
FalsePositiveAnalyzer instance = new FalsePositiveAnalyzer();
String expResult = "False Positive Analyzer";
String result = instance.getName();
assertEquals(expResult, result);
}
|
@Override
public Split.Output run(RunContext runContext) throws Exception {
URI from = new URI(runContext.render(this.from));
return Split.Output.builder()
.uris(StorageService.split(runContext, this, from))
.build();
}
|
@Test
void rows() throws Exception {
RunContext runContext = runContextFactory.of();
URI put = storageUpload(1000);
Split result = Split.builder()
.from(put.toString())
.rows(10)
.build();
Split.Output run = result.run(runContext);
assertThat(run.getUris().size(), is(100));
assertThat(readAll(run.getUris()), is(String.join("\n", content(1000)) + "\n"));
}
|
public void setType( String type ) {
this.type = type;
}
|
@Test
public void setType() {
JobScheduleParam jobScheduleParam = mock( JobScheduleParam.class );
doCallRealMethod().when( jobScheduleParam ).setType( any() );
String type = "hitachi";
jobScheduleParam.setType( type );
Assert.assertEquals( type, ReflectionTestUtils.getField( jobScheduleParam, "type" ) );
}
|
public static Map<String, String> getClientMd5Map(String configKeysString) {
Map<String, String> md5Map = new HashMap<>(5);
if (null == configKeysString || "".equals(configKeysString)) {
return md5Map;
}
int start = 0;
List<String> tmpList = new ArrayList<>(3);
for (int i = start; i < configKeysString.length(); i++) {
char c = configKeysString.charAt(i);
if (c == WORD_SEPARATOR_CHAR) {
tmpList.add(configKeysString.substring(start, i));
start = i + 1;
if (tmpList.size() > 3) {
// Malformed message and return parameter error.
throw new IllegalArgumentException("invalid protocol,too much key");
}
} else if (c == LINE_SEPARATOR_CHAR) {
String endValue = "";
if (start + 1 <= i) {
endValue = configKeysString.substring(start, i);
}
start = i + 1;
// If it is the old message, the last digit is MD5. The post-multi-tenant message is tenant
if (tmpList.size() == 2) {
String groupKey = GroupKey2.getKey(tmpList.get(0), tmpList.get(1));
groupKey = StringPool.get(groupKey);
md5Map.put(groupKey, endValue);
} else {
String groupKey = GroupKey2.getKey(tmpList.get(0), tmpList.get(1), endValue);
groupKey = StringPool.get(groupKey);
md5Map.put(groupKey, tmpList.get(2));
}
tmpList.clear();
// Protect malformed messages
if (md5Map.size() > 10000) {
throw new IllegalArgumentException("invalid protocol, too much listener");
}
}
}
return md5Map;
}
|
@Test
void testGetClientMd5Map() {
String configKeysString =
"test0" + MD5Util.WORD_SEPARATOR_CHAR + "test1" + MD5Util.WORD_SEPARATOR_CHAR + "test2" + MD5Util.LINE_SEPARATOR_CHAR;
Map<String, String> actualValueMap = MD5Util.getClientMd5Map(configKeysString);
assertEquals("test2", actualValueMap.get("test0+test1"));
}
|
public ConcurrentLongHashMap<CompletableFuture<Producer>> getProducers() {
return producers;
}
|
@Test(timeOut = 30000)
public void testBrokerClosedProducerClientRecreatesProducerThenSendCommand() throws Exception {
resetChannel();
setChannelConnected();
setConnectionVersion(ProtocolVersion.v5.getValue());
serverCnx.cancelKeepAliveTask();
String producerName = "my-producer";
ByteBuf clientCommand1 = Commands.newProducer(successTopicName, 1 /* producer id */, 1 /* request id */,
producerName, Collections.emptyMap(), false);
channel.writeInbound(clientCommand1);
assertThat(getResponse()).isInstanceOf(CommandProducerSuccess.class);
// Call disconnect method on producer to trigger activity similar to unloading
Producer producer = serverCnx.getProducers().get(1).get();
assertNotNull(producer);
producer.disconnect();
channel.runPendingTasks();
assertThat(getResponse()).isInstanceOf(CommandCloseProducer.class);
// Send message and expect no response
sendMessage();
assertTrue(channel.outboundMessages().isEmpty());
// Move clock forward to trigger scheduled clean up task
ByteBuf createProducer2 = Commands.newProducer(successTopicName, 1 /* producer id */, 1 /* request id */,
producerName, Collections.emptyMap(), false);
channel.writeInbound(createProducer2);
assertTrue(getResponse() instanceof CommandProducerSuccess);
// Send message and expect success
sendMessage();
assertTrue(getResponse() instanceof CommandSendReceipt);
channel.finish();
}
|
@Override
public void process(SynthesizedAnnotation synthesizedAnnotation, AnnotationSynthesizer synthesizer) {
final Map<String, AnnotationAttribute> attributeMap = synthesizedAnnotation.getAttributes();
// 记录别名与属性的关系
final ForestMap<String, AnnotationAttribute> attributeAliasMappings = new LinkedForestMap<>(false);
attributeMap.forEach((attributeName, attribute) -> {
final String alias = Opt.ofNullable(attribute.getAnnotation(Alias.class))
.map(Alias::value)
.orElse(null);
if (ObjectUtil.isNull(alias)) {
return;
}
final AnnotationAttribute aliasAttribute = attributeMap.get(alias);
Assert.notNull(aliasAttribute, "no method for alias: [{}]", alias);
attributeAliasMappings.putLinkedNodes(alias, aliasAttribute, attributeName, attribute);
});
// 处理别名
attributeMap.forEach((attributeName, attribute) -> {
final AnnotationAttribute resolvedAttribute = Opt.ofNullable(attributeName)
.map(attributeAliasMappings::getRootNode)
.map(TreeEntry::getValue)
.orElse(attribute);
Assert.isTrue(
ObjectUtil.isNull(resolvedAttribute)
|| ClassUtil.isAssignable(attribute.getAttributeType(), resolvedAttribute.getAttributeType()),
"return type of the root alias method [{}] is inconsistent with the original [{}]",
resolvedAttribute.getClass(), attribute.getAttributeType()
);
if (attribute != resolvedAttribute) {
attributeMap.put(attributeName, new ForceAliasedAnnotationAttribute(attribute, resolvedAttribute));
}
});
synthesizedAnnotation.setAttributes(attributeMap);
}
|
@Test
public void processTest() {
AliasAnnotationPostProcessor processor = new AliasAnnotationPostProcessor();
Map<Class<?>, SynthesizedAnnotation> annotationMap = new HashMap<>();
SynthesizedAggregateAnnotation synthesizedAnnotationAggregator = new TestSynthesizedAggregateAnnotation(annotationMap);
AnnotationForTest annotation = ClassForTest.class.getAnnotation(AnnotationForTest.class);
SynthesizedAnnotation synthesizedAnnotation = new TestSynthesizedAnnotation(synthesizedAnnotationAggregator, annotation);
annotationMap.put(annotation.annotationType(), synthesizedAnnotation);
processor.process(synthesizedAnnotation, synthesizedAnnotationAggregator);
AnnotationAttribute valueAttribute = synthesizedAnnotation.getAttributes().get("value");
assertEquals(ReflectUtil.getMethod(AnnotationForTest.class, "value"), valueAttribute.getAttribute());
assertTrue(valueAttribute.isWrapped());
assertEquals(ForceAliasedAnnotationAttribute.class, valueAttribute.getClass());
AnnotationAttribute nameAttribute = synthesizedAnnotation.getAttributes().get("name");
assertEquals(ReflectUtil.getMethod(AnnotationForTest.class, "name"), nameAttribute.getAttribute());
assertFalse(nameAttribute.isWrapped());
assertEquals(CacheableAnnotationAttribute.class, nameAttribute.getClass());
assertEquals(nameAttribute, ((WrappedAnnotationAttribute)valueAttribute).getLinked());
}
|
public static String[] parseKey(String groupKey) {
StringBuilder sb = new StringBuilder();
String dataId = null;
String group = null;
String tenant = null;
for (int i = 0; i < groupKey.length(); ++i) {
char c = groupKey.charAt(i);
if ('+' == c) {
if (null == dataId) {
dataId = sb.toString();
sb.setLength(0);
} else if (null == group) {
group = sb.toString();
sb.setLength(0);
} else {
throw new IllegalArgumentException("invalid groupkey:" + groupKey);
}
} else if ('%' == c) {
char next = groupKey.charAt(++i);
char nextnext = groupKey.charAt(++i);
if ('2' == next && 'B' == nextnext) {
sb.append('+');
} else if ('2' == next && '5' == nextnext) {
sb.append('%');
} else {
throw new IllegalArgumentException("invalid groupkey:" + groupKey);
}
} else {
sb.append(c);
}
}
if (StringUtils.isBlank(group)) {
group = sb.toString();
} else {
tenant = sb.toString();
}
if (group.length() == 0) {
throw new IllegalArgumentException("invalid groupkey:" + groupKey);
}
return new String[] {dataId, group, tenant};
}
|
@Test
void testParseInvalidGroupKey() {
String key = "11111+222+333333+444";
try {
GroupKey.parseKey(key);
fail();
} catch (IllegalArgumentException e) {
System.out.println(e.toString());
}
key = "11111+";
try {
GroupKey.parseKey(key);
fail();
} catch (IllegalArgumentException e) {
System.out.println(e.toString());
}
key = "11111%29+222";
try {
GroupKey.parseKey(key);
fail();
} catch (IllegalArgumentException e) {
System.out.println(e.toString());
}
key = "11111%2b+222";
try {
GroupKey.parseKey(key);
fail();
} catch (IllegalArgumentException e) {
System.out.println(e.toString());
}
key = "11111%25+222";
String[] pair = GroupKey.parseKey(key);
assertEquals("11111%", pair[0]);
assertEquals("222", pair[1]);
}
|
public SchemaMapping fromArrow(Schema arrowSchema) {
List<Field> fields = arrowSchema.getFields();
List<TypeMapping> parquetFields = fromArrow(fields);
MessageType parquetType =
addToBuilder(parquetFields, Types.buildMessage()).named("root");
return new SchemaMapping(arrowSchema, parquetType, parquetFields);
}
|
@Test
public void testArrowTimeMillisecondToParquet() {
MessageType expected = converter
.fromArrow(new Schema(asList(field("a", new ArrowType.Time(TimeUnit.MILLISECOND, 32)))))
.getParquetSchema();
Assert.assertEquals(
expected,
Types.buildMessage()
.addField(Types.optional(INT32)
.as(LogicalTypeAnnotation.timeType(false, MILLIS))
.named("a"))
.named("root"));
}
|
@Override
public boolean add(final Long value) {
return add(value.longValue());
}
|
@Test
public void setsWithTheDifferentValuesAreNotEqual() {
final LongHashSet other = new LongHashSet(100, -1);
set.add(1);
set.add(1001);
other.add(2);
other.add(1001);
assertNotEquals(set, other);
}
|
public static String partitionsToLogString(Collection<TopicIdPartition> partitions, Boolean traceEnabled) {
if (traceEnabled) {
return String.format("( %s )", String.join(", ", partitions.toString()));
}
return String.format("%s partition(s)", partitions.size());
}
|
@Test
public void testPartitionsToLogString() {
Uuid uuid1 = Uuid.randomUuid();
Uuid uuid2 = Uuid.randomUuid();
List<TopicIdPartition> partitions = Arrays.asList(
new TopicIdPartition(uuid1, 0, "foo"),
new TopicIdPartition(uuid2, 1, "bar"));
String response = ShareSession.partitionsToLogString(partitions, false);
assertEquals("2 partition(s)", response);
response = ShareSession.partitionsToLogString(partitions, true);
assertEquals(String.format("( [%s:foo-0, %s:bar-1] )", uuid1, uuid2), response);
}
|
@VisibleForTesting
void validateOldPassword(Long id, String oldPassword) {
AdminUserDO user = userMapper.selectById(id);
if (user == null) {
throw exception(USER_NOT_EXISTS);
}
if (!isPasswordMatch(oldPassword, user.getPassword())) {
throw exception(USER_PASSWORD_FAILED);
}
}
|
@Test
public void testValidateOldPassword_notExists() {
assertServiceException(() -> userService.validateOldPassword(randomLongId(), randomString()),
USER_NOT_EXISTS);
}
|
@Override
public String generate(TokenType tokenType) {
String rawToken = generateRawToken();
return buildIdentifiablePartOfToken(tokenType) + rawToken;
}
|
@Test
public void token_does_not_contain_colon() {
assertThat(underTest.generate(TokenType.USER_TOKEN)).doesNotContain(":");
}
|
@Override
public Set<String> getRoleNames(User user) {
final Set<String> roleIds = user.getRoleIds();
if (roleIds.isEmpty()) {
return Collections.emptySet();
}
Map<String, Role> idMap;
try {
idMap = roleService.loadAllIdMap();
} catch (NotFoundException e) {
LOG.error("Unable to load role ID map. Using empty map.", e);
idMap = Collections.emptyMap();
}
return ImmutableSet.copyOf(
Iterables.filter(
Collections2.transform(roleIds, Roles.roleIdToNameFunction(idMap)),
Predicates.notNull()
)
);
}
|
@Test
public void testGetRoleNames() throws Exception {
final UserImplFactory factory = new UserImplFactory(new Configuration(), permissions);
final UserImpl user = factory.create(new HashMap<>());
final Role role = createRole("Foo");
final ImmutableMap<String, Role> map = ImmutableMap.<String, Role>builder()
.put(role.getId(), role)
.build();
when(roleService.loadAllIdMap()).thenReturn(map);
assertThat(userService.getRoleNames(user)).isEmpty();
user.setRoleIds(Sets.newHashSet(role.getId()));
assertThat(userService.getRoleNames(user)).containsOnly("Foo");
when(roleService.loadAllIdMap()).thenReturn(new HashMap<>());
assertThat(userService.getRoleNames(user)).isEmpty();
}
|
static Map<String, Double> getGroupedCategoricalPredictorMap(final List<CategoricalPredictor> categoricalPredictors) {
final Map<String, Double> toReturn = new LinkedHashMap<>();
for (CategoricalPredictor categoricalPredictor : categoricalPredictors) {
toReturn.put(categoricalPredictor.getValue().toString(),
categoricalPredictor.getCoefficient().doubleValue());
}
return toReturn;
}
|
@Test
void getGroupedCategoricalPredictorMap() {
final List<CategoricalPredictor> categoricalPredictors = new ArrayList<>();
for (int i = 0; i < 3; i++) {
String predictorName = "predictorName-" + i;
double coefficient = 1.23 * i;
categoricalPredictors.add(PMMLModelTestUtils.getCategoricalPredictor(predictorName, i, coefficient));
}
Map<String, Double> retrieved =
KiePMMLRegressionTableFactory.getGroupedCategoricalPredictorMap(categoricalPredictors);
assertThat(retrieved).isNotNull();
assertThat(retrieved).hasSameSizeAs(categoricalPredictors);
categoricalPredictors.forEach(categoricalPredictor ->
{
String key = categoricalPredictor.getValue().toString();
assertThat(retrieved).containsKey(key);
assertThat(retrieved.get(key)).isCloseTo(categoricalPredictor.getCoefficient().doubleValue(), Offset.offset(0.0));
});
}
|
public static ParameterMetric getParamMetric(ResourceWrapper resourceWrapper) {
if (resourceWrapper == null || resourceWrapper.getName() == null) {
return null;
}
return metricsMap.get(resourceWrapper.getName());
}
|
@Test
public void testGetNullParamMetric() {
assertNull(ParameterMetricStorage.getParamMetric(null));
}
|
public synchronized boolean hasOutOfBandData() {
return hasLeadingOutOfBandData() || hasTrailingOutOfBandData();
}
|
@Test
public void testhasOutOfBandData() throws Exception {
assertFalse(instance.hasOutOfBandData(), "Unexpected initial value");
instance.write(buildTestBytes(true, true, true));
assertFalse(instance.hasOutOfBandData());
instance.write("BLAH".getBytes());
assertTrue(instance.hasOutOfBandData());
instance.reset();
assertFalse(instance.hasOutOfBandData());
instance.write("BLAH".getBytes());
instance.write(buildTestBytes(true, true, true));
assertTrue(instance.hasOutOfBandData());
}
|
public static URL appendTrailingSlash(URL originalURL) {
try {
return originalURL.getPath().endsWith("/") ? originalURL :
new URL(originalURL.getProtocol(),
originalURL.getHost(),
originalURL.getPort(),
originalURL.getFile() + '/');
} catch (MalformedURLException ignored) { // shouldn't happen
throw new IllegalArgumentException("Invalid resource URL: " + originalURL);
}
}
|
@Test
void appendTrailingSlashDoesntASlashWhenOneIsAlreadyPresent() {
final URL url = getClass().getResource("/META-INF/");
assertThat(url.toExternalForm())
.endsWith("/");
assertThat(ResourceURL.appendTrailingSlash(url).toExternalForm())
.doesNotMatch(".*//$");
assertThat(url)
.isEqualTo(ResourceURL.appendTrailingSlash(url));
}
|
public InternalRowCollector getInternalRowCollector(
Handover<InternalRow> handover,
Object checkpointLock,
Map<String, String> envOptionsInfo) {
if (isMultiTable) {
return new InternalMultiRowCollector(
handover, checkpointLock, rowSerializationMap, envOptionsInfo);
} else {
return new InternalRowCollector(
handover, checkpointLock, rowSerialization, envOptionsInfo);
}
}
|
@Test
public void testMultiReaderConverter() throws IOException {
initSchema();
initData();
MultiTableManager multiTableManager =
new MultiTableManager(
new CatalogTable[] {catalogTable1, catalogTable2, catalogTable3});
InternalMultiRowCollector internalMultiRowCollector =
(InternalMultiRowCollector)
multiTableManager.getInternalRowCollector(null, null, null);
Map<String, InternalRowConverter> rowSerializationMap =
internalMultiRowCollector.getRowSerializationMap();
InternalRow internalRow =
rowSerializationMap.get(seaTunnelRow1.getTableId()).convert(seaTunnelRow1);
for (int v = 0; v < specificInternalRow2.numFields(); v++) {
if (specificInternalRow2.genericGet(v) instanceof ArrayBasedMapData) {
Assertions.assertEquals(
specificInternalRow2.getMap(v).keyArray(),
internalRow.getMap(v).keyArray());
Assertions.assertEquals(
specificInternalRow2.getMap(v).valueArray(),
internalRow.getMap(v).valueArray());
} else if (specificInternalRow2.genericGet(v) instanceof SpecificInternalRow) {
SpecificInternalRow expected =
(SpecificInternalRow) specificInternalRow2.genericGet(v);
SpecificInternalRow actual =
(SpecificInternalRow) ((SpecificInternalRow) internalRow).genericGet(v);
for (int o = 0; v < expected.numFields(); v++) {
if (expected.genericGet(o) instanceof ArrayBasedMapData) {
Assertions.assertEquals(
expected.getMap(o).keyArray(), actual.getMap(o).keyArray());
Assertions.assertEquals(
expected.getMap(o).valueArray(), actual.getMap(o).valueArray());
} else {
Assertions.assertEquals(
expected.genericGet(v),
((SpecificInternalRow) actual).genericGet(v));
}
}
} else {
Assertions.assertEquals(
specificInternalRow2.genericGet(v),
((SpecificInternalRow) internalRow).genericGet(v));
}
}
InternalRow internalRow3 =
rowSerializationMap.get(seaTunnelRow3.getTableId()).convert(seaTunnelRow3);
Assertions.assertEquals(specificInternalRow3, internalRow3);
for (int v = 0; v < specificInternalRow3.numFields(); v++) {
Assertions.assertEquals(
specificInternalRow3.genericGet(v),
((SpecificInternalRow) internalRow3).genericGet(v));
}
}
|
public static String getHttpMethod(Exchange exchange, Endpoint endpoint) {
// 1. Use method provided in header.
Object method = exchange.getIn().getHeader(Exchange.HTTP_METHOD);
if (method instanceof String) {
return (String) method;
} else if (method instanceof Enum) {
return ((Enum<?>) method).name();
} else if (method != null) {
return exchange.getContext().getTypeConverter().tryConvertTo(String.class, exchange, method);
}
// 2. GET if query string is provided in header.
if (exchange.getIn().getHeader(Exchange.HTTP_QUERY) != null) {
return GET_METHOD;
}
// 3. GET if endpoint is configured with a query string.
if (endpoint.getEndpointUri().indexOf('?') != -1) {
return GET_METHOD;
}
// 4. POST if there is data to send (body is not null).
if (exchange.getIn().getBody() != null) {
return POST_METHOD;
}
// 5. GET otherwise.
return GET_METHOD;
}
|
@Test
public void testGetMethodFromMethodHeaderEnum() {
Exchange exchange = Mockito.mock(Exchange.class);
Message message = Mockito.mock(Message.class);
Mockito.when(exchange.getIn()).thenReturn(message);
Mockito.when(message.getHeader(Exchange.HTTP_METHOD)).thenReturn(HttpMethods.GET);
assertEquals("GET", AbstractHttpSpanDecorator.getHttpMethod(exchange, null));
}
|
public List<String> keyNames() {
return keyNames;
}
|
@Test void oneFunction_keyNames() {
assertThat(oneFunction.keyNames()).containsExactly("one");
}
|
public RuntimeOptionsBuilder parse(Map<String, String> properties) {
return parse(properties::get);
}
|
@Test
void should_parse_features_and_preserve_existing_tag_filters() {
RuntimeOptions existing = RuntimeOptions.defaultOptions();
existing.setTagExpressions(Collections.singletonList(TagExpressionParser.parse("@example")));
properties.put(Constants.FEATURES_PROPERTY_NAME, "classpath:com/example.feature");
RuntimeOptions options = cucumberPropertiesParser.parse(properties).build(existing);
List<String> tagExpressions = options.getTagExpressions().stream()
.map(Object::toString)
.collect(toList());
assertAll(
() -> assertThat(options.getFeaturePaths(), contains(
URI.create("classpath:com/example.feature"))),
() -> assertThat(tagExpressions, contains("@example")));
}
|
@Override
public T build(ConfigurationSourceProvider provider, String path) throws IOException, ConfigurationException {
try (InputStream input = provider.open(requireNonNull(path))) {
final JsonNode node = mapper.readTree(createParser(input));
if (node == null) {
throw ConfigurationParsingException
.builder("Configuration at " + path + " must not be empty")
.build(path);
}
return build(node, path);
} catch (JsonParseException e) {
throw ConfigurationParsingException
.builder("Malformed " + formatName)
.setCause(e)
.setLocation(e.getLocation())
.setDetail(e.getMessage())
.build(path);
}
}
|
@Test
void throwsAnExceptionOnEmptyFiles() {
assertThatExceptionOfType(ConfigurationParsingException.class)
.isThrownBy(() -> factory.build(configurationSourceProvider, emptyFile))
.withMessageContaining(" * Configuration at " + emptyFile + " must not be empty");
}
|
@VisibleForTesting
Path getJarArtifact() throws IOException {
Optional<String> classifier = Optional.empty();
Path buildDirectory = Paths.get(project.getBuild().getDirectory());
Path outputDirectory = buildDirectory;
// Read <classifier> and <outputDirectory> from maven-jar-plugin.
Plugin jarPlugin = project.getPlugin("org.apache.maven.plugins:maven-jar-plugin");
if (jarPlugin != null) {
for (PluginExecution execution : jarPlugin.getExecutions()) {
if ("default-jar".equals(execution.getId())) {
Xpp3Dom configuration = (Xpp3Dom) execution.getConfiguration();
classifier = getChildValue(configuration, "classifier");
Optional<String> directoryString = getChildValue(configuration, "outputDirectory");
if (directoryString.isPresent()) {
outputDirectory = project.getBasedir().toPath().resolve(directoryString.get());
}
break;
}
}
}
String finalName = project.getBuild().getFinalName();
String suffix = ".jar";
Optional<Xpp3Dom> bootConfiguration = getSpringBootRepackageConfiguration();
if (bootConfiguration.isPresent()) {
log(LogEvent.lifecycle("Spring Boot repackaging (fat JAR) detected; using the original JAR"));
// Spring renames original JAR only when replacing it, so check if the paths are clashing.
Optional<String> bootFinalName = getChildValue(bootConfiguration.get(), "finalName");
Optional<String> bootClassifier = getChildValue(bootConfiguration.get(), "classifier");
boolean sameDirectory = outputDirectory.equals(buildDirectory);
// If Boot <finalName> is undefined, it uses the default project <finalName>.
boolean sameFinalName = !bootFinalName.isPresent() || finalName.equals(bootFinalName.get());
boolean sameClassifier = classifier.equals(bootClassifier);
if (sameDirectory && sameFinalName && sameClassifier) {
suffix = ".jar.original";
}
}
String noSuffixJarName = finalName + (classifier.isPresent() ? '-' + classifier.get() : "");
Path jarPath = outputDirectory.resolve(noSuffixJarName + suffix);
log(LogEvent.debug("Using JAR: " + jarPath));
if (".jar".equals(suffix)) {
return jarPath;
}
// "*" in "java -cp *" doesn't work if JAR doesn't end with ".jar". Copy the JAR with a new name
// ending with ".jar".
Path tempDirectory = tempDirectoryProvider.newDirectory();
Path newJarPath = tempDirectory.resolve(noSuffixJarName + ".original.jar");
Files.copy(jarPath, newJarPath);
return newJarPath;
}
|
@Test
public void testGetJarArtifact_classifier() throws IOException {
when(mockBuild.getDirectory()).thenReturn(Paths.get("/foo/bar").toString());
when(mockBuild.getFinalName()).thenReturn("helloworld-1");
when(mockMavenProject.getPlugin("org.apache.maven.plugins:maven-jar-plugin"))
.thenReturn(mockPlugin);
when(mockPlugin.getExecutions()).thenReturn(Arrays.asList(mockPluginExecution));
when(mockPluginExecution.getId()).thenReturn("default-jar");
when(mockPluginExecution.getConfiguration()).thenReturn(pluginConfiguration);
addXpp3DomChild(pluginConfiguration, "classifier", "a-class");
assertThat(mavenProjectProperties.getJarArtifact())
.isEqualTo(Paths.get("/foo/bar/helloworld-1-a-class.jar"));
}
|
public static InMemorySorter create(Options options) {
return new InMemorySorter(options);
}
|
@Test
public void testEmptyKeyValueElement() throws Exception {
SorterTestUtils.testEmptyKeyValueElement(InMemorySorter.create(new InMemorySorter.Options()));
}
|
@Override
public NSImage folderIcon(final Integer size) {
NSImage folder = this.iconNamed("NSFolder", size);
if(null == folder) {
return this.iconNamed("NSFolder", size);
}
return folder;
}
|
@Test
public void testFolderIcon32() {
final NSImage icon = new NSImageIconCache().folderIcon(32);
assertNotNull(icon);
assertTrue(icon.isValid());
assertFalse(icon.isTemplate());
assertEquals(32, icon.size().width.intValue());
assertEquals(32, icon.size().height.intValue());
assertTrue(icon.representations().count().intValue() >= 1);
}
|
protected static void checkMandatoryProperties(Map<String, String> props, String[] mandatoryProps) {
StringBuilder missing = new StringBuilder();
for (String mandatoryProperty : mandatoryProps) {
if (!props.containsKey(mandatoryProperty)) {
if (missing.length() > 0) {
missing.append(", ");
}
missing.append(mandatoryProperty);
}
}
String moduleKey = StringUtils.defaultIfBlank(props.get(MODULE_KEY_PROPERTY), props.get(CoreProperties.PROJECT_KEY_PROPERTY));
if (missing.length() != 0) {
throw MessageException.of("You must define the following mandatory properties for '" + (moduleKey == null ? "Unknown" : moduleKey) + "': " + missing);
}
}
|
@Test
public void shouldFailIfMandatoryPropertiesAreNotPresentButWithProjectKey() {
Map<String, String> props = new HashMap<>();
props.put("foo1", "bla");
props.put("sonar.projectKey", "my-project");
assertThatThrownBy(() -> ProjectReactorBuilder.checkMandatoryProperties(props, new String[] {"foo1", "foo2", "foo3"}))
.isInstanceOf(MessageException.class)
.hasMessage("You must define the following mandatory properties for 'my-project': foo2, foo3");
}
|
@SuppressWarnings("unchecked")
public static String encode(Type parameter) {
if (parameter instanceof NumericType) {
return encodeNumeric(((NumericType) parameter));
} else if (parameter instanceof Address) {
return encodeAddress((Address) parameter);
} else if (parameter instanceof Bool) {
return encodeBool((Bool) parameter);
} else if (parameter instanceof Bytes) {
return encodeBytes((Bytes) parameter);
} else if (parameter instanceof DynamicBytes) {
return encodeDynamicBytes((DynamicBytes) parameter);
} else if (parameter instanceof Utf8String) {
return encodeString((Utf8String) parameter);
} else if (parameter instanceof StaticArray) {
if (DynamicStruct.class.isAssignableFrom(
((StaticArray) parameter).getComponentType())) {
return encodeStaticArrayWithDynamicStruct((StaticArray) parameter);
} else {
return encodeArrayValues((StaticArray) parameter);
}
} else if (parameter instanceof DynamicStruct) {
return encodeDynamicStruct((DynamicStruct) parameter);
} else if (parameter instanceof DynamicArray) {
return encodeDynamicArray((DynamicArray) parameter);
} else if (parameter instanceof PrimitiveType) {
return encode(((PrimitiveType) parameter).toSolidityType());
} else {
throw new UnsupportedOperationException(
"Type cannot be encoded: " + parameter.getClass());
}
}
|
@Test
public void testPrimitiveDouble() {
assertThrows(
UnsupportedOperationException.class,
() -> encode(new org.web3j.abi.datatypes.primitive.Double(0)));
}
|
@Override
public HostToKeyMapper<Integer> getAllPartitionsMultipleHosts(URI serviceUri, int numHostPerPartition)
throws ServiceUnavailableException
{
return getHostToKeyMapper(serviceUri, null, numHostPerPartition, null);
}
|
@Test(dataProvider = "ringFactories")
public void testAllPartitionMultipleHosts(RingFactory<URI> ringFactory)
throws URISyntaxException, ServiceUnavailableException
{
URI serviceURI = new URI("d2://articles");
ConsistentHashKeyMapper mapper = getConsistentHashKeyMapper(ringFactory);
HostToKeyMapper<Integer> result = mapper.getAllPartitionsMultipleHosts(serviceURI, 2);
verifyHostToMapperWithoutKeys(result);
}
|
public static <FROM, TO> MappedCondition<FROM, TO> mappedCondition(Function<FROM, TO> mapping, Condition<TO> condition,
String mappingDescription, Object... args) {
requireNonNull(mappingDescription, "The given mappingDescription should not be null");
return new MappedCondition<>(mapping, condition, format(mappingDescription, args));
}
|
@Test
void mappedCondition_with_description_and_null_mapping_function_should_throw_NPE() {
thenNullPointerException().isThrownBy(() -> mappedCondition(null, isBarString, "::toString"))
.withMessage("The given mapping function should not be null");
}
|
public boolean isFound() {
return found;
}
|
@Test
public void testCalcInstructionsRoundaboutBegin() {
Weighting weighting = new SpeedWeighting(mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(2, 8);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(List.of("At roundabout, take exit 3 onto 5-8",
"arrive at destination"),
tmpList);
}
|
public static List<PropertyDefinition> all() {
List<PropertyDefinition> defs = new ArrayList<>();
defs.addAll(IssueExclusionProperties.all());
defs.addAll(ExclusionProperties.all());
defs.addAll(SecurityProperties.all());
defs.addAll(DebtProperties.all());
defs.addAll(PurgeProperties.all());
defs.addAll(EmailSettings.definitions());
defs.addAll(ScannerProperties.all());
defs.addAll(asList(
PropertyDefinition.builder(CoreProperties.MODULE_LEVEL_ARCHIVED_SETTINGS)
.name("Archived Sub-Projects Settings")
.description("DEPRECATED - List of the properties that were previously configured at sub-project / module level. " +
"These properties are not used anymore and should now be configured at project level. When you've made the " +
"necessary changes, clear this setting to prevent analysis from showing a warning about it.")
.category(CoreProperties.CATEGORY_GENERAL)
.subCategory(CoreProperties.SUBCATEGORY_MODULES)
.onlyOnQualifiers(Qualifiers.PROJECT)
.type(TEXT)
.build(),
PropertyDefinition.builder(CoreProperties.SERVER_BASE_URL)
.name("Server base URL")
.description(
"HTTP(S) URL of this SonarQube server, such as <i>https://yourhost.yourdomain/sonar</i>. "
+ "This value is used outside SonarQube itself, e.g. for PR decoration, emails, etc.")
.category(CoreProperties.CATEGORY_GENERAL)
.build(),
PropertyDefinition.builder(SONAR_PROJECTCREATION_MAINBRANCHNAME)
.name("Default main branch name")
.category(CoreProperties.CATEGORY_GENERAL)
.subCategory(SUBCATEGORY_PROJECT_CREATION)
.description("Each project has a main branch at creation. This setting defines the instance-wide default main branch name. "
+ " A user can override this when creating a project. This setting does not apply to projects imported from a DevOps platform.")
.type(STRING)
.defaultValue("main")
.build(),
PropertyDefinition.builder(CoreProperties.ENCRYPTION_SECRET_KEY_PATH)
.name("Encryption secret key path")
.description("Path to a file that contains encryption secret key that is used to encrypting other settings.")
.type(STRING)
.hidden()
.build(),
PropertyDefinition.builder("sonar.authenticator.downcase")
.name("Downcase login")
.description("Downcase login during user authentication, typically for Active Directory")
.type(BOOLEAN)
.defaultValue(String.valueOf(false))
.hidden()
.build(),
PropertyDefinition.builder(DISABLE_NOTIFICATION_ON_BUILT_IN_QPROFILES)
.name("Avoid quality profiles notification")
.description("Avoid sending email notification on each update of built-in quality profiles to quality profile administrators.")
.defaultValue(Boolean.toString(false))
.category(CoreProperties.CATEGORY_GENERAL)
.subCategory(SUBCATEGORY_QUALITY_PROFILE)
.type(BOOLEAN)
.build(),
PropertyDefinition.builder(ALLOW_DISABLE_INHERITED_RULES)
.name("Enable deactivation of inherited rules")
.description("Set if users with 'Administer Quality Profiles' permission are allowed to deactivate inherited rules in quality profiles.")
.defaultValue(Boolean.toString(true))
.category(CoreProperties.CATEGORY_GENERAL)
.subCategory(SUBCATEGORY_QUALITY_PROFILE)
.type(BOOLEAN)
.build(),
PropertyDefinition.builder(PLUGINS_RISK_CONSENT)
.name("State of user plugins risk consent")
.description("Determine whether user is required to accept plugins risk consent")
.defaultValue(NOT_ACCEPTED.name())
.options(Arrays.stream(PluginRiskConsent.values()).map(Enum::name).toList())
.hidden()
.type(SINGLE_SELECT_LIST)
.build(),
PropertyDefinition.builder(DOCUMENTATION_BASE_URL)
.name("Base URL of the documentation")
.description("Base URL to be used in SonarQube documentation links, such as <i>https://docs.sonarsource.com/sonarqube/</i>")
.defaultValue(DefaultDocumentationLinkGenerator.DOCUMENTATION_PUBLIC_URL)
.hidden()
.type(STRING)
.build(),
// WEB LOOK&FEEL
PropertyDefinition.builder(WebConstants.SONAR_LF_LOGO_URL)
.deprecatedKey("sonar.branding.image")
.name("Logo URL")
.description("URL to logo image. Any standard format is accepted.")
.category(CoreProperties.CATEGORY_GENERAL)
.subCategory(CoreProperties.SUBCATEGORY_LOOKNFEEL)
.build(),
PropertyDefinition.builder(WebConstants.SONAR_LF_LOGO_WIDTH_PX)
.deprecatedKey("sonar.branding.image.width")
.name("Width of image in pixels")
.description("Width in pixels, constrained to 150px (the height of the image is constrained to 40px).")
.category(CoreProperties.CATEGORY_GENERAL)
.subCategory(CoreProperties.SUBCATEGORY_LOOKNFEEL)
.build(),
PropertyDefinition.builder(WebConstants.SONAR_LF_ENABLE_GRAVATAR)
.name("Enable support of gravatars")
.description("Gravatars are profile pictures of users based on their email.")
.type(BOOLEAN)
.defaultValue(String.valueOf(false))
.category(CoreProperties.CATEGORY_GENERAL)
.subCategory(CoreProperties.SUBCATEGORY_LOOKNFEEL)
.build(),
PropertyDefinition.builder(WebConstants.SONAR_LF_GRAVATAR_SERVER_URL)
.name("Gravatar URL")
.description("Optional URL of custom Gravatar service. Accepted variables are {EMAIL_MD5} for MD5 hash of email and {SIZE} for the picture size in pixels.")
.defaultValue("https://secure.gravatar.com/avatar/{EMAIL_MD5}.jpg?s={SIZE}&d=identicon")
.category(CoreProperties.CATEGORY_GENERAL)
.subCategory(CoreProperties.SUBCATEGORY_LOOKNFEEL)
.build(),
// ISSUES
PropertyDefinition.builder(CoreProperties.DEVELOPER_AGGREGATED_INFO_DISABLED)
.name("Disable developer aggregated information")
.description("Don't show issue facets aggregating information per developer")
.category(CoreProperties.CATEGORY_GENERAL)
.subCategory(CoreProperties.SUBCATEGORY_ISSUES)
.onQualifiers(Qualifiers.PROJECT)
.type(BOOLEAN)
.defaultValue(Boolean.toString(false))
.build(),
PropertyDefinition.builder(CoreProperties.DEFAULT_ISSUE_ASSIGNEE)
.name("Default Assignee")
.description("New issues will be assigned to this user each time it is not possible to determine the user who is the author of the issue.")
.category(CoreProperties.CATEGORY_GENERAL)
.subCategory(CoreProperties.SUBCATEGORY_ISSUES)
.onQualifiers(Qualifiers.PROJECT)
.type(PropertyType.USER_LOGIN)
.build(),
// QUALITY GATE
PropertyDefinition.builder(CoreProperties.QUALITY_GATE_IGNORE_SMALL_CHANGES)
.name("Ignore duplication and coverage on small changes")
.description("Quality Gate conditions about duplications in new code and coverage on new code are ignored until the number of new lines is at least 20.")
.category(CoreProperties.CATEGORY_GENERAL)
.subCategory(CoreProperties.SUBCATEGORY_QUALITY_GATE)
.onQualifiers(Qualifiers.PROJECT)
.type(BOOLEAN)
.defaultValue(Boolean.toString(true))
.build(),
// CPD
PropertyDefinition.builder(CoreProperties.CPD_CROSS_PROJECT)
.defaultValue(Boolean.toString(false))
.name("Cross project duplication detection")
.description("DEPRECATED - By default, SonarQube detects duplications at project level. This means that a block "
+ "duplicated on two different projects won't be reported. Setting this parameter to \"true\" "
+ "allows to detect duplicates across projects. Note that activating "
+ "this property will significantly increase each SonarQube analysis time, "
+ "and therefore badly impact the performances of report processing as more and more projects "
+ "are getting involved in this cross project duplication mechanism.")
.onQualifiers(Qualifiers.PROJECT)
.category(CoreProperties.CATEGORY_GENERAL)
.subCategory(CoreProperties.SUBCATEGORY_DUPLICATIONS)
.type(BOOLEAN)
.build(),
PropertyDefinition.builder(CoreProperties.CPD_EXCLUSIONS)
.defaultValue("")
.name("Duplication Exclusions")
.description("Patterns used to exclude some source files from the duplication detection mechanism. " +
"See below to know how to use wildcards to specify this property.")
.onQualifiers(Qualifiers.PROJECT)
.category(CoreProperties.CATEGORY_EXCLUSIONS)
.subCategory(CoreProperties.SUBCATEGORY_DUPLICATIONS_EXCLUSIONS)
.multiValues(true)
.build()));
return defs;
}
|
@Test
public void all_includes_scanner_properties() {
List<PropertyDefinition> defs = CorePropertyDefinitions.all();
assertThat(defs.stream()
.filter(def -> def.key().equals(ScannerProperties.BRANCH_NAME))
.findFirst()).isPresent();
}
|
long parkTime(long n) {
final long proposedShift = n - parkThreshold;
final long allowedShift = min(maxShift, proposedShift);
return proposedShift > maxShift ? maxParkPeriodNs
: proposedShift < maxShift ? minParkPeriodNs << allowedShift
: min(minParkPeriodNs << allowedShift, maxParkPeriodNs);
}
|
@Test
public void when_maxShiftedGreaterThanMaxParkTime_thenParkMax() {
final BackoffIdleStrategy strat = new BackoffIdleStrategy(0, 0, 3, 4);
assertEquals(3, strat.parkTime(0));
assertEquals(4, strat.parkTime(1));
assertEquals(4, strat.parkTime(2));
}
|
public void simulateTypedWord(CharSequence typedWord) {
final var typedCodes = new int[1];
mTypedWord.insert(mCursorPosition, typedWord);
int index = 0;
while (index < typedWord.length()) {
final int codePoint = Character.codePointAt(typedWord, index);
typedCodes[0] = codePoint;
final var codesFromPool = getReusableArray(typedCodes);
mCodes.add(mTypedWord.codePointCount(0, mCursorPosition), codesFromPool);
if (Character.isUpperCase(codePoint)) mCapsCount++;
final var charCount = Character.charCount(codePoint);
index += charCount;
mCursorPosition += charCount;
}
}
|
@Test
public void testSimulateTypedWord() {
final var underTest = new WordComposer();
underTest.simulateTypedWord("hello");
Assert.assertEquals("hello", underTest.getTypedWord());
Assert.assertEquals(5, underTest.charCount());
Assert.assertEquals(5, underTest.codePointCount());
Assert.assertEquals(5, underTest.cursorPosition());
Assert.assertArrayEquals(new int[] {'h'}, underTest.getCodesAt(0));
Assert.assertArrayEquals(new int[] {'o'}, underTest.getCodesAt(4));
underTest.simulateTypedWord("there");
Assert.assertEquals("hellothere", underTest.getTypedWord());
Assert.assertEquals(10, underTest.charCount());
Assert.assertEquals(10, underTest.codePointCount());
Assert.assertEquals(10, underTest.cursorPosition());
Assert.assertArrayEquals(new int[] {'h'}, underTest.getCodesAt(0));
Assert.assertArrayEquals(new int[] {'o'}, underTest.getCodesAt(4));
Assert.assertArrayEquals(new int[] {'t'}, underTest.getCodesAt(5));
Assert.assertArrayEquals(new int[] {'e'}, underTest.getCodesAt(9));
underTest.setCursorPosition(2);
underTest.simulateTypedWord("wr");
Assert.assertEquals("hewrllothere", underTest.getTypedWord());
Assert.assertEquals(12, underTest.charCount());
Assert.assertEquals(12, underTest.codePointCount());
Assert.assertEquals(4, underTest.cursorPosition());
Assert.assertArrayEquals(new int[] {'h'}, underTest.getCodesAt(0));
Assert.assertArrayEquals(new int[] {'o'}, underTest.getCodesAt(6));
Assert.assertArrayEquals(new int[] {'t'}, underTest.getCodesAt(7));
Assert.assertArrayEquals(new int[] {'e'}, underTest.getCodesAt(11));
Assert.assertArrayEquals(new int[] {'w'}, underTest.getCodesAt(2));
Assert.assertArrayEquals(new int[] {'r'}, underTest.getCodesAt(3));
}
|
@Transactional(readOnly = true)
public ArticleResponse readArticle(Long id) {
Article article = articleRepository.getById(id);
return ArticleResponse.from(article);
}
|
@DisplayName("아티클 조회 성공")
@Test
void readArticle() {
// given
articleRepository.save(ARTICLE);
// when & then
assertThatCode(() -> articleService.readArticle(ARTICLE.getId()))
.doesNotThrowAnyException();
}
|
void shutdown(@Observes ShutdownEvent event) {
if (jobRunrBuildTimeConfiguration.backgroundJobServer().enabled()) {
backgroundJobServerInstance.get().stop();
}
if (jobRunrBuildTimeConfiguration.dashboard().enabled()) {
dashboardWebServerInstance.get().stop();
}
storageProviderInstance.get().close();
}
|
@Test
void jobRunrStarterStopsBackgroundJobServerIfConfigured() {
when(backgroundJobServerConfiguration.enabled()).thenReturn(true);
jobRunrStarter.shutdown(new ShutdownEvent());
verify(backgroundJobServer).stop();
}
|
public void setResultMessages(List<AbstractResultMessage> resultMessages) {
this.resultMessages = resultMessages;
}
|
@Test
void setResultMessages() {
BatchResultMessage batchResultMessage = new BatchResultMessage();
List<AbstractResultMessage> resultMessages = Arrays.asList(new RegisterTMResponse(), new RegisterRMResponse(false));
batchResultMessage.setResultMessages(resultMessages);
Assertions.assertIterableEquals(resultMessages, batchResultMessage.getResultMessages());
}
|
@Override
public Num calculate(BarSeries series, Position position) {
if (position.isClosed()) {
Num loss = excludeCosts ? position.getGrossProfit() : position.getProfit();
return loss.isNegative() ? loss : series.zero();
}
return series.zero();
}
|
@Test
public void calculateOnlyWithLossPositions() {
MockBarSeries series = new MockBarSeries(numFunction, 100, 95, 100, 80, 85, 70);
TradingRecord tradingRecord = new BaseTradingRecord(Trade.buyAt(0, series), Trade.sellAt(1, series),
Trade.buyAt(2, series), Trade.sellAt(5, series));
AnalysisCriterion loss = getCriterion(true);
assertNumEquals(-35, loss.calculate(series, tradingRecord));
}
|
@VisibleForTesting
static Map<String, FileSystem> verifySchemesAreUnique(
PipelineOptions options, Set<FileSystemRegistrar> registrars) {
Multimap<String, FileSystem> fileSystemsBySchemes =
TreeMultimap.create(Ordering.<String>natural(), Ordering.arbitrary());
for (FileSystemRegistrar registrar : registrars) {
for (FileSystem fileSystem : registrar.fromOptions(options)) {
fileSystemsBySchemes.put(fileSystem.getScheme(), fileSystem);
}
}
for (Entry<String, Collection<FileSystem>> entry : fileSystemsBySchemes.asMap().entrySet()) {
if (entry.getValue().size() > 1) {
String conflictingFileSystems =
Joiner.on(", ")
.join(
FluentIterable.from(entry.getValue())
.transform(input -> input.getClass().getName())
.toSortedList(Ordering.natural()));
throw new IllegalStateException(
String.format(
"Scheme: [%s] has conflicting filesystems: [%s]",
entry.getKey(), conflictingFileSystems));
}
}
ImmutableMap.Builder<String, FileSystem> schemeToFileSystem = ImmutableMap.builder();
for (Entry<String, FileSystem> entry : fileSystemsBySchemes.entries()) {
schemeToFileSystem.put(entry.getKey(), entry.getValue());
}
return schemeToFileSystem.build();
}
|
@Test
public void testVerifySchemesAreUnique() throws Exception {
thrown.expect(RuntimeException.class);
thrown.expectMessage("Scheme: [file] has conflicting filesystems");
FileSystems.verifySchemesAreUnique(
PipelineOptionsFactory.create(),
Sets.newHashSet(new LocalFileSystemRegistrar(), new LocalFileSystemRegistrar()));
}
|
@Override
public List<Intent> compile(PointToPointIntent intent, List<Intent> installable) {
log.trace("compiling {} {}", intent, installable);
ConnectPoint ingressPoint = intent.filteredIngressPoint().connectPoint();
ConnectPoint egressPoint = intent.filteredEgressPoint().connectPoint();
//TODO: handle protected path case with suggested path!!
//Idea: use suggested path as primary and another path from path service as protection
if (intent.suggestedPath() != null && intent.suggestedPath().size() > 0) {
Path path = new DefaultPath(PID, intent.suggestedPath(), new ScalarWeight(1));
//Check intent constraints against suggested path and suggested path availability
if (checkPath(path, intent.constraints()) && pathAvailable(intent)) {
allocateIntentBandwidth(intent, path);
return asList(createLinkCollectionIntent(ImmutableSet.copyOf(intent.suggestedPath()),
DEFAULT_COST, intent));
}
}
if (ingressPoint.deviceId().equals(egressPoint.deviceId())) {
return createZeroHopLinkCollectionIntent(intent);
}
// proceed with no protected paths
if (!ProtectionConstraint.requireProtectedPath(intent)) {
return createUnprotectedLinkCollectionIntent(intent);
}
try {
// attempt to compute and implement backup path
return createProtectedIntent(ingressPoint, egressPoint, intent, installable);
} catch (PathNotFoundException e) {
log.warn("Could not find disjoint Path for {}", intent);
// no disjoint path extant -- maximum one path exists between devices
return createSinglePathIntent(ingressPoint, egressPoint, intent, installable);
}
}
|
@Test
public void testSuggestedPath() {
String[] suggestedPathHops = {S1, S3, S4, S5, S6, S8};
List<Link> suggestedPath = NetTestTools.createPath(suggestedPathHops).links();
PointToPointIntent intent = makeIntentSuggestedPath(new ConnectPoint(DID_1, PORT_1),
new ConnectPoint(DID_8, PORT_2),
suggestedPath);
String[][] paths = {{S1, S2, S8}, suggestedPathHops};
PointToPointIntentCompiler compiler = makeCompilerSuggestedPath(paths);
List<Intent> result = compiler.compile(intent, null);
assertThat(result, is(Matchers.notNullValue()));
assertThat(result, hasSize(1));
Intent resultIntent = result.get(0);
assertThat(resultIntent instanceof LinkCollectionIntent, is(true));
if (resultIntent instanceof LinkCollectionIntent) {
LinkCollectionIntent resultLinkIntent = (LinkCollectionIntent) resultIntent;
FilteredConnectPoint ingressPoint = new FilteredConnectPoint(new ConnectPoint(DID_1, PORT_1));
FilteredConnectPoint egressPoint = new FilteredConnectPoint(new ConnectPoint(DID_8, PORT_2));
// 5 links for the hops, plus one default link on ingress and egress
assertThat(resultLinkIntent.links(), hasSize(suggestedPathHops.length - 1));
assertThat(resultLinkIntent.links(), linksHasPath(S1, S3));
assertThat(resultLinkIntent.links(), linksHasPath(S3, S4));
assertThat(resultLinkIntent.links(), linksHasPath(S4, S5));
assertThat(resultLinkIntent.links(), linksHasPath(S5, S6));
assertThat(resultLinkIntent.links(), linksHasPath(S6, S8));
assertThat(resultLinkIntent.filteredIngressPoints(), is(ImmutableSet.of(ingressPoint)));
assertThat(resultLinkIntent.filteredEgressPoints(), is(ImmutableSet.of(egressPoint)));
}
assertThat("key is inherited", resultIntent.key(), is(intent.key()));
}
|
@CheckForNull
public Charset detect(byte[] buf) {
// Try UTF-8 first since we are very confident in it if it's a yes.
// Fail if we see nulls to not have FPs if the text is ASCII encoded in UTF-16.
Result utf8Result = validator.isUTF8(buf, true);
if (utf8Result.valid() == Validation.YES) {
return utf8Result.charset();
} else if (utf8Result.valid() == Validation.MAYBE) {
return detectAscii(buf);
}
// try UTF16 with both endiness. Fail if we see nulls to not have FPs if it's UTF-32.
Result utf16 = validator.isUTF16(buf, true);
if (utf16.valid() == Validation.YES && validator.isValidUTF16(buf, UTF_16LE.equals(utf16.charset()))) {
return utf16.charset();
}
// at this point we know it can't be UTF-8
Charset c = userConfiguration;
if (!UTF_8.equals(c) && (!isUtf16(c) || utf16.valid() == Validation.MAYBE) && validator.tryDecode(buf, c)) {
return c;
}
Result windows1252 = validator.isValidWindows1252(buf);
if (windows1252.valid() == Validation.MAYBE) {
return windows1252.charset();
}
return null;
}
|
@Test
public void tryUTF8First() {
when(validation.isUTF8(any(byte[].class), anyBoolean())).thenReturn(Result.newValid(StandardCharsets.UTF_8));
assertThat(charsets.detect(new byte[1])).isEqualTo(StandardCharsets.UTF_8);
}
|
void parseFistline(Modification modification, String line, ConsoleResult result) throws P4OutputParseException {
Pattern pattern = Pattern.compile(FIRST_LINE_PATTERN);
Matcher matcher = pattern.matcher(line);
if (matcher.find()) {
modification.setRevision(matcher.group(1));
modification.setUserName(matcher.group(2));
try {
modification.setModifiedTime(new SimpleDateFormat(P4_DATE_PATTERN).parse(matcher.group(3)));
} catch (ParseException e) {
throw bomb(e);
}
} else {
LOG.warn("Could not parse P4 describe: {}", result.replaceSecretInfo(line));
throw new P4OutputParseException("Could not parse P4 describe: " + result.replaceSecretInfo(line));
}
}
|
@Test
void shouldThrowExceptionIfP4ReturnDifferentDateFormatWhenCannotParseFistLineOfP4Describe() {
String output = "Change 2 on 08/08/19 by cceuser@connect4 'some modification message'";
Modification modification = new Modification();
try {
parser.parseFistline(modification, output, new ConsoleResult(0, new ArrayList<>(), new ArrayList<>(), new ArrayList<>(), new ArrayList<>()));
} catch (P4OutputParseException e) {
assertThat(e.getMessage()).contains("Could not parse P4 describe:");
}
}
|
@Override
protected int poll() throws Exception {
// must reset for each poll
shutdownRunningTask = null;
pendingExchanges = 0;
List<software.amazon.awssdk.services.sqs.model.Message> messages = pollingTask.call();
// okay we have some response from aws so lets mark the consumer as ready
forceConsumerAsReady();
Queue<Exchange> exchanges = createExchanges(messages);
return processBatch(CastUtils.cast(exchanges));
}
|
@Test
void shouldIgnoreEmptyAttributeNames() throws Exception {
// given
configuration.setAttributeNames("");
configuration.setMessageAttributeNames("");
configuration.setSortAttributeName("");
try (var tested = createConsumer(9)) {
// when
var polledMessagesCount = tested.poll();
// then
var expectedRequest = expectedReceiveRequestBuilder()
.messageSystemAttributeNames((List<MessageSystemAttributeName>) null)
.messageAttributeNames((List<String>) null)
.maxNumberOfMessages(9)
.build();
assertThat(polledMessagesCount).isZero();
assertThat(receivedExchanges).isEmpty();
assertThat(sqsClientMock.getReceiveRequests()).containsExactlyInAnyOrder(expectedRequest);
assertThat(sqsClientMock.getQueues()).isEmpty();
}
}
|
@Override
public OAuth2AccessTokenDO grantAuthorizationCodeForAccessToken(String clientId, String code,
String redirectUri, String state) {
OAuth2CodeDO codeDO = oauth2CodeService.consumeAuthorizationCode(code);
Assert.notNull(codeDO, "授权码不能为空"); // 防御性编程
// 校验 clientId 是否匹配
if (!StrUtil.equals(clientId, codeDO.getClientId())) {
throw exception(ErrorCodeConstants.OAUTH2_GRANT_CLIENT_ID_MISMATCH);
}
// 校验 redirectUri 是否匹配
if (!StrUtil.equals(redirectUri, codeDO.getRedirectUri())) {
throw exception(ErrorCodeConstants.OAUTH2_GRANT_REDIRECT_URI_MISMATCH);
}
// 校验 state 是否匹配
state = StrUtil.nullToDefault(state, ""); // 数据库 state 为 null 时,会设置为 "" 空串
if (!StrUtil.equals(state, codeDO.getState())) {
throw exception(ErrorCodeConstants.OAUTH2_GRANT_STATE_MISMATCH);
}
// 创建访问令牌
return oauth2TokenService.createAccessToken(codeDO.getUserId(), codeDO.getUserType(),
codeDO.getClientId(), codeDO.getScopes());
}
|
@Test
public void testGrantAuthorizationCodeForAccessToken() {
// 准备参数
String clientId = randomString();
String code = randomString();
List<String> scopes = Lists.newArrayList("read", "write");
String redirectUri = randomString();
String state = randomString();
// mock 方法(code)
OAuth2CodeDO codeDO = randomPojo(OAuth2CodeDO.class, o -> {
o.setClientId(clientId);
o.setRedirectUri(redirectUri);
o.setState(state);
o.setScopes(scopes);
});
when(oauth2CodeService.consumeAuthorizationCode(eq(code))).thenReturn(codeDO);
// mock 方法(创建令牌)
OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class);
when(oauth2TokenService.createAccessToken(eq(codeDO.getUserId()), eq(codeDO.getUserType()),
eq(codeDO.getClientId()), eq(codeDO.getScopes()))).thenReturn(accessTokenDO);
// 调用,并断言
assertPojoEquals(accessTokenDO, oauth2GrantService.grantAuthorizationCodeForAccessToken(
clientId, code, redirectUri, state));
}
|
@Override
public N getSchedulerNode(NodeId nodeId) {
return nodeTracker.getNode(nodeId);
}
|
@Test(timeout = 30000l)
public void testContainerReleaseWithAllocationTags() throws Exception {
// Currently only can be tested against capacity scheduler.
if (getSchedulerType().equals(SchedulerType.CAPACITY)) {
final String testTag1 = "some-tag";
final String testTag2 = "some-other-tag";
YarnConfiguration conf = getConf();
conf.set(YarnConfiguration.RM_PLACEMENT_CONSTRAINTS_HANDLER, "scheduler");
MockRM rm1 = new MockRM(conf);
rm1.start();
MockNM nm1 = new MockNM("127.0.0.1:1234",
10240, rm1.getResourceTrackerService());
nm1.registerNode();
MockRMAppSubmissionData data =
MockRMAppSubmissionData.Builder.createWithMemory(200, rm1)
.withAppName("name")
.withUser("user")
.withAcls(new HashMap<>())
.withUnmanagedAM(false)
.withQueue("default")
.withMaxAppAttempts(-1)
.withCredentials(null)
.withAppType("Test")
.withWaitForAppAcceptedState(false)
.withKeepContainers(true)
.build();
RMApp app1 =
MockRMAppSubmitter.submit(rm1, data);
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
// allocate 1 container with tag1
SchedulingRequest sr = SchedulingRequest
.newInstance(1l, Priority.newInstance(1),
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED),
Sets.newHashSet(testTag1),
ResourceSizing.newInstance(1, Resource.newInstance(1024, 1)),
null);
// allocate 3 containers with tag2
SchedulingRequest sr1 = SchedulingRequest
.newInstance(2l, Priority.newInstance(1),
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED),
Sets.newHashSet(testTag2),
ResourceSizing.newInstance(3, Resource.newInstance(1024, 1)),
null);
AllocateRequest ar = AllocateRequest.newBuilder()
.schedulingRequests(Lists.newArrayList(sr, sr1)).build();
am1.allocate(ar);
nm1.nodeHeartbeat(true);
List<Container> allocated = new ArrayList<>();
while (allocated.size() < 4) {
AllocateResponse rsp = am1
.allocate(new ArrayList<>(), new ArrayList<>());
allocated.addAll(rsp.getAllocatedContainers());
nm1.nodeHeartbeat(true);
Thread.sleep(1000);
}
Assert.assertEquals(4, allocated.size());
Set<Container> containers = allocated.stream()
.filter(container -> container.getAllocationRequestId() == 1l)
.collect(Collectors.toSet());
Assert.assertNotNull(containers);
Assert.assertEquals(1, containers.size());
ContainerId cid = containers.iterator().next().getId();
// mock container start
rm1.getRMContext().getScheduler()
.getSchedulerNode(nm1.getNodeId()).containerStarted(cid);
// verifies the allocation is made with correct number of tags
Map<String, Long> nodeTags = rm1.getRMContext()
.getAllocationTagsManager()
.getAllocationTagsWithCount(nm1.getNodeId());
Assert.assertNotNull(nodeTags.get(testTag1));
Assert.assertEquals(1, nodeTags.get(testTag1).intValue());
// release a container
am1.allocate(new ArrayList<>(), Lists.newArrayList(cid));
// before NM confirms, the tag should still exist
nodeTags = rm1.getRMContext().getAllocationTagsManager()
.getAllocationTagsWithCount(nm1.getNodeId());
Assert.assertNotNull(nodeTags);
Assert.assertNotNull(nodeTags.get(testTag1));
Assert.assertEquals(1, nodeTags.get(testTag1).intValue());
// NM reports back that container is released
// RM should cleanup the tag
ContainerStatus cs = ContainerStatus.newInstance(cid,
ContainerState.COMPLETE, "", 0);
nm1.nodeHeartbeat(Lists.newArrayList(cs), true);
// Wait on condition
// 1) tag1 doesn't exist anymore
// 2) num of tag2 is still 3
GenericTestUtils.waitFor(() -> {
Map<String, Long> tags = rm1.getRMContext()
.getAllocationTagsManager()
.getAllocationTagsWithCount(nm1.getNodeId());
return tags.get(testTag1) == null &&
tags.get(testTag2).intValue() == 3;
}, 500, 3000);
}
}
|
@Override
public Map<String, Long> call() throws Exception {
Map<String, Long> result = new LinkedHashMap<>();
for (DownloadableItem item : items) {
InputStreamWrapper stream = connectionProvider.getInputStreamForItem(jobId, item);
long size = stream.getBytes();
if (size <= 0) {
size = computeSize(stream);
}
result.put(item.getIdempotentId(), size);
}
return result;
}
|
@Test
public void testExceptionIsThrown() throws Exception {
when(connectionProvider.getInputStreamForItem(any(), any()))
.thenThrow(new IOException("oh no!"));
assertThrows(IOException.class, () -> {
new CallableSizeCalculator(jobId, connectionProvider,
Collections.singleton(createItem("1-3242"))).call();
});
}
|
public static ValueLabel formatClippedBitRate(long bytes) {
return new ValueLabel(bytes * 8, BITS_UNIT).perSec().clipG(100.0);
}
|
@Test
public void formatClippedBitsKilo() {
vl = TopoUtils.formatClippedBitRate(2_004);
assertEquals(AM_WL, "15.66 Kbps", vl.toString());
assertFalse(AM_CL, vl.clipped());
}
|
public static SortDir sortDir(String s) {
return !DESC.equals(s) ? SortDir.ASC : SortDir.DESC;
}
|
@Test
public void sortDirDesc() {
assertEquals("desc sort dir", SortDir.DESC, TableModel.sortDir("desc"));
}
|
static String headerLine(CSVFormat csvFormat) {
return String.join(String.valueOf(csvFormat.getDelimiter()), csvFormat.getHeader());
}
|
@Test
public void givenNoIgnoreEmptyLines_isNoop() {
CSVFormat csvFormat = csvFormat().withIgnoreEmptyLines(false);
PCollection<String> input =
pipeline.apply(Create.of(headerLine(csvFormat), "a,1,1.1", "", "b,2,2.2", "", "c,3,3.3"));
CsvIOStringToCsvRecord underTest = new CsvIOStringToCsvRecord(csvFormat);
CsvIOParseResult<List<String>> result = input.apply(underTest);
PAssert.that(result.getOutput())
.containsInAnyOrder(
Arrays.asList(
Arrays.asList("a", "1", "1.1"),
Arrays.asList("b", "2", "2.2"),
Arrays.asList("c", "3", "3.3")));
PAssert.that(result.getErrors()).empty();
pipeline.run();
}
|
private AlarmId(DeviceId id, String uniqueIdentifier) {
super(id.toString() + ":" + uniqueIdentifier);
checkNotNull(id, "device id must not be null");
checkNotNull(uniqueIdentifier, "unique identifier must not be null");
checkArgument(!uniqueIdentifier.isEmpty(), "unique identifier must not be empty");
}
|
@Test
public void valueOf() {
final AlarmId id = AlarmId.alarmId(DEVICE_ID, UNIQUE_ID_1);
assertEquals("incorrect valueOf", id, ID_A);
}
|
static Result coerceUserList(
final Collection<Expression> expressions,
final ExpressionTypeManager typeManager
) {
return coerceUserList(expressions, typeManager, Collections.emptyMap());
}
|
@Test
public void shouldCoerceLambdaVariables() {
// Given:
final ImmutableList<Expression> expressions = ImmutableList.of(
BIGINT_EXPRESSION,
new LambdaVariable("X"),
INT_EXPRESSION
);
// When:
final Result result = CoercionUtil.coerceUserList(
expressions,
typeManager,
Collections.singletonMap("X", SqlTypes.INTEGER)
);
// Then:
assertThat(result.commonType(), is(Optional.of(SqlTypes.BIGINT)));
assertThat(result.expressions(), is(ImmutableList.of(
BIGINT_EXPRESSION,
cast(new LambdaVariable("X"), SqlTypes.BIGINT),
cast(INT_EXPRESSION, SqlTypes.BIGINT)
)));
}
|
public GetWorkBudget totalCurrentActiveGetWorkBudget() {
return computationCache.asMap().values().stream()
.map(ComputationState::getActiveWorkBudget)
.reduce(GetWorkBudget.noBudget(), GetWorkBudget::apply);
}
|
@Test
public void testTotalCurrentActiveGetWorkBudget() {
String computationId = "computationId";
String computationId2 = "computationId2";
MapTask mapTask = new MapTask().setStageName("stageName").setSystemName("systemName");
Map<String, String> userTransformToStateFamilyName =
ImmutableMap.of("userTransformName", "stateFamilyName");
ComputationConfig computationConfig =
ComputationConfig.create(mapTask, userTransformToStateFamilyName, ImmutableMap.of());
when(configFetcher.fetchConfig(eq(computationId))).thenReturn(Optional.of(computationConfig));
when(configFetcher.fetchConfig(eq(computationId2))).thenReturn(Optional.of(computationConfig));
ShardedKey shardedKey = ShardedKey.create(ByteString.EMPTY, 1);
ShardedKey shardedKey2 = ShardedKey.create(ByteString.EMPTY, 2);
ExecutableWork work1 = createWork(shardedKey, 1, 1);
ExecutableWork work2 = createWork(shardedKey2, 2, 2);
ExecutableWork work3 = createWork(shardedKey2, 3, 3);
// Activate 1 Work for computationId
Optional<ComputationState> maybeComputationState = computationStateCache.get(computationId);
assertTrue(maybeComputationState.isPresent());
ComputationState computationState = maybeComputationState.get();
computationState.activateWork(work1);
// Activate 2 Work(s) for computationId2
Optional<ComputationState> maybeComputationState2 = computationStateCache.get(computationId);
assertTrue(maybeComputationState2.isPresent());
ComputationState computationState2 = maybeComputationState2.get();
computationState2.activateWork(work2);
computationState2.activateWork(work3);
// GetWorkBudget should have 3 items. 1 from computationId, 2 from computationId2.
assertThat(computationStateCache.totalCurrentActiveGetWorkBudget())
.isEqualTo(
GetWorkBudget.builder()
.setItems(3)
.setBytes(
work1.getWorkItem().getSerializedSize()
+ work2.getWorkItem().getSerializedSize()
+ work3.getWorkItem().getSerializedSize())
.build());
}
|
public static String getRelativePath(Path basePath,
Path fullPath) {
return basePath.toUri().relativize(fullPath.toUri()).getPath();
}
|
@Test
public void testRelativizeSelf() {
assertEquals("", getRelativePath(BASE, BASE));
}
|
public static GraphQLRequestParams toGraphQLRequestParams(byte[] postData, final String contentEncoding)
throws JsonProcessingException, UnsupportedEncodingException {
final String encoding = StringUtils.isNotEmpty(contentEncoding) ? contentEncoding
: EncoderCache.URL_ARGUMENT_ENCODING;
ObjectNode data;
try (InputStreamReader reader = new InputStreamReader(new ByteArrayInputStream(postData), encoding)) {
data = OBJECT_MAPPER.readValue(reader, ObjectNode.class);
} catch (IOException e) {
throw new IllegalArgumentException("Invalid json data: " + e.getLocalizedMessage(), e);
}
String operationName = null;
String query;
String variables = null;
final JsonNode operationNameNode = data.has(OPERATION_NAME_FIELD) ? data.get(OPERATION_NAME_FIELD) : null;
if (operationNameNode != null) {
operationName = getJsonNodeTextContent(operationNameNode, true);
}
if (!data.has(QUERY_FIELD)) {
throw new IllegalArgumentException("Not a valid GraphQL query.");
}
final JsonNode queryNode = data.get(QUERY_FIELD);
query = getJsonNodeTextContent(queryNode, false);
final String trimmedQuery = StringUtils.trim(query);
if (!StringUtils.startsWith(trimmedQuery, QUERY_FIELD) && !StringUtils.startsWith(trimmedQuery, "mutation")) {
throw new IllegalArgumentException("Not a valid GraphQL query.");
}
final JsonNode variablesNode = data.has(VARIABLES_FIELD) ? data.get(VARIABLES_FIELD) : null;
if (variablesNode != null) {
final JsonNodeType nodeType = variablesNode.getNodeType();
if (nodeType != JsonNodeType.NULL) {
if (nodeType == JsonNodeType.OBJECT) {
variables = OBJECT_MAPPER.writeValueAsString(variablesNode);
} else {
throw new IllegalArgumentException("Not a valid object node for GraphQL variables.");
}
}
}
return new GraphQLRequestParams(operationName, query, variables);
}
|
@Test
void testToGraphQLRequestParamsWithHttpArguments() throws Exception {
Arguments args = new Arguments();
args.addArgument(new HTTPArgument("query", "query { droid { id }}", "=", false));
GraphQLRequestParams params = GraphQLRequestParamUtils.toGraphQLRequestParams(args, null);
assertNull(params.getOperationName());
assertEquals("query { droid { id }}", params.getQuery());
assertNull(params.getVariables());
args = new Arguments();
args.addArgument(new HTTPArgument("operationName", "op1", "=", false));
args.addArgument(new HTTPArgument("query", "query { droid { id }}", "=", false));
args.addArgument(new HTTPArgument("variables", "{\"id\":123}", "=", false));
params = GraphQLRequestParamUtils.toGraphQLRequestParams(args, null);
assertEquals("op1", params.getOperationName());
assertEquals("query { droid { id }}", params.getQuery());
assertEquals("{\"id\":123}", params.getVariables());
args = new Arguments();
args.addArgument(new HTTPArgument("query", "query+%7B+droid+%7B+id+%7D%7D", "=", true));
params = GraphQLRequestParamUtils.toGraphQLRequestParams(args, null);
assertNull(params.getOperationName());
assertEquals("query { droid { id }}", params.getQuery());
assertNull(params.getVariables());
args = new Arguments();
args.addArgument(new HTTPArgument("query", "query%20%7B%20droid%20%7B%20id%20%7D%7D", "=", true));
params = GraphQLRequestParamUtils.toGraphQLRequestParams(args, null);
assertNull(params.getOperationName());
assertEquals("query { droid { id }}", params.getQuery());
assertNull(params.getVariables());
}
|
public static Optional<TableMetaData> load(final DataSource dataSource, final String tableNamePattern, final DatabaseType databaseType) throws SQLException {
DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData();
try (MetaDataLoaderConnection connection = new MetaDataLoaderConnection(databaseType, dataSource.getConnection())) {
String formattedTableNamePattern = dialectDatabaseMetaData.formatTableNamePattern(tableNamePattern);
return isTableExist(connection, formattedTableNamePattern)
? Optional.of(new TableMetaData(tableNamePattern, ColumnMetaDataLoader.load(
connection, formattedTableNamePattern, databaseType), IndexMetaDataLoader.load(connection, formattedTableNamePattern), Collections.emptyList()))
: Optional.empty();
}
}
|
@Test
void assertLoadWithNotExistedTable() throws SQLException {
Map<String, SchemaMetaData> actual = MetaDataLoader.load(Collections.singleton(new MetaDataLoaderMaterial(Collections.singleton(NOT_EXISTED_TABLE), dataSource, databaseType, "sharding_db")));
assertFalse(actual.isEmpty());
assertTrue(actual.containsKey("sharding_db"));
assertTrue(actual.get("sharding_db").getTables().isEmpty());
}
|
@Override
public <A extends ThreadPoolPlugin> Optional<A> getPlugin(String pluginId) {
return Optional.empty();
}
|
@Test
public void testGetPlugin() {
Assert.assertSame(Optional.empty(), manager.getPlugin(""));
}
|
public static Optional<OP_TYPE> getOpTypeFromFields(final List<Field<?>> fields,
final String fieldName) {
return fields == null ? Optional.empty() :
fields.stream()
.filter(dataField -> Objects.equals(fieldName,dataField.getName()) && dataField.getOpType() != null)
.map(dataField -> OP_TYPE.byName(dataField.getOpType().value()))
.findFirst();
}
|
@Test
void getOpTypeFromFields() {
Optional<OP_TYPE> opType = org.kie.pmml.compiler.api.utils.ModelUtils.getOpTypeFromFields(null, "vsd");
assertThat(opType).isNotNull();
assertThat(opType.isPresent()).isFalse();
final DataDictionary dataDictionary = new DataDictionary();
final List<Field<?>> fields = getFieldsFromDataDictionary(dataDictionary);
opType = org.kie.pmml.compiler.api.utils.ModelUtils.getOpTypeFromFields(fields, "vsd");
assertThat(opType).isNotNull();
assertThat(opType.isPresent()).isFalse();
IntStream.range(0, 3).forEach(i -> {
final DataField dataField = getRandomDataField();
dataDictionary.addDataFields(dataField);
});
fields.clear();
fields.addAll(getFieldsFromDataDictionary(dataDictionary));
dataDictionary.getDataFields().forEach(dataField -> {
Optional<OP_TYPE> retrieved = org.kie.pmml.compiler.api.utils.ModelUtils.getOpTypeFromFields(fields,dataField.getName());
assertThat(retrieved).isNotNull();
assertThat(retrieved).isPresent();
OP_TYPE expected = OP_TYPE.byName(dataField.getOpType().value());
assertThat(retrieved.get()).isEqualTo(expected);
});
}
|
@Override
public boolean generatedKeyAlwaysReturned() {
return false;
}
|
@Test
void assertGeneratedKeyAlwaysReturned() {
assertFalse(metaData.generatedKeyAlwaysReturned());
}
|
public Set<String> getLogicTableNames() {
return tableMappers.stream().map(RouteMapper::getLogicName).collect(Collectors.toCollection(() -> new HashSet<>(tableMappers.size(), 1L)));
}
|
@Test
void assertGetLogicTableNames() {
Set<String> actual = routeUnit.getLogicTableNames();
assertThat(actual.size(), is(1));
assertTrue(actual.contains(LOGIC_TABLE));
}
|
public static <T> Promise<T> error(final Throwable error) {
return new ResolvedError<T>(error);
}
|
@Test
public void testError() {
final Exception error = new Exception();
final Promise<?> promise = Promises.error(error);
assertTrue(promise.isDone());
assertTrue(promise.isFailed());
assertEquals(error, promise.getError());
}
|
public static <T> T jsonToObject(final String json, final Class<T> valueTypeRef) {
try {
return MAPPER.readValue(json, valueTypeRef);
} catch (IOException e) {
LOG.warn("write to Object error: " + json, e);
return null;
}
}
|
@Test
public void testJsonToObject() {
TestObject testObject = JsonUtils.jsonToObject(EXPECTED_JSON, TestObject.class);
assertNotNull(testObject);
assertEquals(testObject.getName(), "test object");
}
|
public String toAngular() {
StringBuilder builder = new StringBuilder();
builder.append("%angular ");
String outputText = template;
for (int i = 0; i < values.size(); ++i) {
outputText = outputText.replace("{" + i + "}", "{{value_" + i + "}}");
}
builder.append(outputText);
return builder.toString();
}
|
@Test
void testAngular() {
List<Object> list = Arrays.asList("2020-01-01", 10);
String template = "Total count:{1} for {0}";
InterpreterContext context = InterpreterContext.builder().build();
SingleRowInterpreterResult singleRowInterpreterResult = new SingleRowInterpreterResult(list, template, context);
String angularOutput = singleRowInterpreterResult.toAngular();
assertEquals("%angular Total count:{{value_1}} for {{value_0}}", angularOutput);
}
|
public List<ModuleEntry> listFullModules() {
// keep the order for used modules
List<ModuleEntry> moduleEntries =
usedModules.stream()
.map(name -> new ModuleEntry(name, true))
.collect(Collectors.toList());
loadedModules.keySet().stream()
.filter(name -> !usedModules.contains(name))
.forEach(name -> moduleEntries.add(new ModuleEntry(name, false)));
return moduleEntries;
}
|
@Test
void testListFullModules() {
ModuleMock x = new ModuleMock("x");
ModuleMock y = new ModuleMock("y");
ModuleMock z = new ModuleMock("z");
manager.loadModule("y", y);
manager.loadModule("x", x);
manager.loadModule("z", z);
manager.useModules("z", "y");
assertThat(manager.listFullModules())
.isEqualTo(
getExpectedModuleEntries(2, "z", "y", CoreModuleFactory.IDENTIFIER, "x"));
}
|
public static <T extends OrderedSPI<?>> Map<Class<?>, T> getServicesByClass(final Class<T> serviceInterface, final Collection<Class<?>> types) {
Collection<T> services = getServices(serviceInterface);
Map<Class<?>, T> result = new LinkedHashMap<>(services.size(), 1F);
for (T each : services) {
types.stream().filter(type -> each.getTypeClass() == type).forEach(type -> result.put(type, each));
}
return result;
}
|
@SuppressWarnings("rawtypes")
@Test
void assertGetServicesByClass() {
Map<Class<?>, OrderedSPIFixture> actual = OrderedSPILoader.getServicesByClass(OrderedSPIFixture.class, Collections.singleton(OrderedInterfaceFixtureImpl.class));
assertThat(actual.size(), is(1));
assertThat(actual.get(OrderedInterfaceFixtureImpl.class), instanceOf(OrderedSPIFixtureImpl.class));
}
|
@Config("resource-groups.config-file")
public FileResourceGroupConfig setConfigFile(String configFile)
{
this.configFile = configFile;
return this;
}
|
@Test
public void testExplicitPropertyMappings()
{
Map<String, String> properties = new ImmutableMap.Builder<String, String>()
.put("resource-groups.config-file", "/test.json")
.build();
FileResourceGroupConfig expected = new FileResourceGroupConfig()
.setConfigFile("/test.json");
assertFullMapping(properties, expected);
}
|
public static byte[] readNullTerminatedBytes(byte[] data, int index) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
for (int i = index; i < data.length; i++) {
byte item = data[i];
if (item == MSC.NULL_TERMINATED_STRING_DELIMITER) {
break;
}
out.write(item);
}
return out.toByteArray();
}
|
@Test
public void testReadNullTerminatedBytes() {
Assert.assertArrayEquals(new byte[] {},
ByteHelper.readNullTerminatedBytes(new byte[] {0}, 0));
Assert.assertArrayEquals(new byte[] {8},
ByteHelper.readNullTerminatedBytes(new byte[] {8}, 0));
}
|
public synchronized void setLevel(Level newLevel) {
if (level == newLevel) {
// nothing to do;
return;
}
if (newLevel == null && isRootLogger()) {
throw new IllegalArgumentException("The level of the root logger cannot be set to null");
}
level = newLevel;
if (newLevel == null) {
effectiveLevelInt = parent.effectiveLevelInt;
newLevel = parent.getEffectiveLevel();
} else {
effectiveLevelInt = newLevel.levelInt;
}
if (childrenList != null) {
int len = childrenList.size();
for (int i = 0; i < len; i++) {
Logger child = (Logger) childrenList.get(i);
// tell child to handle parent levelInt change
child.handleParentLevelChange(effectiveLevelInt);
}
}
// inform listeners
loggerContext.fireOnLevelChange(this, newLevel);
}
|
@Test
public void testEnabled_Debug() throws Exception {
root.setLevel(Level.DEBUG);
checkLevelThreshold(loggerTest, Level.DEBUG);
}
|
@Override
public boolean rename(Path src, Path dst) throws IOException {
FTPClient client = connect();
try {
boolean success = rename(client, src, dst);
return success;
} finally {
disconnect(client);
}
}
|
@Test
public void testRenameFileWithFullQualifiedPath() throws Exception {
BaseUser user = server.addUser("test", "password", new WritePermission());
Configuration configuration = new Configuration();
configuration.set("fs.defaultFS", "ftp:///");
configuration.set("fs.ftp.host", "localhost");
configuration.setInt("fs.ftp.host.port", server.getPort());
configuration.set("fs.ftp.user.localhost", user.getName());
configuration.set("fs.ftp.password.localhost", user.getPassword());
configuration.setBoolean("fs.ftp.impl.disable.cache", true);
FileSystem fs = FileSystem.get(configuration);
Path ftpDir = fs.makeQualified(new Path(testDir.toAbsolutePath().toString()));
Path file1 = fs.makeQualified(new Path(ftpDir, "renamefile" + "1"));
Path file2 = fs.makeQualified(new Path(ftpDir, "renamefile" + "2"));
touch(fs, file1);
assertTrue(fs.rename(file1, file2));
}
|
public static String safeAvroToJsonString(GenericRecord record) {
try {
return avroToJsonString(record, false);
} catch (Exception e) {
return record.toString();
}
}
|
@Test
void testSafeAvroToJsonStringMissingRequiredField() {
Schema schema = new Schema.Parser().parse(EXAMPLE_SCHEMA);
GenericRecord record = new GenericData.Record(schema);
record.put("non_pii_col", "val1");
record.put("pii_col", "val2");
record.put("timestamp", 3.5);
String jsonString = HoodieAvroUtils.safeAvroToJsonString(record);
assertEquals("{\"timestamp\": 3.5, \"_row_key\": null, \"non_pii_col\": \"val1\", \"pii_col\": \"val2\"}", jsonString);
}
|
public List<String> getConsumerIdListByGroup(
final String addr,
final String consumerGroup,
final long timeoutMillis) throws RemotingConnectException, RemotingSendRequestException, RemotingTimeoutException,
MQBrokerException, InterruptedException {
GetConsumerListByGroupRequestHeader requestHeader = new GetConsumerListByGroupRequestHeader();
requestHeader.setConsumerGroup(consumerGroup);
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.GET_CONSUMER_LIST_BY_GROUP, requestHeader);
RemotingCommand response = this.remotingClient.invokeSync(MixAll.brokerVIPChannel(this.clientConfig.isVipChannelEnabled(), addr),
request, timeoutMillis);
assert response != null;
switch (response.getCode()) {
case ResponseCode.SUCCESS: {
if (response.getBody() != null) {
GetConsumerListByGroupResponseBody body =
GetConsumerListByGroupResponseBody.decode(response.getBody(), GetConsumerListByGroupResponseBody.class);
return body.getConsumerIdList();
}
}
default:
break;
}
throw new MQBrokerException(response.getCode(), response.getRemark(), addr);
}
|
@Test
public void testGetConsumerIdListByGroup() throws Exception {
doAnswer((Answer<RemotingCommand>) mock -> {
RemotingCommand request = mock.getArgument(1);
final RemotingCommand response =
RemotingCommand.createResponseCommand(GetConsumerListByGroupResponseHeader.class);
GetConsumerListByGroupResponseBody body = new GetConsumerListByGroupResponseBody();
body.setConsumerIdList(Collections.singletonList("consumer1"));
response.setBody(body.encode());
response.makeCustomHeaderToNet();
response.setCode(ResponseCode.SUCCESS);
response.setOpaque(request.getOpaque());
return response;
}).when(remotingClient).invokeSync(anyString(), any(RemotingCommand.class), anyLong());
List<String> consumerIdList = mqClientAPI.getConsumerIdListByGroup(brokerAddr, group, 10000);
assertThat(consumerIdList).size().isGreaterThan(0);
}
|
public OutputStream scaleThroughput(double v) {
return new OutputStream(id, rate.scaleBy(v), areKeysSkewed);
}
|
@Test
public void scaleThroughput() {
OutputStream orig = new OutputStream("ID", new NormalDistStats(100.0, 1.0, 99.0, 101.0), false);
OutputStream scaled = orig.scaleThroughput(2.0);
assertEquals(orig.id, scaled.id);
assertEquals(orig.areKeysSkewed, scaled.areKeysSkewed);
assertEquals(scaled.rate.mean, 200.0, 0.0001);
assertEquals(scaled.rate.stddev, 1.0, 0.0001);
assertEquals(scaled.rate.min, 199.0, 0.0001);
assertEquals(scaled.rate.max, 201.0, 0.0001);
}
|
@Override
protected Optional<ErrorResponse> filter(DiscFilterRequest req) {
var now = clock.instant();
var bearerToken = requestBearerToken(req).orElse(null);
if (bearerToken == null) {
log.fine("Missing bearer token");
return Optional.of(new ErrorResponse(Response.Status.UNAUTHORIZED, "Unauthorized"));
}
var permission = Permission.getRequiredPermission(req).orElse(null);
if (permission == null) return Optional.of(new ErrorResponse(Response.Status.FORBIDDEN, "Forbidden"));
var requestTokenHash = requestTokenHash(bearerToken);
var clientIds = new TreeSet<String>();
var permissions = EnumSet.noneOf(Permission.class);
var matchedTokens = new HashSet<TokenVersion>();
for (Client c : allowedClients) {
if (!c.permissions().contains(permission)) continue;
var matchedToken = c.tokens().get(requestTokenHash);
if (matchedToken == null) continue;
var expiration = matchedToken.expiration().orElse(null);
if (expiration != null && now.isAfter(expiration)) continue;
matchedTokens.add(matchedToken);
clientIds.add(c.id());
permissions.addAll(c.permissions());
}
if (clientIds.isEmpty()) return Optional.of(new ErrorResponse(Response.Status.FORBIDDEN, "Forbidden"));
if (matchedTokens.size() > 1) {
log.warning("Multiple tokens matched for request %s"
.formatted(matchedTokens.stream().map(TokenVersion::id).toList()));
return Optional.of(new ErrorResponse(Response.Status.FORBIDDEN, "Forbidden"));
}
var matchedToken = matchedTokens.stream().findAny().get();
addAccessLogEntry(req, "token.id", matchedToken.id());
addAccessLogEntry(req, "token.hash", matchedToken.fingerprint().toDelimitedHexString());
addAccessLogEntry(req, "token.exp", matchedToken.expiration().map(Instant::toString).orElse("<none>"));
ClientPrincipal.attachToRequest(req, clientIds, permissions);
return Optional.empty();
}
|
@Test
void fails_on_handler_with_custom_request_spec_with_invalid_action() {
var spec = RequestHandlerSpec.builder()
.withAclMapping(HttpMethodAclMapping.standard()
.override(Method.GET, Action.custom("custom")).build())
.build();
var req = FilterTestUtils.newRequestBuilder()
.withMethod(Method.GET)
.withHeader("Authorization", "Bearer " + READ_TOKEN.secretTokenString())
.withAttribute(RequestHandlerSpec.ATTRIBUTE_NAME, spec)
.build();
var responseHandler = new MockResponseHandler();
newFilterWithClientsConfig().filter(req, responseHandler);
assertNotNull(responseHandler.getResponse());
assertEquals(FORBIDDEN, responseHandler.getResponse().getStatus());
}
|
@Override
public DataSource getNamedDataSource( String datasourceName ) throws DataSourceNamingException {
ClassLoader original = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader( getClass().getClassLoader() );
return DatabaseUtil.getDataSourceFromJndi( datasourceName, new InitialContext() );
} catch ( NamingException ex ) {
throw new DataSourceNamingException( ex );
} finally {
Thread.currentThread().setContextClassLoader( original );
}
}
|
@Test
public void testCl() throws NamingException {
DataSource dataSource = mock( DataSource.class );
when( context.lookup( testName ) ).thenReturn( dataSource );
DatabaseUtil util = new DatabaseUtil();
ClassLoader orig = Thread.currentThread().getContextClassLoader();
ClassLoader cl = mock( ClassLoader.class );
try {
Thread.currentThread().setContextClassLoader( cl );
util.getNamedDataSource( testName );
} catch ( Exception ex ) {
} finally {
try {
verify( cl, never() ).loadClass( anyString() );
verify( cl, never() ).getResource( anyString() );
verify( cl, never() ).getResourceAsStream( anyString() );
} catch ( Exception ex ) {
}
Thread.currentThread().setContextClassLoader( orig );
}
}
|
@Override
public Iterable<DiscoveryNode> discoverNodes() {
try {
Collection<AzureAddress> azureAddresses = azureClient.getAddresses();
logAzureAddresses(azureAddresses);
List<DiscoveryNode> result = new ArrayList<>();
for (AzureAddress azureAddress : azureAddresses) {
for (int port = portRange.getFromPort(); port <= portRange.getToPort(); port++) {
result.add(createDiscoveryNode(azureAddress, port));
}
}
return result;
} catch (NoCredentialsException e) {
if (!isKnownExceptionAlreadyLogged) {
LOGGER.warning("No Azure credentials found! Starting standalone. To use Hazelcast Azure discovery, configure"
+ " properties (client-id, tenant-id, client-secret) or assign a managed identity to the Azure Compute"
+ " instance");
LOGGER.finest(e);
isKnownExceptionAlreadyLogged = true;
}
} catch (RestClientException e) {
if (e.getHttpErrorCode() == HTTP_FORBIDDEN) {
if (!isKnownExceptionAlreadyLogged) {
LOGGER.warning("Required role is not assigned to service principal! To use Hazelcast Azure discovery assign"
+ " a role to service principal with correct 'Read' permissions. Starting standalone.");
isKnownExceptionAlreadyLogged = true;
}
LOGGER.finest(e);
} else {
LOGGER.warning("Cannot discover nodes. Starting standalone.", e);
}
} catch (Exception e) {
LOGGER.warning("Cannot discover nodes. Starting standalone.", e);
}
return Collections.emptyList();
}
|
@Test
public void discoverNodesException() {
// given
given(azureClient.getAddresses()).willThrow(new RuntimeException("Error while checking Azure instances"));
// when
Iterable<DiscoveryNode> nodes = azureDiscoveryStrategy.discoverNodes();
// then
assertFalse(nodes.iterator().hasNext());
}
|
@Override
public InetSocketAddress getLocalAddress() {
return channel.getLocalAddress();
}
|
@Test
void getLocalAddressTest() {
Assertions.assertNull(header.getLocalAddress());
}
|
@Override
public void deleteDictData(Long id) {
// 校验是否存在
validateDictDataExists(id);
// 删除字典数据
dictDataMapper.deleteById(id);
}
|
@Test
public void testDeleteDictData_success() {
// mock 数据
DictDataDO dbDictData = randomDictDataDO();
dictDataMapper.insert(dbDictData);// @Sql: 先插入出一条存在的数据
// 准备参数
Long id = dbDictData.getId();
// 调用
dictDataService.deleteDictData(id);
// 校验数据不存在了
assertNull(dictDataMapper.selectById(id));
}
|
public static Object convert(Type type, String value) {
if (null == value) {
return value;
}
if ("".equals(value)) {
if (type.equals(String.class)) {
return value;
}
if (type.equals(int.class) || type.equals(double.class) ||
type.equals(short.class) || type.equals(long.class) ||
type.equals(byte.class) || type.equals(float.class)) {
return 0;
}
if (type.equals(boolean.class)) {
return false;
}
return null;
}
if (type.equals(int.class) || type.equals(Integer.class)) {
return Integer.parseInt(value);
} else if (type.equals(String.class)) {
return value;
} else if (type.equals(Double.class) || type.equals(double.class)) {
return Double.parseDouble(value);
} else if (type.equals(Float.class) || type.equals(float.class)) {
return Float.parseFloat(value);
} else if (type.equals(Long.class) || type.equals(long.class)) {
return Long.parseLong(value);
} else if (type.equals(Boolean.class) || type.equals(boolean.class)) {
return Boolean.parseBoolean(value);
} else if (type.equals(Short.class) || type.equals(short.class)) {
return Short.parseShort(value);
} else if (type.equals(Byte.class) || type.equals(byte.class)) {
return Byte.parseByte(value);
} else if (type.equals(BigDecimal.class)) {
return new BigDecimal(value);
} else if (type.equals(Date.class)) {
if (value.length() == 10) return DateKit.toDate(value, "yyyy-MM-dd");
return DateKit.toDateTime(value, "yyyy-MM-dd HH:mm:ss");
} else if (type.equals(LocalDate.class)) {
return DateKit.toLocalDate(value, "yyyy-MM-dd");
} else if (type.equals(LocalDateTime.class)) {
return DateKit.toLocalDateTime(value, "yyyy-MM-dd HH:mm:ss");
} else if (type instanceof Class && ((Class) type).isEnum()){
return Enum.valueOf((Class)type,value);
}
return value;
}
|
@Test
public void testConvert() {
Object o1 = ReflectKit.convert(String.class, "hello");
Assert.assertEquals("hello", o1);
Object o2 = ReflectKit.convert(BigDecimal.class, "20.1");
Assert.assertEquals(new BigDecimal("20.1"), o2);
Object o3 = ReflectKit.convert(Float.class, "2.2");
Assert.assertEquals(Float.valueOf("2.2"), o3);
Object o4 = ReflectKit.convert(Date.class, "2017-09-09");
Assert.assertEquals(Date.class, o4.getClass());
Object o5 = ReflectKit.convert(LocalDate.class, "2017-09-09");
Assert.assertEquals(LocalDate.class, o5.getClass());
Object o6 = ReflectKit.convert(LocalDateTime.class, "2017-09-09 21:22:33");
Assert.assertEquals(LocalDateTime.class, o6.getClass());
}
|
public boolean isDeletionInProgress(String workflowId) {
return withRetryableQuery(
EXIST_IN_PROGRESS_DELETION_QUERY, stmt -> stmt.setString(1, workflowId), ResultSet::next);
}
|
@Test
public void testIsDeletionInProgress() throws Exception {
WorkflowDefinition wfd = loadWorkflow(TEST_WORKFLOW_ID1);
workflowDao.addWorkflowDefinition(wfd, wfd.getPropertiesSnapshot().extractProperties());
assertFalse(deletionDao.isDeletionInProgress(TEST_WORKFLOW_ID1));
workflowDao.deleteWorkflow(TEST_WORKFLOW_ID1, User.create("tester"));
assertTrue(deletionDao.isDeletionInProgress(TEST_WORKFLOW_ID1));
}
|
public CompletableFuture<Void> commitAsync(final Map<TopicPartition, OffsetAndMetadata> offsets) {
if (offsets.isEmpty()) {
log.debug("Skipping commit of empty offsets");
return CompletableFuture.completedFuture(null);
}
OffsetCommitRequestState commitRequest = createOffsetCommitRequest(offsets, Long.MAX_VALUE);
pendingRequests.addOffsetCommitRequest(commitRequest);
CompletableFuture<Void> asyncCommitResult = new CompletableFuture<>();
commitRequest.future.whenComplete((committedOffsets, error) -> {
if (error != null) {
asyncCommitResult.completeExceptionally(commitAsyncExceptionForError(error));
} else {
asyncCommitResult.complete(null);
}
});
return asyncCommitResult;
}
|
@Test
public void testPollEnsureManualCommitSent() {
CommitRequestManager commitRequestManager = create(false, 0);
assertPoll(0, commitRequestManager);
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(new TopicPartition("t1", 0), new OffsetAndMetadata(0));
commitRequestManager.commitAsync(offsets);
assertPoll(1, commitRequestManager);
}
|
@Override
public void execute(GraphModel graphModel) {
isCanceled = false;
UndirectedGraph undirectedGraph = graphModel.getUndirectedGraphVisible();
Column weaklyConnectedColumn = initializeWeaklyConnectedColumn(graphModel);
Column stronglyConnectedColumn = null;
if (isDirected) {
stronglyConnectedColumn = initializeStronglyConnectedColumn(graphModel);
}
undirectedGraph.readLock();
try {
weaklyConnected(undirectedGraph, weaklyConnectedColumn);
if (isDirected) {
DirectedGraph directedGraph = graphModel.getDirectedGraphVisible();
stronglyConnected(directedGraph, graphModel, stronglyConnectedColumn);
}
} finally {
undirectedGraph.readUnlock();
}
}
|
@Test
public void testColumnReplace() {
GraphModel graphModel = GraphGenerator.generateNullUndirectedGraph(1);
graphModel.getNodeTable().addColumn(ConnectedComponents.WEAKLY, String.class);
ConnectedComponents cc = new ConnectedComponents();
cc.execute(graphModel);
}
|
static Serde<List<?>> createSerde(final PersistenceSchema schema) {
final List<SimpleColumn> columns = schema.columns();
if (columns.isEmpty()) {
// No columns:
return new KsqlVoidSerde<>();
}
if (columns.size() != 1) {
throw new KsqlException("The '" + FormatFactory.KAFKA.name()
+ "' format only supports a single field. Got: " + columns);
}
final SimpleColumn singleColumn = columns.get(0);
final Class<?> javaType = SchemaConverters.sqlToJavaConverter()
.toJavaType(singleColumn.type());
return createSerde(singleColumn, javaType);
}
|
@Test
public void shouldThrowIfDecimal() {
// Given:
final PersistenceSchema schema = schemaWithFieldOfType(SqlTypes.decimal(1, 1));
// When:
final Exception e = assertThrows(
KsqlException.class,
() -> KafkaSerdeFactory.createSerde(schema)
);
// Then:
assertThat(e.getMessage(), containsString("The 'KAFKA' format does not support type 'DECIMAL'"));
}
|
@Override
public Map<String, Metric> getMetrics() {
final Map<String, Metric> gauges = new HashMap<>();
for (final Thread.State state : Thread.State.values()) {
gauges.put(name(state.toString().toLowerCase(), "count"),
(Gauge<Object>) () -> getThreadCount(state));
}
gauges.put("count", (Gauge<Integer>) threads::getThreadCount);
gauges.put("daemon.count", (Gauge<Integer>) threads::getDaemonThreadCount);
gauges.put("peak.count", (Gauge<Integer>) threads::getPeakThreadCount);
gauges.put("total_started.count", (Gauge<Long>) threads::getTotalStartedThreadCount);
gauges.put("deadlock.count", (Gauge<Integer>) () -> deadlockDetector.getDeadlockedThreads().size());
gauges.put("deadlocks", (Gauge<Set<String>>) deadlockDetector::getDeadlockedThreads);
return Collections.unmodifiableMap(gauges);
}
|
@Test
public void hasASetOfGauges() {
assertThat(gauges.getMetrics().keySet())
.containsOnly("terminated.count",
"new.count",
"count",
"timed_waiting.count",
"deadlocks",
"blocked.count",
"waiting.count",
"daemon.count",
"runnable.count",
"deadlock.count",
"total_started.count",
"peak.count");
}
|
static boolean canDelay(@Nonnull final Packet stanza)
{
if (stanza instanceof IQ) {
return false;
}
if (stanza instanceof Presence) {
final Presence presence = (Presence) stanza;
if (presence.getType() == null || presence.getType() == Presence.Type.unavailable) {
// Presence updates are generally unimportant, unless it is a MUC self-presence stanza, as that suggests
// that the user joined or left a room.
final Element muc = presence.getChildElement("x", "http://jabber.org/protocol/muc#user");
final boolean isSelfPresence = muc != null && muc.elements("status").stream().anyMatch(status -> "110".equals(status.attributeValue("code")));
return !isSelfPresence;
}
}
if (stanza instanceof Message)
{
final Message message = (Message) stanza;
if (message.getBody() == null) {
if (message.getType() == Message.Type.groupchat && !message.getElement().elements("subject").isEmpty()) {
// A subject (which can be empty) is sent to indicate that a room join has completed.
return false;
}
final Element muc = message.getChildElement("x", "http://jabber.org/protocol/muc#user");
if (muc != null && !muc.elements("invite").isEmpty()) {
// Invitations to MUC rooms should be shown immediately.
return false;
}
if (!message.getElement().elements("encrypted").isEmpty()) {
// OMEMO messages never have a body element. We do not know what is being encrypted, but let's assume it's important to err on the side of caution.
return false;
}
if (message.getElement().elements().stream().anyMatch(element -> element.getNamespaceURI().startsWith("urn:xmpp:jingle-message:"))) {
// Typically, things that have to do with setting up an audio/video call. The user wants to see this as soon as possible, so do not delay. (OF-2750)
return false;
}
// No message body, and none of the exemptions above? It can probably wait.
return true;
}
}
return false;
}
|
@Test
public void testJinglePropose() throws Exception
{
// Setup test fixture.
final Packet input = parse("<message type=\"chat\" id=\"jm-propose-LE3clSJQobTiFcrAoSD52\" to=\"user@example.com\">\n" +
" <propose xmlns=\"urn:xmpp:jingle-message:0\" id=\"LE3clSJQobTiFcrAoNLR2A\">\n" +
" <description xmlns=\"urn:xmpp:jingle:apps:rtp:1\" media=\"audio\" />\n" +
" <description xmlns=\"urn:xmpp:jingle:apps:rtp:1\" media=\"video\" />\n" +
" </propose>\n" +
" <request xmlns=\"urn:xmpp:receipts\" />\n" +
" <store xmlns=\"urn:xmpp:hints\" />\n" +
"</message>");
// Execute system under test.
final boolean result = CsiManager.canDelay(input);
// Verify results.
assertFalse(result);
}
|
@VisibleForTesting
boolean isLeader(Collection<MemberDescription> members, Collection<TopicPartition> partitions) {
// there should only be one task assigned partition 0 of the first topic,
// so elect that one the leader
TopicPartition firstTopicPartition =
members.stream()
.flatMap(member -> member.assignment().topicPartitions().stream())
.min(new TopicPartitionComparator())
.orElseThrow(
() -> new ConnectException("No partitions assigned, cannot determine leader"));
return partitions.contains(firstTopicPartition);
}
|
@Test
public void testIsLeader() {
CommitterImpl committer = new CommitterImpl();
MemberAssignment assignment1 =
new MemberAssignment(
ImmutableSet.of(new TopicPartition("topic1", 0), new TopicPartition("topic2", 1)));
MemberDescription member1 =
new MemberDescription(null, Optional.empty(), null, null, assignment1);
MemberAssignment assignment2 =
new MemberAssignment(
ImmutableSet.of(new TopicPartition("topic2", 0), new TopicPartition("topic1", 1)));
MemberDescription member2 =
new MemberDescription(null, Optional.empty(), null, null, assignment2);
List<MemberDescription> members = ImmutableList.of(member1, member2);
List<TopicPartition> assignments =
ImmutableList.of(new TopicPartition("topic2", 1), new TopicPartition("topic1", 0));
assertThat(committer.isLeader(members, assignments)).isTrue();
assignments =
ImmutableList.of(new TopicPartition("topic2", 0), new TopicPartition("topic1", 1));
assertThat(committer.isLeader(members, assignments)).isFalse();
}
|
public static FEEL_1_1Parser parse(FEELEventListenersManager eventsManager, String source, Map<String, Type> inputVariableTypes, Map<String, Object> inputVariables, Collection<FEELFunction> additionalFunctions, List<FEELProfile> profiles, FEELTypeRegistry typeRegistry) {
CharStream input = CharStreams.fromString(source);
FEEL_1_1Lexer lexer = new FEEL_1_1Lexer( input );
CommonTokenStream tokens = new CommonTokenStream( lexer );
FEEL_1_1Parser parser = new FEEL_1_1Parser( tokens );
ParserHelper parserHelper = new ParserHelper(eventsManager);
additionalFunctions.forEach(f -> parserHelper.getSymbolTable().getBuiltInScope().define(f.getSymbol()));
parser.setHelper(parserHelper);
parser.setErrorHandler( new FEELErrorHandler() );
parser.removeErrorListeners(); // removes the error listener that prints to the console
parser.addErrorListener( new FEELParserErrorListener( eventsManager ) );
// pre-loads the parser with symbols
defineVariables( inputVariableTypes, inputVariables, parser );
if (typeRegistry != null) {
parserHelper.setTypeRegistry(typeRegistry);
}
return parser;
}
|
@Test
void nestedContexts() {
String inputExpression = "{ a value : 10,"
+ " an applicant : { "
+ " first name : \"Edson\", "
+ " last + name : \"Tirelli\", "
+ " full name : first name + last + name, "
+ " address : {"
+ " street : \"55 broadway st\","
+ " city : \"New York\" "
+ " }, "
+ " xxx: last + name"
+ " } "
+ "}";
BaseNode ctxbase = parse( inputExpression );
assertThat( ctxbase).isInstanceOf(ContextNode.class);
assertThat( ctxbase.getText()).isEqualTo(inputExpression);
ContextNode ctx = (ContextNode) ctxbase;
assertThat( ctx.getEntries()).hasSize(2);
ContextEntryNode entry = ctx.getEntries().get( 0 );
assertThat( entry.getName()).isInstanceOf(NameDefNode.class);
NameDefNode name = (NameDefNode) entry.getName();
assertThat( name.getText()).isEqualTo("a value");
assertThat( entry.getValue()).isInstanceOf(NumberNode.class);
assertThat( entry.getResultType()).isEqualTo(BuiltInType.NUMBER);
assertThat( entry.getValue().getText()).isEqualTo("10");
entry = ctx.getEntries().get( 1 );
assertThat( entry.getName()).isInstanceOf(NameDefNode.class);
name = (NameDefNode) entry.getName();
assertThat( name.getText()).isEqualTo( "an applicant");
assertThat( entry.getValue()).isInstanceOf(ContextNode.class);
ContextNode applicant = (ContextNode) entry.getValue();
assertThat( applicant.getEntries()).hasSize(5);
assertThat( applicant.getEntries().get( 0 ).getName().getText()).isEqualTo("first name");
assertThat( applicant.getEntries().get( 0 ).getResultType()).isEqualTo(BuiltInType.STRING);
assertThat( applicant.getEntries().get( 1 ).getName().getText()).isEqualTo("last + name");
assertThat( applicant.getEntries().get( 1 ).getResultType()).isEqualTo(BuiltInType.STRING);
assertThat( applicant.getEntries().get( 2 ).getName().getText()).isEqualTo("full name");
assertThat( applicant.getEntries().get( 2 ).getResultType()).isEqualTo(BuiltInType.STRING);
assertThat( applicant.getEntries().get( 3 ).getName().getText()).isEqualTo("address");
assertThat( applicant.getEntries().get( 3 ).getValue()).isInstanceOf(ContextNode.class);
ContextNode address = (ContextNode) applicant.getEntries().get( 3 ).getValue();
assertThat( address.getEntries()).hasSize(2);
assertThat( address.getEntries().get( 0 ).getName().getText()).isEqualTo("street");
assertThat( address.getEntries().get( 0 ).getResultType()).isEqualTo(BuiltInType.STRING);
assertThat( address.getEntries().get( 1 ).getName().getText()).isEqualTo("city");
assertThat( address.getEntries().get( 0 ).getResultType()).isEqualTo(BuiltInType.STRING);
}
|
public static <T> T copyWithGson(Object orig, Class<T> c) {
// Backup the current MetaContext before assigning a new one.
MetaContext oldContext = MetaContext.get();
MetaContext metaContext = new MetaContext();
metaContext.setStarRocksMetaVersion(FeConstants.STARROCKS_META_VERSION);
metaContext.setThreadLocalInfo();
try {
String origJsonStr = GsonUtils.GSON.toJson(orig);
return GsonUtils.GSON.fromJson(origJsonStr, c);
} catch (Exception e) {
LOG.warn("failed to copy object.", e);
return null;
} finally {
// Restore the old MetaContext.
if (oldContext != null) {
oldContext.setThreadLocalInfo();
} else {
MetaContext.remove();
}
}
}
|
@Test
public void testCopyWithJson() {
GsonSerializationTest.OrigClassA classA = new GsonSerializationTest.OrigClassA(1);
GsonSerializationTest.OrigClassA copied = DeepCopy.copyWithGson(classA, GsonSerializationTest.OrigClassA.class);
Assert.assertTrue(copied != null);
Assert.assertEquals(classA.flag, copied.flag);
Assert.assertEquals(classA.classA1.flag, copied.classA1.flag);
}
|
@Override
public Page<RoleInfo> getRolesByUserNameAndRoleName(String username, String role, int pageNo, int pageSize) {
AuthPaginationHelper<RoleInfo> helper = createPaginationHelper();
String sqlCountRows = "SELECT count(*) FROM roles ";
String sqlFetchRows = "SELECT role,username FROM roles ";
StringBuilder where = new StringBuilder(" WHERE 1 = 1 ");
List<String> params = new ArrayList<>();
if (StringUtils.isNotBlank(username)) {
where.append(" AND username = ? ");
params.add(username);
}
if (StringUtils.isNotBlank(role)) {
where.append(" AND role = ? ");
params.add(role);
}
try {
return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize,
ROLE_INFO_ROW_MAPPER);
} catch (CannotGetJdbcConnectionException e) {
LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e);
throw e;
}
}
|
@Test
void testGetRolesByUserName() {
Page<RoleInfo> userName = externalRolePersistService.getRolesByUserNameAndRoleName("userName", "roleName", 1, 10);
assertNotNull(userName);
}
|
public static PostgreSQLBinaryProtocolValue getBinaryProtocolValue(final BinaryColumnType binaryColumnType) {
Preconditions.checkArgument(BINARY_PROTOCOL_VALUES.containsKey(binaryColumnType), "Cannot find PostgreSQL type '%s' in column type when process binary protocol value", binaryColumnType);
return BINARY_PROTOCOL_VALUES.get(binaryColumnType);
}
|
@Test
void assertGetDateBinaryProtocolValue() {
PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.DATE);
assertThat(binaryProtocolValue, instanceOf(PostgreSQLDateBinaryProtocolValue.class));
}
|
public static Document loadXMLFrom( String xml ) throws SAXException, IOException {
return loadXMLFrom( new ByteArrayInputStream( xml.getBytes() ) );
}
|
@Test( timeout = 2000 )
public void whenLoadingMaliciousXmlFromInputStreamParsingEndsWithNoErrorAndNullValueIsReturned() throws Exception {
assertNull( PDIImportUtil.loadXMLFrom( MALICIOUS_XML ) );
}
|
static PartitionSpec createPartitionSpec(
org.apache.iceberg.Schema schema, List<String> partitionBy) {
if (partitionBy.isEmpty()) {
return PartitionSpec.unpartitioned();
}
PartitionSpec.Builder specBuilder = PartitionSpec.builderFor(schema);
partitionBy.forEach(
partitionField -> {
Matcher matcher = TRANSFORM_REGEX.matcher(partitionField);
if (matcher.matches()) {
String transform = matcher.group(1);
switch (transform) {
case "year":
case "years":
specBuilder.year(matcher.group(2));
break;
case "month":
case "months":
specBuilder.month(matcher.group(2));
break;
case "day":
case "days":
specBuilder.day(matcher.group(2));
break;
case "hour":
case "hours":
specBuilder.hour(matcher.group(2));
break;
case "bucket":
{
Pair<String, Integer> args = transformArgPair(matcher.group(2));
specBuilder.bucket(args.first(), args.second());
break;
}
case "truncate":
{
Pair<String, Integer> args = transformArgPair(matcher.group(2));
specBuilder.truncate(args.first(), args.second());
break;
}
default:
throw new UnsupportedOperationException("Unsupported transform: " + transform);
}
} else {
specBuilder.identity(partitionField);
}
});
return specBuilder.build();
}
|
@Test
public void testCreatePartitionSpecUnpartitioned() {
PartitionSpec spec = SchemaUtils.createPartitionSpec(SCHEMA_FOR_SPEC, ImmutableList.of());
assertThat(spec.isPartitioned()).isFalse();
}
|
public boolean hasValues() {
return hasValues;
}
|
@Test
public void testEmptyCells() {
builder = new LhsBuilder(9, 1, "Person");
assertThat(builder.hasValues()).isFalse();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.