focal_method stringlengths 13 60.9k | test_case stringlengths 25 109k |
|---|---|
public static void onFail(final ServerMemberManager manager, final Member member) {
// To avoid null pointer judgments, pass in one NONE_EXCEPTION
onFail(manager, member, ExceptionUtil.NONE_EXCEPTION);
} | @Test
void testMemberOnFailWhenReachMaxFailAccessCnt() {
final Member remote = buildMember();
mockMemberAddressInfos.add(remote.getAddress());
remote.setState(NodeState.SUSPICIOUS);
remote.setFailAccessCnt(2);
MemberUtil.onFail(memberManager, remote);
assertEquals(3, remote.getFailAccessCnt());
assertEquals(NodeState.SUSPICIOUS, remote.getState());
verify(memberManager, never()).notifyMemberChange(remote);
assertTrue(mockMemberAddressInfos.isEmpty());
MemberUtil.onFail(memberManager, remote);
assertEquals(4, remote.getFailAccessCnt());
assertEquals(NodeState.DOWN, remote.getState());
verify(memberManager).notifyMemberChange(remote);
} |
public static PathOutputCommitter createCommitter(Path outputPath,
TaskAttemptContext context) throws IOException {
return getCommitterFactory(outputPath,
context.getConfiguration())
.createOutputCommitter(outputPath, context);
} | @Test
public void testNamedCommitterNullPath() throws Throwable {
Configuration conf = new Configuration();
// set up for the schema factory
conf.set(COMMITTER_FACTORY_CLASS, NAMED_COMMITTER_FACTORY);
conf.set(NAMED_COMMITTER_CLASS, SimpleCommitter.class.getName());
SimpleCommitter sc = createCommitter(
SimpleCommitter.class,
null, taskAttempt(conf));
assertNull(sc.getOutputPath());
} |
public RuntimeOptionsBuilder parse(String... args) {
return parse(Arrays.asList(args));
} | @Test
void strips_line_filters_from_feature_paths_and_put_them_among_line_filters() {
RuntimeOptions options = parser
.parse("somewhere_else.feature:3")
.build();
assertAll(
() -> assertThat(options.getFeaturePaths(), contains(new File("somewhere_else.feature").toURI())),
() -> assertThat(options.getLineFilters(),
hasEntry(new File("somewhere_else.feature").toURI(), singleton(3))));
} |
@Override
public void validateDeleteGroup() throws ApiException {
switch (currentState()) {
case DEAD:
throw new GroupIdNotFoundException(String.format("Group %s is in dead state.", groupId));
case STABLE:
case PREPARING_REBALANCE:
case COMPLETING_REBALANCE:
throw Errors.NON_EMPTY_GROUP.exception();
default:
}
} | @Test
public void testValidateDeleteGroup() {
group.transitionTo(PREPARING_REBALANCE);
assertThrows(GroupNotEmptyException.class, group::validateDeleteGroup);
group.transitionTo(COMPLETING_REBALANCE);
assertThrows(GroupNotEmptyException.class, group::validateDeleteGroup);
group.transitionTo(STABLE);
assertThrows(GroupNotEmptyException.class, group::validateDeleteGroup);
group.transitionTo(PREPARING_REBALANCE);
group.transitionTo(EMPTY);
assertDoesNotThrow(group::validateDeleteGroup);
group.transitionTo(DEAD);
assertThrows(GroupIdNotFoundException.class, group::validateDeleteGroup);
} |
public static PostgreSQLCommandPacket newInstance(final PostgreSQLCommandPacketType commandPacketType, final PostgreSQLPacketPayload payload) {
if (!PostgreSQLCommandPacketType.isExtendedProtocolPacketType(commandPacketType)) {
payload.getByteBuf().skipBytes(1);
return getPostgreSQLCommandPacket(commandPacketType, payload);
}
List<PostgreSQLCommandPacket> result = new ArrayList<>();
while (payload.hasCompletePacket()) {
PostgreSQLCommandPacketType type = PostgreSQLCommandPacketType.valueOf(payload.readInt1());
int length = payload.getByteBuf().getInt(payload.getByteBuf().readerIndex());
PostgreSQLPacketPayload slicedPayload = new PostgreSQLPacketPayload(payload.getByteBuf().readSlice(length), payload.getCharset());
result.add(getPostgreSQLCommandPacket(type, slicedPayload));
}
return new PostgreSQLAggregatedCommandPacket(result);
} | @Test
void assertNewInstanceWithBindComPacket() {
assertThat(PostgreSQLCommandPacketFactory.newInstance(PostgreSQLCommandPacketType.BIND_COMMAND, payload), instanceOf(PostgreSQLAggregatedCommandPacket.class));
} |
public void generate() throws IOException {
Path currentWorkingDir = Paths.get("").toAbsolutePath();
final InputStream rawDoc = Files.newInputStream(currentWorkingDir.resolve(clientParameters.inputFile));
BufferedReader reader = new BufferedReader(new InputStreamReader(rawDoc));
long i = 1;
while (reader.ready()) {
String line = reader.readLine();
JsonReader jsonReader = new JsonReader(types, new ByteArrayInputStream(Utf8.toBytes(line)), parserFactory);
String wikimediaId = "id:wikimedia:" + languageTag.languageCode() + "::" + i;
ParsedDocumentOperation operation = jsonReader.readSingleDocumentStreaming(DocumentOperationType.PUT, wikimediaId);
DocumentPut put = (DocumentPut) operation.operation();
Document document = put.getDocument();
FieldValue fieldValue = document.getFieldValue(clientParameters.field);
this.handleTokenization(fieldValue.toString());
if (i % 50000 == 0) {
System.out.println("Documents processed: " + i + ", unique words: " + documentFrequency.size());
}
i++;
}
long pageCount = i - 1;
System.out.println("Total documents processed: " + pageCount + ", unique words: " + documentFrequency.size());
SignificanceModelFile modelFile;
File outputFile = Paths.get(clientParameters.outputFile).toFile();
String languagesKey = String.join(",", this.languages.stream().map(Language::languageCode).toList());
if (outputFile.exists()) {
InputStream in = outputFile.toString().endsWith(".zst") ?
new ZstdInputStream(new FileInputStream(outputFile)) :
new FileInputStream(outputFile);
modelFile = objectMapper.readValue(in, SignificanceModelFile.class);
modelFile.addLanguage(languagesKey, new DocumentFrequencyFile(DOC_FREQ_DESCRIPTION, pageCount, getFinalDocumentFrequency()));
} else {
HashMap<String, DocumentFrequencyFile> languages = new HashMap<>() {{
put(languagesKey, new DocumentFrequencyFile(DOC_FREQ_DESCRIPTION, pageCount, getFinalDocumentFrequency()));
}};
modelFile = new SignificanceModelFile(VERSION, ID, SIGNIFICANCE_DESCRIPTION + clientParameters.inputFile, languages);
}
try {
ObjectWriter writer = objectMapper.writerWithDefaultPrettyPrinter();
OutputStream out = useZstCompression ?
new ZstdOutputStream(new FileOutputStream(clientParameters.outputFile)) :
new FileOutputStream(clientParameters.outputFile);
writer.writeValue(out, modelFile);
} catch (IOException e) {
throw new IllegalStateException("Failed to write model to output file", e);
}
} | @Test
void testGenerateSimpleFileWithZST() throws IOException {
String inputPath = "no.jsonl";
ClientParameters params1 = createParameters(inputPath, "output.json", "text", "nb", "nb", "true").build();
// Throws exception when outputfile does not have .zst extension when using zst compression
assertThrows(IllegalArgumentException.class, () -> createSignificanceModelGenerator(params1));
String outputPath = "output.json.zst";
ClientParameters params = createParameters(inputPath, outputPath, "text", "nb", "nb", "true").build();
SignificanceModelGenerator generator = createSignificanceModelGenerator(params);
generator.generate();
File outputFile = new File(tempDir.resolve(outputPath ).toString());
assertTrue(outputFile.exists());
InputStream in = new ZstdInputStream(new FileInputStream(outputFile));
SignificanceModelFile modelFile = objectMapper.readValue(in, SignificanceModelFile.class);
HashMap<String, DocumentFrequencyFile> languages = modelFile.languages();
assertEquals(1, languages.size());
assertTrue(languages.containsKey("nb"));
DocumentFrequencyFile documentFrequencyFile = languages.get("nb");
assertEquals(3, documentFrequencyFile.frequencies().get("fra"));
assertEquals(3, documentFrequencyFile.frequencies().get("skriveform"));
assertEquals(3, documentFrequencyFile.frequencies().get("kategori"));
assertEquals(3, documentFrequencyFile.frequencies().get("eldr"));
} |
public ConsumerBuilderImpl(PulsarClientImpl client, Schema<T> schema) {
this(client, new ConsumerConfigurationData<T>(), schema);
} | @Test
public void testConsumerBuilderImpl() throws PulsarClientException {
Consumer consumer = mock(Consumer.class);
when(consumerBuilderImpl.subscribeAsync())
.thenReturn(CompletableFuture.completedFuture(consumer));
assertNotNull(consumerBuilderImpl.topic(TOPIC_NAME).subscribe());
} |
private Pair<LogicalSchema, List<SelectExpression>> build(
final MetaStore metaStore
) {
final LogicalSchema parentSchema = getSource().getSchema();
final Optional<LogicalSchema> targetSchema = getTargetSchema(metaStore);
final List<SelectExpression> selectExpressions = SelectionUtil
.buildSelectExpressions(getSource(), projection.selectItems(), targetSchema);
final LogicalSchema schema =
SelectionUtil.buildProjectionSchema(parentSchema, selectExpressions, metaStore);
if (into.isPresent()) {
// Persistent queries have key columns as value columns - final projection can exclude them:
final Map<ColumnName, Set<ColumnName>> seenKeyColumns = new HashMap<>();
selectExpressions.removeIf(se -> {
if (se.getExpression() instanceof UnqualifiedColumnReferenceExp) {
final ColumnName columnName = ((UnqualifiedColumnReferenceExp) se.getExpression())
.getColumnName();
// Window bounds columns are currently removed if not aliased:
if (SystemColumns.isWindowBound(columnName) && se.getAlias().equals(columnName)) {
return true;
}
if (parentSchema.isKeyColumn(columnName)) {
seenKeyColumns.computeIfAbsent(columnName, k -> new HashSet<>()).add(se.getAlias());
return true;
}
}
return false;
});
for (final Entry<ColumnName, Set<ColumnName>> seenKey : seenKeyColumns.entrySet()) {
if (seenKey.getValue().size() > 1) {
final String keys = GrammaticalJoiner.and().join(
seenKey.getValue().stream().map(Name::text).sorted());
throw new KsqlException("The projection contains a key column (" + seenKey.getKey()
+ ") more than once, aliased as: "
+ keys + "."
+ System.lineSeparator()
+ "Each key column must only be in the projection once. "
+ "If you intended to copy the key into the value, then consider using the "
+ AsValue.NAME + " function to indicate which key reference should be copied."
);
}
}
}
final LogicalSchema nodeSchema;
if (into.isPresent()) {
nodeSchema = schema.withoutPseudoAndKeyColsInValue();
} else {
// Transient queries return key columns in the value, so the projection includes them, and
// the schema needs to include them too:
final Builder builder = LogicalSchema.builder();
builder.keyColumns(parentSchema.key());
schema.columns()
.forEach(builder::valueColumn);
nodeSchema = builder.build();
}
return Pair.of(nodeSchema, selectExpressions);
} | @Test
public void shouldNotThrowOnSyntheticKeyColumnInProjection() {
// Given:
clearInvocations(source);
final UnqualifiedColumnReferenceExp syntheticKeyRef =
new UnqualifiedColumnReferenceExp(ColumnName.of("ROWKEY"));
selects = ImmutableList.of(new SingleColumn(syntheticKeyRef, Optional.of(ALIAS)));
// When:
new FinalProjectNode(
NODE_ID,
source,
selects,
Optional.of(into),
metaStore
);
// Then:
verify(source).validateColumns(RequiredColumns.builder().add(syntheticKeyRef).build());
} |
public Protocol forName(final String identifier) {
return this.forName(identifier, null);
} | @Test
public void testOverrideBundledProtocols() {
final TestProtocol baseProtocol = new TestProtocol(Scheme.http) {
@Override
public String getProvider() {
return "test-provider1";
}
@Override
public boolean isBundled() {
return false;
}
};
final TestProtocol overrideProtocol = new TestProtocol(Scheme.http) {
@Override
public String getProvider() {
return "test-provider2";
}
@Override
public boolean isBundled() {
return false;
}
};
final ProtocolFactory f = new ProtocolFactory(Stream.of(baseProtocol, overrideProtocol).collect(Collectors.toSet()));
assertEquals(overrideProtocol, f.forName("test", "test-provider2"));
assertEquals(baseProtocol, f.forName("test", "test-provider1"));
} |
String messageFromFile(Locale locale, String filename, String relatedProperty) {
String result = null;
String bundleBase = propertyToBundles.get(relatedProperty);
if (bundleBase == null) {
// this property has no translation
return null;
}
String filePath = bundleBase.replace('.', '/');
if (!"en".equals(locale.getLanguage())) {
filePath += "_" + locale.getLanguage();
}
filePath += "/" + filename;
InputStream input = classloader.getResourceAsStream(filePath);
if (input != null) {
result = readInputStream(filePath, input);
}
return result;
} | @Test
public void return_null_if_rule_not_internationalized() {
String html = underTest.messageFromFile(Locale.ENGLISH, "UnknownRule.html", "foo.rule1.name");
assertThat(html).isNull();
} |
@Override
public void createFunction(SqlInvokedFunction function, boolean replace)
{
checkCatalog(function);
checkFunctionLanguageSupported(function);
checkArgument(!function.hasVersion(), "function '%s' is already versioned", function);
QualifiedObjectName functionName = function.getFunctionId().getFunctionName();
checkFieldLength("Catalog name", functionName.getCatalogName(), MAX_CATALOG_NAME_LENGTH);
checkFieldLength("Schema name", functionName.getSchemaName(), MAX_SCHEMA_NAME_LENGTH);
if (!functionNamespaceDao.functionNamespaceExists(functionName.getCatalogName(), functionName.getSchemaName())) {
throw new PrestoException(NOT_FOUND, format("Function namespace not found: %s", functionName.getCatalogSchemaName()));
}
checkFieldLength("Function name", functionName.getObjectName(), MAX_FUNCTION_NAME_LENGTH);
if (function.getParameters().size() > MAX_PARAMETER_COUNT) {
throw new PrestoException(GENERIC_USER_ERROR, format("Function has more than %s parameters: %s", MAX_PARAMETER_COUNT, function.getParameters().size()));
}
for (Parameter parameter : function.getParameters()) {
checkFieldLength("Parameter name", parameter.getName(), MAX_PARAMETER_NAME_LENGTH);
}
checkFieldLength(
"Parameter type list",
function.getFunctionId().getArgumentTypes().stream()
.map(TypeSignature::toString)
.collect(joining(",")),
MAX_PARAMETER_TYPES_LENGTH);
checkFieldLength("Return type", function.getSignature().getReturnType().toString(), MAX_RETURN_TYPE_LENGTH);
jdbi.useTransaction(handle -> {
FunctionNamespaceDao transactionDao = handle.attach(functionNamespaceDaoClass);
Optional<SqlInvokedFunctionRecord> latestVersion = transactionDao.getLatestRecordForUpdate(hash(function.getFunctionId()), function.getFunctionId());
if (!replace && latestVersion.isPresent() && !latestVersion.get().isDeleted()) {
throw new PrestoException(ALREADY_EXISTS, "Function already exists: " + function.getFunctionId());
}
if (!latestVersion.isPresent() || !latestVersion.get().getFunction().hasSameDefinitionAs(function)) {
long newVersion = latestVersion.map(SqlInvokedFunctionRecord::getFunction).map(MySqlFunctionNamespaceManager::getLongVersion).orElse(0L) + 1;
insertSqlInvokedFunction(transactionDao, function, newVersion);
}
else if (latestVersion.get().isDeleted()) {
SqlInvokedFunction latest = latestVersion.get().getFunction();
checkState(latest.hasVersion(), "Function version missing: %s", latest.getFunctionId());
transactionDao.setDeletionStatus(hash(latest.getFunctionId()), latest.getFunctionId(), getLongVersion(latest), false);
}
});
refreshFunctionsCache(functionName);
} | @Test(expectedExceptions = PrestoException.class, expectedExceptionsMessageRegExp = "Function name exceeds max length of 256.*")
public void testFunctionNameTooLong()
{
QualifiedObjectName functionName = QualifiedObjectName.valueOf(TEST_CATALOG, TEST_SCHEMA, dummyString(257));
createFunction(createFunctionTangent(functionName), false);
} |
public static URI parse(String gluePath) {
requireNonNull(gluePath, "gluePath may not be null");
if (gluePath.isEmpty()) {
return rootPackageUri();
}
// Legacy from the Cucumber Eclipse plugin
// Older versions of Cucumber allowed it.
if (CLASSPATH_SCHEME_PREFIX.equals(gluePath)) {
return rootPackageUri();
}
if (nonStandardPathSeparatorInUse(gluePath)) {
String standardized = replaceNonStandardPathSeparator(gluePath);
return parseAssumeClasspathScheme(standardized);
}
if (isProbablyPackage(gluePath)) {
String path = resourceNameOfPackageName(gluePath);
return parseAssumeClasspathScheme(path);
}
return parseAssumeClasspathScheme(gluePath);
} | @Test
void can_parse_package_form() {
URI uri = GluePath.parse("com.example.app");
assertAll(
() -> assertThat(uri.getScheme(), is("classpath")),
() -> assertThat(uri.getSchemeSpecificPart(), is("/com/example/app")));
} |
@Override
public void doBeforeRequest(String remoteAddr, RemotingCommand request) {
if (StringUtils.isNotEmpty(clientConfig.getNamespaceV2())) {
request.addExtField(MixAll.RPC_REQUEST_HEADER_NAMESPACED_FIELD, "true");
request.addExtField(MixAll.RPC_REQUEST_HEADER_NAMESPACE_FIELD, clientConfig.getNamespaceV2());
}
} | @Test
public void testDoBeforeRequestWithNamespace() {
clientConfig = new ClientConfig();
clientConfig.setNamespaceV2(namespace);
namespaceRpcHook = new NamespaceRpcHook(clientConfig);
PullMessageRequestHeader pullMessageRequestHeader = new PullMessageRequestHeader();
RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.PULL_MESSAGE, pullMessageRequestHeader);
namespaceRpcHook.doBeforeRequest("", request);
assertThat(request.getExtFields().get(MixAll.RPC_REQUEST_HEADER_NAMESPACED_FIELD)).isEqualTo("true");
assertThat(request.getExtFields().get(MixAll.RPC_REQUEST_HEADER_NAMESPACE_FIELD)).isEqualTo(namespace);
} |
public static Map<String, Map<String, InetSocketAddress>>
getNNServiceRpcAddressesForCluster(Configuration conf) throws IOException {
// Use default address as fall back
String defaultAddress;
try {
defaultAddress = NetUtils.getHostPortString(
DFSUtilClient.getNNAddress(conf));
} catch (IllegalArgumentException e) {
defaultAddress = null;
}
Collection<String> parentNameServices = getParentNameServices(conf);
Map<String, Map<String, InetSocketAddress>> addressList =
getAddressesForNsIds(conf, parentNameServices,
defaultAddress,
DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
DFS_NAMENODE_RPC_ADDRESS_KEY);
if (addressList.isEmpty()) {
throw new IOException("Incorrect configuration: namenode address "
+ DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY + "." + parentNameServices
+ " or "
+ DFS_NAMENODE_RPC_ADDRESS_KEY + "." + parentNameServices
+ " is not configured.");
}
return addressList;
} | @Test
public void testErrorMessageForInvalidNameservice() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.set(DFSConfigKeys.DFS_NAMESERVICES, "ns1, ns2");
String expectedErrorMessage = "Incorrect configuration: namenode address "
+ DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY + ".[ns1, ns2]"
+ " or "
+ DFS_NAMENODE_RPC_ADDRESS_KEY + ".[ns1, ns2]"
+ " is not configured.";
LambdaTestUtils.intercept(IOException.class, expectedErrorMessage,
()->DFSUtil.getNNServiceRpcAddressesForCluster(conf));
} |
@Override
public void write(byte[] b, int off, int len) throws IOException {
out.write(b, off, len);
} | @Test
public void testWrite() throws IOException {
CallCountOutputStream fsCount = new CallCountOutputStream();
FilterOutputStream fs = new FilterOutputStream(fsCount);
CallCountOutputStream osCount = new CallCountOutputStream();
UnownedOutputStream os = new UnownedOutputStream(osCount);
byte[] data = "Hello World!".getBytes(StandardCharsets.UTF_8);
fs.write(data, 0, data.length);
os.write(data, 0, data.length);
fs.write('\n');
os.write('\n');
assertEquals(13, fsCount.callCnt);
assertEquals(2, osCount.callCnt);
assertArrayEquals(fsCount.toByteArray(), osCount.toByteArray());
} |
public Map<String, List<TopicPartitionInfo>> getTopicPartitionInfo(final Set<String> topics) {
log.debug("Starting to describe topics {} in partition assignor.", topics);
long currentWallClockMs = time.milliseconds();
final long deadlineMs = currentWallClockMs + retryTimeoutMs;
final Set<String> topicsToDescribe = new HashSet<>(topics);
final Map<String, List<TopicPartitionInfo>> topicPartitionInfo = new HashMap<>();
while (!topicsToDescribe.isEmpty()) {
final Map<String, List<TopicPartitionInfo>> existed = getTopicPartitionInfo(topicsToDescribe, null);
topicPartitionInfo.putAll(existed);
topicsToDescribe.removeAll(topicPartitionInfo.keySet());
if (!topicsToDescribe.isEmpty()) {
currentWallClockMs = time.milliseconds();
if (currentWallClockMs >= deadlineMs) {
final String timeoutError = String.format(
"Could not create topics within %d milliseconds. " +
"This can happen if the Kafka cluster is temporarily not available.",
retryTimeoutMs);
log.error(timeoutError);
throw new TimeoutException(timeoutError);
}
log.info(
"Topics {} could not be describe fully. Will retry in {} milliseconds. Remaining time in milliseconds: {}",
topics,
retryBackOffMs,
deadlineMs - currentWallClockMs
);
Utils.sleep(retryBackOffMs);
}
}
log.debug("Completed describing topics");
return topicPartitionInfo;
} | @Test
public void shouldReturnCorrectPartitionInfo() {
final TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(0, broker1, singleReplica, Collections.emptyList());
mockAdminClient.addTopic(
false,
topic1,
Collections.singletonList(topicPartitionInfo),
null);
final Map<String, List<TopicPartitionInfo>> ret = internalTopicManager.getTopicPartitionInfo(Collections.singleton(topic1));
assertEquals(Collections.singletonMap(topic1, Collections.singletonList(topicPartitionInfo)), ret);
} |
@Override
public Instant deserialize(JsonElement json, Type type, JsonDeserializationContext context) throws JsonParseException {
if (!(json instanceof JsonPrimitive))
throw new JsonParseException("The instant should be a string value");
else {
Instant time = deserializeToInstant(json.getAsString());
if (type == Instant.class)
return time;
else
throw new IllegalArgumentException(this.getClass() + " cannot be deserialized to " + type);
}
} | @Test
public void testDeserialize() {
assertEquals(
LocalDateTime.of(2017, 6, 8, 4, 26, 33)
.atOffset(ZoneOffset.UTC).toInstant(),
InstantTypeAdapter.deserializeToInstant("2017-06-08T04:26:33+0000"));
assertEquals(
LocalDateTime.of(2021, 1, 3, 0, 53, 34)
.atOffset(ZoneOffset.UTC).toInstant(),
InstantTypeAdapter.deserializeToInstant("2021-01-03T00:53:34+00:00"));
assertEquals(
LocalDateTime.of(2021, 1, 3, 0, 53, 34)
.atZone(ZoneId.systemDefault()).toInstant(),
InstantTypeAdapter.deserializeToInstant("2021-01-03T00:53:34"));
} |
@Override
public SchemaResult getKeySchema(
final Optional<String> topicName,
final Optional<Integer> schemaId,
final FormatInfo expectedFormat,
final SerdeFeatures serdeFeatures
) {
return getSchema(topicName, schemaId, expectedFormat, serdeFeatures, true);
} | @Test
public void shouldReturnErrorFromGetKeySchemaIfSchemaIsNotInExpectedFormat() {
// Given:
when(parsedSchema.schemaType()).thenReturn(ProtobufSchema.TYPE);
// When:
final SchemaResult result = supplier.getKeySchema(Optional.of(TOPIC_NAME),
Optional.empty(), expectedFormat, SerdeFeatures.of(SerdeFeature.UNWRAP_SINGLES));
// Then:
assertThat(result.schemaAndId, is(Optional.empty()));
assertThat(result.failureReason, is(not(Optional.empty())));
assertThat(result.failureReason.get().getMessage(), is(
"Key schema is not in the expected format. "
+ "You may want to set KEY_FORMAT to 'PROTOBUF'."
+ System.lineSeparator()
+ "topic: " + TOPIC_NAME
+ System.lineSeparator()
+ "expected format: AVRO"
+ System.lineSeparator()
+ "actual format from Schema Registry: PROTOBUF"
));
} |
public static String validateClaimNameOverride(String name, String value) throws ValidateException {
return validateString(name, value);
} | @Test
public void testValidateClaimNameOverride() {
String expected = "email";
String actual = ClaimValidationUtils.validateClaimNameOverride("sub", String.format(" %s ", expected));
assertEquals(expected, actual);
} |
public boolean isSystemInitiatedRun() {
return !initiator.getType().isRestartable();
} | @Test
public void testIsSystemInitiatedRun() {
RunRequest runRequest =
RunRequest.builder()
.currentPolicy(RunPolicy.START_FRESH_NEW_RUN)
.requester(User.create("tester"))
.build();
Assert.assertFalse(runRequest.isSystemInitiatedRun());
runRequest =
RunRequest.builder()
.initiator(UpstreamInitiator.withType(Initiator.Type.FOREACH))
.currentPolicy(RunPolicy.START_FRESH_NEW_RUN)
.build();
Assert.assertTrue(runRequest.isSystemInitiatedRun());
} |
@Override
public boolean isInfoEnabled() {
return logger.isInfoEnabled();
} | @Test
public void testIsInfoEnabled() {
Logger mockLogger = mock(Logger.class);
when(mockLogger.getName()).thenReturn("foo");
when(mockLogger.isInfoEnabled()).thenReturn(true);
InternalLogger logger = new Slf4JLogger(mockLogger);
assertTrue(logger.isInfoEnabled());
verify(mockLogger).getName();
verify(mockLogger).isInfoEnabled();
} |
public static <T> PrefetchableIterable<T> limit(Iterable<T> iterable, int limit) {
PrefetchableIterable<T> prefetchableIterable = maybePrefetchable(iterable);
return new Default<T>() {
@Override
public PrefetchableIterator<T> createIterator() {
return new PrefetchableIterator<T>() {
PrefetchableIterator<T> delegate = prefetchableIterable.iterator();
int currentPosition;
@Override
public boolean isReady() {
if (currentPosition < limit) {
return delegate.isReady();
}
return true;
}
@Override
public void prefetch() {
if (!isReady()) {
delegate.prefetch();
}
}
@Override
public boolean hasNext() {
if (currentPosition != limit) {
return delegate.hasNext();
}
return false;
}
@Override
public T next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
currentPosition += 1;
return delegate.next();
}
};
}
};
} | @Test
public void testLimit() {
verifyIterable(PrefetchableIterables.limit(PrefetchableIterables.fromArray(), 0));
verifyIterable(PrefetchableIterables.limit(PrefetchableIterables.fromArray(), 1));
verifyIterable(PrefetchableIterables.limit(PrefetchableIterables.fromArray("A", "B", "C"), 0));
verifyIterable(
PrefetchableIterables.limit(PrefetchableIterables.fromArray("A", "B", "C"), 2), "A", "B");
verifyIterable(
PrefetchableIterables.limit(PrefetchableIterables.fromArray("A", "B", "C"), 3),
"A",
"B",
"C");
verifyIterable(
PrefetchableIterables.limit(PrefetchableIterables.fromArray("A", "B", "C"), 4),
"A",
"B",
"C");
} |
@Override
public Column convert(BasicTypeDefine typeDefine) {
PhysicalColumn.PhysicalColumnBuilder builder =
PhysicalColumn.builder()
.name(typeDefine.getName())
.sourceType(typeDefine.getColumnType())
.nullable(typeDefine.isNullable())
.defaultValue(typeDefine.getDefaultValue())
.comment(typeDefine.getComment());
String pgDataType = typeDefine.getDataType().toLowerCase();
switch (pgDataType) {
case PG_BOOLEAN:
builder.dataType(BasicType.BOOLEAN_TYPE);
break;
case PG_BOOLEAN_ARRAY:
builder.dataType(ArrayType.BOOLEAN_ARRAY_TYPE);
break;
case PG_SMALLSERIAL:
case PG_SMALLINT:
builder.dataType(BasicType.SHORT_TYPE);
break;
case PG_SMALLINT_ARRAY:
builder.dataType(ArrayType.SHORT_ARRAY_TYPE);
break;
case PG_INTEGER:
case PG_SERIAL:
builder.dataType(BasicType.INT_TYPE);
break;
case PG_INTEGER_ARRAY:
builder.dataType(ArrayType.INT_ARRAY_TYPE);
break;
case PG_BIGINT:
case PG_BIGSERIAL:
builder.dataType(BasicType.LONG_TYPE);
break;
case PG_BIGINT_ARRAY:
builder.dataType(ArrayType.LONG_ARRAY_TYPE);
break;
case PG_REAL:
builder.dataType(BasicType.FLOAT_TYPE);
break;
case PG_REAL_ARRAY:
builder.dataType(ArrayType.FLOAT_ARRAY_TYPE);
break;
case PG_DOUBLE_PRECISION:
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case PG_DOUBLE_PRECISION_ARRAY:
builder.dataType(ArrayType.DOUBLE_ARRAY_TYPE);
break;
case PG_NUMERIC:
DecimalType decimalType;
if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) {
decimalType =
new DecimalType(
typeDefine.getPrecision().intValue(), typeDefine.getScale());
} else {
decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE);
}
builder.dataType(decimalType);
break;
case PG_MONEY:
// -92233720368547758.08 to +92233720368547758.07, With the sign bit it's 20, we use
// 30 precision to save it
DecimalType moneyDecimalType;
moneyDecimalType = new DecimalType(30, 2);
builder.dataType(moneyDecimalType);
builder.columnLength(30L);
builder.scale(2);
break;
case PG_CHAR:
case PG_CHARACTER:
builder.dataType(BasicType.STRING_TYPE);
if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) {
builder.columnLength(TypeDefineUtils.charTo4ByteLength(1L));
builder.sourceType(pgDataType);
} else {
builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength()));
builder.sourceType(String.format("%s(%s)", pgDataType, typeDefine.getLength()));
}
break;
case PG_VARCHAR:
case PG_CHARACTER_VARYING:
builder.dataType(BasicType.STRING_TYPE);
if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) {
builder.sourceType(pgDataType);
} else {
builder.sourceType(String.format("%s(%s)", pgDataType, typeDefine.getLength()));
builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength()));
}
break;
case PG_TEXT:
builder.dataType(BasicType.STRING_TYPE);
break;
case PG_UUID:
builder.dataType(BasicType.STRING_TYPE);
builder.sourceType(pgDataType);
builder.columnLength(128L);
break;
case PG_JSON:
case PG_JSONB:
case PG_XML:
case PG_GEOMETRY:
case PG_GEOGRAPHY:
builder.dataType(BasicType.STRING_TYPE);
break;
case PG_CHAR_ARRAY:
case PG_VARCHAR_ARRAY:
case PG_TEXT_ARRAY:
builder.dataType(ArrayType.STRING_ARRAY_TYPE);
break;
case PG_BYTEA:
builder.dataType(PrimitiveByteArrayType.INSTANCE);
break;
case PG_DATE:
builder.dataType(LocalTimeType.LOCAL_DATE_TYPE);
break;
case PG_TIME:
case PG_TIME_TZ:
builder.dataType(LocalTimeType.LOCAL_TIME_TYPE);
if (typeDefine.getScale() != null && typeDefine.getScale() > MAX_TIME_SCALE) {
builder.scale(MAX_TIME_SCALE);
log.warn(
"The scale of time type is larger than {}, it will be truncated to {}",
MAX_TIME_SCALE,
MAX_TIME_SCALE);
} else {
builder.scale(typeDefine.getScale());
}
break;
case PG_TIMESTAMP:
case PG_TIMESTAMP_TZ:
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
if (typeDefine.getScale() != null && typeDefine.getScale() > MAX_TIMESTAMP_SCALE) {
builder.scale(MAX_TIMESTAMP_SCALE);
log.warn(
"The scale of timestamp type is larger than {}, it will be truncated to {}",
MAX_TIMESTAMP_SCALE,
MAX_TIMESTAMP_SCALE);
} else {
builder.scale(typeDefine.getScale());
}
break;
default:
throw CommonError.convertToSeaTunnelTypeError(
identifier(), typeDefine.getDataType(), typeDefine.getName());
}
return builder.build();
} | @Test
public void testConvertArray() {
BasicTypeDefine typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("_bool")
.dataType("_bool")
.build();
Column column = PostgresTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(ArrayType.BOOLEAN_ARRAY_TYPE, column.getDataType());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toLowerCase());
typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("_int2")
.dataType("_int2")
.build();
column = PostgresTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(ArrayType.SHORT_ARRAY_TYPE, column.getDataType());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toLowerCase());
typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("_int4")
.dataType("_int4")
.build();
column = PostgresTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(ArrayType.INT_ARRAY_TYPE, column.getDataType());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toLowerCase());
typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("_int8")
.dataType("_int8")
.build();
column = PostgresTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(ArrayType.LONG_ARRAY_TYPE, column.getDataType());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toLowerCase());
typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("_float4")
.dataType("_float4")
.build();
column = PostgresTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(ArrayType.FLOAT_ARRAY_TYPE, column.getDataType());
typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("_float8")
.dataType("_float8")
.build();
column = PostgresTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(ArrayType.DOUBLE_ARRAY_TYPE, column.getDataType());
typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("_bpchar")
.dataType("_bpchar")
.build();
column = PostgresTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(ArrayType.STRING_ARRAY_TYPE, column.getDataType());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType());
typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("_varchar")
.dataType("_varchar")
.build();
column = PostgresTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(ArrayType.STRING_ARRAY_TYPE, column.getDataType());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType());
typeDefine =
BasicTypeDefine.builder()
.name("test")
.columnType("_text")
.dataType("_text")
.build();
column = PostgresTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(ArrayType.STRING_ARRAY_TYPE, column.getDataType());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType());
} |
@Override
public String getPort() {
final int port = Integer.parseInt(Optional.ofNullable(super.getPort()).orElseGet(() -> "-1"));
final int mergedPort = port <= 0 ? PortUtils.findPort(getContext().getAutowireCapableBeanFactory()) : port;
return String.valueOf(mergedPort);
} | @Test
public void testGetPort() {
String port = eventListener.getPort();
assertNotNull(port);
assertEquals(port, "8080");
} |
@Override
public String getWelcomeMessage(User user) {
if (UserGroup.isPaid(user)) {
return "You're amazing " + user + ". Thanks for paying for this awesome software.";
}
return "I suppose you can use this software.";
} | @Test
void testGetWelcomeMessageForPaidUser() {
final var welcomeMessage = service.getWelcomeMessage(paidUser);
final var expected = "You're amazing Jamie Coder. Thanks for paying for this awesome software.";
assertEquals(expected, welcomeMessage);
} |
@Override
public byte[] fromConnectData(String topic, Schema schema, Object value) {
if (schema == null && value == null) {
return null;
}
JsonNode jsonValue = config.schemasEnabled() ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value);
try {
return serializer.serialize(topic, jsonValue);
} catch (SerializationException e) {
throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e);
}
} | @Test
public void booleanToJson() {
JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.BOOLEAN_SCHEMA, true));
validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"boolean\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).booleanValue());
} |
@Override
public List<byte[]> clusterGetKeysInSlot(int slot, Integer count) {
RFuture<List<byte[]>> f = executorService.readAsync((String)null, ByteArrayCodec.INSTANCE, CLUSTER_GETKEYSINSLOT, slot, count);
return syncFuture(f);
} | @Test
public void testClusterGetKeysInSlot() {
List<byte[]> keys = connection.clusterGetKeysInSlot(12, 10);
assertThat(keys).isEmpty();
} |
public static Properties parseKeyValueArgs(List<String> args) {
return parseKeyValueArgs(args, true);
} | @Test
public void testParseEmptyArgWithNoDelimiter() {
List<String> argArray = Arrays.asList("my.empty.property");
assertThrows(IllegalArgumentException.class, () -> CommandLineUtils.parseKeyValueArgs(argArray, false));
} |
public synchronized V get(final K key, final Supplier<V> valueSupplier, final Consumer<V> expireCallback) {
final var value = cache.get(key);
if (value != null) {
value.updateDeadline();
return value.value;
}
final var newValue = new ExpirableValue<>(valueSupplier.get(), expireCallback);
newValue.updateDeadline();
cache.put(key, newValue);
return newValue.value;
} | @Test
public void testConcurrentUpdate() throws Exception {
final var cache = new SimpleCache<Integer, Integer>(executor, 10000L, 10000L);
final var pool = Executors.newFixedThreadPool(2);
final var latch = new CountDownLatch(2);
for (int i = 0; i < 2; i++) {
final var value = i + 100;
pool.execute(() -> {
try {
Thread.sleep(100);
} catch (InterruptedException ignored) {
}
cache.get(0, () -> value, __ -> {});
latch.countDown();
});
}
latch.await();
final var value = cache.get(0, () -> -1, __ -> {});
Assert.assertTrue(value == 100 || value == 101);
pool.shutdown();
} |
long count() {
return collection().count();
} | @Test
@MongoDBFixtures("singleDashboard.json")
public void testCountSingleDashboard() throws Exception {
assertEquals(1, this.dashboardService.count());
} |
@Override
public void beforeComponent(Component component) {
if (FILE.equals(component.getType())) {
anticipatedTransitions = anticipatedTransitionRepository.getAnticipatedTransitionByComponent(component);
} else {
anticipatedTransitions = Collections.emptyList();
}
} | @Test
public void givenAFileComponent_theRepositoryIsHitForFetchingAnticipatedTransitions() {
Component component = getComponent(Component.Type.FILE);
when(anticipatedTransitionRepository.getAnticipatedTransitionByComponent(component)).thenReturn(Collections.emptyList());
underTest.beforeComponent(component);
verify(anticipatedTransitionRepository).getAnticipatedTransitionByComponent(component);
} |
public int getMfgReserved() {
return mDataFields.get(0).intValue();
} | @Test
public void testRecognizeBeacon() {
byte[] bytes = hexStringToByteArray("02011a1bff1801beac2f234454cf6d4a0fadf2f4911ba9ffa600010002c509");
AltBeaconParser parser = new AltBeaconParser();
Beacon beacon = parser.fromScanData(bytes, -55, null, 123456L);
assertEquals("manData should be parsed", 9, ((AltBeacon) beacon).getMfgReserved() );
} |
@Override
public List<String> getBrokers() {
return _allBrokerListRef.get();
} | @Test
public void testGetBrokers() {
assertEquals(_dynamicBrokerSelectorUnderTest.getBrokers(), ImmutableList.of("broker1"));
} |
@Produces
@DefaultBean
@Singleton
public JobScheduler jobScheduler(StorageProvider storageProvider) {
if (jobRunrBuildTimeConfiguration.jobScheduler().enabled()) {
final JobDetailsGenerator jobDetailsGenerator = newInstance(jobRunrRuntimeConfiguration.jobScheduler().jobDetailsGenerator().orElse(CachingJobDetailsGenerator.class.getName()));
return new JobScheduler(storageProvider, jobDetailsGenerator, emptyList());
}
return null;
} | @Test
void jobSchedulerIsNotSetupWhenConfigured() {
when(jobSchedulerBuildTimeConfiguration.enabled()).thenReturn(false);
assertThat(jobRunrProducer.jobScheduler(storageProvider)).isNull();
} |
public int getNumber1()
{
checkAvailable(1);
int value = needle.get(needle.position()) & 0xff;
forward(1);
return value;
} | @Test(expected = IllegalArgumentException.class)
public void testGetIncorrectByte()
{
ZFrame frame = new ZFrame(new byte[0]);
ZNeedle needle = new ZNeedle(frame);
needle.getNumber1();
} |
@Override
public EntityExcerpt createExcerpt(DataAdapterDto dataAdapterDto) {
return EntityExcerpt.builder()
.id(ModelId.of(dataAdapterDto.id()))
.type(ModelTypes.LOOKUP_ADAPTER_V1)
.title(dataAdapterDto.title())
.build();
} | @Test
public void createExcerpt() {
final DataAdapterDto dataAdapterDto = DataAdapterDto.builder()
.id("1234567890")
.name("data-adapter-name")
.title("Data Adapter Title")
.description("Data Adapter Description")
.config(new FallbackAdapterConfig())
.build();
final EntityExcerpt excerpt = facade.createExcerpt(dataAdapterDto);
assertThat(excerpt.id()).isEqualTo(ModelId.of("1234567890"));
assertThat(excerpt.type()).isEqualTo(ModelTypes.LOOKUP_ADAPTER_V1);
assertThat(excerpt.title()).isEqualTo("Data Adapter Title");
} |
public static boolean needsQuoting(byte[] data, int off, int len) {
for(int i=off; i< off+len; ++i) {
switch(data[i]) {
case '&':
case '<':
case '>':
case '\'':
case '"':
return true;
default:
break;
}
}
return false;
} | @Test public void testNeedsQuoting() throws Exception {
assertTrue(HtmlQuoting.needsQuoting("abcde>"));
assertTrue(HtmlQuoting.needsQuoting("<abcde"));
assertTrue(HtmlQuoting.needsQuoting("abc'de"));
assertTrue(HtmlQuoting.needsQuoting("abcde\""));
assertTrue(HtmlQuoting.needsQuoting("&"));
assertFalse(HtmlQuoting.needsQuoting(""));
assertFalse(HtmlQuoting.needsQuoting("ab\ncdef"));
assertFalse(HtmlQuoting.needsQuoting(null));
} |
public static void validateCardSecurityVsCardAccess(SecurityInfos cardSecurity, int caKeyReference, int paceVersion,
int taVersion) {
Assert.notNull(cardSecurity, "cardSecurity may not be null");
if (caKeyReference != cardSecurity.getCaKeyId() || paceVersion != cardSecurity.getPaceVersion()
|| taVersion != cardSecurity.getTaVersion()) {
logger.error("the card info and the card security do not match.");
throw new ClientException("The card info and the card security do not match.");
}
} | @Test
public void validateCardSecurityVsCardAccessPaceFail() {
ClientException thrown = assertThrows(ClientException.class, () -> CardValidations.validateCardSecurityVsCardAccess(efCardSecurity, 1, 2, 1));
assertEquals("The card info and the card security do not match.", thrown.getMessage());
} |
public Result resolve(List<PluginDescriptor> plugins) {
// create graphs
dependenciesGraph = new DirectedGraph<>();
dependentsGraph = new DirectedGraph<>();
// populate graphs
Map<String, PluginDescriptor> pluginByIds = new HashMap<>();
for (PluginDescriptor plugin : plugins) {
addPlugin(plugin);
pluginByIds.put(plugin.getPluginId(), plugin);
}
log.debug("Graph: {}", dependenciesGraph);
// get a sorted list of dependencies
List<String> sortedPlugins = dependenciesGraph.reverseTopologicalSort();
log.debug("Plugins order: {}", sortedPlugins);
// create the result object
Result result = new Result(sortedPlugins);
resolved = true;
if (sortedPlugins != null) { // no cyclic dependency
// detect not found dependencies
for (String pluginId : sortedPlugins) {
if (!pluginByIds.containsKey(pluginId)) {
result.addNotFoundDependency(pluginId);
}
}
}
// check dependencies versions
for (PluginDescriptor plugin : plugins) {
String pluginId = plugin.getPluginId();
String existingVersion = plugin.getVersion();
List<String> dependents = getDependents(pluginId);
while (!dependents.isEmpty()) {
String dependentId = dependents.remove(0);
PluginDescriptor dependent = pluginByIds.get(dependentId);
String requiredVersion = getDependencyVersionSupport(dependent, pluginId);
boolean ok = checkDependencyVersion(requiredVersion, existingVersion);
if (!ok) {
result.addWrongDependencyVersion(new WrongDependencyVersion(pluginId, dependentId, existingVersion, requiredVersion));
}
}
}
return result;
} | @Test
void cyclicDependencies() {
PluginDescriptor pd1 = new DefaultPluginDescriptor()
.setPluginId("p1")
.setPluginVersion("0.0.0")
.setDependencies("p2");
PluginDescriptor pd2 = new DefaultPluginDescriptor()
.setPluginId("p2")
.setPluginVersion("0.0.0")
.setDependencies("p3");
PluginDescriptor pd3 = new DefaultPluginDescriptor()
.setPluginId("p3")
.setPluginVersion("0.0.0")
.setDependencies("p1");
List<PluginDescriptor> plugins = new ArrayList<>();
plugins.add(pd1);
plugins.add(pd2);
plugins.add(pd3);
DependencyResolver.Result result = resolver.resolve(plugins);
assertTrue(result.hasCyclicDependency());
} |
@Override
public ReleaseId getParentReleaseId() {
return parentReleaseId;
} | @Test
public void getParentReleaseIdNull() {
final MavenSession mavenSession = mockMavenSession(false);
final ProjectPomModel pomModel = new ProjectPomModel(mavenSession);
assertThat(pomModel.getParentReleaseId()).isNull();
} |
static String determineFullyQualifiedClassName(Path baseDir, String basePackageName, Path classFile) {
String subpackageName = determineSubpackageName(baseDir, classFile);
String simpleClassName = determineSimpleClassName(classFile);
return of(basePackageName, subpackageName, simpleClassName)
.filter(value -> !value.isEmpty()) // default package
.collect(joining(PACKAGE_SEPARATOR_STRING));
} | @Test
void determineFullyQualifiedClassNameFromComPackage() {
Path baseDir = Paths.get("path", "to", "com");
String basePackageName = "com";
Path classFile = Paths.get("path", "to", "com", "example", "app", "App.class");
String fqn = ClasspathSupport.determineFullyQualifiedClassName(baseDir, basePackageName, classFile);
assertEquals("com.example.app.App", fqn);
} |
@Override
public boolean updateTaskExecutionState(TaskExecutionStateTransition taskExecutionState) {
return state.tryCall(
StateWithExecutionGraph.class,
stateWithExecutionGraph ->
stateWithExecutionGraph.updateTaskExecutionState(
taskExecutionState, labelFailure(taskExecutionState)),
"updateTaskExecutionState")
.orElse(false);
} | @Test
void testExceptionHistoryWithTaskFailure() throws Exception {
final Exception expectedException = new Exception("Expected Local Exception");
BiConsumer<AdaptiveScheduler, List<ExecutionAttemptID>> testLogic =
(scheduler, attemptIds) -> {
final ExecutionAttemptID attemptId = attemptIds.get(1);
scheduler.updateTaskExecutionState(
new TaskExecutionStateTransition(
new TaskExecutionState(
attemptId, ExecutionState.FAILED, expectedException)));
};
final Iterable<RootExceptionHistoryEntry> actualExceptionHistory =
new ExceptionHistoryTester(singleThreadMainThreadExecutor)
.withTestLogic(testLogic)
.run();
assertThat(actualExceptionHistory).hasSize(1);
final RootExceptionHistoryEntry failure = actualExceptionHistory.iterator().next();
assertThat(failure.getException().deserializeError(classLoader))
.isEqualTo(expectedException);
} |
public static NetFlowV9Packet parsePacket(ByteBuf bb, NetFlowV9FieldTypeRegistry typeRegistry) {
return parsePacket(bb, typeRegistry, Maps.newHashMap(), null);
} | @Test
public void testParse() throws IOException {
final byte[] b1 = Resources.toByteArray(Resources.getResource("netflow-data/netflow-v9-2-1.dat"));
final byte[] b2 = Resources.toByteArray(Resources.getResource("netflow-data/netflow-v9-2-2.dat"));
final byte[] b3 = Resources.toByteArray(Resources.getResource("netflow-data/netflow-v9-2-3.dat"));
Map<Integer, NetFlowV9Template> cache = Maps.newHashMap();
// check header
NetFlowV9Packet p1 = NetFlowV9Parser.parsePacket(Unpooled.wrappedBuffer(b1), typeRegistry, cache, null);
assertEquals(9, p1.header().version());
assertEquals(3, p1.header().count());
assertEquals(0, p1.header().sequence());
assertEquals(42212, p1.header().sysUptime());
assertEquals(1369122709, p1.header().unixSecs());
assertEquals(106, p1.header().sourceId());
// check templates
assertEquals(2, p1.templates().size());
assertNotNull(p1.optionTemplate());
NetFlowV9Template t1 = p1.templates().get(0);
assertEquals(257, t1.templateId());
assertEquals(18, t1.fieldCount());
List<NetFlowV9FieldDef> d1 = t1.definitions();
assertEquals("in_bytes", name(d1.get(0)));
assertEquals("in_pkts", name(d1.get(1)));
assertEquals("protocol", name(d1.get(2)));
assertEquals("src_tos", name(d1.get(3)));
assertEquals("tcp_flags", name(d1.get(4)));
assertEquals("l4_src_port", name(d1.get(5)));
assertEquals("ipv4_src_addr", name(d1.get(6)));
assertEquals("src_mask", name(d1.get(7)));
assertEquals("input_snmp", name(d1.get(8)));
assertEquals("l4_dst_port", name(d1.get(9)));
assertEquals("ipv4_dst_addr", name(d1.get(10)));
assertEquals("dst_mask", name(d1.get(11)));
assertEquals("output_snmp", name(d1.get(12)));
assertEquals("ipv4_next_hop", name(d1.get(13)));
assertEquals("src_as", name(d1.get(14)));
assertEquals("dst_as", name(d1.get(15)));
assertEquals("last_switched", name(d1.get(16)));
assertEquals("first_switched", name(d1.get(17)));
NetFlowV9Template t2 = p1.templates().get(1);
assertEquals(258, t2.templateId());
assertEquals(18, t2.fieldCount());
NetFlowV9Packet p2 = NetFlowV9Parser.parsePacket(Unpooled.wrappedBuffer(b2), typeRegistry, cache, null);
NetFlowV9BaseRecord r2 = p2.records().get(0);
Map<String, Object> f2 = r2.fields();
assertEquals(2818L, f2.get("in_bytes"));
assertEquals(8L, f2.get("in_pkts"));
assertEquals("192.168.124.1", f2.get("ipv4_src_addr"));
assertEquals("239.255.255.250", f2.get("ipv4_dst_addr"));
assertEquals(3072, f2.get("l4_src_port"));
assertEquals(1900, f2.get("l4_dst_port"));
assertEquals((short) 17, f2.get("protocol"));
NetFlowV9Packet p3 = NetFlowV9Parser.parsePacket(Unpooled.wrappedBuffer(b3), typeRegistry, cache, null);
assertEquals(1, p3.records().size());
} |
private static double[] parseThresholds(String ns, Configuration conf,
int numLevels) {
int[] percentages = conf.getInts(ns + "." +
IPC_FCQ_DECAYSCHEDULER_THRESHOLDS_KEY);
if (percentages.length == 0) {
percentages = conf.getInts(ns + "." + IPC_DECAYSCHEDULER_THRESHOLDS_KEY);
if (percentages.length == 0) {
return getDefaultThresholds(numLevels);
}
} else {
LOG.warn(IPC_FCQ_DECAYSCHEDULER_THRESHOLDS_KEY +
" is deprecated. Please use " +
IPC_DECAYSCHEDULER_THRESHOLDS_KEY);
}
if (percentages.length != numLevels-1) {
throw new IllegalArgumentException("Number of thresholds should be " +
(numLevels-1) + ". Was: " + percentages.length);
}
// Convert integer percentages to decimals
double[] decimals = new double[percentages.length];
for (int i = 0; i < percentages.length; i++) {
decimals[i] = percentages[i] / 100.0;
}
return decimals;
} | @Test
@SuppressWarnings("deprecation")
public void testParseThresholds() {
// Defaults vary by number of queues
Configuration conf = new Configuration();
scheduler = new DecayRpcScheduler(1, "ipc.5", conf);
assertEqualDecimalArrays(new double[]{}, scheduler.getThresholds());
scheduler = new DecayRpcScheduler(2, "ipc.6", conf);
assertEqualDecimalArrays(new double[]{0.5}, scheduler.getThresholds());
scheduler = new DecayRpcScheduler(3, "ipc.7", conf);
assertEqualDecimalArrays(new double[]{0.25, 0.5}, scheduler.getThresholds());
scheduler = new DecayRpcScheduler(4, "ipc.8", conf);
assertEqualDecimalArrays(new double[]{0.125, 0.25, 0.5}, scheduler.getThresholds());
// Custom
conf = new Configuration();
conf.set("ipc.9." + DecayRpcScheduler.IPC_FCQ_DECAYSCHEDULER_THRESHOLDS_KEY,
"1, 10, 20, 50, 85");
scheduler = new DecayRpcScheduler(6, "ipc.9", conf);
assertEqualDecimalArrays(new double[]{0.01, 0.1, 0.2, 0.5, 0.85}, scheduler.getThresholds());
} |
@Udf(description = "Converts a string representation of a date in the given format"
+ " into the TIMESTAMP value."
+ " Single quotes in the timestamp format can be escaped with '',"
+ " for example: 'yyyy-MM-dd''T''HH:mm:ssX'.")
public Timestamp parseTimestamp(
@UdfParameter(
description = "The string representation of a date.") final String formattedTimestamp,
@UdfParameter(
description = "The format pattern should be in the format expected by"
+ " java.time.format.DateTimeFormatter.") final String formatPattern) {
return parseTimestamp(formattedTimestamp, formatPattern, ZoneId.of("GMT").getId());
} | @Test
public void shouldWorkWithManyDifferentFormatters() {
IntStream.range(0, 10_000)
.parallel()
.forEach(idx -> {
try {
final String sourceDate = "2018-12-01 10:12:13.456X" + idx;
final String pattern = "yyyy-MM-dd HH:mm:ss.SSS'X" + idx + "'";
final Timestamp result = udf.parseTimestamp(sourceDate, pattern);
final SimpleDateFormat sdf = new SimpleDateFormat(pattern);
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
final Timestamp expectedResult = Timestamp.from(sdf.parse(sourceDate).toInstant());
assertThat(result, is(expectedResult));
} catch (final Exception e) {
fail(e.getMessage());
}
});
} |
protected String messageToString(Message message) {
switch (message.getMessageType()) {
case SYSTEM:
return message.getContent();
case USER:
return humanPrompt + message.getContent();
case ASSISTANT:
return assistantPrompt + message.getContent();
case TOOL:
throw new IllegalArgumentException(TOOL_EXECUTION_NOT_SUPPORTED_FOR_WAI_MODELS);
}
throw new IllegalArgumentException("Unknown message type: " + message.getMessageType());
} | @Test
public void testSingleUserMessage() {
Message userMessage = new UserMessage("User message");
String expected = "User message";
Assert.assertEquals(expected, converter.messageToString(userMessage));
} |
@Override
public void finish()
{
close();
} | @Test
public void testFinish()
throws Exception
{
SourceOperator operator = createExchangeOperator();
operator.addSplit(new ScheduledSplit(0, operator.getSourceId(), newRemoteSplit(TASK_1_ID)));
operator.addSplit(new ScheduledSplit(1, operator.getSourceId(), newRemoteSplit(TASK_2_ID)));
operator.addSplit(new ScheduledSplit(2, operator.getSourceId(), newRemoteSplit(TASK_3_ID)));
operator.noMoreSplits();
// add pages and leave buffers open
taskBuffers.getUnchecked(TASK_1_ID).addPages(1, false);
taskBuffers.getUnchecked(TASK_2_ID).addPages(1, false);
taskBuffers.getUnchecked(TASK_3_ID).addPages(1, false);
// read 3 pages
waitForPages(operator, 3);
// verify state
assertFalse(operator.isFinished());
assertFalse(operator.needsInput());
assertNull(operator.getOutput());
// finish without closing buffers
operator.finish();
// wait for finished
waitForFinished(operator);
} |
@Override
public BasicTypeDefine reconvert(Column column) {
BasicTypeDefine.BasicTypeDefineBuilder builder =
BasicTypeDefine.builder()
.name(column.getName())
.nullable(column.isNullable())
.comment(column.getComment())
.defaultValue(column.getDefaultValue());
switch (column.getDataType().getSqlType()) {
case BOOLEAN:
builder.columnType(XUGU_BOOLEAN);
builder.dataType(XUGU_BOOLEAN);
break;
case TINYINT:
builder.columnType(XUGU_TINYINT);
builder.dataType(XUGU_TINYINT);
break;
case SMALLINT:
builder.columnType(XUGU_SMALLINT);
builder.dataType(XUGU_SMALLINT);
break;
case INT:
builder.columnType(XUGU_INTEGER);
builder.dataType(XUGU_INTEGER);
break;
case BIGINT:
builder.columnType(XUGU_BIGINT);
builder.dataType(XUGU_BIGINT);
break;
case FLOAT:
builder.columnType(XUGU_FLOAT);
builder.dataType(XUGU_FLOAT);
break;
case DOUBLE:
builder.columnType(XUGU_DOUBLE);
builder.dataType(XUGU_DOUBLE);
break;
case DECIMAL:
DecimalType decimalType = (DecimalType) column.getDataType();
long precision = decimalType.getPrecision();
int scale = decimalType.getScale();
if (precision <= 0) {
precision = DEFAULT_PRECISION;
scale = DEFAULT_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is precision less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (precision > MAX_PRECISION) {
scale = (int) Math.max(0, scale - (precision - MAX_PRECISION));
precision = MAX_PRECISION;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum precision of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_PRECISION,
precision,
scale);
}
if (scale < 0) {
scale = 0;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which is scale less than 0, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
precision,
scale);
} else if (scale > MAX_SCALE) {
scale = MAX_SCALE;
log.warn(
"The decimal column {} type decimal({},{}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to decimal({},{})",
column.getName(),
decimalType.getPrecision(),
decimalType.getScale(),
MAX_SCALE,
precision,
scale);
}
builder.columnType(String.format("%s(%s,%s)", XUGU_NUMERIC, precision, scale));
builder.dataType(XUGU_NUMERIC);
builder.precision(precision);
builder.scale(scale);
break;
case BYTES:
if (column.getColumnLength() == null || column.getColumnLength() <= 0) {
builder.columnType(XUGU_BLOB);
builder.dataType(XUGU_BLOB);
} else if (column.getColumnLength() <= MAX_BINARY_LENGTH) {
builder.columnType(XUGU_BINARY);
builder.dataType(XUGU_BINARY);
} else {
builder.columnType(XUGU_BLOB);
builder.dataType(XUGU_BLOB);
}
break;
case STRING:
if (column.getColumnLength() == null || column.getColumnLength() <= 0) {
builder.columnType(String.format("%s(%s)", XUGU_VARCHAR, MAX_VARCHAR_LENGTH));
builder.dataType(XUGU_VARCHAR);
} else if (column.getColumnLength() <= MAX_VARCHAR_LENGTH) {
builder.columnType(
String.format("%s(%s)", XUGU_VARCHAR, column.getColumnLength()));
builder.dataType(XUGU_VARCHAR);
} else {
builder.columnType(XUGU_CLOB);
builder.dataType(XUGU_CLOB);
}
break;
case DATE:
builder.columnType(XUGU_DATE);
builder.dataType(XUGU_DATE);
break;
case TIME:
builder.dataType(XUGU_TIME);
if (column.getScale() != null && column.getScale() > 0) {
Integer timeScale = column.getScale();
if (timeScale > MAX_TIME_SCALE) {
timeScale = MAX_TIME_SCALE;
log.warn(
"The time column {} type time({}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to time({})",
column.getName(),
column.getScale(),
MAX_SCALE,
timeScale);
}
builder.columnType(String.format("%s(%s)", XUGU_TIME, timeScale));
builder.scale(timeScale);
} else {
builder.columnType(XUGU_TIME);
}
break;
case TIMESTAMP:
if (column.getScale() == null || column.getScale() <= 0) {
builder.columnType(XUGU_TIMESTAMP);
} else {
int timestampScale = column.getScale();
if (column.getScale() > MAX_TIMESTAMP_SCALE) {
timestampScale = MAX_TIMESTAMP_SCALE;
log.warn(
"The timestamp column {} type timestamp({}) is out of range, "
+ "which exceeds the maximum scale of {}, "
+ "it will be converted to timestamp({})",
column.getName(),
column.getScale(),
MAX_TIMESTAMP_SCALE,
timestampScale);
}
builder.columnType(String.format("TIMESTAMP(%s)", timestampScale));
builder.scale(timestampScale);
}
builder.dataType(XUGU_TIMESTAMP);
break;
default:
throw CommonError.convertToConnectorTypeError(
DatabaseIdentifier.XUGU,
column.getDataType().getSqlType().name(),
column.getName());
}
return builder.build();
} | @Test
public void testReconvertDecimal() {
Column column =
PhysicalColumn.builder().name("test").dataType(new DecimalType(0, 0)).build();
BasicTypeDefine typeDefine = XuguTypeConverter.INSTANCE.reconvert(column);
Assertions.assertEquals(column.getName(), typeDefine.getName());
Assertions.assertEquals(
String.format(
"%s(%s,%s)",
XuguTypeConverter.XUGU_NUMERIC,
XuguTypeConverter.DEFAULT_PRECISION,
XuguTypeConverter.DEFAULT_SCALE),
typeDefine.getColumnType());
Assertions.assertEquals(XuguTypeConverter.XUGU_NUMERIC, typeDefine.getDataType());
column = PhysicalColumn.builder().name("test").dataType(new DecimalType(10, 2)).build();
typeDefine = XuguTypeConverter.INSTANCE.reconvert(column);
Assertions.assertEquals(column.getName(), typeDefine.getName());
Assertions.assertEquals(
String.format("%s(%s,%s)", XuguTypeConverter.XUGU_NUMERIC, 10, 2),
typeDefine.getColumnType());
Assertions.assertEquals(XuguTypeConverter.XUGU_NUMERIC, typeDefine.getDataType());
} |
@Override
public ParamCheckResponse checkParamInfoList(List<ParamInfo> paramInfos) {
ParamCheckResponse paramCheckResponse = new ParamCheckResponse();
if (paramInfos == null) {
paramCheckResponse.setSuccess(true);
return paramCheckResponse;
}
for (ParamInfo paramInfo : paramInfos) {
paramCheckResponse = checkParamInfoFormat(paramInfo);
if (!paramCheckResponse.isSuccess()) {
return paramCheckResponse;
}
}
paramCheckResponse.setSuccess(true);
return paramCheckResponse;
} | @Test
void testCheckParamInfoForDataId() {
ParamInfo paramInfo = new ParamInfo();
ArrayList<ParamInfo> paramInfos = new ArrayList<>();
paramInfos.add(paramInfo);
// Max Length
String dataId = buildStringLength(257);
paramInfo.setDataId(dataId);
ParamCheckResponse actual = paramChecker.checkParamInfoList(paramInfos);
assertFalse(actual.isSuccess());
assertEquals("Param 'dataId' is illegal, the param length should not exceed 256.", actual.getMessage());
// Pattern
paramInfo.setDataId("hsbfkj@$!#khdkad");
actual = paramChecker.checkParamInfoList(paramInfos);
assertFalse(actual.isSuccess());
assertEquals("Param 'dataId' is illegal, illegal characters should not appear in the param.", actual.getMessage());
// Success
paramInfo.setDataId("a-zA-Z0-9-_:.");
actual = paramChecker.checkParamInfoList(paramInfos);
assertTrue(actual.isSuccess());
} |
@Override
protected void refresh(final List<PluginData> data) {
pluginDataSubscriber.refreshPluginDataAll();
if (CollectionUtils.isEmpty(data)) {
LOG.info("clear all plugin data cache");
return;
}
data.forEach(pluginDataSubscriber::onSubscribe);
} | @Test
public void testRefreshCoverage() {
final PluginDataRefresh pluginDataRefresh = mockPluginDataRefresh;
PluginData selectorData = new PluginData();
List<PluginData> selectorDataList = new ArrayList<>();
pluginDataRefresh.refresh(selectorDataList);
selectorDataList.add(selectorData);
pluginDataRefresh.refresh(selectorDataList);
} |
public MediaType getContentType() {
Optional<MediaType> optionalType = toContentType(Files.getFileExtension(filename));
Optional<Charset> targetCharset = toCharset(optionalType.orElse(null));
MediaType type = optionalType.orElse(DEFAULT_CONTENT_TYPE_WITH_CHARSET);
if (targetCharset.isPresent() && !type.charset().toJavaUtil().equals(targetCharset)) {
return type.withCharset(targetCharset.get());
}
return type;
} | @Test
public void should_get_default_type_from_unknown_name() {
FileContentType contentType = new FileContentType("UNKNOWN_FILE");
assertThat(contentType.getContentType(), is(MediaType.PLAIN_TEXT_UTF_8));
} |
public static GroupByParams build(
final LogicalSchema sourceSchema,
final List<CompiledExpression> groupBys,
final ProcessingLogger logger
) {
if (groupBys.isEmpty()) {
throw new IllegalArgumentException("No GROUP BY groupBys");
}
final Grouper grouper = buildGrouper(sourceSchema, groupBys, logger);
return new GroupByParams(grouper.getSchema(), grouper::apply);
} | @Test(expected = IllegalArgumentException.class)
public void shouldThrowOnEmptyParam() {
GroupByParamsV1Factory
.build(SOURCE_SCHEMA, Collections.emptyList(), logger);
} |
public void appendDocument(PDDocument destination, PDDocument source) throws IOException
{
if (source.getDocument().isClosed())
{
throw new IOException("Error: source PDF is closed.");
}
if (destination.getDocument().isClosed())
{
throw new IOException("Error: destination PDF is closed.");
}
PDDocumentCatalog srcCatalog = source.getDocumentCatalog();
if (isDynamicXfa(srcCatalog.getAcroForm()))
{
throw new IOException("Error: can't merge source document containing dynamic XFA form content.");
}
PDDocumentInformation destInfo = destination.getDocumentInformation();
PDDocumentInformation srcInfo = source.getDocumentInformation();
mergeInto(srcInfo.getCOSObject(), destInfo.getCOSObject(), Collections.emptySet());
// use the highest version number for the resulting pdf
float destVersion = destination.getVersion();
float srcVersion = source.getVersion();
if (destVersion < srcVersion)
{
destination.setVersion(srcVersion);
}
int pageIndexOpenActionDest = -1;
PDDocumentCatalog destCatalog = destination.getDocumentCatalog();
if (destCatalog.getOpenAction() == null)
{
// PDFBOX-3972: get local dest page index, it must be reassigned after the page cloning
PDDestinationOrAction openAction = null;
try
{
openAction = srcCatalog.getOpenAction();
}
catch (IOException ex)
{
// PDFBOX-4223
LOG.error("Invalid OpenAction ignored", ex);
}
PDDestination openActionDestination = null;
if (openAction instanceof PDActionGoTo)
{
openActionDestination = ((PDActionGoTo) openAction).getDestination();
}
else if (openAction instanceof PDDestination)
{
openActionDestination = (PDDestination) openAction;
}
// note that it can also be something else, e.g. PDActionJavaScript, then do nothing
if (openActionDestination instanceof PDPageDestination)
{
PDPage page = ((PDPageDestination) openActionDestination).getPage();
if (page != null)
{
pageIndexOpenActionDest = srcCatalog.getPages().indexOf(page);
}
}
destCatalog.setOpenAction(openAction);
}
PDFCloneUtility cloner = new PDFCloneUtility(destination);
mergeAcroForm(cloner, destCatalog, srcCatalog);
COSArray destThreads = destCatalog.getCOSObject().getCOSArray(COSName.THREADS);
COSArray srcThreads = (COSArray) cloner.cloneForNewDocument(destCatalog.getCOSObject().getDictionaryObject(
COSName.THREADS));
if (destThreads == null)
{
destCatalog.getCOSObject().setItem(COSName.THREADS, srcThreads);
}
else
{
destThreads.addAll(srcThreads);
}
PDDocumentNameDictionary destNames = destCatalog.getNames();
PDDocumentNameDictionary srcNames = srcCatalog.getNames();
if (srcNames != null)
{
if (destNames == null)
{
destCatalog.getCOSObject().setItem(COSName.NAMES,
cloner.cloneForNewDocument(srcNames.getCOSObject()));
}
else
{
cloner.cloneMerge(srcNames, destNames);
}
}
if (destNames != null && destNames.getCOSObject().containsKey(COSName.ID_TREE))
{
// found in 001031.pdf from PDFBOX-4417 and doesn't belong there
destNames.getCOSObject().removeItem(COSName.ID_TREE);
LOG.warn("Removed /IDTree from /Names dictionary, doesn't belong there");
}
PDDocumentNameDestinationDictionary srcDests = srcCatalog.getDests();
if (srcDests != null)
{
PDDocumentNameDestinationDictionary destDests = destCatalog.getDests();
if (destDests == null)
{
destCatalog.getCOSObject().setItem(COSName.DESTS,
cloner.cloneForNewDocument(srcDests.getCOSObject()));
}
else
{
cloner.cloneMerge(srcDests, destDests);
}
}
PDDocumentOutline srcOutline = srcCatalog.getDocumentOutline();
if (srcOutline != null)
{
PDDocumentOutline destOutline = destCatalog.getDocumentOutline();
if (destOutline == null || destOutline.getFirstChild() == null)
{
PDDocumentOutline cloned = new PDDocumentOutline(
cloner.cloneForNewDocument(srcOutline.getCOSObject()));
destCatalog.setDocumentOutline(cloned);
}
else
{
// search last sibling for dest, because /Last entry is sometimes wrong
PDOutlineItem destLastOutlineItem = destOutline.getFirstChild();
while (true)
{
PDOutlineItem outlineItem = destLastOutlineItem.getNextSibling();
if (outlineItem == null)
{
break;
}
destLastOutlineItem = outlineItem;
}
for (PDOutlineItem item : srcOutline.children())
{
// get each child, clone its dictionary, remove siblings info,
// append outline item created from there
COSDictionary clonedDict = cloner.cloneForNewDocument(item.getCOSObject());
clonedDict.removeItem(COSName.PREV);
clonedDict.removeItem(COSName.NEXT);
PDOutlineItem clonedItem = new PDOutlineItem(clonedDict);
destLastOutlineItem.insertSiblingAfter(clonedItem);
destLastOutlineItem = destLastOutlineItem.getNextSibling();
}
}
}
PageMode destPageMode = destCatalog.getPageMode();
if (destPageMode == null)
{
PageMode srcPageMode = srcCatalog.getPageMode();
destCatalog.setPageMode(srcPageMode);
}
COSDictionary srcLabels = srcCatalog.getCOSObject().getCOSDictionary(COSName.PAGE_LABELS);
if (srcLabels != null)
{
int destPageCount = destination.getNumberOfPages();
COSArray destNums;
COSDictionary destLabels = destCatalog.getCOSObject().getCOSDictionary(COSName.PAGE_LABELS);
if (destLabels == null)
{
destLabels = new COSDictionary();
destNums = new COSArray();
destLabels.setItem(COSName.NUMS, destNums);
destCatalog.getCOSObject().setItem(COSName.PAGE_LABELS, destLabels);
}
else
{
destNums = (COSArray) destLabels.getDictionaryObject(COSName.NUMS);
}
COSArray srcNums = (COSArray) srcLabels.getDictionaryObject(COSName.NUMS);
if (srcNums != null)
{
int startSize = destNums.size();
for (int i = 0; i < srcNums.size(); i += 2)
{
COSBase base = srcNums.getObject(i);
if (!(base instanceof COSNumber))
{
LOG.error("page labels ignored, index {} should be a number, but is {}", i,
base);
// remove what we added
while (destNums.size() > startSize)
{
destNums.remove(startSize);
}
break;
}
COSNumber labelIndex = (COSNumber) base;
long labelIndexValue = labelIndex.intValue();
destNums.add(COSInteger.get(labelIndexValue + destPageCount));
destNums.add(cloner.cloneForNewDocument(srcNums.getObject(i + 1)));
}
}
}
COSStream destMetadata = destCatalog.getCOSObject().getCOSStream(COSName.METADATA);
COSStream srcMetadata = srcCatalog.getCOSObject().getCOSStream(COSName.METADATA);
if (destMetadata == null && srcMetadata != null)
{
try
{
PDStream newStream = new PDStream(destination, srcMetadata.createInputStream(), (COSName) null);
mergeInto(srcMetadata, newStream.getCOSObject(),
new HashSet<>(Arrays.asList(COSName.FILTER, COSName.LENGTH)));
destCatalog.getCOSObject().setItem(COSName.METADATA, newStream);
}
catch (IOException ex)
{
// PDFBOX-4227 cleartext XMP stream with /Flate
LOG.error("Metadata skipped because it could not be read", ex);
}
}
COSDictionary destOCP = destCatalog.getCOSObject().getCOSDictionary(COSName.OCPROPERTIES);
COSDictionary srcOCP = srcCatalog.getCOSObject().getCOSDictionary(COSName.OCPROPERTIES);
if (destOCP == null && srcOCP != null)
{
destCatalog.getCOSObject().setItem(COSName.OCPROPERTIES, cloner.cloneForNewDocument(srcOCP));
}
else if (destOCP != null && srcOCP != null)
{
cloner.cloneMerge(srcOCP, destOCP);
}
mergeOutputIntents(cloner, srcCatalog, destCatalog);
// merge logical structure hierarchy
boolean mergeStructTree = false;
int destParentTreeNextKey = -1;
Map<Integer, COSObjectable> srcNumberTreeAsMap = null;
Map<Integer, COSObjectable> destNumberTreeAsMap = null;
PDStructureTreeRoot srcStructTree = srcCatalog.getStructureTreeRoot();
PDStructureTreeRoot destStructTree = destCatalog.getStructureTreeRoot();
if (destStructTree == null && srcStructTree != null)
{
// create a dummy structure tree in the destination, so that the source
// tree is cloned. (We can't just copy the tree reference due to PDFBOX-3999)
destStructTree = new PDStructureTreeRoot();
destCatalog.setStructureTreeRoot(destStructTree);
destStructTree.setParentTree(new PDNumberTreeNode(PDParentTreeValue.class));
// PDFBOX-4429: remove bogus StructParent(s)
for (PDPage page : destCatalog.getPages())
{
page.getCOSObject().removeItem(COSName.STRUCT_PARENTS);
for (PDAnnotation ann : page.getAnnotations())
{
ann.getCOSObject().removeItem(COSName.STRUCT_PARENT);
}
}
}
if (destStructTree != null)
{
PDNumberTreeNode destParentTree = destStructTree.getParentTree();
destParentTreeNextKey = destStructTree.getParentTreeNextKey();
if (destParentTree != null)
{
destNumberTreeAsMap = getNumberTreeAsMap(destParentTree);
if (destParentTreeNextKey < 0)
{
if (destNumberTreeAsMap.isEmpty())
{
destParentTreeNextKey = 0;
}
else
{
destParentTreeNextKey = Collections.max(destNumberTreeAsMap.keySet()) + 1;
}
}
if (destParentTreeNextKey >= 0 && srcStructTree != null)
{
PDNumberTreeNode srcParentTree = srcStructTree.getParentTree();
if (srcParentTree != null)
{
srcNumberTreeAsMap = getNumberTreeAsMap(srcParentTree);
if (!srcNumberTreeAsMap.isEmpty())
{
mergeStructTree = true;
}
}
}
}
}
Map<COSDictionary, COSDictionary> objMapping = new HashMap<>();
int pageIndex = 0;
PDPageTree destinationPageTree = destination.getPages(); // cache PageTree
for (PDPage page : srcCatalog.getPages())
{
PDPage newPage = new PDPage(cloner.cloneForNewDocument(page.getCOSObject()));
if (!mergeStructTree)
{
// PDFBOX-4429: remove bogus StructParent(s)
newPage.getCOSObject().removeItem(COSName.STRUCT_PARENTS);
for (PDAnnotation ann : newPage.getAnnotations())
{
ann.getCOSObject().removeItem(COSName.STRUCT_PARENT);
}
}
newPage.setCropBox(page.getCropBox());
newPage.setMediaBox(page.getMediaBox());
newPage.setRotation(page.getRotation());
PDResources resources = page.getResources();
if (resources != null)
{
// this is smart enough to just create references for resources that are used on multiple pages
newPage.setResources(new PDResources(
cloner.cloneForNewDocument(resources.getCOSObject())));
}
else
{
newPage.setResources(new PDResources());
}
if (mergeStructTree)
{
// add the value of the destination ParentTreeNextKey to every source element
// StructParent(s) value so that these don't overlap with the existing values
updateStructParentEntries(newPage, destParentTreeNextKey);
objMapping.put(page.getCOSObject(), newPage.getCOSObject());
List<PDAnnotation> oldAnnots = page.getAnnotations();
List<PDAnnotation> newAnnots = newPage.getAnnotations();
for (int i = 0; i < oldAnnots.size(); i++)
{
objMapping.put(oldAnnots.get(i).getCOSObject(), newAnnots.get(i).getCOSObject());
}
// TODO update mapping for XObjects
}
destinationPageTree.add(newPage);
if (pageIndex == pageIndexOpenActionDest)
{
// PDFBOX-3972: reassign the page.
// The openAction is either a PDActionGoTo or a PDPageDestination
PDDestinationOrAction openAction = destCatalog.getOpenAction();
PDPageDestination pageDestination;
if (openAction instanceof PDActionGoTo)
{
pageDestination = (PDPageDestination) ((PDActionGoTo) openAction).getDestination();
}
else
{
pageDestination = (PDPageDestination) openAction;
}
pageDestination.setPage(newPage);
}
++pageIndex;
}
if (mergeStructTree)
{
updatePageReferences(cloner, srcNumberTreeAsMap, objMapping);
int maxSrcKey = -1;
for (Map.Entry<Integer, COSObjectable> entry : srcNumberTreeAsMap.entrySet())
{
int srcKey = entry.getKey();
maxSrcKey = Math.max(srcKey, maxSrcKey);
destNumberTreeAsMap.put(destParentTreeNextKey + srcKey,
cloner.cloneForNewDocument(entry.getValue().getCOSObject()));
}
destParentTreeNextKey += maxSrcKey + 1;
PDNumberTreeNode newParentTreeNode = new PDNumberTreeNode(PDParentTreeValue.class);
// Note that all elements are stored flatly. This could become a problem for large files
// when these are opened in a viewer that uses the tagging information.
// If this happens, then PDNumberTreeNode should be improved with a convenience method that
// stores the map into a B+Tree, see https://en.wikipedia.org/wiki/B+_tree
newParentTreeNode.setNumbers(destNumberTreeAsMap);
destStructTree.setParentTree(newParentTreeNode);
destStructTree.setParentTreeNextKey(destParentTreeNextKey);
mergeKEntries(cloner, srcStructTree, destStructTree);
mergeRoleMap(srcStructTree, destStructTree);
mergeIDTree(cloner, srcStructTree, destStructTree);
mergeMarkInfo(destCatalog, srcCatalog);
mergeLanguage(destCatalog, srcCatalog);
mergeViewerPreferences(destCatalog, srcCatalog);
}
} | @Test
void testMergeBogusStructParents1() throws IOException
{
PDFMergerUtility pdfMergerUtility = new PDFMergerUtility();
try (PDDocument src = Loader.loadPDF(new File(TARGETPDFDIR, "PDFBOX-4408.pdf"));
PDDocument dst = Loader.loadPDF(new File(TARGETPDFDIR, "PDFBOX-4408.pdf")))
{
dst.getDocumentCatalog().setStructureTreeRoot(null);
dst.getPage(0).setStructParents(9999);
dst.getPage(0).getAnnotations().get(0).setStructParent(9998);
pdfMergerUtility.appendDocument(dst, src);
checkWithNumberTree(dst);
checkForPageOrphans(dst);
}
} |
@Override
public <T> TransformEvaluator<T> forApplication(
AppliedPTransform<?, ?, ?> application, CommittedBundle<?> inputBundle) {
@SuppressWarnings({"cast", "unchecked", "rawtypes"})
TransformEvaluator<T> evaluator = createEvaluator((AppliedPTransform) application);
return evaluator;
} | @Test
public void testInMemoryEvaluator() throws Exception {
PCollection<String> input = p.apply(Create.of("foo", "bar"));
PCollectionView<Iterable<String>> pCollectionView = input.apply(View.asIterable());
PCollection<Iterable<String>> concat =
input
.apply(WithKeys.of((Void) null))
.setCoder(KvCoder.of(VoidCoder.of(), StringUtf8Coder.of()))
.apply(GroupByKey.create())
.apply(Values.create());
PCollection<Iterable<String>> view =
concat.apply(new DirectWriteViewVisitor.WriteView<>(pCollectionView));
EvaluationContext context = mock(EvaluationContext.class);
TestViewWriter<String, Iterable<String>> viewWriter = new TestViewWriter<>();
when(context.createPCollectionViewWriter(concat, pCollectionView)).thenReturn(viewWriter);
CommittedBundle<String> inputBundle = bundleFactory.createBundle(input).commit(Instant.now());
AppliedPTransform<?, ?, ?> producer = DirectGraphs.getProducer(view);
TransformEvaluator<Iterable<String>> evaluator =
new ViewEvaluatorFactory(context).forApplication(producer, inputBundle);
evaluator.processElement(WindowedValue.valueInGlobalWindow(ImmutableList.of("foo", "bar")));
assertThat(viewWriter.latest, nullValue());
evaluator.finishBundle();
assertThat(
viewWriter.latest,
containsInAnyOrder(
WindowedValue.valueInGlobalWindow("foo"), WindowedValue.valueInGlobalWindow("bar")));
} |
@Override
public void validate(String name, String value) throws ParameterException {
URL serverUrl;
try {
serverUrl = new URL(value);
} catch (MalformedURLException e) {
throw new ParameterException(name + " is not a valid url");
}
if (!List.of("http", "https").contains(serverUrl.getProtocol())) {
throw new ParameterException(name + " must use http or https protocol");
}
if (!serverUrl.toString().endsWith("/go") && !serverUrl.toString().endsWith("/go/")) {
throw new ParameterException(name + " must end with '/go' (http://localhost:8153/go)");
}
} | @Test
void shouldValidateByParsingUrl() {
assertThatCode(() -> new ServerUrlValidator().validate("foo", "bad-url"))
.isOfAnyClassIn(ParameterException.class)
.hasMessageContaining("is not a valid url");
} |
@Override
public ParDoFn create(
PipelineOptions options,
CloudObject cloudUserFn,
List<SideInputInfo> sideInputInfos,
TupleTag<?> mainOutputTag,
Map<TupleTag<?>, Integer> outputTupleTagsToReceiverIndices,
DataflowExecutionContext<?> executionContext,
DataflowOperationContext operationContext)
throws Exception {
String className = cloudUserFn.getClassName();
ParDoFnFactory factory = defaultFactories.get(className);
if (factory == null) {
throw new Exception("No known ParDoFnFactory for " + className);
}
return factory.create(
options,
cloudUserFn,
sideInputInfos,
mainOutputTag,
outputTupleTagsToReceiverIndices,
executionContext,
operationContext);
} | @Test
public void testCreateSimpleParDoFn() throws Exception {
// A serialized DoFn
String stringFieldValue = "some state";
long longFieldValue = 42L;
TestDoFn fn = new TestDoFn(stringFieldValue, longFieldValue);
String serializedFn =
StringUtils.byteArrayToJsonString(
SerializableUtils.serializeToByteArray(
DoFnInfo.forFn(
fn,
WindowingStrategy.globalDefault(),
null /* side input views */,
null /* input coder */,
new TupleTag<>("output") /* main output */,
DoFnSchemaInformation.create(),
Collections.emptyMap())));
CloudObject cloudUserFn = CloudObject.forClassName("DoFn");
addString(cloudUserFn, "serialized_fn", serializedFn);
// Create the ParDoFn from the serialized DoFn
ParDoFn parDoFn =
DEFAULT_FACTORY.create(
DEFAULT_OPTIONS,
cloudUserFn,
null,
MAIN_OUTPUT,
ImmutableMap.<TupleTag<?>, Integer>of(MAIN_OUTPUT, 0),
DEFAULT_EXECUTION_CONTEXT,
TestOperationContext.create(counterSet));
// Test that the factory created the correct class
assertThat(parDoFn, instanceOf(SimpleParDoFn.class));
// TODO: move the asserts below into new tests in UserParDoFnFactoryTest, and this test should
// simply assert that DefaultParDoFnFactory.create() matches UserParDoFnFactory.create()
// Test that the DoFnInfo reflects the one passed in
SimpleParDoFn simpleParDoFn = (SimpleParDoFn) parDoFn;
parDoFn.startBundle(new OutputReceiver());
// DoFnInfo may not yet be initialized until an element is processed
parDoFn.processElement(WindowedValue.valueInGlobalWindow("foo"));
@SuppressWarnings("rawtypes")
DoFnInfo doFnInfo = simpleParDoFn.getDoFnInfo();
DoFn innerDoFn = (TestDoFn) doFnInfo.getDoFn();
assertThat(innerDoFn, instanceOf(TestDoFn.class));
assertThat(doFnInfo.getWindowingStrategy().getWindowFn(), instanceOf(GlobalWindows.class));
assertThat(doFnInfo.getWindowingStrategy().getTrigger(), instanceOf(DefaultTrigger.class));
// Test that the deserialized user DoFn is as expected
TestDoFn actualTestDoFn = (TestDoFn) innerDoFn;
assertEquals(stringFieldValue, actualTestDoFn.stringField);
assertEquals(longFieldValue, actualTestDoFn.longField);
} |
public static SerializableFunction<Row, TableRow> toTableRow() {
return ROW_TO_TABLE_ROW;
} | @Test
public void testToTableRow_array() {
TableRow row = toTableRow().apply(ARRAY_ROW);
assertThat(row, hasEntry("ids", Arrays.asList("123", "124")));
assertThat(row.size(), equalTo(1));
} |
@Override
public Runnable get() {
if (currentRunnable == null && currentStep != null) {
currentRunnable = createRunnable(currentStep, state);
}
return currentRunnable;
} | @Test
public void step_supplier_finishes() throws Exception {
HazelcastInstance node = createHazelcastInstance(getConfig());
Data data = Accessors.getSerializationService(node).toData("data");
MapOperation operation = new SetOperation("map", data, data);
operation.setNodeEngine(Accessors.getNodeEngineImpl(node));
operation.setPartitionId(1);
operation.beforeRun();
try {
StepSupplier stepSupplier = new StepSupplier(operation, false);
Runnable step;
while ((step = stepSupplier.get()) != null) {
step.run();
}
} catch (Exception e) {
fail();
}
} |
public static void checkDataSourceConnection(HikariDataSource ds) {
java.sql.Connection connection = null;
try {
connection = ds.getConnection();
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
if (connection != null) {
try {
connection.close();
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
}
}
}
} | @Test
void testCheckConnectionNormal() throws SQLException {
HikariDataSource ds = mock(HikariDataSource.class);
Connection connection = mock(Connection.class);
when(ds.getConnection()).thenReturn(connection);
ConnectionCheckUtil.checkDataSourceConnection(ds);
verify(ds).getConnection();
verify(connection).close();
} |
@Override
public void deleteCouponTemplate(Long id) {
// 校验存在
validateCouponTemplateExists(id);
// 删除
couponTemplateMapper.deleteById(id);
} | @Test
public void testDeleteCouponTemplate_success() {
// mock 数据
CouponTemplateDO dbCouponTemplate = randomPojo(CouponTemplateDO.class);
couponTemplateMapper.insert(dbCouponTemplate);// @Sql: 先插入出一条存在的数据
// 准备参数
Long id = dbCouponTemplate.getId();
// 调用
couponTemplateService.deleteCouponTemplate(id);
// 校验数据不存在了
assertNull(couponTemplateMapper.selectById(id));
} |
@ConstantFunction(name = "milliseconds_sub", argTypes = {DATETIME, INT}, returnType = DATETIME, isMonotonic = true)
public static ConstantOperator millisecondsSub(ConstantOperator date, ConstantOperator millisecond) {
return ConstantOperator.createDatetimeOrNull(date.getDatetime().minus(millisecond.getInt(), ChronoUnit.MILLIS));
} | @Test
public void millisecondsSub() {
assertEquals("2015-03-23T09:23:54.990",
ScalarOperatorFunctions.millisecondsSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString());
} |
@Override
public void e(String tag, String message, Object... args) {
Log.e(tag, formatString(message, args));
} | @Test
public void errorLoggedCorrectly() {
String expectedMessage = "Hello World";
logger.e(tag, "Hello %s", "World");
assertLogged(ERROR, tag, expectedMessage, null);
} |
public static String getJobFile(Configuration conf, String user,
org.apache.hadoop.mapreduce.JobID jobId) {
Path jobFile = new Path(MRApps.getStagingAreaDir(conf, user),
jobId.toString() + Path.SEPARATOR + MRJobConfig.JOB_CONF_FILE);
return jobFile.toString();
} | @Test
@Timeout(120000)
public void testGetJobFileWithUser() {
Configuration conf = new Configuration();
conf.set(MRJobConfig.MR_AM_STAGING_DIR, "/my/path/to/staging");
String jobFile = MRApps.getJobFile(conf, "dummy-user",
new JobID("dummy-job", 12345));
assertNotNull(jobFile, "getJobFile results in null.");
assertEquals("/my/path/to/staging/dummy-user/.staging/job_dummy-job_12345/job.xml", jobFile,
"jobFile with specified user is not as expected.");
} |
public RowMetaInterface getQueryFieldsFromDatabaseMetaData() throws Exception {
return this.getQueryFieldsFromDatabaseMetaData( null );
} | @Test
public void testGetQueryFieldsFromDatabaseMetaData() throws Exception {
DatabaseMeta meta = mock( DatabaseMeta.class );
DatabaseMetaData dbMetaData = mock( DatabaseMetaData.class );
Connection conn = mockConnection( dbMetaData );
ResultSet rs = mock( ResultSet.class );
String columnName = "year";
String columnType = "Integer";
int columnSize = 15;
when( dbMetaData.getColumns( anyString(), anyString(), or( anyString(), eq( null ) ), or( anyString(), eq( null ) ) ) ).thenReturn( rs );
when( rs.next() ).thenReturn( true ).thenReturn( false );
when( rs.getString( "COLUMN_NAME" ) ).thenReturn( columnName );
when( rs.getString( "SOURCE_DATA_TYPE" ) ).thenReturn( columnType );
when( rs.getInt( "COLUMN_SIZE" ) ).thenReturn( columnSize );
Database db = new Database( log, meta );
db.setConnection( conn );
RowMetaInterface rowMetaInterface = db.getQueryFieldsFromDatabaseMetaData();
assertEquals( rowMetaInterface.size(), 1 );
assertEquals( rowMetaInterface.getValueMeta( 0 ).getName(), columnName );
assertEquals( rowMetaInterface.getValueMeta( 0 ).getOriginalColumnTypeName(), columnType );
assertEquals( rowMetaInterface.getValueMeta( 0 ).getLength(), columnSize );
} |
public ReferenceConfig<GenericService> get(final String path) {
try {
return cache.get(path);
} catch (ExecutionException e) {
throw new ShenyuException(e.getCause());
}
} | @Test
public void testGet() {
assertNotNull(this.apacheDubboConfigCache.get("/test"));
} |
public static JavaToSqlTypeConverter javaToSqlConverter() {
return JAVA_TO_SQL_CONVERTER;
} | @Test
public void shouldGetSqlTypeForAllJavaTypes() {
SQL_TO_JAVA.inverse().forEach((java, sqlType) -> {
assertThat(javaToSqlConverter().toSqlType(java), is(sqlType));
});
} |
public Point getMinimum() {
int minx = Integer.MAX_VALUE, miny = Integer.MAX_VALUE;
for ( int i = 0; i < nrSteps(); i++ ) {
StepMeta stepMeta = getStep( i );
Point loc = stepMeta.getLocation();
if ( loc.x < minx ) {
minx = loc.x;
}
if ( loc.y < miny ) {
miny = loc.y;
}
}
for ( int i = 0; i < nrNotes(); i++ ) {
NotePadMeta notePadMeta = getNote( i );
Point loc = notePadMeta.getLocation();
if ( loc.x < minx ) {
minx = loc.x;
}
if ( loc.y < miny ) {
miny = loc.y;
}
}
if ( minx > BORDER_INDENT && minx != Integer.MAX_VALUE ) {
minx -= BORDER_INDENT;
} else {
minx = 0;
}
if ( miny > BORDER_INDENT && miny != Integer.MAX_VALUE ) {
miny -= BORDER_INDENT;
} else {
miny = 0;
}
return new Point( minx, miny );
} | @Test
public void testGetMinimum() {
final Point minimalCanvasPoint = new Point( 0, 0 );
//for test goal should content coordinate more than NotePadMetaPoint
final Point stepPoint = new Point( 500, 500 );
//empty Trans return 0 coordinate point
Point point = transMeta.getMinimum();
assertEquals( minimalCanvasPoint.x, point.x );
assertEquals( minimalCanvasPoint.y, point.y );
//when Trans content Step than trans should return minimal coordinate of step
StepMeta stepMeta = mock( StepMeta.class );
when( stepMeta.getLocation() ).thenReturn( stepPoint );
transMeta.addStep( stepMeta );
Point actualStepPoint = transMeta.getMinimum();
assertEquals( stepPoint.x - TransMeta.BORDER_INDENT, actualStepPoint.x );
assertEquals( stepPoint.y - TransMeta.BORDER_INDENT, actualStepPoint.y );
} |
@Override
public int hashCode()
{
return super.hashCode() * 31 + (_entity == null ? 0 : _entity.hashCode());
} | @Test
public void testHashCode()
{
IdEntityResponse<Long, AnyRecord> longIdEntityResponse1 = new IdEntityResponse<>(1L, new AnyRecord());
IdEntityResponse<Long, AnyRecord> longIdEntityResponse2 = new IdEntityResponse<>(1L, new AnyRecord());
IdEntityResponse<Long, AnyRecord> nullLongResponse = new IdEntityResponse<>(null, new AnyRecord());
IdEntityResponse<String, AnyRecord> nullStringResponse = new IdEntityResponse<>(null, new AnyRecord());
Assert.assertEquals(longIdEntityResponse1.hashCode(), longIdEntityResponse2.hashCode());
Assert.assertEquals(nullLongResponse.hashCode(), nullStringResponse.hashCode());
} |
public static boolean shouldLoadInIsolation(String name) {
return !(EXCLUDE.matcher(name).matches() && !INCLUDE.matcher(name).matches());
} | @Test
public void testAllowedFileConnectors() {
List<String> jsonConverterClasses = Arrays.asList(
"org.apache.kafka.connect.file.",
"org.apache.kafka.connect.file.FileStreamSinkConnector",
"org.apache.kafka.connect.file.FileStreamSinkTask",
"org.apache.kafka.connect.file.FileStreamSourceConnector",
"org.apache.kafka.connect.file.FileStreamSourceTask"
);
for (String clazz : jsonConverterClasses) {
assertTrue(PluginUtils.shouldLoadInIsolation(clazz),
clazz + " from 'file' is not loaded in isolation but should be");
}
} |
@Override
public void lock() {
try {
lockInterruptibly(-1, null);
} catch (InterruptedException e) {
throw new IllegalStateException();
}
} | @Test
public void testLockUnlock() {
Lock lock = redisson.getSpinLock("lock1");
lock.lock();
lock.unlock();
lock.lock();
lock.unlock();
} |
@Override
public boolean dataDefinitionCausesTransactionCommit() {
return false;
} | @Test
void assertDataDefinitionCausesTransactionCommit() {
assertFalse(metaData.dataDefinitionCausesTransactionCommit());
} |
public @CheckForNull V start() throws Exception {
V result = null;
int currentAttempt = 0;
boolean success = false;
while (currentAttempt < attempts && !success) {
currentAttempt++;
try {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.log(Level.INFO, Messages.Retrier_Attempt(currentAttempt, action));
}
result = callable.call();
} catch (Exception e) {
if (duringActionExceptions == null || Stream.of(duringActionExceptions).noneMatch(exception -> exception.isAssignableFrom(e.getClass()))) {
// if the raised exception is not considered as a controlled exception doing the action, rethrow it
LOGGER.log(Level.WARNING, Messages.Retrier_ExceptionThrown(currentAttempt, action), e);
throw e;
} else {
// if the exception is considered as a failed action, notify it to the listener
LOGGER.log(Level.INFO, Messages.Retrier_ExceptionFailed(currentAttempt, action), e);
if (duringActionExceptionListener != null) {
LOGGER.log(Level.INFO, Messages.Retrier_CallingListener(e.getLocalizedMessage(), currentAttempt, action));
result = duringActionExceptionListener.apply(currentAttempt, e);
}
}
}
// After the call and the call to the listener, which can change the result, test the result
success = checkResult.test(currentAttempt, result);
if (!success) {
if (currentAttempt < attempts) {
LOGGER.log(Level.WARNING, Messages.Retrier_AttemptFailed(currentAttempt, action));
LOGGER.log(Level.FINE, Messages.Retrier_Sleeping(delay, action));
try {
Thread.sleep(delay);
} catch (InterruptedException ie) {
LOGGER.log(Level.FINE, Messages.Retrier_Interruption(action));
Thread.currentThread().interrupt(); // flag this thread as interrupted
currentAttempt = attempts; // finish
}
} else {
// Failed to perform the action
LOGGER.log(Level.INFO, Messages.Retrier_NoSuccess(action, attempts));
}
} else {
LOGGER.log(Level.INFO, Messages.Retrier_Success(action, currentAttempt));
}
}
return result;
} | @Test
public void failedActionWithAllowedExceptionByInheritanceTest() throws Exception {
final int ATTEMPTS = 1;
final String ACTION = "print";
RingBufferLogHandler handler = new RingBufferLogHandler(20);
Logger.getLogger(Retrier.class.getName()).addHandler(handler);
// Set the required params
Retrier<Boolean> r = new Retrier.Builder<>(
// action to perform
(Callable<Boolean>) () -> {
// This one is allowed because we allow IndexOutOfBoundsException (parent exception)
throw new ArrayIndexOutOfBoundsException("Unallowed exception breaks the process");
},
// check the result and return true if success
(currentAttempt, result) -> result,
//name of the action
ACTION
)
// Set the optional parameters
.withAttempts(ATTEMPTS)
// Exceptions allowed (not the one raised)
.withDuringActionExceptions(new Class[]{IndexOutOfBoundsException.class})
// Listener to call. It change the result to success
.withDuringActionExceptionListener((attempt, exception) -> true)
// Construct the object
.build();
// Begin the process catching the allowed exception
Boolean finalResult = r.start();
Assert.assertTrue(finalResult != null && finalResult);
// The action was a success
String textSuccess = Messages.Retrier_Success(ACTION, ATTEMPTS);
assertTrue(String.format("The log should contain '%s'", textSuccess), handler.getView().stream().anyMatch(m -> m.getMessage().contains(textSuccess)));
// And the message talking about the allowed raised is also there
String testException = Messages.Retrier_ExceptionFailed(ATTEMPTS, ACTION);
assertTrue(String.format("The log should contain '%s'", testException), handler.getView().stream().anyMatch(m -> m.getMessage().startsWith(testException)));
} |
public static <T> Mono<Long> writeAll(Writer writer, Flux<T> values) throws IOException {
return writeAll(DEFAULT_OBJECT_MAPPER, writer, values);
} | @Test
void writeAll_fromSingleValuedSource() throws IOException {
final Path outputTempFilePath = createTempFile();
final List<SimpleEntry> inputValues = List.of(new SimpleEntry(1, "value1"));
final Long outputCount = FileSerde.writeAll(Files.newBufferedWriter(outputTempFilePath), Flux.fromIterable(inputValues)).block();
assertThat(outputCount, is(1L));
final List<String> outputLines = Files.readAllLines(outputTempFilePath);
assertThat(outputLines, hasSize(1));
assertThat(outputLines.getFirst(), equalTo("{id:1,value:\"value1\"}"));
} |
public static boolean isMatch(String regex, CharSequence content) {
if (content == null) {
// 提供null的字符串为不匹配
return false;
}
if (StrUtil.isEmpty(regex)) {
// 正则不存在则为全匹配
return true;
}
// Pattern pattern = Pattern.compile(regex, Pattern.DOTALL);
final Pattern pattern = PatternPool.get(regex, Pattern.DOTALL);
return isMatch(pattern, content);
} | @Test
public void matchTest(){
final boolean match = ReUtil.isMatch(
"(.+?)省(.+?)市(.+?)区", "广东省深圳市南山区");
Console.log(match);
} |
@Nonnull
public static <V> Set<V> findDuplicates(@Nonnull final Collection<V>... collections)
{
final Set<V> merged = new HashSet<>();
final Set<V> duplicates = new HashSet<>();
for (Collection<V> collection : collections) {
for (V o : collection) {
if (!merged.add(o)) {
duplicates.add(o);
}
}
}
return duplicates;
} | @Test
public void testSingleCollectionWithDuplicates() throws Exception
{
// Setup test fixture.
final List<String> input = Arrays.asList("a", "DUPLICATE", "c", "DUPLICATE");
// Execute system under test.
@SuppressWarnings("unchecked")
final Set<String> result = CollectionUtils.findDuplicates(input);
// Verify results.
assertEquals(1, result.size());
assertTrue(result.contains("DUPLICATE"));
} |
public BundleProcessor getProcessor(
BeamFnApi.ProcessBundleDescriptor descriptor,
List<RemoteInputDestination> remoteInputDesinations) {
checkState(
!descriptor.hasStateApiServiceDescriptor(),
"The %s cannot support a %s containing a state %s.",
BundleProcessor.class.getSimpleName(),
BeamFnApi.ProcessBundleDescriptor.class.getSimpleName(),
Endpoints.ApiServiceDescriptor.class.getSimpleName());
return getProcessor(descriptor, remoteInputDesinations, NoOpStateDelegator.INSTANCE);
} | @Test
public void testBundleCheckpointCallback() throws Exception {
BeamFnDataOutboundAggregator mockInputSender = mock(BeamFnDataOutboundAggregator.class);
CompletableFuture<InstructionResponse> processBundleResponseFuture = new CompletableFuture<>();
when(fnApiControlClient.handle(any(BeamFnApi.InstructionRequest.class)))
.thenReturn(processBundleResponseFuture);
FullWindowedValueCoder<String> coder =
FullWindowedValueCoder.of(StringUtf8Coder.of(), Coder.INSTANCE);
BundleProcessor processor =
sdkHarnessClient.getProcessor(
descriptor,
Collections.singletonList(
RemoteInputDestination.of(
(FullWindowedValueCoder) coder, SDK_GRPC_READ_TRANSFORM)));
when(dataService.createOutboundAggregator(any(), anyBoolean())).thenReturn(mockInputSender);
BundleProgressHandler mockProgressHandler = mock(BundleProgressHandler.class);
BundleSplitHandler mockSplitHandler = mock(BundleSplitHandler.class);
BundleCheckpointHandler mockCheckpointHandler = mock(BundleCheckpointHandler.class);
BundleFinalizationHandler mockFinalizationHandler = mock(BundleFinalizationHandler.class);
ProcessBundleResponse response =
ProcessBundleResponse.newBuilder()
.addResidualRoots(DelayedBundleApplication.getDefaultInstance())
.build();
try (ActiveBundle activeBundle =
processor.newBundle(
Collections.emptyMap(),
Collections.emptyMap(),
(request) -> {
throw new UnsupportedOperationException();
},
mockProgressHandler,
mockSplitHandler,
mockCheckpointHandler,
mockFinalizationHandler)) {
processBundleResponseFuture.complete(
InstructionResponse.newBuilder().setProcessBundle(response).build());
}
verify(mockProgressHandler).onCompleted(response);
verify(mockCheckpointHandler).onCheckpoint(response);
verifyNoMoreInteractions(mockFinalizationHandler, mockSplitHandler);
} |
public void addHandler(int startAddress, int endAddress, EH handler) {
TryBounds<EH> bounds = getBoundingRanges(startAddress, endAddress);
MutableTryBlock<EH> startBlock = bounds.start;
MutableTryBlock<EH> endBlock = bounds.end;
int previousEnd = startAddress;
MutableTryBlock<EH> tryBlock = startBlock;
/*Now we have the start and end ranges that exactly match the start and end
of the range being added. We need to iterate over all the ranges from the start
to end range inclusively, and append the handler to the end of each range's handler
list. We also need to create a new range for any "holes" in the existing ranges*/
do
{
//is there a hole? If so, add a new range to fill the hole
if (tryBlock.startCodeAddress > previousEnd) {
MutableTryBlock<EH> newBlock = new MutableTryBlock<EH>(previousEnd, tryBlock.startCodeAddress);
tryBlock.prepend(newBlock);
tryBlock = newBlock;
}
tryBlock.addHandler(handler);
previousEnd = tryBlock.endCodeAddress;
tryBlock = tryBlock.next;
} while (tryBlock.prev != endBlock);
} | @Test
public void testHandlerMerge_DifferentAddress() {
TryListBuilder tlb = new TryListBuilder();
tlb.addHandler(5, 10, new ImmutableExceptionHandler("LException1;", 5));
tlb.addHandler(0, 15, new ImmutableExceptionHandler("LException1;", 6));
// no exception should be thrown...
} |
@JsonProperty("streamsProperties")
public Map<String, Object> getConfigOverrides() {
return PropertiesUtil.coerceTypes(configOverrides, false);
} | @Test
public void shouldHandleNullPropertyValue() {
// Given:
final KsqlRequest request = new KsqlRequest(
"sql",
Collections.singletonMap(
ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"
),
SOME_REQUEST_PROPS,
null
);
// When:
final Map<String, Object> props = request.getConfigOverrides();
// Then:
assertThat(props.keySet(), hasItem(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG));
assertThat(props.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG), is("earliest"));
} |
public StringSubject factValue(String key) {
return doFactValue(key, null);
} | @Test
public void nonTruthErrorFactValue() {
Object unused = expectFailureWhenTestingThat(new AssertionError()).factValue("foo");
assertFailureKeys("expected a failure thrown by Truth's failure API", "but was");
} |
@Override
public void handshakeFailed(Event event, Throwable throwable) {
log.log(Level.FINE, throwable, () -> "Ssl handshake failed: " + throwable.getMessage());
String metricName = SslHandshakeFailure.fromSslHandshakeException((SSLHandshakeException) throwable)
.map(SslHandshakeFailure::metricName)
.orElse(MetricDefinitions.SSL_HANDSHAKE_FAILURE_UNKNOWN);
metric.add(metricName, 1L, metric.createContext(createDimensions(event)));
} | @Test
void does_not_include_client_ip_dimension_present_when_peer_unavailable() {
listener.handshakeFailed(handshakeEvent(false), new SSLHandshakeException("Empty server certificate chain"));
verify(metrics).createContext(eq(Map.of("serverName", "connector", "serverPort", 1234)));
} |
@Override
public Page<RoleInfo> getRoles(int pageNo, int pageSize) {
AuthPaginationHelper<RoleInfo> helper = createPaginationHelper();
String sqlCountRows = "SELECT count(*) FROM (SELECT DISTINCT role FROM roles) roles WHERE ";
String sqlFetchRows = "SELECT role,username FROM roles WHERE ";
String where = " 1=1 ";
try {
Page<RoleInfo> pageInfo = helper.fetchPage(sqlCountRows + where, sqlFetchRows + where,
new ArrayList<String>().toArray(), pageNo, pageSize, ROLE_INFO_ROW_MAPPER);
if (pageInfo == null) {
pageInfo = new Page<>();
pageInfo.setTotalCount(0);
pageInfo.setPageItems(new ArrayList<>());
}
return pageInfo;
} catch (CannotGetJdbcConnectionException e) {
LogUtil.FATAL_LOG.error("[db-error] " + e.toString(), e);
throw e;
}
} | @Test
void testGetRoles() {
Page<RoleInfo> roles = externalRolePersistService.getRoles(1, 10);
assertNotNull(roles);
} |
public static Read read() {
return new Read(null, "", new Scan());
} | @Test
public void testReadingWithSplits() throws Exception {
final String table = tmpTable.getName();
final int numRows = 1500;
final int numRegions = 4;
final long bytesPerRow = 100L;
createAndWriteData(table, numRows);
HBaseIO.Read read = HBaseIO.read().withConfiguration(conf).withTableId(table);
HBaseSource source = new HBaseSource(read, null /* estimatedSizeBytes */);
List<? extends BoundedSource<Result>> splits =
source.split(numRows * bytesPerRow / numRegions, null /* options */);
// Test num splits and split equality.
assertThat(splits, hasSize(4));
assertSourcesEqualReferenceSource(source, splits, null /* options */);
} |
@Override
public boolean canHandleReturnType(Class<?> returnType) {
return rxSupportedTypes.stream().anyMatch(classType -> classType.isAssignableFrom(returnType));
} | @Test
public void testCheckTypes() {
assertThat(rxJava3TimeLimiterAspectExt.canHandleReturnType(Flowable.class)).isTrue();
assertThat(rxJava3TimeLimiterAspectExt.canHandleReturnType(Single.class)).isTrue();
assertThat(rxJava3TimeLimiterAspectExt.canHandleReturnType(Observable.class)).isTrue();
assertThat(rxJava3TimeLimiterAspectExt.canHandleReturnType(Completable.class)).isTrue();
assertThat(rxJava3TimeLimiterAspectExt.canHandleReturnType(Maybe.class)).isTrue();
} |
public static MetricRegistry getDefault() {
MetricRegistry metricRegistry = tryGetDefault();
if (metricRegistry == null) {
throw new IllegalStateException("Default registry name has not been set.");
}
return metricRegistry;
} | @Test
public void errorsWhenDefaultUnset() {
exception.expect(IllegalStateException.class);
exception.expectMessage("Default registry name has not been set.");
SharedMetricRegistries.getDefault();
} |
void triggerEvent(final DiscordGameEventType eventType)
{
final Optional<EventWithTime> foundEvent = events.stream().filter(e -> e.type == eventType).findFirst();
final EventWithTime event;
if (foundEvent.isPresent())
{
event = foundEvent.get();
}
else
{
event = new EventWithTime(eventType);
event.setStart(Instant.now());
events.add(event);
}
event.setUpdated(Instant.now());
if (event.getType().isShouldClear())
{
events.removeIf(e -> e.getType() != eventType && e.getType().isShouldBeCleared());
}
if (event.getType().isShouldRestart())
{
event.setStart(Instant.now());
}
events.sort((a, b) -> ComparisonChain.start()
.compare(b.getType().getPriority(), a.getType().getPriority())
.compare(b.getUpdated(), a.getUpdated())
.result());
log.debug("Events: {}", events);
updatePresenceWithLatestEvent();
} | @Test
public void testAreaChange()
{
when(discordConfig.elapsedTimeType()).thenReturn(DiscordConfig.ElapsedTimeType.TOTAL);
// Start with state of IN_GAME
ArgumentCaptor<DiscordPresence> captor = ArgumentCaptor.forClass(DiscordPresence.class);
discordState.triggerEvent(DiscordGameEventType.IN_GAME);
verify(discordService, times(1)).updatePresence(captor.capture());
assertEquals(DiscordGameEventType.IN_GAME.getState(), captor.getValue().getState());
// IN_GAME -> CITY
discordState.triggerEvent(DiscordGameEventType.CITY_VARROCK);
verify(discordService, times(2)).updatePresence(captor.capture());
assertEquals(DiscordGameEventType.CITY_VARROCK.getState(), captor.getValue().getState());
// CITY -> IN_GAME
discordState.triggerEvent(DiscordGameEventType.IN_GAME);
verify(discordService, times(3)).updatePresence(captor.capture());
assertEquals(DiscordGameEventType.IN_GAME.getState(), captor.getValue().getState());
} |
public Option<Dataset<Row>> loadAsDataset(SparkSession spark, List<CloudObjectMetadata> cloudObjectMetadata,
String fileFormat, Option<SchemaProvider> schemaProviderOption, int numPartitions) {
if (LOG.isDebugEnabled()) {
LOG.debug("Extracted distinct files " + cloudObjectMetadata.size()
+ " and some samples " + cloudObjectMetadata.stream().map(CloudObjectMetadata::getPath).limit(10).collect(Collectors.toList()));
}
if (isNullOrEmpty(cloudObjectMetadata)) {
return Option.empty();
}
DataFrameReader reader = spark.read().format(fileFormat);
String datasourceOpts = getStringWithAltKeys(properties, CloudSourceConfig.SPARK_DATASOURCE_OPTIONS, true);
if (schemaProviderOption.isPresent()) {
Schema sourceSchema = schemaProviderOption.get().getSourceSchema();
if (sourceSchema != null && !sourceSchema.equals(InputBatch.NULL_SCHEMA)) {
reader = reader.schema(AvroConversionUtils.convertAvroSchemaToStructType(sourceSchema));
}
}
if (StringUtils.isNullOrEmpty(datasourceOpts)) {
// fall back to legacy config for BWC. TODO consolidate in HUDI-6020
datasourceOpts = getStringWithAltKeys(properties, S3EventsHoodieIncrSourceConfig.SPARK_DATASOURCE_OPTIONS, true);
}
if (StringUtils.nonEmpty(datasourceOpts)) {
final ObjectMapper mapper = new ObjectMapper();
Map<String, String> sparkOptionsMap = null;
try {
sparkOptionsMap = mapper.readValue(datasourceOpts, Map.class);
} catch (IOException e) {
throw new HoodieException(String.format("Failed to parse sparkOptions: %s", datasourceOpts), e);
}
LOG.info(String.format("sparkOptions loaded: %s", sparkOptionsMap));
reader = reader.options(sparkOptionsMap);
}
List<String> paths = new ArrayList<>();
for (CloudObjectMetadata o : cloudObjectMetadata) {
paths.add(o.getPath());
}
boolean isCommaSeparatedPathFormat = properties.getBoolean(SPARK_DATASOURCE_READER_COMMA_SEPARATED_PATH_FORMAT.key(), false);
Dataset<Row> dataset;
if (isCommaSeparatedPathFormat) {
dataset = reader.load(String.join(",", paths));
} else {
dataset = reader.load(paths.toArray(new String[cloudObjectMetadata.size()]));
}
// add partition column from source path if configured
if (containsConfigProperty(properties, PATH_BASED_PARTITION_FIELDS)) {
String[] partitionKeysToAdd = getStringWithAltKeys(properties, PATH_BASED_PARTITION_FIELDS).split(",");
// Add partition column for all path-based partition keys. If key is not present in path, the value will be null.
for (String partitionKey : partitionKeysToAdd) {
String partitionPathPattern = String.format("%s=", partitionKey);
LOG.info(String.format("Adding column %s to dataset", partitionKey));
dataset = dataset.withColumn(partitionKey, split(split(input_file_name(), partitionPathPattern).getItem(1), "/").getItem(0));
}
}
dataset = coalesceOrRepartition(dataset, numPartitions);
return Option.of(dataset);
} | @Test
public void filesFromMetadataRead() {
CloudObjectsSelectorCommon cloudObjectsSelectorCommon = new CloudObjectsSelectorCommon(new TypedProperties());
List<CloudObjectMetadata> input = Collections.singletonList(new CloudObjectMetadata("src/test/resources/data/partitioned/country=US/state=CA/data.json", 1));
Option<Dataset<Row>> result = cloudObjectsSelectorCommon.loadAsDataset(sparkSession, input, "json", Option.empty(), 1);
Assertions.assertTrue(result.isPresent());
Assertions.assertEquals(1, result.get().count());
Row expected = RowFactory.create("some data");
Assertions.assertEquals(Collections.singletonList(expected), result.get().collectAsList());
} |
@Override
public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) {
int date = payload.getByteBuf().readUnsignedMediumLE();
int year = date / 16 / 32;
int month = date / 32 % 16;
int day = date % 32;
return 0 == date ? MySQLTimeValueUtils.ZERO_OF_DATE : Date.valueOf(LocalDate.of(year, month, day));
} | @Test
void assertReadNullDate() {
when(payload.getByteBuf()).thenReturn(byteBuf);
when(byteBuf.readUnsignedMediumLE()).thenReturn(0);
assertThat(new MySQLDateBinlogProtocolValue().read(columnDef, payload), is(MySQLTimeValueUtils.ZERO_OF_DATE));
} |
public boolean isAbsolute() {
final int start = hasWindowsDrive(uri.getPath(), true) ? 3 : 0;
return uri.getPath().startsWith(SEPARATOR, start);
} | @Test
void testIsAbsolute() {
// UNIX
Path p = new Path("/my/abs/path");
assertThat(p.isAbsolute()).isTrue();
p = new Path("/");
assertThat(p.isAbsolute()).isTrue();
p = new Path("./my/rel/path");
assertThat(p.isAbsolute()).isFalse();
p = new Path("my/rel/path");
assertThat(p.isAbsolute()).isFalse();
// WINDOWS
p = new Path("C:/my/abs/windows/path");
assertThat(p.isAbsolute()).isTrue();
p = new Path("y:/my/abs/windows/path");
assertThat(p.isAbsolute()).isTrue();
p = new Path("/y:/my/abs/windows/path");
assertThat(p.isAbsolute()).isTrue();
p = new Path("b:\\my\\abs\\windows\\path");
assertThat(p.isAbsolute()).isTrue();
p = new Path("/c:/my/dir");
assertThat(p.isAbsolute()).isTrue();
p = new Path("/C:/");
assertThat(p.isAbsolute()).isTrue();
p = new Path("C:");
assertThat(p.isAbsolute()).isFalse();
p = new Path("C:/");
assertThat(p.isAbsolute()).isTrue();
p = new Path("C:my\\relative\\path");
assertThat(p.isAbsolute()).isFalse();
p = new Path("\\my\\dir");
assertThat(p.isAbsolute()).isTrue();
p = new Path("\\");
assertThat(p.isAbsolute()).isTrue();
p = new Path(".\\my\\relative\\path");
assertThat(p.isAbsolute()).isFalse();
p = new Path("my\\relative\\path");
assertThat(p.isAbsolute()).isFalse();
p = new Path("\\\\myServer\\myDir");
assertThat(p.isAbsolute()).isTrue();
} |
@VisibleForTesting
static Path resolveEntropy(Path path, EntropyInjectingFileSystem efs, boolean injectEntropy)
throws IOException {
final String entropyInjectionKey = efs.getEntropyInjectionKey();
if (entropyInjectionKey == null) {
return path;
} else {
final URI originalUri = path.toUri();
final String checkpointPath = originalUri.getPath();
final int indexOfKey = checkpointPath.indexOf(entropyInjectionKey);
if (indexOfKey == -1) {
return path;
} else {
final StringBuilder buffer = new StringBuilder(checkpointPath.length());
buffer.append(checkpointPath, 0, indexOfKey);
if (injectEntropy) {
buffer.append(efs.generateEntropy());
}
buffer.append(
checkpointPath,
indexOfKey + entropyInjectionKey.length(),
checkpointPath.length());
final String rewrittenPath = buffer.toString();
try {
return new Path(
new URI(
originalUri.getScheme(),
originalUri.getAuthority(),
rewrittenPath,
originalUri.getQuery(),
originalUri.getFragment())
.normalize());
} catch (URISyntaxException e) {
// this could only happen if the injected entropy string contains invalid
// characters
throw new IOException(
"URI format error while processing path for entropy injection", e);
}
}
}
} | @Test
void testFullUriMatching() throws Exception {
EntropyInjectingFileSystem efs = new TestEntropyInjectingFs("s0mek3y", "12345678");
Path path = new Path("s3://hugo@myawesomehost:55522/path/s0mek3y/the/file");
assertThat(EntropyInjector.resolveEntropy(path, efs, true))
.isEqualTo(new Path("s3://hugo@myawesomehost:55522/path/12345678/the/file"));
assertThat(EntropyInjector.resolveEntropy(path, efs, false))
.isEqualTo(new Path("s3://hugo@myawesomehost:55522/path/the/file"));
} |
public PullResult getHalfMessage(int queueId, long offset, int nums) {
String group = TransactionalMessageUtil.buildConsumerGroup();
String topic = TransactionalMessageUtil.buildHalfTopic();
SubscriptionData sub = new SubscriptionData(topic, "*");
return getMessage(group, topic, queueId, offset, nums, sub);
} | @Test
public void testGetHalfMessage() {
when(messageStore.getMessage(anyString(), anyString(), anyInt(), anyLong(), anyInt(), ArgumentMatchers.nullable(MessageFilter.class))).thenReturn(createGetMessageResult(GetMessageStatus.NO_MESSAGE_IN_QUEUE));
PullResult result = transactionBridge.getHalfMessage(0, 0, 1);
assertThat(result.getPullStatus()).isEqualTo(PullStatus.NO_NEW_MSG);
} |
protected List<DeviceId> getDeviceAndPopulatePowerMap(JsonNode connectivityReply,
Map<DeviceId, Double> deviceAtoBPowerMap,
Map<DeviceId, Double> deviceBtoAPowerMap,
String name) {
List<DeviceId> deviceIds = new ArrayList<>();
if (connectivityReply.has("result")
&& connectivityReply.get("result").has("response")) {
JsonNode response = connectivityReply.get("result").get("response");
//getting the a-b path.
Iterator<JsonNode> paths = connectivityReply.get("result").get("response")
.elements();
while (paths.hasNext()) {
JsonNode path = paths.next();
if (path.get("response-id").asText().equals(name)) {
Iterator<JsonNode> elements = path.get("path-properties")
.get("reversed-path-route-objects").elements();
Iterable<JsonNode> iterable = () -> elements;
List<JsonNode> elementsList = StreamSupport
.stream(iterable.spliterator(), false)
.collect(Collectors.toList());
Iterator<JsonNode> reversePathRoute = path.get("path-properties")
.get("reversed-path-route-objects").elements();
Iterable<JsonNode> reversedIterable = () -> reversePathRoute;
List<JsonNode> reversedElementsList = StreamSupport
.stream(reversedIterable.spliterator(), false)
.collect(Collectors.toList());
for (int i = 0; i < elementsList.size() - 1; i++) {
if (elementsList.get(i).get("path-route-object").has("num-unnum-hop")) {
String elementId = elementsList.get(i).get("path-route-object")
.get("num-unnum-hop").get("node-id")
.asText();
//TODO this is a workaround until we understand better the
// topology mapping between ONOS and GNPy
if (elementId.startsWith("netconf:")) {
double power = -99;
if (!elementsList.get(i).get("path-route-object")
.get("num-unnum-hop").get("gnpy-node-type")
.asText().equals("transceiver")) {
power = getPerHopPower(elementsList.get(i + 2));
}
deviceAtoBPowerMap.put(DeviceId.deviceId(elementId), power);
for (int j = 0; j < reversedElementsList.size() - 1; j++) {
if (reversedElementsList.get(j).get("path-route-object").has("num-unnum-hop")) {
String reversedElementId = reversedElementsList.get(j).get("path-route-object")
.get("num-unnum-hop").get("node-id")
.asText();
double reversePower = -99;
if (reversedElementId.equals(elementId)) {
reversePower = getPerHopPower(reversedElementsList.get(j + 2));
deviceBtoAPowerMap.put(DeviceId.deviceId(elementId), reversePower);
}
}
}
deviceIds.add(DeviceId.deviceId(elementId));
}
}
}
break;
}
}
} else {
log.warn("Can't retrieve devices {}", connectivityReply);
}
return deviceIds;
} | @Test
public void testgetDevicePowerMap() throws IOException {
Map<DeviceId, Double> deviceAtoBPowerMap = new HashMap<>();
Map<DeviceId, Double> deviceBtoAPowerMap = new HashMap<>();
manager.getDeviceAndPopulatePowerMap(reply, deviceAtoBPowerMap, deviceBtoAPowerMap, "second");
assertEquals(-25.0, deviceAtoBPowerMap.get(DeviceId.deviceId("netconf:10.0.254.107:830")));
assertEquals(-12.0, deviceAtoBPowerMap.get(DeviceId.deviceId("netconf:10.0.254.225:830")));
assertEquals(-12.0, deviceBtoAPowerMap.get(DeviceId.deviceId("netconf:10.0.254.225:830")));
assertEquals(-25.0, deviceBtoAPowerMap.get(DeviceId.deviceId("netconf:10.0.254.107:830")));
} |
public static String[] splitString( String string, String separator ) {
/*
* 0123456 Example a;b;c;d --> new String[] { a, b, c, d }
*/
// System.out.println("splitString ["+path+"] using ["+separator+"]");
List<String> list = new ArrayList<>();
if ( string == null || string.length() == 0 ) {
return new String[] {};
}
int sepLen = separator.length();
int from = 0;
int end = string.length() - sepLen + 1;
for ( int i = from; i < end; i += sepLen ) {
if ( string.substring( i, i + sepLen ).equalsIgnoreCase( separator ) ) {
// OK, we found a separator, the string to add to the list
// is [from, i[
list.add( nullToEmpty( string.substring( from, i ) ) );
from = i + sepLen;
}
}
// Wait, if the string didn't end with a separator, we still have information at the end of the string...
// In our example that would be "d"...
if ( from + sepLen <= string.length() ) {
list.add( nullToEmpty( string.substring( from, string.length() ) ) );
}
return list.toArray( new String[list.size()] );
} | @Test
public void testSplitStringWithDelimiterAndEnclosureNullMultiCharRemoveEnclosure() {
String mask = "Hello%s world";
String[] chunks = {"Hello", " world"};
String stringToSplit = String.format( mask, DELIMITER2 );
String[] result = Const.splitString( stringToSplit, DELIMITER2, null, true );
assertSplit( result, chunks );
} |
@Override
public ManageSnapshots replaceBranch(String name, long snapshotId) {
updateSnapshotReferencesOperation().replaceBranch(name, snapshotId);
return this;
} | @TestTemplate
public void testReplaceBranch() {
table.newAppend().appendFile(FILE_A).set("wap.id", "123").stageOnly().commit();
Snapshot firstSnapshot = Iterables.getOnlyElement(table.snapshots());
table.manageSnapshots().createBranch("branch1", firstSnapshot.snapshotId()).commit();
table.newAppend().appendFile(FILE_B).set("wap.id", "456").stageOnly().commit();
Snapshot secondSnapshot = Iterables.get(table.snapshots(), 1);
table.manageSnapshots().createBranch("branch2", secondSnapshot.snapshotId()).commit();
table.manageSnapshots().replaceBranch("branch1", "branch2").commit();
assertThat(secondSnapshot.snapshotId())
.isEqualTo(table.ops().refresh().ref("branch1").snapshotId());
} |
static BayeuxClient createClient(final SalesforceComponent component, final SalesforceSession session)
throws SalesforceException {
// use default Jetty client from SalesforceComponent, it's shared by all consumers
final SalesforceHttpClient httpClient = component.getConfig().getHttpClient();
Map<String, Object> options = new HashMap<>();
/*
The timeout should be greater than 110 sec as per https://github.com/cometd/cometd/issues/1142#issuecomment-1048256297
and https://developer.salesforce.com/docs/atlas.en-us.api_streaming.meta/api_streaming/using_streaming_api_timeouts.htm
*/
options.put(ClientTransport.MAX_NETWORK_DELAY_OPTION, 120000);
if (component.getLongPollingTransportProperties() != null) {
options.putAll(component.getLongPollingTransportProperties());
}
// check login access token
if (session.getAccessToken() == null && !component.getLoginConfig().isLazyLogin()) {
session.login(null);
}
CookieStore cookieStore = new CookieManager().getCookieStore();
HttpCookieStore httpCookieStore = new HttpCookieStore.Default();
ClientTransport transport = new JettyHttpClientTransport(options, httpClient) {
@Override
protected void customize(Request request) {
super.customize(request);
//accessToken might be null due to lazy login
String accessToken = session.getAccessToken();
if (accessToken == null) {
try {
accessToken = session.login(null);
} catch (SalesforceException e) {
throw new RuntimeException(e);
}
}
String finalAccessToken = new String(accessToken);
request.headers(h -> h.add(HttpHeader.AUTHORIZATION, "OAuth " + finalAccessToken));
}
@Override
protected void storeCookies(URI uri, Map<String, List<String>> cookies) {
try {
CookieManager cookieManager = new CookieManager(cookieStore, CookiePolicy.ACCEPT_ALL);
cookieManager.put(uri, cookies);
for (java.net.HttpCookie httpCookie : cookieManager.getCookieStore().getCookies()) {
httpCookieStore.add(uri, HttpCookie.from(httpCookie));
}
} catch (IOException x) {
if (LOG.isDebugEnabled()) {
LOG.debug("Could not parse cookies", x);
}
}
}
@Override
protected HttpCookieStore getHttpCookieStore() {
return httpCookieStore;
}
};
BayeuxClient client = new BayeuxClient(getEndpointUrl(component), transport);
// added eagerly to check for support during handshake
client.addExtension(REPLAY_EXTENSION);
return client;
} | @Test
public void shouldLoginWhenAccessTokenIsNullAndLazyLoginIsFalse() throws SalesforceException {
final SalesforceHttpClient httpClient = mock(SalesforceHttpClient.class);
httpClient.setTimeout(0L);
final SalesforceEndpointConfig endpointConfig = new SalesforceEndpointConfig();
endpointConfig.setHttpClient(httpClient);
final SalesforceLoginConfig loginConfig = new SalesforceLoginConfig();
loginConfig.setLazyLogin(false);
final SalesforceSession session = mock(SalesforceSession.class);
final SalesforceComponent component = mock(SalesforceComponent.class);
when(component.getLoginConfig()).thenReturn(loginConfig);
when(component.getConfig()).thenReturn(endpointConfig);
when(component.getSession()).thenReturn(session);
BayeuxClient bayeuxClient = SubscriptionHelper.createClient(component, session);
assertNotNull(bayeuxClient);
verify(session).login(null);
} |
@Override public final String path() {
return delegate.getRequestURI();
} | @Test void path_doesntCrashOnNullUrl() {
assertThat(wrapper.path())
.isNull();
} |
public byte[] loadContent() {
Collection<String> lines = Arrays.asList(urlLoader.load(url.getConfigurationSubject(), url.getQueryProps()).split(System.lineSeparator()));
return URLArgumentLineRender.render(lines, URLArgumentPlaceholderTypeFactory.valueOf(url.getQueryProps()));
} | @Test
void assertLoadContent() {
final String lineSeparator = System.lineSeparator();
String content = "foo_driver_fixture_db=2" + lineSeparator + "storage_unit_count=2" + lineSeparator;
ShardingSphereURLLoader urlLoader = mock(ShardingSphereURLLoader.class);
when(urlLoader.load(any(), any())).thenReturn(content);
try (MockedStatic<TypedSPILoader> typedSPILoaderMockedStatic = mockStatic(TypedSPILoader.class)) {
typedSPILoaderMockedStatic.when(() -> TypedSPILoader.getService(ShardingSphereURLLoader.class, "classpath:")).thenReturn(urlLoader);
ShardingSphereURLLoadEngine loadEngine = new ShardingSphereURLLoadEngine(ShardingSphereURL.parse("classpath:xxx"));
assertThat(loadEngine.loadContent(), is(content.getBytes()));
}
} |
public static IntrinsicMapTaskExecutor withSharedCounterSet(
List<Operation> operations,
CounterSet counters,
ExecutionStateTracker executionStateTracker) {
return new IntrinsicMapTaskExecutor(operations, counters, executionStateTracker);
} | @Test
public void testNoOperation() throws Exception {
// Test MapTaskExecutor without a single operation.
ExecutionStateTracker stateTracker = ExecutionStateTracker.newForTest();
try (IntrinsicMapTaskExecutor executor =
IntrinsicMapTaskExecutor.withSharedCounterSet(
new ArrayList<Operation>(), counterSet, stateTracker)) {
thrown.expect(IllegalStateException.class);
thrown.expectMessage("has no operation");
executor.getReadOperation();
}
} |
public ChannelUriStringBuilder linger(final Long lingerNs)
{
if (null != lingerNs && lingerNs < 0)
{
throw new IllegalArgumentException("linger value cannot be negative: " + lingerNs);
}
this.linger = lingerNs;
return this;
} | @Test
void shouldCopyLingerTimeoutFromChannelUriHumanForm()
{
final ChannelUriStringBuilder builder = new ChannelUriStringBuilder();
builder.linger(ChannelUri.parse("aeron:ipc?linger=7200s"));
assertEquals(TimeUnit.HOURS.toNanos(2), builder.linger());
} |
@SuppressWarnings("unchecked")
public static <T> TypeInformation<T> convert(String jsonSchema) {
Preconditions.checkNotNull(jsonSchema, "JSON schema");
final ObjectMapper mapper = JacksonMapperFactory.createObjectMapper();
mapper.getFactory()
.enable(JsonParser.Feature.ALLOW_COMMENTS)
.enable(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES)
.enable(JsonParser.Feature.ALLOW_SINGLE_QUOTES);
final JsonNode node;
try {
node = mapper.readTree(jsonSchema);
} catch (IOException e) {
throw new IllegalArgumentException("Invalid JSON schema.", e);
}
return (TypeInformation<T>) convertType("<root>", node, node);
} | @Test
void testMissingProperties() {
final TypeInformation<?> result = JsonRowSchemaConverter.convert("{ type: 'object' }");
assertThat(result).isEqualTo(Types.ROW());
} |
public synchronized void scheduleRequest(DataSize maxResponseSize)
{
if (closed || (future != null) || scheduled) {
return;
}
scheduled = true;
// start before scheduling to include error delay
backoff.startRequest();
long delayNanos = backoff.getBackoffDelayNanos();
scheduler.schedule(() -> {
try {
initiateRequest(maxResponseSize);
}
catch (Throwable t) {
// should not happen, but be safe and fail the operator
clientCallback.clientFailed(PageBufferClient.this, t);
}
}, delayNanos, NANOSECONDS);
lastUpdate = DateTime.now();
requestsScheduled.incrementAndGet();
} | @Test
public void testExceptionFromResponseHandler()
throws Exception
{
DataSize expectedMaxSize = new DataSize(10, Unit.MEGABYTE);
TestingTicker ticker = new TestingTicker();
AtomicReference<Duration> tickerIncrement = new AtomicReference<>(new Duration(0, TimeUnit.SECONDS));
TestingHttpClient.Processor processor = (input) -> {
Duration delta = tickerIncrement.get();
ticker.increment(delta.toMillis(), TimeUnit.MILLISECONDS);
throw new RuntimeException("Foo");
};
CyclicBarrier requestComplete = new CyclicBarrier(2);
TestingClientCallback callback = new TestingClientCallback(requestComplete);
URI location = URI.create("http://localhost:8080");
PageBufferClient client = new PageBufferClient(
new HttpRpcShuffleClient(new TestingHttpClient(processor, scheduler), location),
new Duration(30, TimeUnit.SECONDS),
true,
location,
callback,
scheduler,
ticker,
pageBufferClientCallbackExecutor);
assertStatus(client, location, "queued", 0, 0, 0, 0, "not scheduled");
// request processor will throw exception, verify the request is marked a completed
// this starts the error stopwatch
client.scheduleRequest(expectedMaxSize);
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getPages().size(), 0);
assertEquals(callback.getCompletedRequests(), 1);
assertEquals(callback.getFinishedBuffers(), 0);
assertEquals(callback.getFailedBuffers(), 0);
assertStatus(client, location, "queued", 0, 1, 1, 1, "not scheduled");
// advance time forward, but not enough to fail the client
tickerIncrement.set(new Duration(30, TimeUnit.SECONDS));
// verify that the client has not failed
client.scheduleRequest(expectedMaxSize);
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getPages().size(), 0);
assertEquals(callback.getCompletedRequests(), 2);
assertEquals(callback.getFinishedBuffers(), 0);
assertEquals(callback.getFailedBuffers(), 0);
assertStatus(client, location, "queued", 0, 2, 2, 2, "not scheduled");
// advance time forward beyond the minimum error duration
tickerIncrement.set(new Duration(31, TimeUnit.SECONDS));
// verify that the client has failed
client.scheduleRequest(expectedMaxSize);
requestComplete.await(10, TimeUnit.SECONDS);
assertEquals(callback.getPages().size(), 0);
assertEquals(callback.getCompletedRequests(), 3);
assertEquals(callback.getFinishedBuffers(), 0);
assertEquals(callback.getFailedBuffers(), 1);
assertInstanceOf(callback.getFailure(), PageTransportTimeoutException.class);
assertContains(callback.getFailure().getMessage(), WORKER_NODE_ERROR + " (http://localhost:8080/0 - 3 failures, failure duration 31.00s, total failed request time 31.00s)");
assertStatus(client, location, "queued", 0, 3, 3, 3, "not scheduled");
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.