focal_method stringlengths 13 60.9k | test_case stringlengths 25 109k |
|---|---|
@Override
synchronized public void close() {
if (stream != null) {
IOUtils.cleanupWithLogger(LOG, stream);
stream = null;
}
} | @Test(timeout=120000)
public void testRandomDouble() throws Exception {
OsSecureRandom random = getOsSecureRandom();
double rand1 = random.nextDouble();
double rand2 = random.nextDouble();
while (rand1 == rand2) {
rand2 = random.nextDouble();
}
random.close();
} |
public ShareFetchContext newContext(String groupId, Map<TopicIdPartition, ShareFetchRequest.SharePartitionData> shareFetchData,
List<TopicIdPartition> toForget, ShareRequestMetadata reqMetadata, Boolean isAcknowledgeDataPresent) {
ShareFetchContext context;
// TopicPartition with maxBytes as 0 should not be added in the cachedPartitions
Map<TopicIdPartition, ShareFetchRequest.SharePartitionData> shareFetchDataWithMaxBytes = new HashMap<>();
shareFetchData.forEach((tp, sharePartitionData) -> {
if (sharePartitionData.maxBytes > 0) shareFetchDataWithMaxBytes.put(tp, sharePartitionData);
});
// If the request's epoch is FINAL_EPOCH or INITIAL_EPOCH, we should remove the existing sessions. Also, start a
// new session in case it is INITIAL_EPOCH. Hence, we need to treat them as special cases.
if (reqMetadata.isFull()) {
ShareSessionKey key = shareSessionKey(groupId, reqMetadata.memberId());
if (reqMetadata.epoch() == ShareRequestMetadata.FINAL_EPOCH) {
// If the epoch is FINAL_EPOCH, don't try to create a new session.
if (!shareFetchDataWithMaxBytes.isEmpty()) {
throw Errors.INVALID_REQUEST.exception();
}
if (cache.remove(key) == null) {
log.error("Share session error for {}: no such share session found", key);
throw Errors.SHARE_SESSION_NOT_FOUND.exception();
} else {
log.debug("Removed share session with key " + key);
}
context = new FinalContext();
} else {
if (isAcknowledgeDataPresent) {
log.error("Acknowledge data present in Initial Fetch Request for group {} member {}", groupId, reqMetadata.memberId());
throw Errors.INVALID_REQUEST.exception();
}
if (cache.remove(key) != null) {
log.debug("Removed share session with key {}", key);
}
ImplicitLinkedHashCollection<CachedSharePartition> cachedSharePartitions = new
ImplicitLinkedHashCollection<>(shareFetchDataWithMaxBytes.size());
shareFetchDataWithMaxBytes.forEach((topicIdPartition, reqData) ->
cachedSharePartitions.mustAdd(new CachedSharePartition(topicIdPartition, reqData, false)));
ShareSessionKey responseShareSessionKey = cache.maybeCreateSession(groupId, reqMetadata.memberId(),
time.milliseconds(), cachedSharePartitions);
if (responseShareSessionKey == null) {
log.error("Could not create a share session for group {} member {}", groupId, reqMetadata.memberId());
throw Errors.SHARE_SESSION_NOT_FOUND.exception();
}
context = new ShareSessionContext(reqMetadata, shareFetchDataWithMaxBytes);
log.debug("Created a new ShareSessionContext with key {} isSubsequent {} returning {}. A new share " +
"session will be started.", responseShareSessionKey, false,
partitionsToLogString(shareFetchDataWithMaxBytes.keySet()));
}
} else {
// We update the already existing share session.
synchronized (cache) {
ShareSessionKey key = shareSessionKey(groupId, reqMetadata.memberId());
ShareSession shareSession = cache.get(key);
if (shareSession == null) {
log.error("Share session error for {}: no such share session found", key);
throw Errors.SHARE_SESSION_NOT_FOUND.exception();
}
if (shareSession.epoch != reqMetadata.epoch()) {
log.debug("Share session error for {}: expected epoch {}, but got {} instead", key,
shareSession.epoch, reqMetadata.epoch());
throw Errors.INVALID_SHARE_SESSION_EPOCH.exception();
}
Map<ShareSession.ModifiedTopicIdPartitionType, List<TopicIdPartition>> modifiedTopicIdPartitions = shareSession.update(
shareFetchDataWithMaxBytes, toForget);
cache.touch(shareSession, time.milliseconds());
shareSession.epoch = ShareRequestMetadata.nextEpoch(shareSession.epoch);
log.debug("Created a new ShareSessionContext for session key {}, epoch {}: " +
"added {}, updated {}, removed {}", shareSession.key(), shareSession.epoch,
partitionsToLogString(modifiedTopicIdPartitions.get(
ShareSession.ModifiedTopicIdPartitionType.ADDED)),
partitionsToLogString(modifiedTopicIdPartitions.get(ShareSession.ModifiedTopicIdPartitionType.UPDATED)),
partitionsToLogString(modifiedTopicIdPartitions.get(ShareSession.ModifiedTopicIdPartitionType.REMOVED))
);
context = new ShareSessionContext(reqMetadata, shareSession);
}
}
return context;
} | @Test
public void testZeroSizeShareSession() {
ShareSessionCache cache = new ShareSessionCache(10, 1000);
SharePartitionManager sharePartitionManager = SharePartitionManagerBuilder.builder()
.withCache(cache).build();
Map<Uuid, String> topicNames = new HashMap<>();
Uuid fooId = Uuid.randomUuid();
topicNames.put(fooId, "foo");
TopicIdPartition foo0 = new TopicIdPartition(fooId, new TopicPartition("foo", 0));
TopicIdPartition foo1 = new TopicIdPartition(fooId, new TopicPartition("foo", 1));
// Create a new share session with foo-0 and foo-1
Map<TopicIdPartition, ShareFetchRequest.SharePartitionData> reqData1 = new LinkedHashMap<>();
reqData1.put(foo0, new ShareFetchRequest.SharePartitionData(foo0.topicId(), 100));
reqData1.put(foo1, new ShareFetchRequest.SharePartitionData(foo1.topicId(), 100));
String groupId = "grp";
ShareRequestMetadata reqMetadata1 = new ShareRequestMetadata(Uuid.randomUuid(), ShareRequestMetadata.INITIAL_EPOCH);
ShareFetchContext context1 = sharePartitionManager.newContext(groupId, reqData1, EMPTY_PART_LIST, reqMetadata1, false);
assertEquals(ShareSessionContext.class, context1.getClass());
LinkedHashMap<TopicIdPartition, ShareFetchResponseData.PartitionData> respData1 = new LinkedHashMap<>();
respData1.put(foo0, new ShareFetchResponseData.PartitionData().setPartitionIndex(foo0.partition()));
respData1.put(foo1, new ShareFetchResponseData.PartitionData().setPartitionIndex(foo1.partition()));
ShareFetchResponse resp1 = context1.updateAndGenerateResponseData(groupId, reqMetadata1.memberId(), respData1);
assertEquals(Errors.NONE, resp1.error());
assertEquals(2, resp1.responseData(topicNames).size());
// Create a subsequent share request that removes foo-0 and foo-1
// Verify that the previous share session was closed.
List<TopicIdPartition> removed2 = new ArrayList<>();
removed2.add(foo0);
removed2.add(foo1);
ShareFetchContext context2 = sharePartitionManager.newContext(groupId, Collections.emptyMap(), removed2,
new ShareRequestMetadata(reqMetadata1.memberId(), 1), true);
assertEquals(ShareSessionContext.class, context2.getClass());
LinkedHashMap<TopicIdPartition, ShareFetchResponseData.PartitionData> respData2 = new LinkedHashMap<>();
ShareFetchResponse resp2 = context2.updateAndGenerateResponseData(groupId, reqMetadata1.memberId(), respData2);
assertTrue(resp2.responseData(topicNames).isEmpty());
assertEquals(1, cache.size());
} |
private String downloadPackage(HeliumPackage pkg, String[] nameAndVersion, File bundleDir,
String templateWebpackConfig, String templatePackageJson,
FrontendPluginFactory fpf) throws IOException, TaskRunnerException {
if (bundleDir.exists()) {
FileUtils.deleteQuietly(bundleDir);
}
FileUtils.forceMkdir(bundleDir);
FileFilter copyFilter = new FileFilter() {
@Override
public boolean accept(File pathname) {
String fileName = pathname.getName();
if (fileName.startsWith(".") || fileName.startsWith("#") || fileName.startsWith("~")) {
return false;
} else {
return true;
}
}
};
if (isLocalPackage(pkg)) {
FileUtils.copyDirectory(
new File(pkg.getArtifact()),
bundleDir,
copyFilter);
} else {
// if online package
String version = nameAndVersion[1];
File tgz = new File(heliumLocalRepoDirectory, pkg.getName() + "-" + version + ".tgz");
FileUtils.deleteQuietly(tgz);
// wget, extract and move dir to `bundles/${pkg.getName()}`, and remove tgz
npmCommand(fpf, "pack " + pkg.getArtifact());
File extracted = new File(heliumBundleDirectory, "package");
FileUtils.deleteDirectory(extracted);
List<String> entries = unTgz(tgz, heliumBundleDirectory);
for (String entry : entries) {
LOGGER.debug("Extracted {}", entry);
}
FileUtils.deleteQuietly(tgz);
FileUtils.copyDirectory(extracted, bundleDir);
FileUtils.deleteDirectory(extracted);
}
// 1. setup package.json
File existingPackageJson = new File(bundleDir, "package.json");
JsonReader reader = new JsonReader(new FileReader(existingPackageJson));
Map<String, Object> packageJson = gson.fromJson(reader,
new TypeToken<Map<String, Object>>(){}.getType());
Map<String, String> existingDeps = (Map<String, String>) packageJson.get("dependencies");
String mainFileName = (String) packageJson.get("main");
StringBuilder dependencies = new StringBuilder();
int index = 0;
for (Map.Entry<String, String> e: existingDeps.entrySet()) {
dependencies.append(" \"").append(e.getKey()).append("\": ");
if (e.getKey().equals("zeppelin-vis") ||
e.getKey().equals("zeppelin-tabledata") ||
e.getKey().equals("zeppelin-spell")) {
dependencies.append("\"file:../../" + HELIUM_LOCAL_MODULE_DIR + "/")
.append(e.getKey()).append("\"");
} else {
dependencies.append("\"").append(e.getValue()).append("\"");
}
if (index < existingDeps.size() - 1) {
dependencies.append(",\n");
}
index = index + 1;
}
FileUtils.deleteQuietly(new File(bundleDir, PACKAGE_JSON));
templatePackageJson = templatePackageJson.replaceFirst("PACKAGE_NAME", pkg.getName());
templatePackageJson = templatePackageJson.replaceFirst("MAIN_FILE", mainFileName);
templatePackageJson = templatePackageJson.replaceFirst("DEPENDENCIES", dependencies.toString());
FileUtils.write(new File(bundleDir, PACKAGE_JSON), templatePackageJson, StandardCharsets.UTF_8);
// 2. setup webpack.config
FileUtils.write(new File(bundleDir, "webpack.config.js"), templateWebpackConfig, StandardCharsets.UTF_8);
return mainFileName;
} | @Test
void downloadPackage() throws TaskRunnerException {
HeliumPackage pkg =
newHeliumPackage(
HeliumType.VISUALIZATION,
"lodash",
"lodash",
"lodash@3.9.3",
"",
null,
"license",
"icon");
hbf.install(pkg);
System.out.println(new File(nodeInstallationDir, "/node_modules/lodash"));
assertTrue(new File(nodeInstallationDir, "/node_modules/lodash").isDirectory());
} |
@Override
public ByteBuf setBytes(int index, byte[] src) {
setBytes(index, src, 0, src.length);
return this;
} | @Test
public void testSetBytesAfterRelease6() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setBytes(0, ByteBuffer.allocate(8));
}
});
} |
@Override
public TimelineEntities getEntities(String entityType, Long limit,
Long windowStart, Long windowEnd, String fromId, Long fromTs,
NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters,
EnumSet<Field> fieldsToRetrieve, CheckAcl checkAcl) throws IOException {
LOG.debug("getEntities type={} primary={}", entityType, primaryFilter);
List<EntityCacheItem> relatedCacheItems = new ArrayList<>();
List<TimelineStore> stores = getTimelineStoresForRead(entityType,
primaryFilter, secondaryFilters, relatedCacheItems);
TimelineEntities returnEntities = new TimelineEntities();
for (TimelineStore store : stores) {
LOG.debug("Try timeline store {} for the request", store.getName());
TimelineEntities entities = store.getEntities(entityType, limit,
windowStart, windowEnd, fromId, fromTs, primaryFilter,
secondaryFilters, fieldsToRetrieve, checkAcl);
if (entities != null) {
returnEntities.addEntities(entities.getEntities());
}
}
return returnEntities;
} | @Test
void testSummaryRead() throws Exception {
// Load data
EntityGroupFSTimelineStore.AppLogs appLogs =
store.new AppLogs(mainTestAppId, mainTestAppDirPath,
AppState.COMPLETED);
MutableCounterLong summaryLogEntityRead
= store.metrics.getGetEntityToSummaryOps();
long numEntityReadBefore = summaryLogEntityRead.value();
TimelineDataManager tdm
= PluginStoreTestUtils.getTdmWithStore(config, store);
appLogs.scanForLogs();
appLogs.parseSummaryLogs(tdm);
// Verify single entity read
PluginStoreTestUtils.verifyTestEntities(tdm);
// Verify multiple entities read
TimelineEntities entities = tdm.getEntities("type_1", null, null, null,
null, null, null, null, EnumSet.allOf(TimelineReader.Field.class),
UserGroupInformation.getLoginUser());
assertThat(entities.getEntities()).hasSize(1);
for (TimelineEntity entity : entities.getEntities()) {
assertEquals((Long) 123L, entity.getStartTime());
}
// Verify metrics
assertEquals(numEntityReadBefore + 5L, summaryLogEntityRead.value());
} |
public static Permission getPermission(String name, String serviceName, String... actions) {
PermissionFactory permissionFactory = PERMISSION_FACTORY_MAP.get(serviceName);
if (permissionFactory == null) {
throw new IllegalArgumentException("No permissions found for service: " + serviceName);
}
return permissionFactory.create(name, actions);
} | @Test
public void getPermission_VectorCollection() {
Permission permission = ActionConstants.getPermission("foo", VectorCollectionServiceUtil.SERVICE_NAME);
assertNotNull(permission);
assertTrue(permission instanceof VectorCollectionPermission);
} |
public static LoggingContext forConnector(String connectorName) {
Objects.requireNonNull(connectorName);
LoggingContext context = new LoggingContext();
MDC.put(CONNECTOR_CONTEXT, prefixFor(connectorName, Scope.WORKER, null));
return context;
} | @Test
public void shouldCreateConnectorLoggingContext() {
assertMdcExtrasUntouched();
assertMdc(null, null, null);
try (LoggingContext loggingContext = LoggingContext.forConnector(CONNECTOR_NAME)) {
assertMdc(CONNECTOR_NAME, null, Scope.WORKER);
log.info("Starting Connector");
}
assertMdcExtrasUntouched();
assertMdc(null, null, null);
} |
@Override
public Path move(final Path file, final Path target, final TransferStatus status, final Delete.Callback delete, final ConnectionCallback callback) throws BackgroundException {
try {
final BrickApiClient client = new BrickApiClient(session);
if(status.isExists()) {
if(!new CaseInsensitivePathPredicate(file).test(target)) {
if(log.isWarnEnabled()) {
log.warn(String.format("Delete file %s to be replaced with %s", target, file));
}
new BrickDeleteFeature(session).delete(Collections.singletonList(target), callback, delete);
}
}
final FileActionEntity entity = new FileActionsApi(client)
.move(new MovePathBody().destination(StringUtils.removeStart(target.getAbsolute(), String.valueOf(Path.DELIMITER))),
StringUtils.removeStart(file.getAbsolute(), String.valueOf(Path.DELIMITER)));
if(entity.getFileMigrationId() != null) {
this.poll(client, entity);
}
return target.withAttributes(file.attributes());
}
catch(ApiException e) {
throw new BrickExceptionMappingService().map("Cannot rename {0}", e, file);
}
} | @Test
public void testRenameCaseOnly() throws Exception {
final String name = new AlphanumericRandomStringService().random();
final Path test = new BrickTouchFeature(session).touch(new Path(new DefaultHomeFinderService(session).find(),
StringUtils.capitalize(name), EnumSet.of(Path.Type.file)), new TransferStatus());
final Path target = new BrickMoveFeature(session).move(test, new Path(new DefaultHomeFinderService(session).find(),
StringUtils.lowerCase(name), EnumSet.of(Path.Type.file)), new TransferStatus().exists(true), new Delete.DisabledCallback(), new DisabledConnectionCallback());
assertTrue(new BrickFindFeature(session).find(test));
assertTrue(new BrickFindFeature(session).find(target));
assertEquals(test.attributes(), target.attributes());
new BrickDeleteFeature(session).delete(Collections.singletonList(target), new DisabledLoginCallback(), new Delete.DisabledCallback());
} |
@Override
public void maybeInterruptOnCancel(
Thread toInterrupt, @Nullable String taskName, @Nullable Long timeout) {
super.maybeInterruptOnCancel(toInterrupt, taskName, timeout);
interruptSourceThread();
} | @Test
void testWaitsForSourceThreadOnCancel() throws Exception {
StreamTaskTestHarness<String> harness =
new StreamTaskTestHarness<>(SourceStreamTask::new, STRING_TYPE_INFO);
harness.setupOutputForSingletonOperatorChain();
harness.getStreamConfig().setStreamOperator(new StreamSource<>(new NonStoppingSource()));
harness.invoke();
NonStoppingSource.waitForStart();
// SourceStreamTask should be still waiting for NonStoppingSource after cancellation
harness.getTask().cancel();
harness.waitForTaskCompletion(50, true); // allow task to exit prematurely
assertThat(harness.taskThread.isAlive()).isTrue();
// SourceStreamTask should be still waiting for NonStoppingSource after interruptions
for (int i = 0; i < 10; i++) {
harness.getTask().maybeInterruptOnCancel(harness.getTaskThread(), null, null);
harness.waitForTaskCompletion(50, true); // allow task to exit prematurely
assertThat(harness.taskThread.isAlive()).isTrue();
}
// It should only exit once NonStoppingSource allows for it
NonStoppingSource.forceCancel();
harness.waitForTaskCompletion(Long.MAX_VALUE, true);
} |
public static String format( String xml ) {
XMLStreamReader rd = null;
XMLStreamWriter wr = null;
StringWriter result = new StringWriter();
try {
rd = INPUT_FACTORY.createXMLStreamReader( new StringReader( xml ) );
synchronized ( OUTPUT_FACTORY ) {
// BACKLOG-18743: This object was not thread safe in some scenarios
// causing the `result` variable to have data from other concurrent executions
// and making the final output invalid.
wr = OUTPUT_FACTORY.createXMLStreamWriter( result );
}
StartElementBuffer startElementBuffer = null;
StringBuilder str = new StringBuilder();
StringBuilder prefix = new StringBuilder();
StringBuilder cdata = new StringBuilder();
boolean wasStart = false;
boolean wasSomething = false;
while ( rd.hasNext() ) {
int event = rd.next();
if ( event != XMLStreamConstants.CDATA && cdata.length() > 0 ) {
// was CDATA
wr.writeCData( cdata.toString() );
cdata.setLength( 0 );
}
if ( startElementBuffer != null ) {
if ( event == XMLStreamConstants.END_ELEMENT ) {
startElementBuffer.writeTo( wr, true );
startElementBuffer = null;
prefix.setLength( prefix.length() - STEP_PREFIX.length() );
wasStart = false;
continue;
} else {
startElementBuffer.writeTo( wr, false );
startElementBuffer = null;
}
}
switch ( event ) {
case XMLStreamConstants.START_ELEMENT:
if ( !whitespacesOnly( str ) ) {
wr.writeCharacters( str.toString() );
} else if ( wasSomething ) {
wr.writeCharacters( "\n" + prefix );
}
str.setLength( 0 );
prefix.append( STEP_PREFIX );
startElementBuffer = new StartElementBuffer( rd );
wasStart = true;
wasSomething = true;
break;
case XMLStreamConstants.END_ELEMENT:
prefix.setLength( prefix.length() - STEP_PREFIX.length() );
if ( wasStart ) {
wr.writeCharacters( str.toString() );
} else {
if ( !whitespacesOnly( str ) ) {
wr.writeCharacters( str.toString() );
} else {
wr.writeCharacters( "\n" + prefix );
}
}
str.setLength( 0 );
wr.writeEndElement();
wasStart = false;
break;
case XMLStreamConstants.SPACE:
case XMLStreamConstants.CHARACTERS:
str.append( rd.getText() );
break;
case XMLStreamConstants.CDATA:
if ( !whitespacesOnly( str ) ) {
wr.writeCharacters( str.toString() );
}
str.setLength( 0 );
cdata.append( rd.getText() );
wasSomething = true;
break;
case XMLStreamConstants.COMMENT:
if ( !whitespacesOnly( str ) ) {
wr.writeCharacters( str.toString() );
} else if ( wasSomething ) {
wr.writeCharacters( "\n" + prefix );
}
str.setLength( 0 );
wr.writeComment( rd.getText() );
wasSomething = true;
break;
case XMLStreamConstants.END_DOCUMENT:
wr.writeCharacters( "\n" );
wr.writeEndDocument();
break;
default:
throw new RuntimeException( "Unknown XML event: " + event );
}
}
wr.flush();
return result.toString();
} catch ( XMLStreamException ex ) {
throw new RuntimeException( ex );
} finally {
try {
if ( wr != null ) {
wr.close();
}
} catch ( Exception ex ) {
}
try {
if ( rd != null ) {
rd.close();
}
} catch ( Exception ex ) {
}
}
} | @Test
public void test4() throws Exception {
String inXml, expectedXml;
try ( InputStream in = XMLFormatterTest.class.getResourceAsStream( "XMLFormatterIn4multilinecdata.xml" ) ) {
inXml = IOUtils.toString( in );
}
try ( InputStream in = XMLFormatterTest.class.getResourceAsStream( "XMLFormatterExpected4multilinecdata.xml" ) ) {
expectedXml = IOUtils.toString( in );
}
String result = XMLFormatter.format( inXml );
assertXMLEqual( expectedXml, result );
} |
@Override
public void store(Measure newMeasure) {
saveMeasure(newMeasure.inputComponent(), (DefaultMeasure<?>) newMeasure);
} | @Test
public void should_save_external_issue() {
InputFile file = new TestInputFileBuilder("foo", "src/Foo.php").build();
DefaultExternalIssue externalIssue = new DefaultExternalIssue(project).at(new DefaultIssueLocation().on(file));
underTest.store(externalIssue);
ArgumentCaptor<ExternalIssue> argumentCaptor = ArgumentCaptor.forClass(ExternalIssue.class);
verify(moduleIssues).initAndAddExternalIssue(argumentCaptor.capture());
assertThat(argumentCaptor.getValue()).isEqualTo(externalIssue);
} |
public static List<Metadata> fromJson(Reader reader) throws IOException {
List<Metadata> ms = null;
if (reader == null) {
return ms;
}
ms = new ArrayList<>();
try (JsonParser jParser = new JsonFactory()
.setStreamReadConstraints(StreamReadConstraints
.builder()
.maxStringLength(TikaConfig.getMaxJsonStringFieldLength())
.build())
.createParser(CloseShieldReader.wrap(reader))) {
JsonToken token = jParser.nextToken();
if (token != JsonToken.START_ARRAY) {
throw new IOException("metadata list must start with an array, but I see: " + token.name());
}
token = jParser.nextToken();
while (token != JsonToken.END_ARRAY) {
Metadata m = JsonMetadata.readMetadataObject(jParser);
ms.add(m);
token = jParser.nextToken();
}
}
if (ms == null) {
return null;
}
//if the last object is the main document,
//as happens with the streaming serializer,
//flip it to be the first element.
if (ms.size() > 1) {
Metadata last = ms.get(ms.size() - 1);
String embResourcePath = last.get(TikaCoreProperties.EMBEDDED_RESOURCE_PATH);
if (embResourcePath == null && ms
.get(0)
.get(TikaCoreProperties.EMBEDDED_RESOURCE_PATH) != null) {
ms.add(0, ms.remove(ms.size() - 1));
}
}
return ms;
} | @Test
public void testListCorrupted() throws Exception {
String json = "[{\"k1\":[\"v1\",\"v2\",\"v3\",\"v4\",\"v4\"],\"k2\":\"v1\"}," + "\"k3\":[\"v1\",\"v2\",\"v3\",\"v4\",\"v4\"],\"k4\":\"v1\"}]";
List<Metadata> m = JsonMetadataList.fromJson(null);
assertNull(m);
} |
@Override
public void execute(Context context) {
PreMeasuresComputationCheck.Context extensionContext = new ContextImpl();
for (PreMeasuresComputationCheck extension : extensions) {
try {
extension.onCheck(extensionContext);
} catch (PreMeasuresComputationCheckException pmcce) {
ceTaskMessages.add(new CeTaskMessages.Message(pmcce.getMessage(), System2.INSTANCE.now(), MessageType.GENERIC));
}
}
} | @Test
public void whenCheckThrows_thenLogCeMessage() throws PreMeasuresComputationCheckException {
PreMeasuresComputationCheck check = mock(PreMeasuresComputationCheck.class);
doThrow(new PreMeasuresComputationCheckException("error"))
.when(check).onCheck(any());
newStep(check).execute(new TestComputationStepContext());
var messageCaptor = ArgumentCaptor.forClass(CeTaskMessages.Message.class);
verify(ceTaskMessages).add(messageCaptor.capture());
assertThat(messageCaptor.getValue().getText()).isEqualTo("error");
} |
public long getSignature() {
return this.signature;
} | @Test
public void agentTaskQueueTest() {
AgentTaskQueue.clearAllTasks();
Assert.assertEquals(0, AgentTaskQueue.getTaskNum());
// add
AgentTaskQueue.addTask(createReplicaTask);
Assert.assertEquals(1, AgentTaskQueue.getTaskNum());
Assert.assertFalse(AgentTaskQueue.addTask(createReplicaTask));
// get
AgentTask task = AgentTaskQueue.getTask(backendId1, TTaskType.CREATE, createReplicaTask.getSignature());
Assert.assertEquals(createReplicaTask, task);
Map<TTaskType, Set<Long>> runningTasks = new HashMap<TTaskType, Set<Long>>();
List<AgentTask> diffTasks = AgentTaskQueue.getDiffTasks(backendId1, runningTasks);
Assert.assertEquals(1, diffTasks.size());
// remove
AgentTaskQueue.removeTask(backendId1, TTaskType.CREATE, createReplicaTask.getSignature());
Assert.assertEquals(0, AgentTaskQueue.getTaskNum());
} |
public static long realHashCode(float value)
{
// canonicalize +0 and -0 to a single value
value = value == -0 ? 0 : value;
// floatToIntBits converts all NaNs to the same representation
return AbstractIntType.hash(floatToIntBits(value));
} | @Test
public void testRealHashCode()
{
assertEquals(realHashCode(0), realHashCode(Float.parseFloat("-0")));
// 0x7fc01234 is a different representation of NaN
assertEquals(realHashCode(Float.NaN), realHashCode(intBitsToFloat(0x7fc01234)));
} |
private QuerySchedulerFactory() {
} | @Test
public void testQuerySchedulerFactory() {
QueryExecutor queryExecutor = mock(QueryExecutor.class);
ServerMetrics serverMetrics = mock(ServerMetrics.class);
LongAccumulator latestQueryTime = mock(LongAccumulator.class);
PinotConfiguration config = new PinotConfiguration();
config.setProperty(QuerySchedulerFactory.ALGORITHM_NAME_CONFIG_KEY, QuerySchedulerFactory.FCFS_ALGORITHM);
QueryScheduler queryScheduler = QuerySchedulerFactory.create(config, queryExecutor, serverMetrics, latestQueryTime);
assertTrue(queryScheduler instanceof FCFSQueryScheduler);
config.setProperty(QuerySchedulerFactory.ALGORITHM_NAME_CONFIG_KEY, QuerySchedulerFactory.TOKEN_BUCKET_ALGORITHM);
queryScheduler = QuerySchedulerFactory.create(config, queryExecutor, serverMetrics, latestQueryTime);
assertTrue(queryScheduler instanceof TokenPriorityScheduler);
config.setProperty(QuerySchedulerFactory.ALGORITHM_NAME_CONFIG_KEY, QuerySchedulerFactory.BOUNDED_FCFS_ALGORITHM);
queryScheduler = QuerySchedulerFactory.create(config, queryExecutor, serverMetrics, latestQueryTime);
assertTrue(queryScheduler instanceof BoundedFCFSScheduler);
config.setProperty(QuerySchedulerFactory.ALGORITHM_NAME_CONFIG_KEY, TestQueryScheduler.class.getName());
queryScheduler = QuerySchedulerFactory.create(config, queryExecutor, serverMetrics, latestQueryTime);
assertTrue(queryScheduler instanceof TestQueryScheduler);
} |
@Override
public PositionReader openPositionRead(String path, long fileLength) {
return new OBSPositionReader(mClient, mBucketName, stripPrefixIfPresent(path), fileLength);
} | @Test
public void testOpenPositionRead() {
PositionReader result = mOBSUnderFileSystem.openPositionRead(KEY, 1L);
Assert.assertTrue(result instanceof OBSPositionReader);
} |
public Analysis analyze(Statement statement)
{
return analyze(statement, false);
} | @Test
public void testGroupByWithQualifiedName()
{
// TODO: verify output
analyze("SELECT a FROM t1 GROUP BY t1.a");
} |
@Override
public Long sendSingleSmsToMember(String mobile, Long userId, String templateCode, Map<String, Object> templateParams) {
// 如果 mobile 为空,则加载用户编号对应的手机号
if (StrUtil.isEmpty(mobile)) {
mobile = memberService.getMemberUserMobile(userId);
}
// 执行发送
return sendSingleSms(mobile, userId, UserTypeEnum.MEMBER.getValue(), templateCode, templateParams);
} | @Test
public void testSendSingleSmsToUser() {
// 准备参数
Long userId = randomLongId();
String templateCode = randomString();
Map<String, Object> templateParams = MapUtil.<String, Object>builder().put("code", "1234")
.put("op", "login").build();
// mock memberService 的方法
String mobile = "15601691300";
when(memberService.getMemberUserMobile(eq(userId))).thenReturn(mobile);
// mock SmsTemplateService 的方法
SmsTemplateDO template = randomPojo(SmsTemplateDO.class, o -> {
o.setStatus(CommonStatusEnum.ENABLE.getStatus());
o.setContent("验证码为{code}, 操作为{op}");
o.setParams(Lists.newArrayList("code", "op"));
});
when(smsTemplateService.getSmsTemplateByCodeFromCache(eq(templateCode))).thenReturn(template);
String content = randomString();
when(smsTemplateService.formatSmsTemplateContent(eq(template.getContent()), eq(templateParams)))
.thenReturn(content);
// mock SmsChannelService 的方法
SmsChannelDO smsChannel = randomPojo(SmsChannelDO.class, o -> o.setStatus(CommonStatusEnum.ENABLE.getStatus()));
when(smsChannelService.getSmsChannel(eq(template.getChannelId()))).thenReturn(smsChannel);
// mock SmsLogService 的方法
Long smsLogId = randomLongId();
when(smsLogService.createSmsLog(eq(mobile), eq(userId), eq(UserTypeEnum.MEMBER.getValue()), eq(Boolean.TRUE), eq(template),
eq(content), eq(templateParams))).thenReturn(smsLogId);
// 调用
Long resultSmsLogId = smsSendService.sendSingleSmsToMember(null, userId, templateCode, templateParams);
// 断言
assertEquals(smsLogId, resultSmsLogId);
// 断言调用
verify(smsProducer).sendSmsSendMessage(eq(smsLogId), eq(mobile),
eq(template.getChannelId()), eq(template.getApiTemplateId()),
eq(Lists.newArrayList(new KeyValue<>("code", "1234"), new KeyValue<>("op", "login"))));
} |
ObjectFactory loadObjectFactory() {
Class<? extends ObjectFactory> objectFactoryClass = options.getObjectFactoryClass();
ClassLoader classLoader = classLoaderSupplier.get();
ServiceLoader<ObjectFactory> loader = ServiceLoader.load(ObjectFactory.class, classLoader);
if (objectFactoryClass == null) {
return loadSingleObjectFactoryOrDefault(loader);
}
return loadSelectedObjectFactory(loader, objectFactoryClass);
} | @Test
void test_case_5() {
io.cucumber.core.backend.Options options = () -> DefaultObjectFactory.class;
ObjectFactoryServiceLoader loader = new ObjectFactoryServiceLoader(
() -> new ServiceLoaderTestClassLoader(ObjectFactory.class,
DefaultObjectFactory.class,
OtherFactory.class),
options);
assertThat(loader.loadObjectFactory(), instanceOf(DefaultObjectFactory.class));
} |
@Override
public Collection<String> getServices() {
return zkServiceManager.chooseService().getServices();
} | @Test
public void getServices() {
Mockito.when(zkService34.getServices()).thenReturn(Collections.singletonList("serviceName"));
Assert.assertEquals(zkDiscoveryClient.getServices().size(), 1);
} |
public FEELFnResult<TemporalAccessor> invoke(@ParameterName("from") String val) {
if ( val == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null"));
}
try {
TemporalAccessor parsed = FEEL_TIME.parse(val);
if (parsed.query(TemporalQueries.offset()) != null) {
// it is an offset-zoned time, so I can know for certain an OffsetTime
OffsetTime asOffSetTime = parsed.query(OffsetTime::from);
return FEELFnResult.ofResult(asOffSetTime);
} else if (parsed.query(TemporalQueries.zone()) == null) {
// if it does not contain any zone information at all, then I know for certain is a local time.
LocalTime asLocalTime = parsed.query(LocalTime::from);
return FEELFnResult.ofResult(asLocalTime);
} else if (parsed.query(TemporalQueries.zone()) != null) {
boolean hasSeconds = timeStringWithSeconds(val);
LocalTime asLocalTime = parsed.query(LocalTime::from);
ZoneId zoneId = parsed.query(TemporalQueries.zone());
ZoneTime zoneTime = ZoneTime.of(asLocalTime, zoneId, hasSeconds);
return FEELFnResult.ofResult(zoneTime);
}
return FEELFnResult.ofResult(parsed);
} catch (DateTimeException e) {
return manageDateTimeException(e, val);
}
} | @Test
void invokeTimeUnitsParamsNoOffset() {
FunctionTestUtil.assertResult(timeFunction.invoke(10, 43, 15, null), LocalTime.of(10, 43, 15));
} |
@Override
public void appendDataInfluences(List<Object> entities, Class<?> beanDefinition) {
String tableName = ApolloAuditUtil.getApolloAuditLogTableName(beanDefinition);
if (Objects.isNull(tableName) || tableName.equals("")) {
return;
}
List<Field> dataInfluenceFields = ApolloAuditUtil.getAnnotatedFields(
ApolloAuditLogDataInfluenceTableField.class, beanDefinition);
Field idField = ApolloAuditUtil.getPersistenceIdFieldByAnnotation(beanDefinition);
entities.forEach(e -> {
try {
idField.setAccessible(true);
String tableId = idField.get(e).toString();
for (Field f : dataInfluenceFields) {
f.setAccessible(true);
String val = String.valueOf(f.get(e));
String fieldName = f.getAnnotation(ApolloAuditLogDataInfluenceTableField.class).fieldName();
appendDataInfluence(tableName, tableId, fieldName, val);
}
} catch (IllegalAccessException ex) {
throw new IllegalArgumentException("failed append data influence, "
+ "might due to wrong beanDefinition for entity audited", ex);
}
});
} | @Test
public void testAppendDataInfluencesCaseWrongBeanDefinition() {
List<Object> entities = new ArrayList<>();
entities.add(new Object());
assertThrows(IllegalArgumentException.class, () -> {
api.appendDataInfluences(entities, MockDataInfluenceEntity.class);
});
} |
@GET
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8
/* , MediaType.APPLICATION_XML */})
public TimelineAbout about(
@Context HttpServletRequest req,
@Context HttpServletResponse res) {
init(res);
return TimelineUtils.createTimelineAbout("Timeline API");
} | @Test
void testAbout() throws Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("timeline")
.accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
TimelineAbout actualAbout = response.getEntity(TimelineAbout.class);
TimelineAbout expectedAbout =
TimelineUtils.createTimelineAbout("Timeline API");
assertNotNull(
actualAbout, "Timeline service about response is null");
assertEquals(expectedAbout.getAbout(), actualAbout.getAbout());
assertEquals(expectedAbout.getTimelineServiceVersion(),
actualAbout.getTimelineServiceVersion());
assertEquals(expectedAbout.getTimelineServiceBuildVersion(),
actualAbout.getTimelineServiceBuildVersion());
assertEquals(expectedAbout.getTimelineServiceVersionBuiltOn(),
actualAbout.getTimelineServiceVersionBuiltOn());
assertEquals(expectedAbout.getHadoopVersion(),
actualAbout.getHadoopVersion());
assertEquals(expectedAbout.getHadoopBuildVersion(),
actualAbout.getHadoopBuildVersion());
assertEquals(expectedAbout.getHadoopVersionBuiltOn(),
actualAbout.getHadoopVersionBuiltOn());
} |
@Override
public <T> T clone(T object) {
if (object instanceof String) {
return object;
} else if (object instanceof Collection) {
Object firstElement = findFirstNonNullElement((Collection) object);
if (firstElement != null && !(firstElement instanceof Serializable)) {
JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass());
return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type);
}
} else if (object instanceof Map) {
Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object);
if (firstEntry != null) {
Object key = firstEntry.getKey();
Object value = firstEntry.getValue();
if (!(key instanceof Serializable) || !(value instanceof Serializable)) {
JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass());
return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type);
}
}
} else if (object instanceof JsonNode) {
return (T) ((JsonNode) object).deepCopy();
}
if (object instanceof Serializable) {
try {
return (T) SerializationHelper.clone((Serializable) object);
} catch (SerializationException e) {
//it is possible that object itself implements java.io.Serializable, but underlying structure does not
//in this case we switch to the other JSON marshaling strategy which doesn't use the Java serialization
}
}
return jsonClone(object);
} | @Test
public void should_clone_map_of_non_serializable_key() {
Map<NonSerializableObject, String> original = new HashMap<>();
original.put(new NonSerializableObject("key"), "value");
Object cloned = serializer.clone(original);
assertEquals(original, cloned);
assertNotSame(original, cloned);
} |
public void checkToken(String gitlabUrl, String personalAccessToken) {
String url = format("%s/user", gitlabUrl);
LOG.debug("get current user : [{}]", url);
Request.Builder builder = new Request.Builder()
.addHeader(PRIVATE_TOKEN, personalAccessToken)
.url(url)
.get();
Request request = builder.build();
String errorMessage = "Could not validate GitLab token. Got an unexpected answer.";
try (Response response = client.newCall(request).execute()) {
checkResponseIsSuccessful(response, errorMessage);
GsonId.parseOne(response.body().string());
} catch (JsonSyntaxException e) {
throw new IllegalArgumentException("Could not parse GitLab answer to verify token. Got a non-json payload as result.");
} catch (IOException e) {
logException(url, e);
throw new IllegalArgumentException(errorMessage);
}
} | @Test
public void fail_check_token_with_unexpected_io_exception_with_detailed_log() throws IOException {
server.shutdown();
assertThatThrownBy(() -> underTest.checkToken(gitlabUrl, "token"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Could not validate GitLab token. Got an unexpected answer.");
assertThat(logTester.logs(Level.INFO).get(0))
.contains("Gitlab API call to [" + server.url("user") + "] " +
"failed with error message : [Failed to connect to " + server.getHostName());
} |
@Override
public void execute(Exchange exchange) throws SmppException {
byte[] message = getShortMessage(exchange.getIn());
ReplaceSm replaceSm = createReplaceSmTempate(exchange);
replaceSm.setShortMessage(message);
if (log.isDebugEnabled()) {
log.debug("Sending replacement command for a short message for exchange id '{}' and message id '{}'",
exchange.getExchangeId(), replaceSm.getMessageId());
}
try {
session.replaceShortMessage(
replaceSm.getMessageId(),
TypeOfNumber.valueOf(replaceSm.getSourceAddrTon()),
NumberingPlanIndicator.valueOf(replaceSm.getSourceAddrNpi()),
replaceSm.getSourceAddr(),
replaceSm.getScheduleDeliveryTime(),
replaceSm.getValidityPeriod(),
new RegisteredDelivery(replaceSm.getRegisteredDelivery()),
replaceSm.getSmDefaultMsgId(),
replaceSm.getShortMessage());
} catch (Exception e) {
throw new SmppException(e);
}
if (log.isDebugEnabled()) {
log.debug("Sent replacement command for a short message for exchange id '{}' and message id '{}'",
exchange.getExchangeId(), replaceSm.getMessageId());
}
Message rspMsg = ExchangeHelper.getResultMessage(exchange);
rspMsg.setHeader(SmppConstants.ID, replaceSm.getMessageId());
} | @Test
public void eightBitDataCodingOverridesDefaultAlphabet() throws Exception {
final int binDataCoding = 0xF7; /* GSM 8-bit class 3 */
byte[] body = { (byte) 0xFF, 'A', 'B', (byte) 0x00, (byte) 0xFF, (byte) 0x7F, 'C', (byte) 0xFF };
Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut);
exchange.getIn().setHeader(SmppConstants.COMMAND, "ReplaceSm");
exchange.getIn().setHeader(SmppConstants.ALPHABET, Alphabet.ALPHA_DEFAULT.value());
exchange.getIn().setHeader(SmppConstants.DATA_CODING, binDataCoding);
exchange.getIn().setBody(body);
command.execute(exchange);
verify(session).replaceShortMessage((String) isNull(),
eq(TypeOfNumber.UNKNOWN),
eq(NumberingPlanIndicator.UNKNOWN),
eq("1616"),
(String) isNull(),
(String) isNull(),
eq(new RegisteredDelivery(SMSCDeliveryReceipt.SUCCESS_FAILURE)),
eq((byte) 0),
eq(body));
} |
@Override
public String named() {
return PluginEnum.HYSTRIX.getName();
} | @Test
public void testNamed() {
assertEquals(hystrixPlugin.named(), PluginEnum.HYSTRIX.getName());
} |
public static KeyStore buildKeyStore(Path certChainFile, Path keyFile, char[] keyPasswordChars) throws GeneralSecurityException {
try {
return doBuildKeyStore(certChainFile, keyFile, keyPasswordChars);
} catch (KeyStoreException | NoSuchAlgorithmException | InvalidKeySpecException | CertificateException |
KeyException | IOException | PKCSException | OperatorCreationException e) {
throw new GeneralSecurityException(e);
}
} | @Test
public void testBuildKeyStoreWithSecuredPrivateKey() throws Exception {
final Path certChainFile = Paths.get(Resources.getResource("org/graylog2/shared/security/tls/chain.crt").toURI());
final Path keyFile = Paths.get(Resources.getResource("org/graylog2/shared/security/tls/key-enc-pbe1.p8").toURI());
final KeyStore keyStore = PemKeyStore.buildKeyStore(certChainFile, keyFile, "password".toCharArray());
final Certificate[] keys = keyStore.getCertificateChain("key");
assertThat(keys).hasSize(2);
final Key key = keyStore.getKey("key", "password".toCharArray());
assertThat(key.getFormat()).isEqualTo("PKCS#8");
assertThat(key.getEncoded()).isNotEmpty();
} |
@Udf(description = "Returns a new string with all occurences of oldStr in str with newStr")
public String replace(
@UdfParameter(
description = "The source string. If null, then function returns null.") final String str,
@UdfParameter(
description = "The substring to replace."
+ " If null, then function returns null.") final String oldStr,
@UdfParameter(
description = "The string to replace the old substrings with."
+ " If null, then function returns null.") final String newStr) {
if (str == null || oldStr == null || newStr == null) {
return null;
}
return str.replace(oldStr, newStr);
} | @Test
public void shouldReplace() {
assertThat(udf.replace("foobar", "foo", "bar"), is("barbar"));
assertThat(udf.replace("foobar", "fooo", "bar"), is("foobar"));
assertThat(udf.replace("foobar", "o", ""), is("fbar"));
assertThat(udf.replace("abc", "", "n"), is("nanbncn"));
} |
public Expression rewrite(final Expression expression) {
return new ExpressionTreeRewriter<>(new OperatorPlugin()::process)
.rewrite(expression, null);
} | @Test
public void shouldReplaceQualifiedColumns() {
// Given:
final Expression predicate = getPredicate(
"SELECT * FROM orders where ORDERS.ROWTIME > '2017-01-01T00:00:00.000';");
// When:
final Expression rewritten = rewriter.rewrite(predicate);
// Then:
assertThat(rewritten.toString(), is(String.format("(ORDERS.ROWTIME > %d)", A_TIMESTAMP)));
} |
public static List<Type> decode(String rawInput, List<TypeReference<Type>> outputParameters) {
return decoder.decodeFunctionResult(rawInput, outputParameters);
} | @Test
public void testDecodeDynamicNested3() {
String rawInput =
"0x0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000020"
+ "0000000000000000000000000000000000000000000000000000000000000040"
+ "0000000000000000000000000000000000000000000000000000000000000080"
+ "0000000000000000000000000000000000000000000000000000000000000001"
+ "3400000000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000009"
+ "6e6573746564466f6f0000000000000000000000000000000000000000000000";
assertEquals(
FunctionReturnDecoder.decode(
rawInput, AbiV2TestFixture.getNarFunction.getOutputParameters()),
Arrays.asList(
new AbiV2TestFixture.Nar(
new AbiV2TestFixture.Nuu(
new AbiV2TestFixture.Foo("4", "nestedFoo")))));
} |
@Override
public void setSequence(long sequence) {
throw new UnsupportedOperationException();
} | @Test(expected = UnsupportedOperationException.class)
public void testSetSequence() {
batchEventData.setSequence(1);
} |
@Override
public FileConfigDO getFileConfig(Long id) {
return fileConfigMapper.selectById(id);
} | @Test
public void testGetFileConfig() {
// mock 数据
FileConfigDO dbFileConfig = randomFileConfigDO().setMaster(false);
fileConfigMapper.insert(dbFileConfig);// @Sql: 先插入出一条存在的数据
// 准备参数
Long id = dbFileConfig.getId();
// 调用,并断言
assertPojoEquals(dbFileConfig, fileConfigService.getFileConfig(id));
} |
public static <T> Response call(RestUtils.RestCallable<T> callable,
AlluxioConfiguration alluxioConf, @Nullable Map<String, Object> headers) {
try {
// TODO(cc): reconsider how to enable authentication
if (SecurityUtils.isSecurityEnabled(alluxioConf)
&& AuthenticatedClientUser.get(alluxioConf) == null) {
AuthenticatedClientUser.set(ServerUserState.global().getUser().getName());
}
} catch (IOException e) {
LOG.warn("Failed to set AuthenticatedClientUser in REST service handler: {}", e.toString());
return createErrorResponse(e, alluxioConf);
}
try {
return createResponse(callable.call(), alluxioConf, headers);
} catch (Exception e) {
LOG.warn("Unexpected error invoking rest endpoint: {}", e.toString());
return createErrorResponse(e, alluxioConf);
}
} | @Test
public void stringOkResponse() throws Exception {
final String message = "ok";
Response response = RestUtils.call(new RestUtils.RestCallable<String>() {
@Override
public String call() throws Exception {
return message;
}
}, Configuration.global());
Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
ObjectMapper mapper = new ObjectMapper();
String jsonMessage = mapper.writeValueAsString(message);
Assert.assertEquals(jsonMessage, response.getEntity());
} |
@VisibleForTesting
void setFactory(JniDBFactory fact) {
this.factory = fact;
} | @Test
void testLevelDbRepair() throws IOException {
LeveldbTimelineStore store = new LeveldbTimelineStore();
JniDBFactory factory = Mockito.mock(JniDBFactory.class);
Mockito.when(
factory.open(Mockito.any(File.class), Mockito.any(Options.class)))
.thenThrow(new IOException()).thenCallRealMethod();
store.setFactory(factory);
//Create the LevelDb in a different location
File path = new File("target", this.getClass().getSimpleName() +
"-tmpDir1").getAbsoluteFile();
Configuration conf = new Configuration(this.config);
conf.set(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH,
path.getAbsolutePath());
try {
store.init(conf);
Mockito.verify(factory, Mockito.times(1))
.repair(Mockito.any(File.class), Mockito.any(Options.class));
FileFilter fileFilter = WildcardFileFilter.builder()
.setWildcards("*" + LeveldbTimelineStore.BACKUP_EXT +"*")
.get();
assertTrue(path.listFiles(fileFilter).length > 0);
} finally {
store.close();
fsContext.delete(new Path(path.getAbsolutePath()), true);
}
} |
@Override
public void execute(ComputationStep.Context context) {
PostMeasuresComputationCheck.Context extensionContext = new ContextImpl();
for (PostMeasuresComputationCheck extension : extensions) {
extension.onCheck(extensionContext);
}
} | @Test
public void context_contains_ncloc_when_available() {
PostMeasuresComputationCheck check = mock(PostMeasuresComputationCheck.class);
measureRepository.addRawMeasure(DUMB_PROJECT.getReportAttributes().getRef(), CoreMetrics.NCLOC_KEY, Measure.newMeasureBuilder().create(10));
newStep(check).execute(new TestComputationStepContext());
ArgumentCaptor<Context> contextArgumentCaptor = ArgumentCaptor.forClass(Context.class);
verify(check).onCheck(contextArgumentCaptor.capture());
assertThat(contextArgumentCaptor.getValue().getNcloc()).isEqualTo(10);
} |
@Field
public void setThrowOnEncryptedPayload(boolean throwOnEncryptedPayload) {
defaultConfig.setThrowOnEncryptedPayload(throwOnEncryptedPayload);
} | @Test
public void testThrowOnEncryptedPayload() throws Exception {
PDFParserConfig pdfParserConfig = new PDFParserConfig();
pdfParserConfig.setThrowOnEncryptedPayload(true);
ParseContext parseContext = new ParseContext();
parseContext.set(PDFParserConfig.class, pdfParserConfig);
assertThrows(EncryptedDocumentException.class, () -> {
getRecursiveMetadata("testMicrosoftIRMServices.pdf", parseContext);
});
} |
public String toString() {
return new String(buffer, 0, count, ConfigConstants.DEFAULT_CHARSET);
} | @Test
void testToString() throws IOException {
byte[] data = "1234567890".getBytes(ConfigConstants.DEFAULT_CHARSET);
try (ByteArrayOutputStreamWithPos stream = new ByteArrayOutputStreamWithPos(data.length)) {
stream.write(data);
assertThat(stream.toString().getBytes(ConfigConstants.DEFAULT_CHARSET))
.containsExactly(data);
for (int i = 0; i < data.length; i++) {
stream.setPosition(i);
assertThat(stream.toString().getBytes(ConfigConstants.DEFAULT_CHARSET))
.containsExactly(Arrays.copyOf(data, i));
}
// validate that the stored bytes are still tracked properly even when expanding array
stream.setPosition(data.length + 1);
assertThat(stream.toString().getBytes(ConfigConstants.DEFAULT_CHARSET))
.containsExactly(Arrays.copyOf(data, data.length + 1));
}
} |
public void destroy()
{
List<SerializedPageReference> removedPages;
PendingRead pendingRead;
synchronized (this) {
removedPages = ImmutableList.copyOf(pages);
pages.clear();
bufferedBytes.getAndSet(0);
noMorePages = true;
destroyed.set(true);
pendingRead = this.pendingRead;
this.pendingRead = null;
}
dereferencePages(removedPages, onPagesReleased);
if (pendingRead != null) {
pendingRead.completeResultFutureWithEmpty();
}
} | @Test
public void testReferenceCount()
{
AtomicInteger releasedPages = new AtomicInteger(0);
PagesReleasedListener onPagesReleased = (lifespan, releasedPagesCount, releasedSizeInBytes) -> {
releasedPages.addAndGet(releasedPagesCount);
};
ClientBuffer buffer = new ClientBuffer(TASK_INSTANCE_ID, BUFFER_ID, onPagesReleased);
// add 2 pages and verify they are referenced
addPage(buffer, createPage(0));
addPage(buffer, createPage(1));
assertEquals(releasedPages.get(), 0);
assertBufferInfo(buffer, 2, 0);
// read one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, 0, sizeOfPages(0), NO_WAIT), bufferResult(0, createPage(0)));
assertEquals(releasedPages.get(), 0);
assertBufferInfo(buffer, 2, 0);
// acknowledge first page
assertBufferResultEquals(TYPES, getBufferResult(buffer, 1, sizeOfPages(1), NO_WAIT), bufferResult(1, createPage(1)));
assertEquals(releasedPages.get(), 1);
assertBufferInfo(buffer, 1, 1);
// destroy the buffer
buffer.destroy();
assertEquals(releasedPages.get(), 2);
assertBufferDestroyed(buffer, 1);
} |
public void validateRedirectUri(String redirectUri) {
if (ObjectUtils.isEmpty(redirectUri) || (!redirectUri.startsWith(this.redirectUrl))) {
throw new OAuth2Exception(ErrorType.ILLEGAL_REDIRECT_URI);
}
} | @Test
public void test(){
OAuth2Client client=new OAuth2Client();
client.setRedirectUrl("http://hsweb.me/callback");
client.validateRedirectUri("http://hsweb.me/callback");
client.validateRedirectUri("http://hsweb.me/callback?a=1&n=1");
} |
@Override
public Type classify(final Throwable e) {
final Type type = e instanceof MissingSourceTopicException ? Type.USER : Type.UNKNOWN;
if (type == Type.USER) {
LOG.info(
"Classified error as USER error based on missing topic. Query ID: {} Exception: {}",
queryId,
e);
}
return type;
} | @Test
public void shouldClassifyNoMissingTopicAsUnknownError() {
// Given:
final Exception e = new Exception("foo");
// When:
final Type type = new MissingTopicClassifier("").classify(e);
// Then:
assertThat(type, is(Type.UNKNOWN));
} |
public Map<GRN, Set<GRN>> getOwnersForTargets(Collection<GRN> targets) {
return db.find(DBQuery.and(
DBQuery.in(GrantDTO.FIELD_TARGET, targets),
DBQuery.is(GrantDTO.FIELD_CAPABILITY, Capability.OWN)
)).toArray()
.stream()
.collect(Collectors.groupingBy(
GrantDTO::target,
Collectors.mapping(GrantDTO::grantee, Collectors.toSet())
));
} | @Test
@MongoDBFixtures("grants.json")
public void getOwnersForTargets() {
final GRN jane = grnRegistry.parse("grn::::user:jane");
final GRN john = grnRegistry.parse("grn::::user:john");
final GRN dashboard1 = grnRegistry.parse("grn::::dashboard:54e3deadbeefdeadbeef0000");
final GRN dashboard2 = grnRegistry.parse("grn::::dashboard:54e3deadbeefdeadbeef0001");
final GRN stream1 = grnRegistry.parse("grn::::stream:54e3deadbeefdeadbeef0001");
assertThat(dbService.getOwnersForTargets(ImmutableSet.of(dashboard1, dashboard2, stream1))).satisfies(result -> {
assertThat(result.get(dashboard1)).containsExactlyInAnyOrder(jane);
assertThat(result.get(dashboard2)).containsExactlyInAnyOrder(john);
assertThat(result).doesNotContainKey(stream1);
});
} |
public FEELFnResult<BigDecimal> invoke(@ParameterName( "n" ) BigDecimal n) {
return invoke(n, BigDecimal.ZERO);
} | @Test
void invokeRoundingUp() {
FunctionTestUtil.assertResult(roundHalfDownFunction.invoke(BigDecimal.valueOf(10.27)), BigDecimal.valueOf(10));
FunctionTestUtil.assertResult(roundHalfDownFunction.invoke(BigDecimal.valueOf(10.27), BigDecimal.ONE),
BigDecimal.valueOf(10.3));
} |
static void quoteExternalName(StringBuilder sb, String externalName) {
List<String> parts = splitByNonQuotedDots(externalName);
for (int i = 0; i < parts.size(); i++) {
String unescaped = unescapeQuotes(parts.get(i));
String unquoted = unquoteIfQuoted(unescaped);
DIALECT.quoteIdentifier(sb, unquoted);
if (i < parts.size() - 1) {
sb.append(".");
}
}
} | @Test
public void quoteExternalName_withQuotes() {
String externalName = "custom_schema.\"my_table\"";
StringBuilder sb = new StringBuilder();
MappingHelper.quoteExternalName(sb, externalName);
assertThat(sb.toString()).isEqualTo("\"custom_schema\".\"my_table\"");
} |
@Override
public Void visitModifyFrontendHostClause(ModifyFrontendAddressClause clause, ConnectContext context) {
checkModifyHostClause(clause.getSrcHost(), clause.getDestHost());
return null;
} | @Test(expected = SemanticException.class)
public void testVisitModifyFrontendHostClauseException() {
AlterSystemStmtAnalyzer visitor = new AlterSystemStmtAnalyzer();
ModifyFrontendAddressClause clause = new ModifyFrontendAddressClause("127.0.0.2", "127.0.0.1");
visitor.visitModifyFrontendHostClause(clause, null);
} |
synchronized void add(int splitCount) {
int pos = count % history.length;
history[pos] = splitCount;
count += 1;
} | @Test
public void testTwoMoreThanFullHistory() {
EnumerationHistory history = new EnumerationHistory(3);
history.add(1);
history.add(2);
history.add(3);
history.add(4);
history.add(5);
int[] expectedHistorySnapshot = {3, 4, 5};
testHistory(history, expectedHistorySnapshot);
} |
@Override
public Collection<ExecuteAwarePlugin> getExecuteAwarePluginList() {
return Collections.emptyList();
} | @Test
public void testGetExecuteAwarePluginList() {
Assert.assertEquals(Collections.emptyList(), manager.getExecuteAwarePluginList());
} |
@Override
public String lookup(LogEvent event, String key) {
return Log4j2NacosLoggingPropertiesHolder.getValue(key);
} | @Test
void testLookUp() {
System.setProperty("test.nacos.logging.lookup", "true");
NacosLoggingProperties properties = new NacosLoggingProperties("", System.getProperties());
Log4j2NacosLoggingPropertiesHolder.setProperties(properties);
NacosClientPropertiesLookup nacosClientPropertiesLookup = new NacosClientPropertiesLookup();
final String actual = nacosClientPropertiesLookup.lookup("test.nacos.logging.lookup");
assertEquals("true", actual);
} |
public static FEELFunction[] getFunctions() {
return FUNCTIONS;
} | @Test
void getFunctions() {
// This test is aimed at verify that all the "INSTANCE" fields are correctly populated, referring to the same class they are defined in
Set< Class<? extends FEELFunction>> verifiedClasses = Stream.of(FUNCTIONS).map(this::validateFunction)
.filter(Objects::nonNull)
.collect(Collectors.toUnmodifiableSet());
assertEquals(FUNCTIONS.length, verifiedClasses.size());
} |
public final ProxyHandler newProxyHandler() {
SocketAddress proxyAddr = this.address.get();
final boolean b = Objects.nonNull(username) && Objects.nonNull(password);
String username = this.username;
String password = b ? this.password.apply(username) : null;
final ProxyHandler proxyHandler;
switch (this.type) {
case HTTP:
proxyHandler = b ?
new HttpProxyHandler(proxyAddr, username, password, this.httpHeaders.get()) :
new HttpProxyHandler(proxyAddr, this.httpHeaders.get());
break;
case SOCKS4:
proxyHandler = Objects.nonNull(username) ? new Socks4ProxyHandler(proxyAddr, username) :
new Socks4ProxyHandler(proxyAddr);
break;
case SOCKS5:
proxyHandler = b ?
new Socks5ProxyHandler(proxyAddr, username, password) :
new Socks5ProxyHandler(proxyAddr);
break;
default:
throw new IllegalArgumentException("Proxy type unsupported : " + this.type);
}
proxyHandler.setConnectTimeoutMillis(connectTimeoutMillis);
return proxyHandler;
} | @Test
void connectTimeoutWithNonPositiveValue() {
assertThat(createConnectTimeoutProxy(0).newProxyHandler().connectTimeoutMillis()).isEqualTo(0);
assertThat(createConnectTimeoutProxy(-1).newProxyHandler().connectTimeoutMillis()).isEqualTo(0);
} |
@Override
public TimestampedSegment getOrCreateSegmentIfLive(final long segmentId,
final ProcessorContext context,
final long streamTime) {
final TimestampedSegment segment = super.getOrCreateSegmentIfLive(segmentId, context, streamTime);
cleanupExpiredSegments(streamTime);
return segment;
} | @Test
public void shouldClearSegmentsOnClose() {
segments.getOrCreateSegmentIfLive(0, context, -1L);
segments.close();
assertThat(segments.getSegmentForTimestamp(0), is(nullValue()));
} |
public ImmutableList<ComputationState> getAllPresentComputations() {
return computationCache.asMap().values().stream().collect(toImmutableList());
} | @Test
public void testGetAllPresentComputations() {
String computationId1 = "computationId1";
String computationId2 = "computationId2";
MapTask mapTask = new MapTask().setStageName("stageName").setSystemName("systemName");
Map<String, String> userTransformToStateFamilyName =
ImmutableMap.of("userTransformName", "stateFamilyName");
ComputationConfig computationConfig =
ComputationConfig.create(mapTask, userTransformToStateFamilyName, ImmutableMap.of());
when(configFetcher.fetchConfig(eq(computationId1))).thenReturn(Optional.of(computationConfig));
when(configFetcher.fetchConfig(eq(computationId2))).thenReturn(Optional.of(computationConfig));
computationStateCache.get(computationId1);
computationStateCache.get(computationId2);
Set<String> expectedComputationIds = ImmutableSet.of(computationId1, computationId2);
Set<String> actualComputationIds =
computationStateCache.getAllPresentComputations().stream()
.map(ComputationState::getComputationId)
.collect(Collectors.toSet());
assertThat(actualComputationIds).containsExactlyElementsIn(expectedComputationIds);
computationStateCache
.getAllPresentComputations()
.forEach(
computationState -> {
assertThat(expectedComputationIds).contains(computationState.getComputationId());
assertThat(computationState.getMapTask()).isEqualTo(mapTask);
assertThat(computationState.getTransformUserNameToStateFamily())
.isEqualTo(userTransformToStateFamilyName);
});
} |
public static boolean isEmoji(char c) {
//noinspection ConstantConditions
return false == ((c == 0x0) || //
(c == 0x9) || //
(c == 0xA) || //
(c == 0xD) || //
((c >= 0x20) && (c <= 0xD7FF)) || //
((c >= 0xE000) && (c <= 0xFFFD)) || //
((c >= 0x100000) && (c <= 0x10FFFF)));
} | @Test
public void isEmojiTest() {
final String a = "莉🌹";
assertFalse(CharUtil.isEmoji(a.charAt(0)));
assertTrue(CharUtil.isEmoji(a.charAt(1)));
} |
public static ResourceModel processResource(final Class<?> resourceClass)
{
return processResource(resourceClass, null);
} | @Test(expectedExceptions = ResourceConfigException.class)
public void failsOnInvalidActionParamAnnotationTypeRef() {
@RestLiCollection(name = "brokenParam")
class LocalClass extends CollectionResourceTemplate<Long, EmptyRecord> {
@Action(name = "brokenParam")
public void brokenParam(@ActionParam(value = "someId", typeref = BrokenTypeRef.class) BrokenTypeRef typeRef) {
}
}
RestLiAnnotationReader.processResource(LocalClass.class);
Assert.fail("#buildActionParam should fail throwing a ResourceConfigException");
} |
protected boolean update(final Local file, final NSImage icon) {
synchronized(NSWorkspace.class) {
// Specify 0 if you want to generate icons in all available icon representation formats
if(workspace.setIcon_forFile_options(icon, file.getAbsolute(), new NSUInteger(0))) {
workspace.noteFileSystemChanged(new NFDNormalizer().normalize(file.getAbsolute()).toString());
return true;
}
return false;
}
} | @Test
public void testSetProgress() throws Exception {
final WorkspaceIconService s = new WorkspaceIconService();
final Local file = new Local(PreferencesFactory.get().getProperty("tmp.dir"),
UUID.randomUUID().toString());
LocalTouchFactory.get().touch(file);
assertTrue(s.update(file, NSImage.imageWithContentsOfFile("../../img/download0.icns")));
file.delete();
} |
public String doLayout(ILoggingEvent event) {
if (!isStarted()) {
return CoreConstants.EMPTY_STRING;
}
return writeLoopOnConverters(event);
} | @Test
public void testCompositePattern() {
pl.setPattern("%-56(%d %lo{20}) - %m%n");
pl.start();
String val = pl.doLayout(getEventObject());
// 2008-03-18 21:55:54,250 c.q.l.c.pattern.ConverterTest - Some message
String regex = ISO_REGEX
+ " c.q.l.c.p.ConverterTest - Some message\\s*";
assertThat(val, matchesPattern(regex));
} |
@Override
public long getCreationTime() {
return record.getCreationTime();
} | @Test
public void test_getCreationTime() {
assertEquals(0, view.getCreationTime());
} |
public void setTlsConfig(RpcClientTlsConfig tlsConfig) {
this.tlsConfig = tlsConfig;
} | @Test
void testSetTlsConfig() {
RpcClientTlsConfig tlsConfig = new RpcClientTlsConfig();
DefaultGrpcClientConfig.Builder builder = DefaultGrpcClientConfig.newBuilder();
builder.setTlsConfig(tlsConfig);
DefaultGrpcClientConfig config = (DefaultGrpcClientConfig) builder.build();
assertEquals(tlsConfig, config.tlsConfig());
} |
@Override
public Map<String, List<TopicPartition>> assign(Map<String, Integer> partitionsPerTopic,
Map<String, Subscription> subscriptions) {
Map<String, List<TopicPartition>> assignment = new HashMap<>();
List<MemberInfo> memberInfoList = new ArrayList<>();
for (Map.Entry<String, Subscription> memberSubscription : subscriptions.entrySet()) {
assignment.put(memberSubscription.getKey(), new ArrayList<>());
memberInfoList.add(new MemberInfo(memberSubscription.getKey(),
memberSubscription.getValue().groupInstanceId()));
}
CircularIterator<MemberInfo> assigner = new CircularIterator<>(Utils.sorted(memberInfoList));
for (TopicPartition partition : allPartitionsSorted(partitionsPerTopic, subscriptions)) {
final String topic = partition.topic();
while (!subscriptions.get(assigner.peek().memberId).topics().contains(topic))
assigner.next();
assignment.get(assigner.next().memberId).add(partition);
}
return assignment;
} | @Test
public void testOneConsumerNonexistentTopic() {
Map<String, Integer> partitionsPerTopic = new HashMap<>();
Map<String, List<TopicPartition>> assignment = assignor.assign(partitionsPerTopic,
Collections.singletonMap(consumerId, new Subscription(topics(topic))));
assertEquals(Collections.singleton(consumerId), assignment.keySet());
assertTrue(assignment.get(consumerId).isEmpty());
} |
public void open() throws IOException {
if (fp > 0) {
fp = inputStream.skip(fp);
}
} | @Test
public void open() throws IOException {
cs.open();
byte[] buff = new byte[1];
assertEquals(buff.length, cs.read(buff));
assertEquals(text[0], buff[0]);
} |
@Nullable
public static EpoxyModel<?> getModelFromPayload(List<Object> payloads, long modelId) {
if (payloads.isEmpty()) {
return null;
}
for (Object payload : payloads) {
DiffPayload diffPayload = (DiffPayload) payload;
if (diffPayload.singleModel != null) {
if (diffPayload.singleModel.id() == modelId) {
return diffPayload.singleModel;
}
} else {
EpoxyModel<?> modelForId = diffPayload.modelsById.get(modelId);
if (modelForId != null) {
return modelForId;
}
}
}
return null;
} | @Test
public void returnsNullForEmptyPayload() {
List<Object> payloads = new ArrayList<>();
EpoxyModel<?> modelFromPayload = getModelFromPayload(payloads, 2);
assertNull(modelFromPayload);
} |
public static void cleanDirectory(File directory) throws IOException {
if (!directory.exists()) {
String message = directory + " does not exist";
throw new IllegalArgumentException(message);
}
if (!directory.isDirectory()) {
String message = directory + " is not a directory";
throw new IllegalArgumentException(message);
}
File[] files = directory.listFiles();
// null if security restricted
if (files == null) {
throw new IOException("Failed to list contents of " + directory);
}
IOException exception = null;
for (File file : files) {
try {
delete(file);
} catch (IOException ioe) {
exception = ioe;
}
}
if (null != exception) {
throw exception;
}
} | @Test
void testCleanDirectoryWithEmptyDirectory() throws IOException {
assertThrows(IOException.class, () -> {
File mockFile = mock(File.class);
when(mockFile.exists()).thenReturn(true);
when(mockFile.isDirectory()).thenReturn(true);
IoUtils.cleanDirectory(mockFile);
});
} |
Record selectRandomRecord(List<Record> recordsList) {
Preconditions.checkArgument(CollectionUtils.isNotEmpty(recordsList), "Records list can not be empty.");
LOG.debug("Selecting a random Record from the sample list.");
return recordsList.get(new Random().nextInt(recordsList.size()));
} | @Test
public void testSelectRandomRecord() {
// Test empty list
List<Record> fakeRecordList = new ArrayList<>();
AssertionsForClassTypes.assertThatThrownBy(() -> kinesisService.selectRandomRecord(fakeRecordList))
.isExactlyInstanceOf(IllegalArgumentException.class)
.hasMessageContaining("Records list can not be empty.");
// Test list with records
fakeRecordList.add(Record.builder().build());
fakeRecordList.add(Record.builder().build());
fakeRecordList.add(Record.builder().build());
Record record = kinesisService.selectRandomRecord(fakeRecordList);
// Test a record returns
assertNotNull(record);
} |
@Override
public Schema toConnectSchema(final ParsedSchema schema) {
return protobufData.toConnectSchema(withSchemaFullName((ProtobufSchema) schema));
} | @Test
public void shouldUnwrapPrimitives() {
// Given:
givenUnwrapPrimitives();
// When:
final Schema schema = schemaTranslator.toConnectSchema(SCHEMA_WITH_WRAPPED_PRIMITIVES);
// Then:
assertThat(schema.field("c1").schema().type(), is(Type.BOOLEAN));
assertThat(schema.field("c2").schema().type(), is(Type.INT32));
assertThat(schema.field("c3").schema().type(), is(Type.INT64));
assertThat(schema.field("c4").schema().type(), is(Type.FLOAT64));
assertThat(schema.field("c5").schema().type(), is(Type.STRING));
} |
@VisibleForTesting
public void validateDictTypeExists(String type) {
DictTypeDO dictType = dictTypeService.getDictType(type);
if (dictType == null) {
throw exception(DICT_TYPE_NOT_EXISTS);
}
if (!CommonStatusEnum.ENABLE.getStatus().equals(dictType.getStatus())) {
throw exception(DICT_TYPE_NOT_ENABLE);
}
} | @Test
public void testValidateDictTypeExists_notEnable() {
// mock 方法,数据类型被禁用
String dictType = randomString();
when(dictTypeService.getDictType(eq(dictType))).thenReturn(
randomPojo(DictTypeDO.class, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus())));
// 调用, 并断言异常
assertServiceException(() -> dictDataService.validateDictTypeExists(dictType), DICT_TYPE_NOT_ENABLE);
} |
@Nullable
public static <T extends Annotation> T extract(Class<?> targetClass, Class<T> annotationClass) {
T annotation = null;
if (targetClass.isAnnotationPresent(annotationClass)) {
annotation = targetClass.getAnnotation(annotationClass);
if (annotation == null && logger.isDebugEnabled()) {
logger.debug("TargetClass has no annotation '{}'", annotationClass.getSimpleName());
annotation = targetClass.getDeclaredAnnotation(annotationClass);
if (annotation == null && logger.isDebugEnabled()) {
logger.debug("TargetClass has no declared annotation '{}'",
annotationClass.getSimpleName());
}
}
}
return annotation;
} | @Test
public void testExtract() {
CircuitBreaker circuitBreaker = AnnotationExtractor
.extract(AnnotatedClass.class, CircuitBreaker.class);
assertThat(circuitBreaker).isNotNull();
assertThat(circuitBreaker.name()).isEqualTo("test");
} |
@Bean("TempFolder")
public TempFolder provide(ServerFileSystem fs) {
File tempDir = new File(fs.getTempDir(), "tmp");
try {
FileUtils.forceMkdir(tempDir);
} catch (IOException e) {
throw new IllegalStateException("Unable to create temp directory " + tempDir, e);
}
return new DefaultTempFolder(tempDir);
} | @Test
public void create_temp_dir_if_missing() throws Exception {
ServerFileSystem fs = mock(ServerFileSystem.class);
File tmpDir = temp.newFolder();
when(fs.getTempDir()).thenReturn(tmpDir);
FileUtils.forceDelete(tmpDir);
TempFolder folder = underTest.provide(fs);
assertThat(folder).isNotNull();
File newDir = folder.newDir();
assertThat(newDir).exists().isDirectory();
assertThat(newDir.getParentFile().getCanonicalPath()).startsWith(tmpDir.getCanonicalPath());
} |
private static void execute(String... args) throws Exception {
LogDirsCommandOptions options = new LogDirsCommandOptions(args);
try (Admin adminClient = createAdminClient(options)) {
execute(options, adminClient);
}
} | @Test
@SuppressWarnings("unchecked")
public void shouldQueryAllBrokersIfNonSpecified() throws JsonProcessingException {
Node brokerOne = new Node(1, "hostname", 9092);
Node brokerTwo = new Node(2, "hostname", 9092);
try (MockAdminClient adminClient = new MockAdminClient(Arrays.asList(brokerTwo, brokerOne), brokerOne)) {
String standardOutput = execute(fromArgsToOptions("--bootstrap-server", "EMPTY", "--describe"), adminClient);
String[] standardOutputLines = standardOutput.split("\n");
assertEquals(3, standardOutputLines.length);
Map<String, Object> information = new ObjectMapper().readValue(standardOutputLines[2], HashMap.class);
List<Object> brokersInformation = (List<Object>) information.get("brokers");
Set<Integer> brokerIds = new HashSet<Integer>() {{
add((Integer) ((HashMap<String, Object>) brokersInformation.get(0)).get("broker"));
add((Integer) ((HashMap<String, Object>) brokersInformation.get(1)).get("broker"));
}};
assertEquals(2, brokersInformation.size());
assertEquals(new HashSet<>(Arrays.asList(2, 1)), brokerIds);
}
} |
public static String concatenateWithThreshold(String a, String b, int threshold) {
// Convert both strings to byte arrays in UTF-8 encoding
byte[] bytesA = getUTF8Bytes(a);
byte[] bytesB = getUTF8Bytes(b);
if (bytesB.length > threshold) {
throw new IllegalArgumentException(String.format(
"Length of the Second string to concatenate exceeds the threshold (%d > %d)",
bytesB.length, threshold));
}
// Calculate total bytes
int totalBytes = bytesA.length + bytesB.length;
// If total bytes is within the threshold, return concatenated string
if (totalBytes <= threshold) {
return a + b;
}
// Calculate the maximum bytes 'a' can take
int bestLength = getBestLength(a, threshold - bytesB.length);
// Concatenate the valid substring of 'a' with 'b'
return a.substring(0, bestLength) + b;
} | @Test
void testConcatenateInvalidInput() {
// Test case when b alone exceeds the threshold
String a = generateRandomString(900);
String b = generateRandomString(3000); // 3000 bytes in UTF-8
Exception exception = assertThrows(IllegalArgumentException.class, () -> {
concatenateWithThreshold(a, b, 2048);
});
String expectedMessage = "Length of the Second string to concatenate exceeds the threshold (3000 > 2048)";
String actualMessage = exception.getMessage();
assertTrue(actualMessage.contains(expectedMessage));
} |
public static <T> int lastIndexOf(Collection<T> collection, Matcher<T> matcher) {
if (collection instanceof List) {
// List的查找最后一个有优化算法
return ListUtil.lastIndexOf((List<T>) collection, matcher);
}
int matchIndex = -1;
if (isNotEmpty(collection)) {
int index = 0;
for (T t : collection) {
if (null == matcher || matcher.match(t)) {
matchIndex = index;
}
index++;
}
}
return matchIndex;
} | @Test
public void lastIndexOfTest() {
// List有优化
final ArrayList<String> list = CollUtil.newArrayList("a", "b", "c", "c", "a", "b", "d");
final int i = CollUtil.lastIndexOf(list, (str) -> str.charAt(0) == 'c');
assertEquals(3, i);
} |
public static String removeHtmlTag(String content, String... tagNames) {
return removeHtmlTag(content, true, tagNames);
} | @Test
public void removeHtmlTagTest() {
//非闭合标签
String str = "pre<img src=\"xxx/dfdsfds/test.jpg\">";
String result = HtmlUtil.removeHtmlTag(str, "img");
assertEquals("pre", result);
//闭合标签
str = "pre<img>";
result = HtmlUtil.removeHtmlTag(str, "img");
assertEquals("pre", result);
//闭合标签
str = "pre<img src=\"xxx/dfdsfds/test.jpg\" />";
result = HtmlUtil.removeHtmlTag(str, "img");
assertEquals("pre", result);
//闭合标签
str = "pre<img />";
result = HtmlUtil.removeHtmlTag(str, "img");
assertEquals("pre", result);
//包含内容标签
str = "pre<div class=\"test_div\">dfdsfdsfdsf</div>";
result = HtmlUtil.removeHtmlTag(str, "div");
assertEquals("pre", result);
//带换行
str = "pre<div class=\"test_div\">\r\n\t\tdfdsfdsfdsf\r\n</div>";
result = HtmlUtil.removeHtmlTag(str, "div");
assertEquals("pre", result);
} |
@Override
public String getValue(EvaluationContext context) {
// Use variable name if we just provide this.
if (variableName != null && variable == null) {
variable = context.lookupVariable(variableName);
return (variable != null ? variable.toString() : "");
}
String propertyName = pathExpression;
String propertyPath = null;
int delimiterIndex = -1;
// Search for a delimiter to isolate property name.
for (String delimiter : PROPERTY_NAME_DELIMITERS) {
delimiterIndex = pathExpression.indexOf(delimiter);
if (delimiterIndex != -1) {
propertyName = pathExpression.substring(0, delimiterIndex);
propertyPath = pathExpression.substring(delimiterIndex);
break;
}
}
Object variableValue = getProperty(variable, propertyName);
if (log.isDebugEnabled()) {
log.debug("propertyName: {}", propertyName);
log.debug("propertyPath: {}", propertyPath);
log.debug("variableValue: {}", variableValue);
}
if (propertyPath != null) {
if (variableValue.getClass().equals(String.class)) {
if (propertyPath.startsWith("/")) {
// This is a JSON Pointer or XPath expression to apply.
String variableString = String.valueOf(variableValue);
if (variableString.trim().startsWith("{") || variableString.trim().startsWith("[")) {
variableValue = getJsonPointerValue(variableString, propertyPath);
} else if (variableString.trim().startsWith("<")) {
variableValue = getXPathValue(variableString, propertyPath);
} else {
log.warn("Got a path query expression but content seems not to be JSON nor XML...");
variableValue = null;
}
}
} else if (variableValue.getClass().isArray()) {
if (propertyPath.matches(ARRAY_INDEX_REGEXP)) {
Matcher m = ARRAY_INDEX_PATTERN.matcher(propertyPath);
if (m.matches()) {
String arrayIndex = m.group(1);
Object[] variableValues = (Object[]) variableValue;
try {
variableValue = variableValues[Integer.parseInt(arrayIndex)];
} catch (ArrayIndexOutOfBoundsException ae) {
log.warn("Expression asked for " + arrayIndex + " but array is smaller (" + variableValues.length
+ "). Returning null.");
variableValue = null;
}
}
}
} else if (Map.class.isAssignableFrom(variableValue.getClass())) {
if (propertyPath.matches(MAP_INDEX_REGEXP)) {
Matcher m = MAP_INDEX_PATTERN.matcher(propertyPath);
if (m.matches()) {
String mapKey = m.group(1);
Map variableValues = (Map) variableValue;
variableValue = variableValues.get(mapKey);
}
}
}
}
return String.valueOf(variableValue);
} | @Test
void testJSONPointerValue() {
String jsonString = "{\n" + " \"library\": \"My Personal Library\",\n" + " \"books\": [\n"
+ " { \"title\":\"Title 1\", \"author\":\"Jane Doe\" },\n"
+ " { \"title\":\"Title 2\", \"author\":\"John Doe\" }\n" + " ]\n" + "}";
EvaluableRequest request = new EvaluableRequest(jsonString, null);
// Create new expression evaluating JSON Pointer path.
VariableReferenceExpression exp = new VariableReferenceExpression(request, "body/books/1/author");
String result = exp.getValue(new EvaluationContext());
assertEquals("John Doe", result);
// Test extraction of Array by JSON Pointer path
VariableReferenceExpression expArray = new VariableReferenceExpression(request, "body/books");
String resultArray = expArray.getValue(new EvaluationContext());
assertEquals("[{\"title\":\"Title 1\",\"author\":\"Jane Doe\"},{\"title\":\"Title 2\",\"author\":\"John Doe\"}]",
resultArray);
// Test extraction of Object by JSON Pointer path
VariableReferenceExpression expObj = new VariableReferenceExpression(request, "body/books/1");
String resultObj = expObj.getValue(new EvaluationContext());
assertEquals("{\"title\":\"Title 2\",\"author\":\"John Doe\"}", resultObj);
} |
static IndexComponentFilter findBestComponentFilter(
IndexType type,
List<IndexComponentCandidate> candidates,
QueryDataType converterType
) {
// First look for equality filters, assuming that they are more selective than ranges
IndexComponentFilter equalityComponentFilter = searchForEquality(candidates, converterType);
if (equalityComponentFilter != null) {
return equalityComponentFilter;
}
// Look for ranges filters
return searchForRange(type, candidates, converterType);
} | @Test
public void when_singleEqualityFilterPresent_then_itIsUsed() {
IndexComponentFilter bestFilter = IndexComponentFilterResolver.findBestComponentFilter(
indexType, WITH_SINGLE_EQUALITY_CANDIDATES, QUERY_DATA_TYPE
);
assertEquals(SINGLE_EQUALITY_CANDIDATE.getFilter(), bestFilter.getFilter());
} |
@Override
@SuppressWarnings("unchecked")
public <K, V> List<Map<K, V>> toMaps(DataTable dataTable, Type keyType, Type valueType) {
requireNonNull(dataTable, "dataTable may not be null");
requireNonNull(keyType, "keyType may not be null");
requireNonNull(valueType, "valueType may not be null");
if (dataTable.isEmpty()) {
return emptyList();
}
DataTableType keyConverter = registry.lookupCellTypeByType(keyType);
DataTableType valueConverter = registry.lookupCellTypeByType(valueType);
List<String> problems = new ArrayList<>();
if (keyConverter == null) {
problems.add(problemNoTableCellTransformer(keyType));
}
if (valueConverter == null) {
problems.add(problemNoTableCellTransformer(valueType));
}
if (!problems.isEmpty()) {
throw mapsNoConverterDefined(keyType, valueType, problems);
}
DataTable header = dataTable.rows(0, 1);
List<Map<K, V>> result = new ArrayList<>();
List<K> keys = unpack((List<List<K>>) keyConverter.transform(header.cells()));
DataTable rows = dataTable.rows(1);
if (rows.isEmpty()) {
return emptyList();
}
List<List<V>> transform = (List<List<V>>) valueConverter.transform(rows.cells());
for (List<V> values : transform) {
result.add(createMap(keyType, keys, valueType, values));
}
return unmodifiableList(result);
} | @Test
void to_maps_cant_convert_table_with_duplicate_null_keys() {
DataTable table = parse("",
"| | |",
"| 1 | 2 |");
CucumberDataTableException exception = assertThrows(
CucumberDataTableException.class,
() -> converter.toMaps(table, Integer.class, Integer.class));
assertThat(exception.getMessage(), is(format("" +
"Can't convert DataTable to Map<%s, %s>.\n" +
"Encountered duplicate key null with values 1 and 2",
typeName(Integer.class), typeName(Integer.class))));
} |
static Map<String, ValueExtractor> instantiateExtractors(List<AttributeConfig> attributeConfigs,
ClassLoader classLoader) {
Map<String, ValueExtractor> extractors = createHashMap(attributeConfigs.size());
for (AttributeConfig config : attributeConfigs) {
if (extractors.containsKey(config.getName())) {
throw new IllegalArgumentException("Could not add " + config
+ ". Extractor for this attribute name already added.");
}
extractors.put(config.getName(), instantiateExtractor(config, classLoader));
}
return extractors;
} | @Test
public void instantiate_extractors_oneClassNotExisting() {
// GIVEN
AttributeConfig iqExtractor
= new AttributeConfig("iq", "com.hazelcast.query.impl.getters.ExtractorHelperTest$IqExtractor");
AttributeConfig nameExtractor = new AttributeConfig("name", "not.existing.class");
// WHEN
assertThatThrownBy(() -> instantiateExtractors(asList(iqExtractor, nameExtractor)))
.isInstanceOf(IllegalArgumentException.class)
.hasCauseInstanceOf(ClassNotFoundException.class);
} |
static Time toTime(final JsonNode object) {
if (object instanceof NumericNode) {
return returnTimeOrThrow(object.asLong());
}
if (object instanceof TextNode) {
try {
return returnTimeOrThrow(Long.parseLong(object.textValue()));
} catch (final NumberFormatException e) {
throw failedStringCoercionException(SqlBaseType.TIME);
}
}
throw invalidConversionException(object, SqlBaseType.TIME);
} | @Test
public void shouldConvertStringToTimeCorrectly() {
final Time d = JsonSerdeUtils.toTime(JsonNodeFactory.instance.textNode("100"));
assertThat(d.getTime(), equalTo(100L));
} |
@Override
public Object getDateValue(final ResultSet resultSet, final int columnIndex) throws SQLException {
if (isYearDataType(resultSet.getMetaData().getColumnTypeName(columnIndex))) {
return resultSet.wasNull() ? null : resultSet.getObject(columnIndex);
}
return resultSet.getDate(columnIndex);
} | @Test
void assertGetDateValueWithYearDataTypeAndNullValue() throws SQLException {
when(resultSet.getMetaData().getColumnTypeName(1)).thenReturn("YEAR");
when(resultSet.wasNull()).thenReturn(true);
assertNull(dialectResultSetMapper.getDateValue(resultSet, 1));
} |
@Override
public int length() {
return 1;
} | @Test
public void testLength() {
System.out.println("length");
BernoulliDistribution instance = new BernoulliDistribution(0.3);
instance.rand();
assertEquals(1, instance.length());
} |
public Optional<DslStoreSuppliers> dslStoreSuppliers() {
return Optional.ofNullable(dslStoreSuppliers);
} | @Test
public void shouldReturnEmptyWhenOriginalsAndOverridesDontHaveSuppliersSpecified() {
final Properties topologyOverrides = new Properties();
final StreamsConfig config = new StreamsConfig(StreamsTestUtils.getStreamsConfig());
final InternalTopologyBuilder topologyBuilder = new InternalTopologyBuilder(
new TopologyConfig("my-topology", config, topologyOverrides));
final InternalStreamsBuilder internalStreamsBuilder = new InternalStreamsBuilder(topologyBuilder);
final MaterializedInternal<Object, Object, KeyValueStore<Bytes, byte[]>> materialized =
new MaterializedInternal<>(Materialized.as(supplier), internalStreamsBuilder, prefix);
assertThat(materialized.dslStoreSuppliers().isPresent(), is(false));
} |
@Nonnull
public <K, V> KafkaProducer<K, V> getProducer(@Nullable String transactionalId) {
if (getConfig().isShared()) {
if (transactionalId != null) {
throw new IllegalArgumentException("Cannot use transactions with shared "
+ "KafkaProducer for DataConnection" + getConfig().getName());
}
retain();
//noinspection unchecked
return (KafkaProducer<K, V>) producerSupplier.get();
} else {
if (transactionalId != null) {
@SuppressWarnings({"rawtypes", "unchecked"})
Map<String, Object> castProperties = (Map) getConfig().getProperties();
Map<String, Object> copy = new HashMap<>(castProperties);
copy.put("transactional.id", transactionalId);
return new KafkaProducer<>(copy);
} else {
return new KafkaProducer<>(getConfig().getProperties());
}
}
} | @Test
public void non_shared_data_connection_should_return_new_producer() {
kafkaDataConnection = createNonSharedKafkaDataConnection();
try (Producer<Object, Object> p1 = kafkaDataConnection.getProducer(null);
Producer<Object, Object> p2 = kafkaDataConnection.getProducer(null)) {
assertThat(p1).isNotSameAs(p2);
}
} |
public JobStatsExtended enrich(JobStats jobStats) {
JobStats latestJobStats = getLatestJobStats(jobStats, previousJobStats);
if (lock.tryLock()) {
setFirstRelevantJobStats(latestJobStats);
setJobStatsExtended(latestJobStats);
setPreviousJobStats(latestJobStats);
lock.unlock();
}
return jobStatsExtended;
} | @Test
void estimatedTimeProcessingIsCalculated2() {
JobStats jobStats0 = getJobStats(now().minusSeconds(60), 100L, 0L, 0L, 100L);
JobStats jobStats1 = getJobStats(now().minusSeconds(50), 85L, 5L, 0L, 110L);
JobStats jobStats2 = getJobStats(now().minusSeconds(40), 75L, 5L, 0L, 120L);
JobStats jobStats3 = getJobStats(now().minusSeconds(30), 65L, 5L, 0L, 130L);
JobStats jobStats4 = getJobStats(now().minusSeconds(20), 55L, 5L, 0L, 140L);
JobStats jobStats5 = getJobStats(now().minusSeconds(10), 45L, 5L, 0L, 150L);
JobStats jobStats6 = getJobStats(now(), 35L, 5L, 0L, 160L);
JobStatsExtended jobStatsExtended = enrich(jobStats0, jobStats1, jobStats2, jobStats3, jobStats4, jobStats5, jobStats6);
assertThat(jobStatsExtended.getEstimation().isProcessingDone()).isFalse();
assertThat(Duration.between(now(), jobStatsExtended.getEstimation().getEstimatedProcessingFinishedAt()).toSeconds()).isCloseTo(40L, Offset.offset(1L));
} |
public Comparator<?> getValueComparator(int column) {
return valueComparators[column];
} | @Test
public void getDefaultComparatorForStringClass() {
ObjectTableSorter sorter = new ObjectTableSorter(createTableModel("string", String.class));
assertThat(sorter.getValueComparator(0), is(CoreMatchers.notNullValue()));
} |
public static void isNotAllowedSendTopic(String topic) throws MQClientException {
if (TopicValidator.isNotAllowedSendTopic(topic)) {
throw new MQClientException(
String.format("Sending message to topic[%s] is forbidden.", topic), null);
}
} | @Test
public void testIsNotAllowedSendTopic() {
for (String topic : TopicValidator.getNotAllowedSendTopicSet()) {
try {
Validators.isNotAllowedSendTopic(topic);
fail("excepted MQClientException for blacklist topic");
} catch (MQClientException e) {
assertThat(e.getResponseCode()).isEqualTo(-1);
assertThat(e.getErrorMessage()).isEqualTo(String.format("Sending message to topic[%s] is forbidden.", topic));
}
}
} |
@Override
public V get() throws InterruptedException, ExecutionException {
try {
return resolve(future.get());
} catch (HazelcastSerializationException e) {
throw new ExecutionException(e);
}
} | @Test
public void test_get_Object_withTimeout() throws Exception {
Object value = "value";
Future<Object> future = new DelegatingCompletableFuture<>(serializationService, newCompletedFuture(value));
assertEquals(value, future.get(1, TimeUnit.MILLISECONDS));
} |
public boolean removeAction(@Nullable Action a) {
if (a == null) {
return false;
}
// CopyOnWriteArrayList does not support Iterator.remove, so need to do it this way:
return getActions().removeAll(Set.of(a));
} | @Test
public void removeAction_null() {
assertFalse(thing.removeAction(null));
} |
public BackgroundException map(HttpResponse response) throws IOException {
final S3ServiceException failure;
if(null == response.getEntity()) {
failure = new S3ServiceException(response.getStatusLine().getReasonPhrase());
}
else {
EntityUtils.updateEntity(response, new BufferedHttpEntity(response.getEntity()));
failure = new S3ServiceException(response.getStatusLine().getReasonPhrase(),
EntityUtils.toString(response.getEntity()));
}
failure.setResponseCode(response.getStatusLine().getStatusCode());
if(response.containsHeader(MINIO_ERROR_CODE)) {
failure.setErrorCode(response.getFirstHeader(MINIO_ERROR_CODE).getValue());
}
if(response.containsHeader(MINIO_ERROR_DESCRIPTION)) {
failure.setErrorMessage(response.getFirstHeader(MINIO_ERROR_DESCRIPTION).getValue());
}
return this.map(failure);
} | @Test
public void testLoginFailure403() {
final ServiceException f = new ServiceException("m", "<null/>");
f.setResponseCode(403);
f.setErrorMessage("m");
f.setErrorCode("AccessDenied");
assertTrue(new S3ExceptionMappingService().map(f) instanceof AccessDeniedException);
f.setErrorCode("InvalidAccessKeyId");
assertTrue(new S3ExceptionMappingService().map(f) instanceof LoginFailureException);
f.setErrorCode("SignatureDoesNotMatch");
assertTrue(new S3ExceptionMappingService().map(f) instanceof LoginFailureException);
} |
public static boolean isVariableNameValid( String source ) {
return checkVariableName( source ).isEmpty();
} | @Test
void variableNameWithValidCharacters() {
String var = "?_873./-'+*valid";
assertThat( FEELParser.isVariableNameValid( var )).isEqualTo(true);
} |
public static Builder in(Table table) {
return new Builder(table);
} | @TestTemplate
public void testIncludeColumnStats() {
table.newAppend().appendFile(FILE_WITH_STATS).commit();
Iterable<DataFile> files = FindFiles.in(table).includeColumnStats().collect();
final DataFile file = files.iterator().next();
assertThat(file.columnSizes()).isEqualTo(FILE_WITH_STATS.columnSizes());
assertThat(file.valueCounts()).isEqualTo(FILE_WITH_STATS.valueCounts());
assertThat(file.nullValueCounts()).isEqualTo(FILE_WITH_STATS.nullValueCounts());
assertThat(file.nanValueCounts()).isEqualTo(FILE_WITH_STATS.nanValueCounts());
assertThat(file.lowerBounds()).isEqualTo(FILE_WITH_STATS.lowerBounds());
assertThat(file.upperBounds()).isEqualTo(FILE_WITH_STATS.upperBounds());
} |
public static ListenableFuture<EntityFieldsData> findAsync(TbContext ctx, EntityId originatorId) {
switch (originatorId.getEntityType()) { // TODO: use EntityServiceRegistry
case TENANT:
return toEntityFieldsDataAsync(ctx.getTenantService().findTenantByIdAsync(ctx.getTenantId(), (TenantId) originatorId),
EntityFieldsData::new, ctx);
case CUSTOMER:
return toEntityFieldsDataAsync(ctx.getCustomerService().findCustomerByIdAsync(ctx.getTenantId(), (CustomerId) originatorId),
EntityFieldsData::new, ctx);
case USER:
return toEntityFieldsDataAsync(ctx.getUserService().findUserByIdAsync(ctx.getTenantId(), (UserId) originatorId),
EntityFieldsData::new, ctx);
case ASSET:
return toEntityFieldsDataAsync(ctx.getAssetService().findAssetByIdAsync(ctx.getTenantId(), (AssetId) originatorId),
EntityFieldsData::new, ctx);
case DEVICE:
return toEntityFieldsDataAsync(Futures.immediateFuture(ctx.getDeviceService().findDeviceById(ctx.getTenantId(), (DeviceId) originatorId)),
EntityFieldsData::new, ctx);
case ALARM:
return toEntityFieldsDataAsync(ctx.getAlarmService().findAlarmByIdAsync(ctx.getTenantId(), (AlarmId) originatorId),
EntityFieldsData::new, ctx);
case RULE_CHAIN:
return toEntityFieldsDataAsync(ctx.getRuleChainService().findRuleChainByIdAsync(ctx.getTenantId(), (RuleChainId) originatorId),
EntityFieldsData::new, ctx);
case ENTITY_VIEW:
return toEntityFieldsDataAsync(ctx.getEntityViewService().findEntityViewByIdAsync(ctx.getTenantId(), (EntityViewId) originatorId),
EntityFieldsData::new, ctx);
case EDGE:
return toEntityFieldsDataAsync(ctx.getEdgeService().findEdgeByIdAsync(ctx.getTenantId(), (EdgeId) originatorId),
EntityFieldsData::new, ctx);
default:
return Futures.immediateFailedFuture(new TbNodeException("Unexpected originator EntityType: " + originatorId.getEntityType()));
}
} | @Test
public void givenSupportedEntityTypes_whenFindAsync_thenOK() throws ExecutionException, InterruptedException {
for (var entityType : SUPPORTED_ENTITY_TYPES) {
var entityId = EntityIdFactory.getByTypeAndUuid(entityType, RANDOM_UUID);
initMocks(entityType, false);
when(ctxMock.getTenantId()).thenReturn(TENANT_ID);
var actualEntityFieldsData = EntitiesFieldsAsyncLoader.findAsync(ctxMock, entityId).get();
var expectedEntityFieldsData = new EntityFieldsData(getEntityFromEntityId(entityId));
Assertions.assertEquals(expectedEntityFieldsData, actualEntityFieldsData);
}
} |
@Override
public List<DatabasePacket> execute() throws SQLException {
return portalContext.get(packet.getPortal()).execute(packet.getMaxRows());
} | @Test
void assertExecute() throws SQLException {
PostgreSQLPacket expectedPacket = mock(PostgreSQLPacket.class);
when(portal.execute(anyInt())).thenReturn(Collections.singletonList(expectedPacket));
List<DatabasePacket> actualPackets = executor.execute();
assertThat(actualPackets.size(), is(1));
assertThat(actualPackets.iterator().next(), is(expectedPacket));
} |
@Override
public boolean dropTable(TableIdentifier identifier, boolean purge) {
if (!isValidIdentifier(identifier)) {
return false;
}
String database = identifier.namespace().level(0);
TableOperations ops = newTableOps(identifier);
TableMetadata lastMetadata = null;
if (purge) {
try {
lastMetadata = ops.current();
} catch (NotFoundException e) {
LOG.warn(
"Failed to load table metadata for table: {}, continuing drop without purge",
identifier,
e);
}
}
try {
clients.run(client -> {
client.dropTable(database, identifier.name(),
false /* do not delete data */,
false /* throw NoSuchObjectException if the table doesn't exist */);
return null;
});
if (purge && lastMetadata != null) {
CatalogUtil.dropTableData(ops.io(), lastMetadata);
}
LOG.info("Dropped table: {}", identifier);
return true;
} catch (NoSuchTableException | NoSuchObjectException e) {
LOG.info("Skipping drop, table does not exist: {}", identifier, e);
return false;
} catch (TException e) {
throw new RuntimeException("Failed to drop " + identifier, e);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException("Interrupted in call to dropTable", e);
}
} | @Test
public void testSetDefaultPartitionSpec() throws Exception {
Schema schema = getTestSchema();
TableIdentifier tableIdent = TableIdentifier.of(DB_NAME, "tbl");
try {
Table table = catalog.buildTable(tableIdent, schema).create();
assertThat(hmsTableParameters())
.as("Must not have default partition spec")
.doesNotContainKey(TableProperties.DEFAULT_PARTITION_SPEC);
table.updateSpec().addField(bucket("data", 16)).commit();
assertThat(hmsTableParameters())
.containsEntry(
TableProperties.DEFAULT_PARTITION_SPEC, PartitionSpecParser.toJson(table.spec()));
} finally {
catalog.dropTable(tableIdent);
}
} |
@Override
public boolean isSupported() {
return true;
} | @Test
public void isSupported() {
LenovoImpl lenovo = new LenovoImpl(mApplication);
Assert.assertTrue(lenovo.isSupported());
} |
@Override
public void shutdown() {
if (server.started()) {
server.stop();
}
vertx.close();
} | @Test
public void shouldNotStopServerIfNotStarted() {
// Given:
when(server.started()).thenReturn(false);
// When:
checker.shutdown();
// Then:
verify(server, never()).stop();
} |
public final void isSameInstanceAs(@Nullable Object expected) {
if (actual != expected) {
failEqualityCheck(
SAME_INSTANCE,
expected,
/*
* Pass through *whether* the values are equal so that failEqualityCheck() can print that
* information. But remove the description of the difference, which is always about
* content, since people calling isSameInstanceAs() are explicitly not interested in
* content, only object identity.
*/
compareForEquality(expected).withoutDescription());
}
} | @Test
public void isSameInstanceAsWithNulls() {
Object o = null;
assertThat(o).isSameInstanceAs(null);
} |
public RunStrategy getRunStrategy(String workflowId) {
PropertiesSnapshot properties = getCurrentPropertiesSnapshot(workflowId);
LOG.debug("Properties for workflow [{}] are {}", workflowId, properties);
return ObjectHelper.valueOrDefault(properties.getRunStrategy(), Defaults.DEFAULT_RUN_STRATEGY);
} | @Test
public void testGetRunStrategy() throws Exception {
WorkflowDefinition wfd = loadWorkflow(TEST_WORKFLOW_ID1);
workflowDao.addWorkflowDefinition(wfd, wfd.getPropertiesSnapshot().extractProperties());
assertNotNull(wfd.getInternalId());
RunStrategy runStrategy = workflowDao.getRunStrategy(TEST_WORKFLOW_ID1);
assertEquals(RunStrategy.Rule.PARALLEL, runStrategy.getRule());
assertEquals(20L, runStrategy.getWorkflowConcurrency());
wfd = loadWorkflow(TEST_WORKFLOW_ID2);
workflowDao.addWorkflowDefinition(wfd, wfd.getPropertiesSnapshot().extractProperties());
assertNotNull(wfd.getInternalId());
assertEquals(Defaults.DEFAULT_RUN_STRATEGY, workflowDao.getRunStrategy(TEST_WORKFLOW_ID2));
} |
@Override
public boolean isActive() {
return isActive;
} | @Test(timeOut = 30000)
public void testCreateProducerTimeout() throws Exception {
resetChannel();
setChannelConnected();
// Delay the topic creation in a deterministic way
CompletableFuture<Runnable> openTopicFuture = new CompletableFuture<>();
doAnswer(invocationOnMock -> {
openTopicFuture.complete(
() -> ((OpenLedgerCallback) invocationOnMock.getArguments()[2]).openLedgerComplete(ledgerMock,
null));
return null;
}).when(pulsarTestContext.getManagedLedgerFactory())
.asyncOpen(matches(".*success.*"), any(ManagedLedgerConfig.class),
any(OpenLedgerCallback.class), any(Supplier.class), any());
// In a create producer timeout from client side we expect to see this sequence of commands :
// 1. create producer
// 2. close producer (when the timeout is triggered, which may be before the producer was created on the broker
// 3. create producer (triggered by reconnection logic)
// These operations need to be serialized, to allow the last create producer to finally succeed
// (There can be more create/close pairs in the sequence, depending on the client timeout
String producerName = "my-producer";
ByteBuf createProducer1 = Commands.newProducer(successTopicName, 1 /* producer id */, 1 /* request id */,
producerName, Collections.emptyMap(), false);
channel.writeInbound(createProducer1);
ByteBuf closeProducer = Commands.newCloseProducer(1 /* producer id */, 2 /* request id */);
channel.writeInbound(closeProducer);
ByteBuf createProducer2 = Commands.newProducer(successTopicName, 1 /* producer id */, 3 /* request id */,
producerName, Collections.emptyMap(), false);
channel.writeInbound(createProducer2);
// Complete the topic opening: It will make 2nd producer creation successful
openTopicFuture.get().run();
// Close succeeds
Object response = getResponse();
assertEquals(response.getClass(), CommandSuccess.class);
assertEquals(((CommandSuccess) response).getRequestId(), 2);
// 2nd producer will be successfully created as topic is open by then
response = getResponse();
assertEquals(response.getClass(), CommandProducerSuccess.class);
assertEquals(((CommandProducerSuccess) response).getRequestId(), 3);
assertTrue(channel.isActive());
channel.finish();
} |
@Override
public Double getDouble(final int columnIndex) {
return values.getDouble(columnIndex - 1);
} | @Test
public void shouldGetDouble() {
assertThat(row.getDouble("f_double"), is(34.43));
} |
int[][] assignPartitionsToProcessors(int localParallelism, boolean isEdgeDistributed) {
final int[] ptions = isEdgeDistributed ? localPartitions : allPartitions;
final int[][] ptionsPerProcessor = createPtionArrays(ptions.length, localParallelism);
for (int i = 0; i < localParallelism; i++) {
for (int j = 0, ptionIndex = i; ptionIndex < ptions.length; ptionIndex += localParallelism, j++) {
ptionsPerProcessor[i][j] = ptions[ptionIndex];
}
}
return ptionsPerProcessor;
} | @Test
public void testAssignPartitionsToProcessors() {
assertArrayEquals(
new int[][]{new int[]{0, 1, 2, 3, 4, 5, 6}},
a.assignPartitionsToProcessors(1, false));
assertArrayEquals(
new int[][]{
new int[]{0, 2, 4, 6},
new int[]{1, 3, 5}},
a.assignPartitionsToProcessors(2, false));
assertArrayEquals(
new int[][]{
new int[]{0, 3, 6},
new int[]{1, 4},
new int[]{2, 5}
},
a.assignPartitionsToProcessors(3, false));
assertArrayEquals(
new int[][]{new int[]{0, 3, 6}},
a.assignPartitionsToProcessors(1, true));
assertArrayEquals(
new int[][]{
new int[]{0, 6},
new int[]{3}},
a.assignPartitionsToProcessors(2, true));
assertArrayEquals(
new int[][]{
new int[]{0},
new int[]{3},
new int[]{6}},
a.assignPartitionsToProcessors(3, true));
} |
@Override
public List<String> goSupportedVersions() {
return SUPPORTED_VERSIONS;
} | @Test
public void shouldVerifyGoSupportedVersion() {
assertTrue(analyticsExtension.goSupportedVersions().contains("1.0"));
assertTrue(analyticsExtension.goSupportedVersions().contains("2.0"));
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.