focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
static Date toDate(final JsonNode object) { if (object instanceof NumericNode) { return getDateFromEpochDays(object.asLong()); } if (object instanceof TextNode) { try { return getDateFromEpochDays(Long.parseLong(object.textValue())); } catch (final NumberFormatException e) { throw failedStringCoercionException(SqlBaseType.DATE); } } throw invalidConversionException(object, SqlBaseType.DATE); }
@Test(expected = IllegalArgumentException.class) public void shouldFailWhenConvertingIncompatibleDate() { JsonSerdeUtils.toDate(JsonNodeFactory.instance.booleanNode(false)); }
public Node parse() throws ScanException { return E(); }
@Test public void testFormattingInfo() throws Exception { { Parser<Object> p = new Parser<>("%45x"); Node t = p.parse(); FormattingNode witness = new SimpleKeywordNode("x"); witness.setFormatInfo(new FormatInfo(45, Integer.MAX_VALUE)); Assertions.assertEquals(witness, t); } { Parser<Object> p = new Parser<>("%4.5x"); Node t = p.parse(); FormattingNode witness = new SimpleKeywordNode("x"); witness.setFormatInfo(new FormatInfo(4, 5)); Assertions.assertEquals(witness, t); } { Parser<Object> p = new Parser<>("%-4.5x"); Node t = p.parse(); FormattingNode witness = new SimpleKeywordNode("x"); witness.setFormatInfo(new FormatInfo(4, 5, false, true)); Assertions.assertEquals(witness, t); } { Parser<Object> p = new Parser<>("%-4.-5x"); Node t = p.parse(); FormattingNode witness = new SimpleKeywordNode("x"); witness.setFormatInfo(new FormatInfo(4, 5, false, false)); Assertions.assertEquals(witness, t); } { Parser<Object> p = new Parser<>("%-4.5x %12y"); Node t = p.parse(); FormattingNode witness = new SimpleKeywordNode("x"); witness.setFormatInfo(new FormatInfo(4, 5, false, true)); Node n = witness.next = new Node(Node.LITERAL, " "); n = n.next = new SimpleKeywordNode("y"); ((FormattingNode) n).setFormatInfo(new FormatInfo(12, Integer.MAX_VALUE)); Assertions.assertEquals(witness, t); } }
int getReconfigurableProperties(final String nodeType, final String address, final PrintStream out, final PrintStream err) throws IOException { String outMsg = null; String errMsg = null; List<String> properties = null; try { properties = getReconfigurablePropertiesDispatch(nodeType, address, out, err); outMsg = String.format("Node [%s] Reconfigurable properties:", address); } catch (IOException e) { errMsg = String.format("Node [%s] reconfiguration: %s.", address, e.toString()); } if (errMsg != null) { err.println(errMsg); return 1; } else if (properties == null) { return 1; } else { out.println(outMsg); for (String name : properties) { out.println(name); } return 0; } }
@Test(timeout = 30000) public void testDataNodeGetReconfigurableProperties() throws IOException, InterruptedException { final int port = datanode.getIpcPort(); final String address = "localhost:" + port; final List<String> outs = Lists.newArrayList(); final List<String> errs = Lists.newArrayList(); getReconfigurableProperties("datanode", address, outs, errs); assertEquals(26, outs.size()); assertEquals(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, outs.get(1)); }
static HeaderExchangeChannel getOrAddChannel(Channel ch) { if (ch == null) { return null; } HeaderExchangeChannel ret = (HeaderExchangeChannel) ch.getAttribute(CHANNEL_KEY); if (ret == null) { ret = new HeaderExchangeChannel(ch); if (ch.isConnected()) { ch.setAttribute(CHANNEL_KEY, ret); } } return ret; }
@Test void getOrAddChannelTest02() { channel = null; HeaderExchangeChannel ret = HeaderExchangeChannel.getOrAddChannel(channel); Assertions.assertNull(ret); }
@Override public Map<String, LocalIndexStats> getIndexStats() { throw new UnsupportedOperationException("Queries on replicated maps are not supported."); }
@Test(expected = UnsupportedOperationException.class) public void testGetIndexStats() { localReplicatedMapStats.getIndexStats(); }
public static void populateGetCreatedTransformationDictionaryMethod(final ClassOrInterfaceDeclaration toPopulate, final TransformationDictionary transformationDictionary) { if (transformationDictionary != null) { BlockStmt createTransformationDictionaryBody = KiePMMLTransformationDictionaryFactory.getKiePMMLTransformationDictionaryVariableDeclaration(transformationDictionary); createTransformationDictionaryBody.addStatement(getReturnStmt(TRANSFORMATION_DICTIONARY)); final MethodDeclaration methodDeclaration = toPopulate.getMethodsByName(GET_CREATED_TRANSFORMATION_DICTIONARY).get(0); methodDeclaration.setBody(createTransformationDictionaryBody); } }
@Test void populateGetCreatedTransformationDictionaryMethod() throws IOException { org.kie.pmml.compiler.commons.codegenfactories.KiePMMLModelFactoryUtils.populateGetCreatedTransformationDictionaryMethod(classOrInterfaceDeclaration, pmmlModel.getTransformationDictionary()); final MethodDeclaration retrieved = classOrInterfaceDeclaration.getMethodsByName(GET_CREATED_TRANSFORMATION_DICTIONARY).get(0); String text = getFileContent(TEST_09_SOURCE); MethodDeclaration expected = JavaParserUtils.parseMethod(text); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); }
public static long parseLongAscii(final CharSequence cs, final int index, final int length) { if (length <= 0) { throw new AsciiNumberFormatException("empty string: index=" + index + " length=" + length); } final boolean negative = MINUS_SIGN == cs.charAt(index); int i = index; if (negative) { i++; if (1 == length) { throwParseLongError(cs, index, length); } } final int end = index + length; if (end - i < LONG_MAX_DIGITS) { final long tally = parsePositiveLongAscii(cs, index, length, i, end); return negative ? -tally : tally; } else if (negative) { return -parseLongAsciiOverflowCheck(cs, index, length, LONG_MIN_VALUE_DIGITS, i, end); } else { return parseLongAsciiOverflowCheck(cs, index, length, LONG_MAX_VALUE_DIGITS, i, end); } }
@Test void shouldThrowExceptionWhenParsingLongContainingLoneMinusSign() { assertThrows(AsciiNumberFormatException.class, () -> parseLongAscii("-", 0, 1)); }
@Override protected double maintain() { if ( ! nodeRepository().nodes().isWorking()) return 0.0; if ( ! nodeRepository().zone().cloud().allowHostSharing()) return 1.0; // Re-balancing not necessary if (nodeRepository().zone().environment().isTest()) return 1.0; // Short-lived deployments; no need to rebalance if (nodeRepository().zone().system().isCd()) return 1.0; // CD tests assert on # of nodes, avoid re-balancing as it make tests unstable // Work with an unlocked snapshot as this can take a long time and full consistency is not needed NodeList allNodes = nodeRepository().nodes().list(); updateSkewMetric(allNodes); if ( ! zoneIsStable(allNodes)) return 1.0; findBestMove(allNodes).execute(true, Agent.Rebalancer, deployer, metric, nodeRepository()); return 1.0; }
@Test public void testRebalancing() { RebalancerTester tester = new RebalancerTester(); // --- Deploying a cpu heavy application - causing 1 of these nodes to be skewed tester.deployApp(cpuApp); Node cpuSkewedNode = tester.getNode(cpuApp); tester.maintain(); assertFalse("No better place to move the skewed node, so no action is taken", tester.getNode(cpuSkewedNode.hostname()).get().status().wantToRetire()); assertEquals(0.00325, tester.metric().values.get("hostedVespa.docker.skew").doubleValue(), 0.00001); // --- Making a more suitable node configuration available causes rebalancing Node newCpuHost = tester.makeReadyNode("cpu"); tester.activateTenantHosts(); tester.maintain(); assertTrue("Rebalancer retired the node we wanted to move away from", tester.isNodeRetired(cpuSkewedNode)); assertTrue("... and added a node on the new host instead", tester.getNodes(cpuApp, Node.State.active).stream().anyMatch(node -> node.hasParent(newCpuHost.hostname()))); assertEquals("Skew is reduced", 0.00244, tester.metric().values.get("hostedVespa.docker.skew").doubleValue(), 0.00001); // --- Deploying a mem heavy application - allocated to the best option and causing increased skew tester.deployApp(memoryApp); assertEquals("Assigned to a flat node as that causes least skew", "flat", tester.nodeRepository().nodes().list().parentOf(tester.getNode(memoryApp)).get().flavor().name()); tester.maintain(); assertEquals("Deploying the mem skewed app increased skew", 0.00734, tester.metric().values.get("hostedVespa.docker.skew").doubleValue(), 0.00001); // --- Adding a more suitable node reconfiguration causes no action as the system is not stable Node memSkewedNode = tester.getNode(memoryApp); Node newMemHost = tester.makeReadyNode("mem"); tester.activateTenantHosts(); tester.maintain(); assertFalse("No rebalancing happens because cpuSkewedNode is still retired", tester.isNodeRetired(memSkewedNode)); // --- Making the system stable enables rebalancing NestedTransaction tx = new NestedTransaction(); tester.nodeRepository().nodes().deactivate(List.of(cpuSkewedNode), new ApplicationTransaction(new ApplicationMutex(cpuApp, () -> {}), tx)); tx.commit(); assertEquals(1, tester.getNodes(Node.State.dirty).size()); // ... if activation fails when trying, we clean up the state tester.deployer().setFailActivate(true); tester.maintain(); assertTrue("Want to retire is reset", tester.getNodes(Node.State.active).stream().noneMatch(node -> node.status().wantToRetire())); assertEquals("Reserved node was moved to dirty", 2, tester.getNodes(Node.State.dirty).size()); String reservedHostname = tester.getNodes(Node.State.dirty).owner(memoryApp).first().get().hostname(); tester.tester.move(Node.State.ready, reservedHostname); tester.nodeRepository().nodes().removeRecursively(reservedHostname); // ... otherwise we successfully rebalance, again reducing skew tester.deployer().setFailActivate(false); tester.maintain(); assertTrue("Rebalancer retired the node we wanted to move away from", tester.isNodeRetired(memSkewedNode)); assertTrue("... and added a node on the new host instead", tester.getNodes(memoryApp, Node.State.active).stream().anyMatch(node -> node.hasParent(newMemHost.hostname()))); assertEquals("Skew is reduced", 0.00587, tester.metric().values.get("hostedVespa.docker.skew").doubleValue(), 0.00001); }
@BuildStep AdditionalBeanBuildItem produce(Capabilities capabilities, JobRunrBuildTimeConfiguration jobRunrBuildTimeConfiguration) { Set<Class<?>> additionalBeans = new HashSet<>(); additionalBeans.add(JobRunrProducer.class); additionalBeans.add(JobRunrStarter.class); additionalBeans.add(jsonMapper(capabilities)); additionalBeans.addAll(storageProvider(capabilities, jobRunrBuildTimeConfiguration)); return AdditionalBeanBuildItem.builder() .setUnremovable() .addBeanClasses(additionalBeans.toArray(new Class[0])) .build(); }
@Test void jobRunrProducerUsesSqlStorageProviderIfAgroalCapabilityIsPresent() { lenient().when(capabilities.isPresent(Capability.AGROAL)).thenReturn(true); final AdditionalBeanBuildItem additionalBeanBuildItem = jobRunrExtensionProcessor.produce(capabilities, jobRunrBuildTimeConfiguration); assertThat(additionalBeanBuildItem.getBeanClasses()) .contains(JobRunrSqlStorageProviderProducer.class.getName()); }
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (MailInputMeta) smi; data = (MailInputData) sdi; if ( !super.init( smi, sdi ) ) { return false; } if ( !meta.isDynamicFolder() ) { try { // Create the output row meta-data data.outputRowMeta = new RowMeta(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); // get the // metadata // populated } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "MailInput.ErrorInit", e.toString() ) ); logError( Const.getStackTracker( e ) ); return false; } } data.usePOP = meta.getProtocol().equals( MailConnectionMeta.PROTOCOL_STRING_POP3 ); String realserver = environmentSubstitute( meta.getServerName() ); if ( meta.getProtocol().equals( MailConnectionMeta.PROTOCOL_STRING_MBOX ) && StringUtils.startsWith( realserver, "file://" ) ) { realserver = StringUtils.remove( realserver, "file://" ); } String realusername = environmentSubstitute( meta.getUserName() ); String realpassword = Utils.resolvePassword( variables, meta.getPassword() ); int realport = Const.toInt( environmentSubstitute( meta.getPort() ), -1 ); String realProxyUsername = environmentSubstitute( meta.getProxyUsername() ); if ( !meta.isDynamicFolder() ) { //Limit field has absolute priority String reallimitrow = environmentSubstitute( meta.getRowLimit() ); int limit = Const.toInt( reallimitrow, 0 ); //Limit field has absolute priority if ( limit == 0 ) { limit = getReadFirst( meta.getProtocol() ); } data.rowlimit = limit; } Date beginDate = null; Date endDate = null; SimpleDateFormat df = new SimpleDateFormat( MailInputMeta.DATE_PATTERN ); // check search terms // Received Date try { switch ( meta.getConditionOnReceivedDate() ) { case MailConnectionMeta.CONDITION_DATE_EQUAL: case MailConnectionMeta.CONDITION_DATE_GREATER: case MailConnectionMeta.CONDITION_DATE_SMALLER: String realBeginDate = environmentSubstitute( meta.getReceivedDate1() ); if ( Utils.isEmpty( realBeginDate ) ) { throw new KettleException( BaseMessages.getString( PKG, "MailInput.Error.ReceivedDateSearchTermEmpty" ) ); } beginDate = df.parse( realBeginDate ); break; case MailConnectionMeta.CONDITION_DATE_BETWEEN: realBeginDate = environmentSubstitute( meta.getReceivedDate1() ); if ( Utils.isEmpty( realBeginDate ) ) { throw new KettleException( BaseMessages.getString( PKG, "MailInput.Error.ReceivedDatesSearchTermEmpty" ) ); } beginDate = df.parse( realBeginDate ); String realEndDate = environmentSubstitute( meta.getReceivedDate2() ); if ( Utils.isEmpty( realEndDate ) ) { throw new KettleException( BaseMessages.getString( PKG, "MailInput.Error.ReceivedDatesSearchTermEmpty" ) ); } endDate = df.parse( realEndDate ); break; default: break; } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "MailInput.Error.SettingSearchTerms", e.getMessage() ) ); setErrors( 1 ); stopAll(); } try { // create a mail connection object data.mailConn = new MailConnection( log, MailConnectionMeta.getProtocolFromString( meta.getProtocol(), MailConnectionMeta.PROTOCOL_IMAP ), realserver, realport, realusername, realpassword, meta.isUseSSL(), meta.isUseProxy(), realProxyUsername ); // connect data.mailConn.connect(); // Need to apply search filters? applySearch( beginDate, endDate ); if ( !meta.isDynamicFolder() ) { // pass static folder name String realIMAPFolder = environmentSubstitute( meta.getIMAPFolder() ); // return folders list // including sub folders if necessary data.folders = getFolders( realIMAPFolder ); } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "MailInput.Error.OpeningConnection", e.getMessage() ) ); setErrors( 1 ); stopAll(); } data.nrFields = meta.getInputFields() != null ? meta.getInputFields().length : 0; return true; }
@Test public void testInitSetGetFirstForPOP3() { MailInput step = new MailInput( mockHelper.stepMeta, mockHelper.stepDataInterface, 0, mockHelper.transMeta, mockHelper.trans ); MailInputData data = new MailInputData(); MailInputMeta meta = mock( MailInputMeta.class ); when( meta.isDynamicFolder() ).thenReturn( false ); when( meta.getProtocol() ).thenReturn( MailConnectionMeta.PROTOCOL_STRING_POP3 ); when( meta.getFirstIMAPMails() ).thenReturn( "2" ); when( meta.getFirstMails() ).thenReturn( "3" ); step.init( meta, data ); Assert.assertEquals( "Row Limit is set up to 3 rows.", 3, data.rowlimit ); }
@Override public PollResult poll(long currentTimeMs) { return pollInternal( prepareFetchRequests(), this::handleFetchSuccess, this::handleFetchFailure ); }
@Test public void testReadCommittedWithCommittedAndAbortedTransactions() { buildFetcher(OffsetResetStrategy.EARLIEST, new ByteArrayDeserializer(), new ByteArrayDeserializer(), Integer.MAX_VALUE, IsolationLevel.READ_COMMITTED); ByteBuffer buffer = ByteBuffer.allocate(1024); List<FetchResponseData.AbortedTransaction> abortedTransactions = new ArrayList<>(); long pid1 = 1L; long pid2 = 2L; // Appends for producer 1 (eventually committed) appendTransactionalRecords(buffer, pid1, 0L, new SimpleRecord("commit1-1".getBytes(), "value".getBytes()), new SimpleRecord("commit1-2".getBytes(), "value".getBytes())); // Appends for producer 2 (eventually aborted) appendTransactionalRecords(buffer, pid2, 2L, new SimpleRecord("abort2-1".getBytes(), "value".getBytes())); // commit producer 1 commitTransaction(buffer, pid1, 3L); // append more for producer 2 (eventually aborted) appendTransactionalRecords(buffer, pid2, 4L, new SimpleRecord("abort2-2".getBytes(), "value".getBytes())); // abort producer 2 abortTransaction(buffer, pid2, 5L); abortedTransactions.add(new FetchResponseData.AbortedTransaction().setProducerId(pid2).setFirstOffset(2L)); // New transaction for producer 1 (eventually aborted) appendTransactionalRecords(buffer, pid1, 6L, new SimpleRecord("abort1-1".getBytes(), "value".getBytes())); // New transaction for producer 2 (eventually committed) appendTransactionalRecords(buffer, pid2, 7L, new SimpleRecord("commit2-1".getBytes(), "value".getBytes())); // Add messages for producer 1 (eventually aborted) appendTransactionalRecords(buffer, pid1, 8L, new SimpleRecord("abort1-2".getBytes(), "value".getBytes())); // abort producer 1 abortTransaction(buffer, pid1, 9L); abortedTransactions.add(new FetchResponseData.AbortedTransaction().setProducerId(1).setFirstOffset(6)); // commit producer 2 commitTransaction(buffer, pid2, 10L); buffer.flip(); MemoryRecords records = MemoryRecords.readableRecords(buffer); assignFromUser(singleton(tp0)); subscriptions.seek(tp0, 0); // normal fetch assertEquals(1, sendFetches()); assertFalse(fetcher.hasCompletedFetches()); client.prepareResponse(fullFetchResponseWithAbortedTransactions(records, abortedTransactions, Errors.NONE, 100L, 100L, 0)); networkClientDelegate.poll(time.timer(0)); assertTrue(fetcher.hasCompletedFetches()); Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> fetchedRecords = fetchRecords(); assertTrue(fetchedRecords.containsKey(tp0)); // There are only 3 committed records List<ConsumerRecord<byte[], byte[]>> fetchedConsumerRecords = fetchedRecords.get(tp0); Set<String> fetchedKeys = new HashSet<>(); for (ConsumerRecord<byte[], byte[]> consumerRecord : fetchedConsumerRecords) { fetchedKeys.add(new String(consumerRecord.key(), StandardCharsets.UTF_8)); } assertEquals(mkSet("commit1-1", "commit1-2", "commit2-1"), fetchedKeys); }
public List<SqlColumnMetadata> loadColumnMetadataFromMapping(String mapping) { String query = "SELECT * FROM information_schema.columns WHERE table_name = ? ORDER BY ordinal_position ASC"; try (SqlResult result = sqlService.execute(query, mapping)) { return StreamSupport .stream(result.spliterator(), false) .map(row -> { String name = row.getObject("column_name"); String typeString = ((String) row.getObject("data_type")).replaceAll(" ", "_"); SqlColumnType type = SqlColumnType.valueOf(typeString); boolean isNullable = Boolean.parseBoolean(row.getObject("is_nullable")); return new SqlColumnMetadata(name, type, isNullable); }) .collect(Collectors.toList()); } }
@Test public void when_loadColumnMetadataFromMapping_then_quoteMappingName() { mappingHelper.loadColumnMetadataFromMapping("myMapping"); verify(sqlService).execute( "SELECT * FROM information_schema.columns " + "WHERE table_name = ? ORDER BY ordinal_position ASC", "myMapping" ); }
long importPhotos( Collection<PhotoModel> photos, GPhotosUpload gPhotosUpload) throws Exception { return gPhotosUpload.uploadItemsViaBatching( photos, this::importPhotoBatch); }
@Test public void importPhotoInTempStore() throws Exception { PhotoModel photoModel = new PhotoModel( PHOTO_TITLE, IMG_URI, PHOTO_DESCRIPTION, JPEG_MEDIA_TYPE, "oldPhotoID1", OLD_ALBUM_ID, true); Mockito.when(googlePhotosInterface.uploadMediaContent(any(), eq(null))).thenReturn("token1"); PhotosLibraryClient photosLibraryClient = mock(PhotosLibraryClient.class); JobStore jobStore = mock(LocalJobStore.class); Mockito.when(jobStore.getStream(any(), any())) .thenReturn( new TemporaryPerJobDataStore.InputStreamWrapper( new ByteArrayInputStream("TestingBytes".getBytes()))); Mockito.doNothing().when(jobStore).removeData(any(), anyString()); ConnectionProvider connectionProvider = new ConnectionProvider(jobStore); GoogleMediaImporter googleMediaImporter = new GoogleMediaImporter( null, /*credentialFactory*/ jobStore, null, /*jsonFactory*/ new HashMap<>(), /*photosInterfacesMap*/ new HashMap<>(), /*photosLibraryClientMap*/ appCredentials, googlePhotosInterface, connectionProvider, monitor, 1.0 /*writesPerSecond*/); BatchMediaItemResponse batchMediaItemResponse = new BatchMediaItemResponse( new NewMediaItemResult[]{buildMediaItemResult("token1", Code.OK_VALUE)}); Mockito.when(googlePhotosInterface.createPhotos(any(NewMediaItemUpload.class))) .thenReturn(batchMediaItemResponse); UUID jobId = UUID.randomUUID(); googleMediaImporter.importPhotos(Lists.newArrayList(photoModel), new GPhotosUpload(jobId, executor, mock(TokensAndUrlAuthData.class))); assertTrue(executor.isKeyCached(String.format("%s-%s", OLD_ALBUM_ID, "oldPhotoID1"))); Mockito.verify(jobStore, Mockito.times(1)).removeData(any(), anyString()); Mockito.verify(jobStore, Mockito.times(1)).getStream(any(), anyString()); }
public Set<String> validate(String expr, Set<String> whitelistVars) throws Exception { checkExprLength(expr); selParser.ReInit(new ByteArrayInputStream(expr.getBytes())); ASTExecute n = selParser.Execute(); Map<String, Boolean> vars = new HashMap<>(); n.jjtAccept(validator, vars); Set<String> res = new HashSet<>(); for (Map.Entry<String, Boolean> entry : vars.entrySet()) { if (entry.getValue() && !whitelistVars.contains(entry.getKey())) { res.add(entry.getKey()); } } return res; }
@Test(expected = IllegalArgumentException.class) public void testValidateExpressionTooLong() throws Exception { t1.validate("x.IsInvalidExpression();", new HashSet<>()); }
static long calculateForStMutableLimit(long bufferSize) { return bufferSize * 7 / 8; }
@Test public void testCalculateForStMutableLimit() { long bufferSize = 64 * 1024 * 1024; long limit = bufferSize * 7 / 8; assertThat(ForStMemoryControllerUtils.calculateForStMutableLimit(bufferSize), is(limit)); }
protected CompletableFuture<Triple<MessageExt, String, Boolean>> getMessageFromRemoteAsync(String topic, long offset, int queueId, String brokerName) { try { String brokerAddr = this.brokerController.getTopicRouteInfoManager().findBrokerAddressInSubscribe(brokerName, MixAll.MASTER_ID, false); if (null == brokerAddr) { this.brokerController.getTopicRouteInfoManager().updateTopicRouteInfoFromNameServer(topic, true, false); brokerAddr = this.brokerController.getTopicRouteInfoManager().findBrokerAddressInSubscribe(brokerName, MixAll.MASTER_ID, false); if (null == brokerAddr) { LOG.warn("can't find broker address for topic {}, {}", topic, brokerName); return CompletableFuture.completedFuture(Triple.of(null, "brokerAddress not found", true)); // maybe offline temporarily, so need retry } } return this.brokerController.getBrokerOuterAPI().pullMessageFromSpecificBrokerAsync(brokerName, brokerAddr, this.innerConsumerGroupName, topic, queueId, offset, 1, DEFAULT_PULL_TIMEOUT_MILLIS) .thenApply(pullResult -> { if (pullResult.getLeft() != null && PullStatus.FOUND.equals(pullResult.getLeft().getPullStatus()) && CollectionUtils.isNotEmpty(pullResult.getLeft().getMsgFoundList())) { return Triple.of(pullResult.getLeft().getMsgFoundList().get(0), "", false); } return Triple.of(null, pullResult.getMiddle(), pullResult.getRight()); }); } catch (Exception e) { LOG.error("Get message from remote failed. {}, {}, {}, {}", topic, offset, queueId, brokerName, e); } return CompletableFuture.completedFuture(Triple.of(null, "Get message from remote failed", true)); // need retry }
@Test public void getMessageFromRemoteAsyncTest_exception_caught() throws Exception { when(brokerOuterAPI.pullMessageFromSpecificBrokerAsync(anyString(), anyString(), anyString(), anyString(), anyInt(), anyLong(), anyInt(), anyLong())) .thenThrow(new RemotingException("mock remoting exception")); Triple<MessageExt, String, Boolean> rst = escapeBridge.getMessageFromRemoteAsync(TEST_TOPIC, 1, DEFAULT_QUEUE_ID, BROKER_NAME).join(); Assert.assertNull(rst.getLeft()); Assert.assertEquals("Get message from remote failed", rst.getMiddle()); Assert.assertTrue(rst.getRight()); // need retry }
public static boolean parseReturnEntityParameter(final String value) { switch (value.toLowerCase()) { case "true": return true; case "false": return false; default: throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, String.format("Invalid \"%s\" parameter: %s", RestConstants.RETURN_ENTITY_PARAM, value)); } }
@Test(dataProvider = "parseReturnEntityParameterData") public void testParseReturnEntityParameter(String paramValue, Boolean expectedValue, boolean expectException) { try { boolean value = ArgumentUtils.parseReturnEntityParameter(paramValue); if (expectException) { Assert.fail("Expected \"" + RestConstants.RETURN_ENTITY_PARAM + "\" parameter parse to fail for value: " + paramValue); } Assert.assertEquals(value, (boolean) expectedValue); } catch (RestLiServiceException e) { if (!expectException) { Assert.fail("Expected \"" + RestConstants.RETURN_ENTITY_PARAM + "\" parameter parse to succeed for value: " + paramValue); } Assert.assertEquals(e.getStatus(), HttpStatus.S_400_BAD_REQUEST); Assert.assertTrue(e.getMessage().contains(String.format("Invalid \"%s\" parameter: %s", RestConstants.RETURN_ENTITY_PARAM, paramValue))); } }
public static boolean needsQuotes(final String identifier) { return !(isValid(identifier) && upperCase(identifier)); }
@Test public void shouldNeedBackQuotes() { // Given: final String[] identifiers = new String[]{ "SELECT", // reserved word "@ID", // invalid character "FOO.BAR", // with a dot "foo" // lower case }; // Then: for (final String identifier : identifiers) { assertThat("Expected quotes for " + identifier, IdentifierUtil.needsQuotes(identifier)); } }
@Override public List<Document> get() { try (var input = markdownResource.getInputStream()) { Node node = parser.parseReader(new InputStreamReader(input)); DocumentVisitor documentVisitor = new DocumentVisitor(config); node.accept(documentVisitor); return documentVisitor.getDocuments(); } catch (IOException e) { throw new RuntimeException(e); } }
@Test void testWithAdditionalMetadata() { MarkdownDocumentReaderConfig config = MarkdownDocumentReaderConfig.builder() .withAdditionalMetadata("service", "some-service-name") .withAdditionalMetadata("env", "prod") .build(); MarkdownDocumentReader reader = new MarkdownDocumentReader("classpath:/simple.md", config); List<Document> documents = reader.get(); assertThat(documents).hasSize(1); Document documentsFirst = documents.get(0); assertThat(documentsFirst.getMetadata()).isEqualTo(Map.of("service", "some-service-name", "env", "prod")); assertThat(documentsFirst.getContent()).startsWith("Lorem ipsum dolor sit amet, consectetur adipiscing elit."); }
@Override public Coder<AccumT> getAccumulatorCoder(CoderRegistry registry, Coder<InputT> inputCoder) throws CannotProvideCoderException { // Infer coder based on underlying AggregateFn instance. return registry.getCoder( getAggregateFn().getClass(), AggregateFn.class, ImmutableMap.<Type, Coder<?>>of(getInputTVariable(), inputCoder), getAccumTVariable()); }
@Test public void getAccumulatorCoderInfersCoderForWildcardTypeParameter() throws CannotProvideCoderException { LazyAggregateCombineFn<Long, ?, ?> combiner = new LazyAggregateCombineFn<>(new Sum()); Coder<?> coder = combiner.getAccumulatorCoder(CoderRegistry.createDefault(), VarLongCoder.of()); assertThat(coder, instanceOf(VarLongCoder.class)); }
void restore(final StateStoreMetadata storeMetadata, final List<ConsumerRecord<byte[], byte[]>> restoreRecords, final OptionalLong optionalLag) { if (!stores.containsValue(storeMetadata)) { throw new IllegalStateException("Restoring " + storeMetadata + " which is not registered in this state manager, " + "this should not happen."); } if (!restoreRecords.isEmpty()) { // restore states from changelog records and update the snapshot offset as the batch end record's offset final Long batchEndOffset = restoreRecords.get(restoreRecords.size() - 1).offset(); final RecordBatchingStateRestoreCallback restoreCallback = adapt(storeMetadata.restoreCallback); final List<ConsumerRecord<byte[], byte[]>> convertedRecords = restoreRecords.stream() .map(storeMetadata.recordConverter::convert) .collect(Collectors.toList()); try { restoreCallback.restoreBatch(convertedRecords); } catch (final RuntimeException e) { throw new ProcessorStateException( format("%sException caught while trying to restore state from %s", logPrefix, storeMetadata.changelogPartition), e ); } storeMetadata.setOffset(batchEndOffset); // If null means the lag for this partition is not known yet if (optionalLag.isPresent()) { storeMetadata.setEndOffset(optionalLag.getAsLong() + batchEndOffset); } } }
@Test public void shouldThrowIfRestoringUnregisteredStore() { final ProcessorStateManager stateManager = getStateManager(Task.TaskType.ACTIVE); assertThrows(IllegalStateException.class, () -> stateManager.restore(storeMetadata, Collections.emptyList(), OptionalLong.of(2L))); }
public static Map<String, String> parseMap(String str) { if (str != null) { StringTokenizer tok = new StringTokenizer(str, ", \t\n\r"); HashMap<String, String> map = new HashMap<>(); while (tok.hasMoreTokens()) { String record = tok.nextToken(); int endIndex = record.indexOf('='); if (endIndex == -1) { throw new RuntimeException("Failed to parse Map from String"); } String key = record.substring(0, endIndex); String value = record.substring(endIndex + 1); map.put(key.trim(), value.trim()); } return Collections.unmodifiableMap(map); } else { return Collections.emptyMap(); } }
@Test public void testParseMapNull() { Map<String, String> m = parseMap(null); assertThat(m, aMapWithSize(0)); }
@Override public void onClick(View v) { switch (v.getId()) { case R.id.about_legal_stuff_link: Navigation.findNavController(requireView()) .navigate( AboutAnySoftKeyboardFragmentDirections .actionAboutAnySoftKeyboardFragmentToAdditionalSoftwareLicensesFragment()); break; case R.id.about_privacy_link: String privacyUrl = getString(R.string.privacy_policy); startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(privacyUrl))); break; case R.id.about_web_site_link: String siteWebPage = getString(R.string.main_site_url); startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(siteWebPage))); break; case R.id.share_app_details: shareAppDetails(); break; case R.id.rate_app_in_store: startActivity( new Intent( Intent.ACTION_VIEW, Uri.parse(getString(R.string.rate_app_in_store_url, BuildConfig.APPLICATION_ID)))); break; default: throw new IllegalArgumentException( "Failed to handle " + v.getId() + " in AboutAnySoftKeyboardFragment"); } }
@Test public void testAdditionalLicenses() { AboutAnySoftKeyboardFragment fragment = startFragment(); TextView link = fragment.getView().findViewById(R.id.about_legal_stuff_link); Assert.assertNotNull(link); Shadows.shadowOf(link).getOnClickListener().onClick(link); ensureAllScheduledJobsAreDone(); Fragment nextFragment = getCurrentFragment(); Assert.assertNotNull(nextFragment); Assert.assertTrue( nextFragment instanceof AboutAnySoftKeyboardFragment.AdditionalSoftwareLicensesFragment); }
@Override public Future<?> submit(Runnable runnable) { submitted.mark(); return delegate.submit(new InstrumentedRunnable(runnable)); }
@Test public void reportsTasksInformationForCallable() throws Exception { assertThat(submitted.getCount()).isEqualTo(0); assertThat(running.getCount()).isEqualTo(0); assertThat(completed.getCount()).isEqualTo(0); assertThat(duration.getCount()).isEqualTo(0); assertThat(idle.getCount()).isEqualTo(0); Callable<Void> callable = () -> { assertThat(submitted.getCount()).isEqualTo(1); assertThat(running.getCount()).isEqualTo(1); assertThat(completed.getCount()).isEqualTo(0); assertThat(duration.getCount()).isEqualTo(0); assertThat(idle.getCount()).isEqualTo(1); return null; }; Future<?> theFuture = instrumentedExecutorService.submit(callable); theFuture.get(); assertThat(submitted.getCount()).isEqualTo(1); assertThat(running.getCount()).isEqualTo(0); assertThat(completed.getCount()).isEqualTo(1); assertThat(duration.getCount()).isEqualTo(1); assertThat(duration.getSnapshot().size()).isEqualTo(1); assertThat(idle.getCount()).isEqualTo(1); assertThat(idle.getSnapshot().size()).isEqualTo(1); }
public static int utf8Length(String string) { CharacterIterator iter = new StringCharacterIterator(string); char ch = iter.first(); int size = 0; while (ch != CharacterIterator.DONE) { if ((ch >= 0xD800) && (ch < 0xDC00)) { // surrogate pair? char trail = iter.next(); if ((trail > 0xDBFF) && (trail < 0xE000)) { // valid pair size += 4; } else { // invalid pair size += 3; iter.previous(); // rewind one } } else if (ch < 0x80) { size++; } else if (ch < 0x800) { size += 2; } else { // ch < 0x10000, that is, the largest char value size += 3; } ch = iter.next(); } return size; }
@Test public void testUtf8Length() { assertEquals("testUtf8Length1 error !!!", 1, Text.utf8Length(new String(new char[]{(char) 1}))); assertEquals("testUtf8Length127 error !!!", 1, Text.utf8Length(new String(new char[]{(char) 127}))); assertEquals("testUtf8Length128 error !!!", 2, Text.utf8Length(new String(new char[]{(char) 128}))); assertEquals("testUtf8Length193 error !!!", 2, Text.utf8Length(new String(new char[]{(char) 193}))); assertEquals("testUtf8Length225 error !!!", 2, Text.utf8Length(new String(new char[]{(char) 225}))); assertEquals("testUtf8Length254 error !!!", 2, Text.utf8Length(new String(new char[]{(char)254}))); }
@Override public Collection<Rule> getAll() { return List.copyOf(rulesByUuid.values()); }
@Test public void getAll_returns_immutable_empty_collection_when_register_was_never_called() { Collection<Rule> all = underTest.getAll(); assertThat(all).isEmpty(); assertThatThrownBy(() -> all.add(SOME_RULE)) .isInstanceOf(UnsupportedOperationException.class); }
@Override public List<V> getValuesForAddressesStartingWith(IpPrefix prefix) { String prefixString = getPrefixString(prefix); if (prefix.isIp4()) { return Lists.newArrayList(ipv4Tree.getValuesForKeysStartingWith(prefixString)); } if (prefix.isIp6()) { return Lists.newArrayList(ipv6Tree.getValuesForKeysStartingWith(prefixString)); } return null; }
@Test public void testGetValuesForAddressesStartingWith() { assertTrue("IPv4 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv4PrefixKey1).contains(1)); assertTrue("IPv4 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv4PrefixKey3).contains(1)); assertTrue("IPv4 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv4PrefixKey3).contains(2)); assertTrue("IPv4 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv4PrefixKey3).contains(3)); assertTrue("IPv4 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv4PrefixKey4).contains(1)); assertTrue("IPv4 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv4PrefixKey4).contains(2)); assertTrue("IPv4 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv4PrefixKey4).contains(3)); assertTrue("IPv4 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv4PrefixKey4).contains(4)); assertTrue("IPv6 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv6PrefixKey1).contains(11)); assertTrue("IPv6 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv6PrefixKey3).contains(11)); assertTrue("IPv6 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv6PrefixKey3).contains(12)); assertTrue("IPv6 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv6PrefixKey3).contains(13)); assertTrue("IPv6 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv6PrefixKey6).contains(11)); assertTrue("IPv6 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv6PrefixKey6).contains(12)); assertTrue("IPv6 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv6PrefixKey6).contains(13)); assertTrue("IPv6 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv6PrefixKey6).contains(14)); assertTrue("IPv6 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv6PrefixKey6).contains(15)); assertTrue("IPv6 prefix has not been inserted correctly", radixTree.getValuesForAddressesStartingWith(ipv6PrefixKey6).contains(16)); }
public RowMetaInterface getPrevInfoFields( String stepname ) throws KettleStepException { return getPrevInfoFields( findStep( stepname ) ); }
@Test public void testGetPrevInfoFields() throws KettleStepException { DataGridMeta dgm1 = new DataGridMeta(); dgm1.setFieldName( new String[] { "id", "colA" } ); dgm1.allocate( 2 ); dgm1.setFieldType( new String[] { ValueMetaFactory.getValueMetaName( ValueMetaInterface.TYPE_INTEGER ), ValueMetaFactory.getValueMetaName( ValueMetaInterface.TYPE_STRING ) } ); List<List<String>> dgm1Data = new ArrayList<>(); dgm1Data.add( asList( "1", "A" ) ); dgm1Data.add( asList( "2", "B" ) ); dgm1.setDataLines( dgm1Data ); DataGridMeta dgm2 = new DataGridMeta(); dgm2.allocate( 1 ); dgm2.setFieldName( new String[] { "moreData" } ); dgm2.setFieldType( new String[] { ValueMetaFactory.getValueMetaName( ValueMetaInterface.TYPE_STRING ) } ); List<List<String>> dgm2Data = new ArrayList<>(); dgm2Data.add( List.of( "Some Informational Data" ) ); dgm2.setDataLines( dgm2Data ); StepMeta dg1 = new StepMeta( "input1", dgm1 ); StepMeta dg2 = new StepMeta( "input2", dgm2 ); final String UDJC_METHOD = "public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { return " + "false; }"; UserDefinedJavaClassMeta udjcMeta = new UserDefinedJavaClassMeta(); udjcMeta.getInfoStepDefinitions().add( new InfoStepDefinition( dg2.getName(), dg2.getName(), dg2, "info_data" ) ); udjcMeta.replaceDefinitions( singletonList( new UserDefinedJavaClassDef( UserDefinedJavaClassDef.ClassType.TRANSFORM_CLASS, "MainClass", UDJC_METHOD ) ) ); StepMeta udjc = new StepMeta( "PDI-14910", udjcMeta ); TransHopMeta hop1 = new TransHopMeta( dg1, udjc, true ); TransHopMeta hop2 = new TransHopMeta( dg2, udjc, true ); transMeta.addStep( dg1 ); transMeta.addStep( dg2 ); transMeta.addStep( udjc ); transMeta.addTransHop( hop1 ); transMeta.addTransHop( hop2 ); RowMetaInterface row; row = transMeta.getPrevInfoFields( udjc ); assertNotNull( row ); assertEquals( 1, row.size() ); assertEquals( "moreData", row.getValueMeta( 0 ).getName() ); assertEquals( ValueMetaInterface.TYPE_STRING, row.getValueMeta( 0 ).getType() ); }
public static Base64String wrap(final String base64String) { return new Base64String(base64String); }
@Test public void testTooLongStringThrows() { assertThrows(RuntimeException.class, () -> Base64String.wrap(BASE64_1 + "m")); }
@Override public QueryRegistry createSandbox() { return new QueryRegistryImpl(this); }
@Test public void shouldOnlyAllowServerLevelConfigsForDedicatedRuntimesSandbox() { // Given: when(config.getOverrides()).thenReturn(ImmutableMap.of("commit.interval.ms", 9)); if (sharedRuntimes) { final Exception e = assertThrows(IllegalArgumentException.class, () -> givenCreate(registry.createSandbox(), "q1", "source", Optional.of("sink1"), CREATE_AS)); assertThat(e.getMessage(), containsString("commit.interval.ms")); } givenCreate(registry, "q1", "source", Optional.of("sink1"), CREATE_AS); }
@Override public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest) req; HttpServletResponse response = (HttpServletResponse) resp; String appId = accessKeyUtil.extractAppIdFromRequest(request); if (StringUtils.isBlank(appId)) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "InvalidAppId"); return; } List<String> availableSecrets = accessKeyUtil.findAvailableSecret(appId); if (!CollectionUtils.isEmpty(availableSecrets)) { String timestamp = request.getHeader(Signature.HTTP_HEADER_TIMESTAMP); String authorization = request.getHeader(HttpHeaders.AUTHORIZATION); // check timestamp, valid within 1 minute if (!checkTimestamp(timestamp)) { logger.warn("Invalid timestamp. appId={},timestamp={}", appId, timestamp); response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "RequestTimeTooSkewed"); return; } // check signature String uri = request.getRequestURI(); String query = request.getQueryString(); if (!checkAuthorization(authorization, availableSecrets, timestamp, uri, query)) { logger.warn("Invalid authorization. appId={},authorization={}", appId, authorization); response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized"); return; } } chain.doFilter(request, response); }
@Test public void testRequestTimeOneMinFasterThenCurrentTime() throws Exception { String appId = "someAppId"; List<String> secrets = Lists.newArrayList("someSecret"); String oneMinAfterTimestamp = Long.toString(System.currentTimeMillis() + 61 * 1000); when(accessKeyUtil.extractAppIdFromRequest(any())).thenReturn(appId); when(accessKeyUtil.findAvailableSecret(appId)).thenReturn(secrets); when(request.getHeader(Signature.HTTP_HEADER_TIMESTAMP)).thenReturn(oneMinAfterTimestamp); clientAuthenticationFilter.doFilter(request, response, filterChain); verify(response).sendError(HttpServletResponse.SC_UNAUTHORIZED, "RequestTimeTooSkewed"); verify(filterChain, never()).doFilter(request, response); }
@Override public Health check(Set<NodeHealth> nodeHealths) { Set<NodeHealth> appNodes = nodeHealths.stream() .filter(s -> s.getDetails().getType() == NodeDetails.Type.APPLICATION) .collect(Collectors.toSet()); return Arrays.stream(AppNodeClusterHealthSubChecks.values()) .map(s -> s.check(appNodes)) .reduce(Health.GREEN, HealthReducer::merge); }
@Test public void status_YELLOW_when_one_YELLOW_node_and_one_GREEN_application_node() { Set<NodeHealth> nodeHealths = nodeHealths(YELLOW, GREEN).collect(toSet()); Health check = underTest.check(nodeHealths); assertThat(check) .forInput(nodeHealths) .hasStatus(Health.Status.YELLOW) .andCauses("At least one application node is YELLOW"); }
public void write(final ByteBuffer buffer) { if (!isClosed.get()) { try { channel.write(buffer); } catch (IOException e) { isClosed.compareAndSet(false, true); LOG.error("write buffer Failed.", e); } } }
@Test public void testWrite() { ByteBuffer byteBuffer = ByteBuffer.wrap(sendString.getBytes(StandardCharsets.UTF_8)); writer.write(byteBuffer.asReadOnlyBuffer()); String res = writer.output(); Assertions.assertEquals(res, "hello, shenyu"); }
@Override public List<SnowflakeIdentifier> listIcebergTables(SnowflakeIdentifier scope) { StringBuilder baseQuery = new StringBuilder("SHOW ICEBERG TABLES"); String[] queryParams = null; switch (scope.type()) { case ROOT: // account-level listing baseQuery.append(" IN ACCOUNT"); break; case DATABASE: // database-level listing baseQuery.append(" IN DATABASE IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; case SCHEMA: // schema-level listing baseQuery.append(" IN SCHEMA IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; default: throw new IllegalArgumentException( String.format("Unsupported scope type for listIcebergTables: %s", scope)); } final String finalQuery = baseQuery.toString(); final String[] finalQueryParams = queryParams; List<SnowflakeIdentifier> tables; try { tables = connectionPool.run( conn -> queryHarness.query(conn, finalQuery, TABLE_RESULT_SET_HANDLER, finalQueryParams)); } catch (SQLException e) { throw snowflakeExceptionToIcebergException( scope, e, String.format("Failed to list tables for scope '%s'", scope)); } catch (InterruptedException e) { throw new UncheckedInterruptedException( e, "Interrupted while listing tables for scope '%s'", scope); } tables.forEach( table -> Preconditions.checkState( table.type() == SnowflakeIdentifier.Type.TABLE, "Expected TABLE, got identifier '%s' for scope '%s'", table, scope)); return tables; }
@SuppressWarnings("unchecked") @Test public void testListIcebergTablesSQLExceptionAtDatabaseLevel() throws SQLException, InterruptedException { for (Integer errorCode : DATABASE_NOT_FOUND_ERROR_CODES) { Exception injectedException = new SQLException( String.format("SQL exception with Error Code %d", errorCode), "2000", errorCode, null); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(NoSuchNamespaceException.class) .isThrownBy( () -> snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining( String.format( "Identifier not found: 'DATABASE: 'DB_1''. Underlying exception: 'SQL exception with Error Code %d'", errorCode)) .withCause(injectedException); } }
public static IpPrefix valueOf(int address, int prefixLength) { return new IpPrefix(IpAddress.valueOf(address), prefixLength); }
@Test(expected = IllegalArgumentException.class) public void testInvalidValueOfShortArrayIPv4() { IpPrefix ipPrefix; byte[] value; value = new byte[] {1, 2, 3}; ipPrefix = IpPrefix.valueOf(IpAddress.Version.INET, value, 24); }
@Override public void deleteVersionsBeforeDate( RepositoryElementInterface element, Date beforeDate ) throws KettleException { try { Serializable fileId = element.getObjectId().getId(); deleteVersionsBeforeDate( fileId, beforeDate ); } catch ( Exception e ) { processDeleteException( e ); } }
@Test public void deleteVersionsBeforeDate() throws KettleException { IUnifiedRepository mockRepo = mock( IUnifiedRepository.class ); final HashMap<String, List<VersionSummary>> versionListMap = processVersionMap( mockRepo ); UnifiedRepositoryPurgeService purgeService = new UnifiedRepositoryPurgeService( mockRepo ); String fileId = "1"; Date beforeDate = getDate( "01/02/2006" ); purgeService.deleteVersionsBeforeDate( element1, beforeDate ); verifyDateBeforeDeletion( versionListMap, mockRepo, fileId, beforeDate ); verify( mockRepo, never() ).deleteFileAtVersion( eq( "2" ), anyString() ); }
public static Duration parse(String str) { if (StringUtils.isBlank(str)) { return DEFAULT_DURATION; } if (SIMPLE.matcher(str).matches()) { if (str.contains(MILLIS_SECOND_UNIT)) { long value = doParse(MILLIS_SECOND_UNIT, str); return Duration.ofMillis(value); } else if (str.contains(DAY_UNIT)) { long value = doParse(DAY_UNIT, str); return Duration.ofDays(value); } else if (str.contains(HOUR_UNIT)) { long value = doParse(HOUR_UNIT, str); return Duration.ofHours(value); } else if (str.contains(MINUTE_UNIT)) { long value = doParse(MINUTE_UNIT, str); return Duration.ofMinutes(value); } else if (str.contains(SECOND_UNIT)) { long value = doParse(SECOND_UNIT, str); return Duration.ofSeconds(value); } else { throw new UnsupportedOperationException("\"" + str + "\" can't parse to Duration"); } } try { if (ISO8601.matcher(str).matches()) { return Duration.parse(str); } } catch (DateTimeParseException e) { throw new UnsupportedOperationException("\"" + str + "\" can't parse to Duration", e); } try { int millis = Integer.parseInt(str); return Duration.ofMillis(millis); } catch (Exception e) { throw new UnsupportedOperationException("\"" + str + "\" can't parse to Duration", e); } }
@Test public void testParse() { Assertions.assertEquals(-1L, DurationUtil.parse("").getSeconds()); Assertions.assertEquals(0L, DurationUtil.parse("8").getSeconds()); Assertions.assertEquals(8L, DurationUtil.parse("8").toMillis()); Assertions.assertEquals(0L, DurationUtil.parse("8ms").getSeconds()); Assertions.assertEquals(8L, DurationUtil.parse("8ms").toMillis()); Assertions.assertEquals(8L, DurationUtil.parse("8s").getSeconds()); Assertions.assertEquals(480L, DurationUtil.parse("8m").getSeconds()); Assertions.assertEquals(28800L, DurationUtil.parse("8h").getSeconds()); Assertions.assertEquals(691200L, DurationUtil.parse("8d").getSeconds()); Assertions.assertEquals(172800L,DurationUtil.parse("P2D").getSeconds()); Assertions.assertEquals(20L,DurationUtil.parse("PT20.345S").getSeconds()); Assertions.assertEquals(20345L,DurationUtil.parse("PT20.345S").toMillis()); Assertions.assertEquals(900L,DurationUtil.parse("PT15M").getSeconds()); Assertions.assertEquals(36000L,DurationUtil.parse("PT10H").getSeconds()); Assertions.assertEquals(8L,DurationUtil.parse("PT8S").getSeconds()); Assertions.assertEquals(86460L,DurationUtil.parse("P1DT1M").getSeconds()); Assertions.assertEquals(183840L,DurationUtil.parse("P2DT3H4M").getSeconds()); Assertions.assertEquals(-21420L,DurationUtil.parse("PT-6H3M").getSeconds()); Assertions.assertEquals(-21780L,DurationUtil.parse("-PT6H3M").getSeconds()); Assertions.assertEquals(21420L,DurationUtil.parse("-PT-6H+3M").getSeconds()); }
@Override public Optional<ShardingConditionValue> generate(final BinaryOperationExpression predicate, final Column column, final List<Object> params, final TimestampServiceRule timestampServiceRule) { String operator = predicate.getOperator().toUpperCase(); if (!isSupportedOperator(operator)) { return Optional.empty(); } ExpressionSegment valueExpression = predicate.getLeft() instanceof ColumnSegment ? predicate.getRight() : predicate.getLeft(); ConditionValue conditionValue = new ConditionValue(valueExpression, params); if (conditionValue.isNull()) { return generate(null, column, operator, conditionValue.getParameterMarkerIndex().orElse(-1)); } Optional<Comparable<?>> value = conditionValue.getValue(); if (value.isPresent()) { return generate(value.get(), column, operator, conditionValue.getParameterMarkerIndex().orElse(-1)); } if (ExpressionConditionUtils.isNowExpression(valueExpression)) { return generate(timestampServiceRule.getTimestamp(), column, operator, -1); } return Optional.empty(); }
@Test void assertGenerateConditionValueWithoutParameter() { ColumnSegment left = new ColumnSegment(0, 0, new IdentifierValue("order_id")); ParameterMarkerExpressionSegment right = new ParameterMarkerExpressionSegment(0, 0, 0); BinaryOperationExpression predicate = new BinaryOperationExpression(0, 0, left, right, "=", "order_id = ?"); Optional<ShardingConditionValue> actual = generator.generate(predicate, column, new LinkedList<>(), mock(TimestampServiceRule.class)); assertFalse(actual.isPresent()); }
@VisibleForTesting static JibContainerBuilder processCommonConfiguration( RawConfiguration rawConfiguration, InferredAuthProvider inferredAuthProvider, ProjectProperties projectProperties) throws InvalidFilesModificationTimeException, InvalidAppRootException, IncompatibleBaseImageJavaVersionException, IOException, InvalidImageReferenceException, InvalidContainerizingModeException, MainClassInferenceException, InvalidPlatformException, InvalidContainerVolumeException, InvalidWorkingDirectoryException, InvalidCreationTimeException, ExtraDirectoryNotFoundException { // Create and configure JibContainerBuilder ModificationTimeProvider modificationTimeProvider = createModificationTimeProvider(rawConfiguration.getFilesModificationTime()); JavaContainerBuilder javaContainerBuilder = getJavaContainerBuilderWithBaseImage( rawConfiguration, projectProperties, inferredAuthProvider) .setAppRoot(getAppRootChecked(rawConfiguration, projectProperties)) .setModificationTimeProvider(modificationTimeProvider); JibContainerBuilder jibContainerBuilder = projectProperties.createJibContainerBuilder( javaContainerBuilder, getContainerizingModeChecked(rawConfiguration, projectProperties)); jibContainerBuilder .setFormat(rawConfiguration.getImageFormat()) .setPlatforms(getPlatformsSet(rawConfiguration)) .setEntrypoint(computeEntrypoint(rawConfiguration, projectProperties, jibContainerBuilder)) .setProgramArguments(rawConfiguration.getProgramArguments().orElse(null)) .setEnvironment(rawConfiguration.getEnvironment()) .setExposedPorts(Ports.parse(rawConfiguration.getPorts())) .setVolumes(getVolumesSet(rawConfiguration)) .setLabels(rawConfiguration.getLabels()) .setUser(rawConfiguration.getUser().orElse(null)) .setCreationTime(getCreationTime(rawConfiguration.getCreationTime(), projectProperties)); getWorkingDirectoryChecked(rawConfiguration) .ifPresent(jibContainerBuilder::setWorkingDirectory); // Adds all the extra files. for (ExtraDirectoriesConfiguration extraDirectory : rawConfiguration.getExtraDirectories()) { Path from = extraDirectory.getFrom(); if (Files.exists(from)) { jibContainerBuilder.addFileEntriesLayer( JavaContainerBuilderHelper.extraDirectoryLayerConfiguration( from, AbsoluteUnixPath.get(extraDirectory.getInto()), extraDirectory.getIncludesList(), extraDirectory.getExcludesList(), rawConfiguration.getExtraDirectoryPermissions(), modificationTimeProvider)); } else if (!from.endsWith(DEFAULT_JIB_DIR)) { throw new ExtraDirectoryNotFoundException(from.toString(), from.toString()); } } return jibContainerBuilder; }
@Test public void testEntrypoint_warningOnMainclassForWar() throws IOException, InvalidCreationTimeException, InvalidImageReferenceException, IncompatibleBaseImageJavaVersionException, InvalidPlatformException, InvalidContainerVolumeException, MainClassInferenceException, InvalidAppRootException, InvalidWorkingDirectoryException, InvalidFilesModificationTimeException, InvalidContainerizingModeException, ExtraDirectoryNotFoundException { when(rawConfiguration.getMainClass()).thenReturn(Optional.of("java.util.Object")); when(projectProperties.isWarProject()).thenReturn(true); ContainerBuildPlan buildPlan = processCommonConfiguration(); assertThat(buildPlan.getEntrypoint()) .containsExactly("java", "-jar", "/usr/local/jetty/start.jar", "--module=ee10-deploy") .inOrder(); verify(projectProperties) .log( LogEvent.warn( "mainClass, extraClasspath, jvmFlags, and expandClasspathDependencies " + "are ignored for WAR projects")); }
@Override public String probeContentType(Path path) throws IOException { // Try to detect based on the file name only for efficiency String fileNameDetect = tika.detect(path.toString()); if (!fileNameDetect.equals(MimeTypes.OCTET_STREAM)) { return fileNameDetect; } // Then check the file content if necessary String fileContentDetect = tika.detect(path); if (!fileContentDetect.equals(MimeTypes.OCTET_STREAM)) { return fileContentDetect; } // Specification says to return null if we could not // conclusively determine the file type return null; }
@Test public final void testFilesProbeContentTypePathExtension() throws Exception { String contentType = Files.probeContentType(testDirectory.resolve(TEST_HTML)); assertNotNull(contentType); assertEquals("text/html", contentType); }
static Range consolidateRanges(List<RangeNode> ranges) { boolean consistent = true; Range result = new RangeImpl(); for (RangeNode r : ranges) { Comparable lowValue = null; if (r.getStart() instanceof NumberNode startNode) { lowValue = startNode.getValue(); } else if (r.getStart() instanceof AtLiteralNode atLiteralNode) { Object evaluated = MapperHelper.evaluateAtLiteralNode(atLiteralNode); lowValue = evaluated instanceof Comparable<?> ? (Comparable) evaluated : null; } if (lowValue != null) { if (result.getLowEndPoint() == null) { result = new RangeImpl(Range.RangeBoundary.valueOf(r.getLowerBound().name()), lowValue, result.getHighEndPoint(), result.getHighBoundary()); } else { consistent = false; } } Comparable highValue = null; if (r.getEnd() instanceof NumberNode endNode) { highValue = endNode.getValue(); } else if (r.getEnd() instanceof AtLiteralNode atLiteralNode) { Object evaluated = MapperHelper.evaluateAtLiteralNode(atLiteralNode); highValue = evaluated instanceof Comparable<?> ? (Comparable) evaluated : null; } if (highValue != null) { if (result.getHighEndPoint() == null) { result = new RangeImpl(result.getLowBoundary(), result.getLowEndPoint(), highValue, Range.RangeBoundary.valueOf(r.getUpperBound().name())); } else { consistent = false; } } } return consistent ? result : null; }
@Test void consolidateRangesForDateRange() { List<LocalDate> expectedDates = Arrays.asList(LocalDate.of(2022, 1, 1), LocalDate.of(2024, 1, 1)); Range lowRange = new RangeImpl(Range.RangeBoundary.OPEN, expectedDates.get(0), null, Range.RangeBoundary.OPEN); Range highRange = new RangeImpl(Range.RangeBoundary.OPEN, null, expectedDates.get(1), Range.RangeBoundary.CLOSED); List<String> formattedDates = expectedDates.stream() .map(toFormat -> String.format("@\"%s-0%s-0%s\"", toFormat.getYear(), toFormat.getMonthValue(), toFormat.getDayOfMonth())) .toList(); List<String> toRange = Arrays.asList(String.format("(%s .. null)", formattedDates.get(0)), String.format( "(null .. %s]", formattedDates.get(1))); List<RangeNode> ranges = getBaseNodes(toRange, RangeNode.class); Range retrieved = RangeNodeSchemaMapper.consolidateRanges(ranges); assertThat(retrieved).isNotNull().isEqualTo(new RangeImpl(lowRange.getLowBoundary(), lowRange.getLowEndPoint(), highRange.getHighEndPoint(), highRange.getHighBoundary())); }
public static String cleanPath(String path) throws InvalidPathException { validatePath(path); return FilenameUtils.separatorsToUnix(FilenameUtils.normalizeNoEndSeparator(path)); }
@Test public void cleanPathException() throws InvalidPathException { mException.expect(InvalidPathException.class); PathUtils.cleanPath(""); }
@Override public List<Container> allocateContainers(ResourceBlacklistRequest blackList, List<ResourceRequest> oppResourceReqs, ApplicationAttemptId applicationAttemptId, OpportunisticContainerContext opportContext, long rmIdentifier, String appSubmitter) throws YarnException { // Update black list. updateBlacklist(blackList, opportContext); // Add OPPORTUNISTIC requests to the outstanding ones. opportContext.addToOutstandingReqs(oppResourceReqs); Set<String> nodeBlackList = new HashSet<>(opportContext.getBlacklist()); Set<String> allocatedNodes = new HashSet<>(); List<Container> allocatedContainers = new ArrayList<>(); // Satisfy the outstanding OPPORTUNISTIC requests. boolean continueLoop = true; while (continueLoop) { continueLoop = false; List<Map<Resource, List<Allocation>>> allocations = new ArrayList<>(); for (SchedulerRequestKey schedulerKey : opportContext.getOutstandingOpReqs().descendingKeySet()) { // Allocated containers : // Key = Requested Capability, // Value = List of Containers of given cap (the actual container size // might be different than what is requested, which is why // we need the requested capability (key) to match against // the outstanding reqs) int remAllocs = -1; int maxAllocationsPerAMHeartbeat = getMaxAllocationsPerAMHeartbeat(); if (maxAllocationsPerAMHeartbeat > 0) { remAllocs = maxAllocationsPerAMHeartbeat - allocatedContainers.size() - getTotalAllocations(allocations); if (remAllocs <= 0) { LOG.info("Not allocating more containers as we have reached max " + "allocations per AM heartbeat {}", maxAllocationsPerAMHeartbeat); break; } } Map<Resource, List<Allocation>> allocation = allocate( rmIdentifier, opportContext, schedulerKey, applicationAttemptId, appSubmitter, nodeBlackList, allocatedNodes, remAllocs); if (allocation.size() > 0) { allocations.add(allocation); continueLoop = true; } } matchAllocation(allocations, allocatedContainers, opportContext); } return allocatedContainers; }
@Test public void testOffSwitchAllocationWhenNoNodeOrRack() throws Exception { ResourceBlacklistRequest blacklistRequest = ResourceBlacklistRequest.newInstance( new ArrayList<>(), new ArrayList<>()); List<ResourceRequest> reqs = Arrays.asList( ResourceRequest.newInstance(PRIORITY_NORMAL, "*", CAPABILITY_1GB, 2, true, null, OPPORTUNISTIC_REQ), ResourceRequest.newInstance(PRIORITY_NORMAL, "h6", CAPABILITY_1GB, 2, true, null, OPPORTUNISTIC_REQ), ResourceRequest.newInstance(PRIORITY_NORMAL, "/r3", CAPABILITY_1GB, 2, true, null, OPPORTUNISTIC_REQ)); ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance( ApplicationId.newInstance(0L, 1), 1); oppCntxt.updateNodeList( Arrays.asList( RemoteNode.newInstance( NodeId.newInstance("h3", 1234), "h3:1234", "/r2"), RemoteNode.newInstance( NodeId.newInstance("h2", 1234), "h2:1234", "/r1"), RemoteNode.newInstance( NodeId.newInstance("h5", 1234), "h5:1234", "/r1"), RemoteNode.newInstance( NodeId.newInstance("h4", 1234), "h4:1234", "/r2"))); List<Container> containers = allocator.allocateContainers( blacklistRequest, reqs, appAttId, oppCntxt, 1L, "luser"); LOG.info("Containers: {}", containers); Assert.assertEquals(2, containers.size()); }
static String getAbbreviation(Exception ex, Integer statusCode, String storageErrorMessage) { String result = null; for (RetryReasonCategory retryReasonCategory : rankedReasonCategories) { final String abbreviation = retryReasonCategory.captureAndGetAbbreviation(ex, statusCode, storageErrorMessage); if (abbreviation != null) { result = abbreviation; } } return result; }
@Test public void testUnknownIOExceptionRetryReason() { Assertions.assertThat(RetryReason.getAbbreviation(new IOException(), null, null)).isEqualTo( IO_EXCEPTION_ABBREVIATION ); }
public void setCache(Cache cache) { this.cache = cache; }
@Test public void testSetCache() { final String key = randomString(); //Send exchange Object responseBody = this.template().requestBody("direct:policy-cache", key); //Verify the set cache was used assertEquals(generateValue(key), lookupCache("setCache").get(key)); assertEquals(generateValue(key), responseBody); assertEquals(1, getMockEndpoint("mock:value").getExchanges().size()); }
@Override public ProtobufSystemInfo.Section toProtobuf() { ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName("Plugins"); for (PluginInfo plugin : repository.getPluginsInfoByType(PluginType.EXTERNAL)) { String label = ""; Version version = plugin.getVersion(); if (version != null) { label = version.getName() + " "; } label += String.format("[%s]", plugin.getName()); setAttribute(protobuf, plugin.getKey(), label); } return protobuf.build(); }
@Test public void plugin_name_and_version() { when(repo.getPluginsInfoByType(PluginType.EXTERNAL)).thenReturn(Arrays.asList( new PluginInfo("key-1") .setName("Plugin 1") .setVersion(Version.create("1.1")), new PluginInfo("key-2") .setName("Plugin 2") .setVersion(Version.create("2.2")), new PluginInfo("no-version") .setName("No Version"))); ProtobufSystemInfo.Section section = underTest.toProtobuf(); assertThatAttributeIs(section, "key-1", "1.1 [Plugin 1]"); assertThatAttributeIs(section, "key-2", "2.2 [Plugin 2]"); assertThatAttributeIs(section, "no-version", "[No Version]"); }
public Map<String, Object> getKsqlStreamConfigProps(final String applicationId) { final Map<String, Object> map = new HashMap<>(getKsqlStreamConfigProps()); map.put( MetricCollectors.RESOURCE_LABEL_PREFIX + StreamsConfig.APPLICATION_ID_CONFIG, applicationId ); // Streams client metrics aren't used in Confluent deployment possiblyConfigureConfluentTelemetry(map); return Collections.unmodifiableMap(map); }
@Test public void shouldNotSetDeserializationExceptionHandlerWhenFailOnProductionErrorTrue() { final KsqlConfig ksqlConfig = new KsqlConfig(Collections.singletonMap(KsqlConfig.FAIL_ON_PRODUCTION_ERROR_CONFIG, true)); final Object result = ksqlConfig.getKsqlStreamConfigProps() .get(StreamsConfig.DEFAULT_PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG); assertThat(result, equalTo(LogAndFailProductionExceptionHandler.class)); }
@Override public Map<String, String> loadProjectSettings() { return load(scannerProperties.getProjectKey()); }
@Test public void loadProjectSettings() throws IOException { WsResponse response = mock(WsResponse.class); PipedOutputStream out = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(out); Settings.ValuesWsResponse.newBuilder() .addSettings(Settings.Setting.newBuilder() .setKey("abc").setValue("def") .build()) .addSettings(Settings.Setting.newBuilder() .setKey("123").setValue("456") .build()) .build() .writeTo(out); out.close(); when(response.contentStream()).thenReturn(in); when(wsClient.call(any())).thenReturn(response); when(properties.getProjectKey()).thenReturn("project_key"); Map<String, String> result = underTest.loadProjectSettings(); ArgumentCaptor<GetRequest> argumentCaptor = ArgumentCaptor.forClass(GetRequest.class); verify(wsClient, times(1)).call(argumentCaptor.capture()); assertThat(argumentCaptor.getValue().getPath()).isEqualTo("api/settings/values.protobuf?component=project_key"); assertThat(result) .isNotNull() .hasSize(2) .containsEntry("abc", "def") .containsEntry("123", "456"); }
@Operation(summary = "deleteVersion", description = "DELETE_PROCESS_DEFINITION_VERSION_NOTES") @Parameters({ @Parameter(name = "code", description = "PROCESS_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "1")), @Parameter(name = "version", description = "VERSION", required = true, schema = @Schema(implementation = int.class, example = "100")) }) @DeleteMapping(value = "/{code}/versions/{version}") @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_PROCESS_DEFINITION_VERSION_ERROR) public Result deleteProcessDefinitionVersion(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "code") long code, @PathVariable(value = "version") int version) { Map<String, Object> result = processDefinitionService.deleteProcessDefinitionVersion(loginUser, projectCode, code, version); return returnDataList(result); }
@Test public void testDeleteProcessDefinitionVersion() { long projectCode = 1L; Map<String, Object> resultMap = new HashMap<>(); putMsg(resultMap, Status.SUCCESS); Mockito.when(processDefinitionService.deleteProcessDefinitionVersion( user, projectCode, 1, 10)).thenReturn(resultMap); Result result = processDefinitionController.deleteProcessDefinitionVersion( user, projectCode, 1, 10); Assertions.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode()); }
public static List<UpdateRequirement> forUpdateTable( TableMetadata base, List<MetadataUpdate> metadataUpdates) { Preconditions.checkArgument(null != base, "Invalid table metadata: null"); Preconditions.checkArgument(null != metadataUpdates, "Invalid metadata updates: null"); Builder builder = new Builder(base, false); builder.require(new UpdateRequirement.AssertTableUUID(base.uuid())); metadataUpdates.forEach(builder::update); return builder.build(); }
@Test public void assignUUIDFailure() { List<UpdateRequirement> requirements = UpdateRequirements.forUpdateTable( metadata, ImmutableList.of(new MetadataUpdate.AssignUUID(metadata.uuid()))); when(updated.uuid()).thenReturn(UUID.randomUUID().toString()); assertThatThrownBy(() -> requirements.forEach(req -> req.validate(updated))) .isInstanceOf(CommitFailedException.class) .hasMessage( String.format( "Requirement failed: UUID does not match: expected %s != %s", updated.uuid(), metadata.uuid())); }
public long beginTransaction(long dbId, List<Long> tableIdList, String label, TUniqueId requestId, TxnCoordinator coordinator, LoadJobSourceType sourceType, long listenerId, long timeoutSecond, long warehouseId) throws LabelAlreadyUsedException, RunningTxnExceedException, DuplicatedRequestException, AnalysisException { if (Config.disable_load_job) { throw ErrorReportException.report(ErrorCode.ERR_BEGIN_TXN_FAILED, "disable_load_job is set to true, all load jobs are rejected"); } if (Config.metadata_enable_recovery_mode) { throw ErrorReportException.report(ErrorCode.ERR_BEGIN_TXN_FAILED, "The cluster is under recovery mode, all load jobs are rejected"); } if (GlobalStateMgr.getCurrentState().isSafeMode()) { throw ErrorReportException.report(ErrorCode.ERR_BEGIN_TXN_FAILED, "The cluster is under safe mode state, all load jobs are rejected."); } switch (sourceType) { case BACKEND_STREAMING: checkValidTimeoutSecond(timeoutSecond, Config.max_stream_load_timeout_second, Config.min_load_timeout_second); break; case LAKE_COMPACTION: case REPLICATION: // skip transaction timeout range check for lake compaction and replication break; default: checkValidTimeoutSecond(timeoutSecond, Config.max_load_timeout_second, Config.min_load_timeout_second); } DatabaseTransactionMgr dbTransactionMgr = getDatabaseTransactionMgr(dbId); return dbTransactionMgr.beginTransaction(tableIdList, label, requestId, coordinator, sourceType, listenerId, timeoutSecond, warehouseId); }
@Test public void testBeginTransactionFailed() { Config.disable_load_job = true; boolean exceptionThrown = false; try { masterTransMgr.beginTransaction(1L, null, "xxx", null, null, LoadJobSourceType.FRONTEND, 1L, 1000L); } catch (Exception e) { Assert.assertTrue(e instanceof ErrorReportException); Assert.assertEquals(ErrorCode.ERR_BEGIN_TXN_FAILED, ((ErrorReportException) e).getErrorCode()); exceptionThrown = true; } finally { Config.disable_load_job = false; } Assert.assertTrue(exceptionThrown); Config.metadata_enable_recovery_mode = true; exceptionThrown = false; try { masterTransMgr.beginTransaction(1L, null, "xxx", null, null, LoadJobSourceType.FRONTEND, 1L, 1000L); } catch (Exception e) { Assert.assertTrue(e instanceof ErrorReportException); Assert.assertEquals(ErrorCode.ERR_BEGIN_TXN_FAILED, ((ErrorReportException) e).getErrorCode()); exceptionThrown = true; } finally { Config.metadata_enable_recovery_mode = false; } Assert.assertTrue(exceptionThrown); GlobalStateMgr.getCurrentState().setSafeMode(true); exceptionThrown = false; try { masterTransMgr.beginTransaction(1L, null, "xxx", null, null, LoadJobSourceType.FRONTEND, 1L, 1000L); } catch (Exception e) { Assert.assertTrue(e instanceof ErrorReportException); Assert.assertEquals(ErrorCode.ERR_BEGIN_TXN_FAILED, ((ErrorReportException) e).getErrorCode()); exceptionThrown = true; } finally { GlobalStateMgr.getCurrentState().setSafeMode(false); } Assert.assertTrue(exceptionThrown); }
@Override public V get(Object k) { return containsKey(k) ? super.get(k) : defaultValue; }
@Test public void nullDefaultIsAllowed() { // but makes this class behave no different than HashMap map = new DefaultHashMap<>(null); loadMap(); assertEquals("missing 1", 1, (int) map.get(ONE)); assertEquals("missing 2", 2, (int) map.get(TWO)); assertEquals("three?", null, map.get(THREE)); assertEquals("four?", null, map.get(FOUR)); }
@Override public ZuulMessage clone() { final ZuulMessageImpl copy = new ZuulMessageImpl(context.clone(), Headers.copyOf(headers)); this.bodyChunks.forEach(chunk -> { chunk.retain(); copy.bufferBodyContents(chunk); }); return copy; }
@Test void testClone() { SessionContext ctx1 = new SessionContext(); ctx1.set("k1", "v1"); Headers headers1 = new Headers(); headers1.set("k1", "v1"); ZuulMessage msg1 = new ZuulMessageImpl(ctx1, headers1); ZuulMessage msg2 = msg1.clone(); assertEquals(msg1.getBodyAsText(), msg2.getBodyAsText()); assertEquals(msg1.getHeaders(), msg2.getHeaders()); assertEquals(msg1.getContext(), msg2.getContext()); // Verify that values of the 2 messages are decoupled. msg1.getHeaders().set("k1", "v_new"); msg1.getContext().set("k1", "v_new"); assertEquals("v1", msg2.getHeaders().getFirst("k1")); assertEquals("v1", msg2.getContext().get("k1")); }
@Override public ConfiguredDataSourceProvenance getProvenance() { return provenance; }
@Test public void testRegexExpand() { CSVDataSource<MockOutput> dataSource = new CSVDataSource<>(regexDataFile, regexRowProcessor, true); MutableDataset<MockOutput> dataset = new MutableDataset<>(dataSource); assertEquals(6,dataset.size(),"Found an incorrect number of rows when loading the csv."); DatasetProvenance prov = dataset.getProvenance(); List<ObjectMarshalledProvenance> datasetProvenance = ProvenanceUtil.marshalProvenance(prov); assertFalse(datasetProvenance.isEmpty()); ObjectProvenance unmarshalledProvenance = ProvenanceUtil.unmarshalProvenance(datasetProvenance); assertEquals(prov,unmarshalledProvenance); assertEquals(13, dataset.getFeatureMap().size()); }
@Description("Returns the length of a LineString or Multi-LineString using Euclidean measurement on a 2D plane (based on spatial ref) in projected units") @ScalarFunction("ST_Length") @SqlType(DOUBLE) public static double stLength(@SqlType(GEOMETRY_TYPE_NAME) Slice input) { Geometry geometry = deserialize(input); validateType("ST_Length", geometry, EnumSet.of(LINE_STRING, MULTI_LINE_STRING)); return geometry.getLength(); }
@Test public void testSTLength() { assertFunction("ST_Length(ST_GeometryFromText('LINESTRING EMPTY'))", DOUBLE, 0.0); assertFunction("ST_Length(ST_GeometryFromText('LINESTRING (0 0, 2 2)'))", DOUBLE, 2.8284271247461903); assertFunction("ST_Length(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'))", DOUBLE, 6.0); assertInvalidFunction("ST_Length(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))'))", "ST_Length only applies to LINE_STRING or MULTI_LINE_STRING. Input type is: POLYGON"); }
@Override public UserIdentity getUserIdentity(HttpServletRequest request, String name) { int hostSeparator = name.indexOf('/'); String shortName = hostSeparator > 0 ? name.substring(0, hostSeparator) : name; int realmSeparator = shortName.indexOf('@'); shortName = realmSeparator > 0 ? shortName.substring(0, realmSeparator) : shortName; return super.getUserIdentity(request, shortName); }
@Test public void testPrincipalNames() { UserStore users = new UserStore(); users.addUser(TEST_USER, SecurityUtils.NO_CREDENTIAL, new String[] { DefaultRoleSecurityProvider.ADMIN }); UserStoreAuthorizationService usas = new SpnegoUserStoreAuthorizationService(users); UserIdentity result = usas.getUserIdentity(null, TEST_USER + "/host@REALM"); assertNotNull(result); assertEquals(TEST_USER, result.getUserPrincipal().getName()); result = usas.getUserIdentity(null, TEST_USER + "@REALM"); assertNotNull(result); assertEquals(TEST_USER, result.getUserPrincipal().getName()); result = usas.getUserIdentity(null, TEST_USER + "/host"); assertNotNull(result); assertEquals(TEST_USER, result.getUserPrincipal().getName()); result = usas.getUserIdentity(null, TEST_USER); assertNotNull(result); assertEquals(TEST_USER, result.getUserPrincipal().getName()); }
RebalanceProtocol getProtocol() { return protocol; }
@Test public void testSelectRebalanceProtocol() { List<ConsumerPartitionAssignor> assignors = new ArrayList<>(); assignors.add(new MockPartitionAssignor(Collections.singletonList(ConsumerPartitionAssignor.RebalanceProtocol.EAGER))); assignors.add(new MockPartitionAssignor(Collections.singletonList(COOPERATIVE))); // no commonly supported protocols assertThrows(IllegalArgumentException.class, () -> buildCoordinator(rebalanceConfig, new Metrics(), assignors, false, subscriptions)); assignors.clear(); assignors.add(new MockPartitionAssignor(Arrays.asList(ConsumerPartitionAssignor.RebalanceProtocol.EAGER, COOPERATIVE))); assignors.add(new MockPartitionAssignor(Arrays.asList(ConsumerPartitionAssignor.RebalanceProtocol.EAGER, COOPERATIVE))); // select higher indexed (more advanced) protocols try (ConsumerCoordinator coordinator = buildCoordinator(rebalanceConfig, new Metrics(), assignors, false, subscriptions)) { assertEquals(COOPERATIVE, coordinator.getProtocol()); } }
@Override public void updateDataSourceConfig(DataSourceConfigSaveReqVO updateReqVO) { // 校验存在 validateDataSourceConfigExists(updateReqVO.getId()); DataSourceConfigDO updateObj = BeanUtils.toBean(updateReqVO, DataSourceConfigDO.class); validateConnectionOK(updateObj); // 更新 dataSourceConfigMapper.updateById(updateObj); }
@Test public void testUpdateDataSourceConfig_notExists() { // 准备参数 DataSourceConfigSaveReqVO reqVO = randomPojo(DataSourceConfigSaveReqVO.class); // 调用, 并断言异常 assertServiceException(() -> dataSourceConfigService.updateDataSourceConfig(reqVO), DATA_SOURCE_CONFIG_NOT_EXISTS); }
public static Table getTableMeta(DataSource ds, String tableName) { return getTableMeta(ds, null, null, tableName); }
@Test public void getTableNotExistTest() { assertThrows(DbRuntimeException.class, () -> { final Table table = MetaUtil.getTableMeta(ds, "user_not_exist"); assertEquals(table.getIndexInfoList().size(), 2); }); }
@Override public AppResponse process(Flow flow, AppSessionRequest request) { if (appSession.getRegistrationId() == null) { return new NokResponse(); } Map<String, String> result = digidClient.getExistingApplication(appSession.getRegistrationId()); if (result.get(lowerUnderscore(STATUS)).equals("OK")) { return new OkResponse(); } else if (result.get(lowerUnderscore(STATUS)).equals("PENDING")) { // switch state to require replace action appSession.setState(State.EXISTING_APPLICATION_FOUND.name()); return new StatusResponse("PENDING"); } else { return new NokResponse(); } }
@Test void processNOKResponseTest(){ when(digidClientMock.getExistingApplication(1337L)).thenReturn(Map.of( lowerUnderscore(STATUS), "QUESTION MARK" )); AppResponse appResponse = checkExistingApplication.process(flowMock, null); assertTrue(appResponse instanceof NokResponse); assertEquals("NOK", ((StatusResponse) appResponse).getStatus()); }
@Override public Connection getConnection() throws SQLException { return new FlinkConnection(DriverUri.create(url, properties)); }
@Test public void testDataSource() throws Exception { FlinkDataSource dataSource = new FlinkDataSource(getDriverUri().getURL(), new Properties()); try (Connection connection = dataSource.getConnection()) { assertEquals("default_catalog", connection.getCatalog()); assertEquals("default_database", connection.getSchema()); } }
public <T> void resolve(T resolvable) { ParamResolver resolver = this; if (ParamScope.class.isAssignableFrom(resolvable.getClass())) { ParamScope newScope = (ParamScope) resolvable; resolver = newScope.applyOver(resolver); } resolveStringLeaves(resolvable, resolver); resolveNonStringLeaves(resolvable, resolver); resolveNodes(resolvable, resolver); }
@Test public void shouldNotRecursivelySubstituteParams() { PipelineConfig pipelineConfig = PipelineConfigMother.createPipelineConfig("cruise", "dev", "ant"); pipelineConfig.setLabelTemplate("#{foo}"); new ParamResolver(new ParamSubstitutionHandlerFactory(params(param("foo", "#{bar}"), param("bar", "baz"))), fieldCache).resolve(pipelineConfig); assertThat(pipelineConfig.getLabelTemplate(), is("#{bar}")); pipelineConfig.setLabelTemplate("#{foo}"); new ParamResolver(new ParamSubstitutionHandlerFactory(params(param("foo", "###"))), fieldCache).resolve(pipelineConfig); assertThat(pipelineConfig.getLabelTemplate(), is("###")); }
public void cacheInstanceForRedo(String serviceName, String groupName, Instance instance) { String key = NamingUtils.getGroupedName(serviceName, groupName); InstanceRedoData redoData = InstanceRedoData.build(serviceName, groupName, instance); synchronized (registeredInstances) { registeredInstances.put(key, redoData); } }
@Test void testCacheInstanceForRedo() { ConcurrentMap<String, InstanceRedoData> registeredInstances = getInstanceRedoDataMap(); assertTrue(registeredInstances.isEmpty()); Instance instance = new Instance(); redoService.cacheInstanceForRedo(SERVICE, GROUP, instance); assertFalse(registeredInstances.isEmpty()); InstanceRedoData actual = registeredInstances.entrySet().iterator().next().getValue(); assertEquals(SERVICE, actual.getServiceName()); assertEquals(GROUP, actual.getGroupName()); assertEquals(instance, actual.get()); assertFalse(actual.isRegistered()); assertFalse(actual.isUnregistering()); assertTrue(actual.isExpectedRegistered()); }
@Override public RouteContext route(final ShardingRule shardingRule) { Collection<String> bindingTableNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); Collection<RouteContext> routeContexts = new LinkedList<>(); for (String each : logicTables) { Optional<ShardingTable> shardingTable = shardingRule.findShardingTable(each); if (shardingTable.isPresent()) { if (!bindingTableNames.contains(each)) { routeContexts.add(new ShardingStandardRoutingEngine(shardingTable.get().getLogicTable(), shardingConditions, sqlStatementContext, hintValueContext, props).route(shardingRule)); } shardingRule.findBindingTableRule(each).ifPresent(optional -> bindingTableNames.addAll(optional.getShardingTables().keySet())); } } if (routeContexts.isEmpty()) { throw new ShardingTableRuleNotFoundException(logicTables); } RouteContext result = new RouteContext(); if (1 == routeContexts.size()) { RouteContext newRouteContext = routeContexts.iterator().next(); result.getOriginalDataNodes().addAll(newRouteContext.getOriginalDataNodes()); result.getRouteUnits().addAll(newRouteContext.getRouteUnits()); } else { RouteContext routeContext = new ShardingCartesianRoutingEngine(routeContexts).route(shardingRule); result.getOriginalDataNodes().addAll(routeContext.getOriginalDataNodes()); result.getRouteUnits().addAll(routeContext.getRouteUnits()); } return result; }
@Test void assertRoutingForBindingTables() { ShardingComplexRoutingEngine complexRoutingEngine = new ShardingComplexRoutingEngine(ShardingRoutingEngineFixtureBuilder.createShardingConditions("t_order"), mock(SQLStatementContext.class), new HintValueContext(), new ConfigurationProperties(new Properties()), Arrays.asList("t_order", "t_order_item")); RouteContext routeContext = complexRoutingEngine.route(ShardingRoutingEngineFixtureBuilder.createBindingShardingRule()); List<RouteUnit> routeUnits = new ArrayList<>(routeContext.getRouteUnits()); assertThat(routeContext.getRouteUnits().size(), is(1)); assertThat(routeUnits.get(0).getDataSourceMapper().getActualName(), is("ds_1")); assertThat(routeUnits.get(0).getTableMappers().size(), is(1)); assertThat(routeUnits.get(0).getTableMappers().iterator().next().getActualName(), is("t_order_1")); assertThat(routeUnits.get(0).getTableMappers().iterator().next().getLogicName(), is("t_order")); }
public static Map<String, String> decodeParamMap(String paramsStr, Charset charset) { final Map<CharSequence, CharSequence> queryMap = UrlQuery.of(paramsStr, charset).getQueryMap(); if (MapUtil.isEmpty(queryMap)) { return MapUtil.empty(); } return Convert.toMap(String.class, String.class, queryMap); }
@Test public void decodeParamMapTest() { // 参数值存在分界标记等号时 final Map<String, String> paramMap = HttpUtil.decodeParamMap("https://www.xxx.com/api.action?aa=123&f_token=NzBkMjQxNDM1MDVlMDliZTk1OTU3ZDI1OTI0NTBiOWQ=", CharsetUtil.CHARSET_UTF_8); assertEquals("123",paramMap.get("aa")); assertEquals("NzBkMjQxNDM1MDVlMDliZTk1OTU3ZDI1OTI0NTBiOWQ=",paramMap.get("f_token")); }
public void validate(ExternalIssueReport report, Path reportPath) { if (report.rules != null && report.issues != null) { Set<String> ruleIds = validateRules(report.rules, reportPath); validateIssuesCctFormat(report.issues, ruleIds, reportPath); } else if (report.rules == null && report.issues != null) { String documentationLink = documentationLinkGenerator.getDocumentationLink(DOCUMENTATION_SUFFIX); LOGGER.warn("External issues were imported with a deprecated format which will be removed soon. " + "Please switch to the newest format to fully benefit from Clean Code: {}", documentationLink); validateIssuesDeprecatedFormat(report.issues, reportPath); } else { throw new IllegalStateException(String.format("Failed to parse report '%s': invalid report detected.", reportPath)); } }
@Test public void validate_whenReportMissingTextRangeStartLineForSecondaryLocation_shouldThrowException() throws IOException { ExternalIssueReport report = read(REPORTS_LOCATION); report.issues[3].secondaryLocations[0].textRange.startLine = null; assertThatThrownBy(() -> validator.validate(report, reportPath)) .isInstanceOf(IllegalStateException.class) .hasMessage("Failed to parse report 'report-path': missing mandatory field 'startLine of the text range' in a secondary location of the issue."); }
public static List<String> splitSql(final String sql) { final List<String> commands = new ArrayList<>(); StringBuilder current = new StringBuilder(); int index = 0; while (index < sql.length()) { if (sql.charAt(index) == SINGLE_QUOTE) { final int closingToken = sql.indexOf(SINGLE_QUOTE, index + 1); validateToken(String.valueOf(SINGLE_QUOTE), closingToken); current.append(sql, index, closingToken + 1); index = closingToken + 1; } else if (index < sql.length() - 1 && sql.startsWith(SHORT_COMMENT_OPENER, index)) { index = sql.indexOf(SHORT_COMMENT_CLOSER, index + 1) + 1; validateToken(SHORT_COMMENT_CLOSER, index - 1); } else if (index < sql.length() - 1 && sql.startsWith(LONG_COMMENT_OPENER, index)) { index = sql.indexOf(LONG_COMMENT_CLOSER, index + 1) + 2; validateToken(LONG_COMMENT_CLOSER, index - 2); } else if (sql.charAt(index) == SEMICOLON) { current.append(';'); commands.add(current.toString()); current = new StringBuilder(); index++; } else { current.append(sql.charAt(index)); index++; } } if (!current.toString().trim().isEmpty()) { throw new MigrationException(String.format( "Unmatched command at end of file; missing semicolon: '%s'", current )); } return commands; }
@Test public void shouldThrowOnMalformedQuote() { // When: final MigrationException e = assertThrows(MigrationException.class, () -> CommandParser.splitSql("select 'unclosed quote;")); // Then: assertThat(e.getMessage(), is("Invalid sql - failed to find closing token '''")); }
public void stop() { if (!running) throw new IllegalStateException(); elapsed = ticks.ticks() - start; running = false; }
@Test void notStarted2() { assertThrows(IllegalStateException.class, () -> { FakeTicks f = new FakeTicks(); Stopwatch s = new Stopwatch(f); s.stop(); }); }
public StrBuilder insert(int index, Object obj) { if (obj instanceof CharSequence) { return insert(index, (CharSequence) obj); } return insert(index, Convert.toStr(obj)); }
@Test public void insertTest() { StrBuilder builder = StrBuilder.create(1); builder.append("aaa").append("你好").append('r'); builder.insert(3, "数据插入"); assertEquals("aaa数据插入你好r", builder.toString()); }
@Override @SuppressWarnings("rawtypes") public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { final String dateTime = dateFormat.format(new Date(clock.getTime())); printWithBanner(dateTime, '='); output.println(); if (!gauges.isEmpty()) { printWithBanner("-- Gauges", '-'); for (Map.Entry<String, Gauge> entry : gauges.entrySet()) { output.println(entry.getKey()); printGauge(entry.getValue()); } output.println(); } if (!counters.isEmpty()) { printWithBanner("-- Counters", '-'); for (Map.Entry<String, Counter> entry : counters.entrySet()) { output.println(entry.getKey()); printCounter(entry); } output.println(); } if (!histograms.isEmpty()) { printWithBanner("-- Histograms", '-'); for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { output.println(entry.getKey()); printHistogram(entry.getValue()); } output.println(); } if (!meters.isEmpty()) { printWithBanner("-- Meters", '-'); for (Map.Entry<String, Meter> entry : meters.entrySet()) { output.println(entry.getKey()); printMeter(entry.getValue()); } output.println(); } if (!timers.isEmpty()) { printWithBanner("-- Timers", '-'); for (Map.Entry<String, Timer> entry : timers.entrySet()) { output.println(entry.getKey()); printTimer(entry.getValue()); } output.println(); } output.println(); output.flush(); }
@Test public void reportsTimerValues() throws Exception { final Timer timer = mock(Timer.class); when(timer.getCount()).thenReturn(1L); when(timer.getMeanRate()).thenReturn(2.0); when(timer.getOneMinuteRate()).thenReturn(3.0); when(timer.getFiveMinuteRate()).thenReturn(4.0); when(timer.getFifteenMinuteRate()).thenReturn(5.0); final Snapshot snapshot = mock(Snapshot.class); when(snapshot.getMax()).thenReturn(TimeUnit.MILLISECONDS.toNanos(100)); when(snapshot.getMean()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(200)); when(snapshot.getMin()).thenReturn(TimeUnit.MILLISECONDS.toNanos(300)); when(snapshot.getStdDev()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(400)); when(snapshot.getMedian()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(500)); when(snapshot.get75thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(600)); when(snapshot.get95thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(700)); when(snapshot.get98thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(800)); when(snapshot.get99thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(900)); when(snapshot.get999thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS .toNanos(1000)); when(timer.getSnapshot()).thenReturn(snapshot); reporter.report(map(), map(), map(), map(), map("test.another.timer", timer)); assertThat(consoleOutput()) .isEqualTo(lines( dateHeader, "", "-- Timers ----------------------------------------------------------------------", "test.another.timer", " count = 1", " mean rate = 2.00 calls/second", " 1-minute rate = 3.00 calls/second", " 5-minute rate = 4.00 calls/second", " 15-minute rate = 5.00 calls/second", " min = 300.00 milliseconds", " max = 100.00 milliseconds", " mean = 200.00 milliseconds", " stddev = 400.00 milliseconds", " median = 500.00 milliseconds", " 75% <= 600.00 milliseconds", " 95% <= 700.00 milliseconds", " 98% <= 800.00 milliseconds", " 99% <= 900.00 milliseconds", " 99.9% <= 1000.00 milliseconds", "", "" )); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { final String prefix = containerService.isContainer(directory) ? StringUtils.EMPTY : containerService.getKey(directory) + Path.DELIMITER; return this.list(directory, listener, prefix); }
@Test public void testListLexicographicSortOrderAssumption() throws Exception { final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("IAD"); final SwiftRegionService regionService = new SwiftRegionService(session); final Path directory = new SwiftDirectoryFeature(session, regionService).mkdir( new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final List<String> files = Arrays.asList( "Z", "aa", "0a", "a", "AAA", "B", "~$a", ".c" ); files.sort(session.getHost().getProtocol().getListComparator()); assertTrue(new SwiftObjectListService(session, regionService).list(directory, new IndexedListProgressListener() { @Override public void message(final String message) { // } @Override public void visit(final AttributedList<Path> list, final int index, final Path file) { assertEquals(files.get(index), file.getName()); } }).isEmpty()); for(String f : files) { new SwiftTouchFeature(session, regionService).touch(new Path(directory, f, EnumSet.of(Path.Type.file)), new TransferStatus()); } final AttributedList<Path> list = new SwiftObjectListService(session, regionService).list(directory, new DisabledListProgressListener()); for(int i = 0; i < list.size(); i++) { assertEquals(files.get(i), list.get(i).getName()); new SwiftDeleteFeature(session, regionService).delete(Collections.singletonList(list.get(i)), new DisabledLoginCallback(), new Delete.DisabledCallback()); } new SwiftDeleteFeature(session, regionService).delete(Collections.singletonList(directory), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public DataSource get(final String url, final String urlPrefix) { if (dataSourceMap.containsKey(url)) { return dataSourceMap.get(url); } return dataSourceMap.computeIfAbsent(url, driverUrl -> createDataSource(ShardingSphereURL.parse(driverUrl.substring(urlPrefix.length())))); }
@Test void assertGetNewDataSource() { DriverDataSourceCache dataSourceCache = new DriverDataSourceCache(); DataSource fooDataSource = dataSourceCache.get("jdbc:shardingsphere:classpath:config/driver/foo-driver-fixture.yaml", DRIVER_URL_PREFIX); DataSource barDataSource = dataSourceCache.get("jdbc:shardingsphere:classpath:config/driver/bar-driver-fixture.yaml", DRIVER_URL_PREFIX); assertThat(fooDataSource, not(barDataSource)); }
@Override public ImmutableList<String> computeEntrypoint(List<String> jvmFlags) throws IOException { try (JarFile jarFile = new JarFile(jarPath.toFile())) { String mainClass = jarFile.getManifest().getMainAttributes().getValue(Attributes.Name.MAIN_CLASS); if (mainClass == null) { throw new IllegalArgumentException( "`Main-Class:` attribute for an application main class not defined in the input JAR's " + "manifest (`META-INF/MANIFEST.MF` in the JAR)."); } ImmutableList.Builder<String> entrypoint = ImmutableList.builder(); entrypoint.add("java"); entrypoint.addAll(jvmFlags); entrypoint.add("-jar"); entrypoint.add(JarLayers.APP_ROOT + "/" + jarPath.getFileName().toString()); return entrypoint.build(); } }
@Test public void testComputeEntrypoint_withMainClass_jvmFlags() throws IOException, URISyntaxException { Path standardJar = Paths.get(Resources.getResource(STANDARD_JAR_WITH_CLASS_PATH_MANIFEST).toURI()); StandardPackagedProcessor standardPackagedModeProcessor = new StandardPackagedProcessor(standardJar, JAR_JAVA_VERSION); ImmutableList<String> actualEntrypoint = standardPackagedModeProcessor.computeEntrypoint(ImmutableList.of("-jvm-flag")); assertThat(actualEntrypoint) .isEqualTo( ImmutableList.of("java", "-jvm-flag", "-jar", "/app/standardJarWithClassPath.jar")); }
public static <T extends ScanTask> List<ScanTaskGroup<T>> planTaskGroups( List<T> tasks, long splitSize, int lookback, long openFileCost) { return Lists.newArrayList( planTaskGroups(CloseableIterable.withNoopClose(tasks), splitSize, lookback, openFileCost)); }
@Test public void testTaskGroupPlanning() { List<ParentTask> tasks = ImmutableList.of( new ChildTask1(64), new ChildTask1(32), new ChildTask3(64), new ChildTask3(32), new ChildTask2(128), new ChildTask3(32), new ChildTask3(32)); CloseableIterable<ScanTaskGroup<ParentTask>> taskGroups = TableScanUtil.planTaskGroups(CloseableIterable.withNoopClose(tasks), 128, 10, 4); assertThat(taskGroups).as("Must have 3 task groups").hasSize(3); }
@Override public void setElementValue(long index, int bitsPerElement, long value) { long whichLong = index >>> 6; int whichBit = (int) (index & 0x3F); set(whichLong, get(whichLong) | (value << whichBit)); int bitsRemaining = 64 - whichBit; if (bitsRemaining < bitsPerElement) set(whichLong + 1, get(whichLong + 1) | (value >>> bitsRemaining)); }
@Test public void doesNotThrowSIGSEGV() { FixedLengthElementArray arr = new FixedLengthElementArray(new WastefulRecycler(2, 2), 1793); for(int i=0;i<2500;i++) { try { arr.setElementValue(i, 2, 3); } catch(ArrayIndexOutOfBoundsException acceptable) { } } }
@Override public final int hashCode() { return delegate.hashCode(); }
@Test public void requireThatHashCodeIsImplemented() { assertEquals(newLazyMap(null).hashCode(), newLazyMap(null).hashCode()); }
public boolean relaxedOffer(E e) { return offer(e); }
@Test(dataProvider = "full") public void relaxedOffer_whenFull(MpscGrowableArrayQueue<Integer> queue) { assertThat(queue.relaxedOffer(1)).isFalse(); assertThat(queue).hasSize(FULL_SIZE); }
public static BigInteger decodeQuantity(String value) { if (isLongValue(value)) { return BigInteger.valueOf(Long.parseLong(value)); } if (!isValidHexQuantity(value)) { throw new MessageDecodingException("Value must be in format 0x[0-9a-fA-F]+"); } try { return parsePaddedNumberHex(value); } catch (NumberFormatException e) { throw new MessageDecodingException("Negative ", e); } }
@Test public void testQuantityDecodeLeadingZero() { assertEquals(Numeric.decodeQuantity("0x0400"), (BigInteger.valueOf(1024L))); assertEquals(Numeric.decodeQuantity("0x001"), (BigInteger.valueOf(1L))); assertEquals(Numeric.decodeQuantity("0x000"), (BigInteger.ZERO)); assertEquals(Numeric.decodeQuantity("0x00f"), (BigInteger.valueOf(15L))); }
public static void checkJavaInternalAccess(ILogger logger) { Map<String, PackageAccessRequirement[]> moduleRequirements = new TreeMap<>(); moduleRequirements.put("java.base", packages( export("jdk.internal.ref"), open("java.lang"), open("sun.nio.ch") ) ); moduleRequirements.put("jdk.management", getJdkManagementRequirements()); moduleRequirements.put("java.management", packages(open("sun.management"))); checkPackageRequirements(logger, moduleRequirements); }
@Test public void testNoMissingPackageAccessInTestsuite() { ILogger logger = mock(ILogger.class); checkJavaInternalAccess(logger); verify(logger, never()).warning(anyString()); }
@Override public Deserializer<AvroWrapper<T>> getDeserializer(Class<AvroWrapper<T>> c) { Configuration conf = getConf(); GenericData dataModel = createDataModel(conf); if (AvroKey.class.isAssignableFrom(c)) { Schema writerSchema = getKeyWriterSchema(conf); Schema readerSchema = getKeyReaderSchema(conf); DatumReader<T> datumReader = (readerSchema != null) ? dataModel.createDatumReader(writerSchema, readerSchema) : dataModel.createDatumReader(writerSchema); return new AvroKeyDeserializer<>(writerSchema, readerSchema, datumReader); } else if (AvroValue.class.isAssignableFrom(c)) { Schema writerSchema = getValueWriterSchema(conf); Schema readerSchema = getValueReaderSchema(conf); DatumReader<T> datumReader = (readerSchema != null) ? dataModel.createDatumReader(writerSchema, readerSchema) : dataModel.createDatumReader(writerSchema); return new AvroValueDeserializer<>(writerSchema, readerSchema, datumReader); } else { throw new IllegalStateException("Only AvroKey and AvroValue are supported."); } }
@Test void getDeserializerForKey() throws IOException { // Set the reader schema in the job configuration. Schema readerSchema = Schema.create(Schema.Type.STRING); Job job = Job.getInstance(); AvroJob.setMapOutputKeySchema(job, readerSchema); // Get a deserializer from the configuration. AvroSerialization serialization = ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration()); @SuppressWarnings("unchecked") Deserializer<AvroWrapper> deserializer = serialization.getDeserializer(AvroKey.class); assertTrue(deserializer instanceof AvroKeyDeserializer); AvroKeyDeserializer avroDeserializer = (AvroKeyDeserializer) deserializer; // Check that the reader schema is set correctly on the deserializer. assertEquals(readerSchema, avroDeserializer.getReaderSchema()); }
@Override public JsonElement next() throws JsonParseException { if (!hasNext()) { throw new NoSuchElementException(); } try { return Streams.parse(parser); } catch (StackOverflowError e) { throw new JsonParseException("Failed parsing JSON source to Json", e); } catch (OutOfMemoryError e) { throw new JsonParseException("Failed parsing JSON source to Json", e); } }
@Test public void testCallingNextBeyondAvailableInput() { JsonElement unused1 = parser.next(); JsonElement unused2 = parser.next(); // Parser should not go beyond available input assertThrows(NoSuchElementException.class, parser::next); }
@Override public void filter(ContainerRequestContext requestContext) { if (isInternalRequest(requestContext)) { log.trace("Skipping authentication for internal request"); return; } try { log.debug("Authenticating request"); BasicAuthCredentials credentials = new BasicAuthCredentials(requestContext.getHeaderString(AUTHORIZATION)); LoginContext loginContext = new LoginContext( CONNECT_LOGIN_MODULE, null, new BasicAuthCallBackHandler(credentials), configuration); loginContext.login(); setSecurityContextForRequest(requestContext, credentials); } catch (LoginException | ConfigException e) { // Log at debug here in order to avoid polluting log files whenever someone mistypes their credentials log.debug("Request failed authentication", e); requestContext.abortWith( Response.status(Response.Status.UNAUTHORIZED) .entity("User cannot access the resource.") .build()); } }
@Test public void testUnknownLoginModule() throws IOException { File credentialFile = setupPropertyLoginFile(true); JaasBasicAuthFilter jaasBasicAuthFilter = setupJaasFilter("KafkaConnect1", credentialFile.getPath()); ContainerRequestContext requestContext = setMock("Basic", "user", "password"); jaasBasicAuthFilter.filter(requestContext); verify(requestContext).abortWith(any(Response.class)); verify(requestContext, atLeastOnce()).getMethod(); verify(requestContext).getHeaderString(JaasBasicAuthFilter.AUTHORIZATION); }
@VisibleForTesting public static void addUserAgentEnvironments(List<String> info) { info.add(String.format(OS_FORMAT, OSUtils.OS_NAME)); if (EnvironmentUtils.isDocker()) { info.add(DOCKER_KEY); } if (EnvironmentUtils.isKubernetes()) { info.add(KUBERNETES_KEY); } if (EnvironmentUtils.isGoogleComputeEngine()) { info.add(GCE_KEY); } else { addEC2Info(info); } }
@Test public void userAgentEnvironmentStringK8s() { Mockito.when(EnvironmentUtils.isDocker()).thenReturn(true); Mockito.when(EnvironmentUtils.isKubernetes()).thenReturn(true); Mockito.when(EC2MetadataUtils.getUserData()) .thenThrow(new SdkClientException("Unable to contact EC2 metadata service.")); List<String> info = new ArrayList<>(); UpdateCheckUtils.addUserAgentEnvironments(info); Assert.assertEquals(3, info.size()); Assert.assertEquals(String.format(UpdateCheckUtils.OS_FORMAT, OSUtils.OS_NAME), info.get(0)); Assert.assertEquals(UpdateCheckUtils.DOCKER_KEY, info.get(1)); Assert.assertEquals(UpdateCheckUtils.KUBERNETES_KEY, info.get(2)); }
public List<String> getEnabledIdentityProviders() { return identityProviderRepository.getAllEnabledAndSorted() .stream() .filter(IdentityProvider::isEnabled) .map(IdentityProvider::getName) .toList(); }
@Test public void getEnabledIdentityProviders_whenNonDefined_shouldReturnEmpty() { mockIdentityProviders(List.of()); assertThat(commonSystemInformation.getEnabledIdentityProviders()) .isEmpty(); }
@Override public V removeLast() { V value = get(removeLastAsync()); if (value == null) { throw new NoSuchElementException(); } return value; }
@Test public void testRemoveLast() { RDeque<Integer> queue1 = redisson.getDeque("deque1"); queue1.addFirst(1); queue1.addFirst(2); queue1.addFirst(3); Assertions.assertEquals(1, (int)queue1.removeLast()); Assertions.assertEquals(2, (int)queue1.removeLast()); Assertions.assertEquals(3, (int)queue1.removeLast()); }
public static Option<Pair<String, List<String>>> getFileSliceForFileGroupFromDeltaCommit(byte[] bytes, HoodieFileGroupId fileGroupId) { try { org.apache.hudi.avro.model.HoodieCommitMetadata commitMetadata = deserializeCommitMetadata(bytes); Map<String,List<org.apache.hudi.avro.model.HoodieWriteStat>> partitionToWriteStatsMap = commitMetadata.getPartitionToWriteStats(); for (Map.Entry<String, List<org.apache.hudi.avro.model.HoodieWriteStat>> partitionToWriteStat: partitionToWriteStatsMap.entrySet()) { for (org.apache.hudi.avro.model.HoodieWriteStat writeStat: partitionToWriteStat.getValue()) { HoodieFileGroupId fgId = new HoodieFileGroupId(partitionToWriteStat.getKey(), writeStat.getFileId()); if (fgId.equals(fileGroupId)) { return Option.of(Pair.of(writeStat.getBaseFile() == null ? "" : writeStat.getBaseFile(), writeStat.getLogFiles())); } } } return Option.empty(); } catch (Exception e) { throw new HoodieException("Fail to parse the base file and log files from DeltaCommit", e); } }
@Test public void testGetFileSliceForFileGroupFromDeltaCommit() throws IOException { org.apache.hudi.avro.model.HoodieCommitMetadata commitMetadata = new org.apache.hudi.avro.model.HoodieCommitMetadata(); org.apache.hudi.avro.model.HoodieWriteStat writeStat1 = createWriteStat("111", "111base", Arrays.asList("1.log", "2.log")); org.apache.hudi.avro.model.HoodieWriteStat writeStat2 = createWriteStat("111", "111base", Arrays.asList("3.log", "4.log")); org.apache.hudi.avro.model.HoodieWriteStat writeStat3 = createWriteStat("222", null, Collections.singletonList("5.log")); Map<String,List<org.apache.hudi.avro.model.HoodieWriteStat>> partitionToWriteStatsMap = new HashMap<>(); partitionToWriteStatsMap.put("partition1", Arrays.asList(writeStat2, writeStat3)); partitionToWriteStatsMap.put("partition2", Collections.singletonList(writeStat1)); commitMetadata.setPartitionToWriteStats(partitionToWriteStatsMap); byte[] serializedCommitMetadata = TimelineMetadataUtils.serializeAvroMetadata( commitMetadata, org.apache.hudi.avro.model.HoodieCommitMetadata.class).get(); Option<Pair<String, List<String>>> result = HoodieCommitMetadata.getFileSliceForFileGroupFromDeltaCommit( serializedCommitMetadata, new HoodieFileGroupId("partition1","111")); assertTrue(result.isPresent()); assertEquals("111base", result.get().getKey()); assertEquals(2, result.get().getValue().size()); assertEquals("3.log", result.get().getValue().get(0)); assertEquals("4.log", result.get().getValue().get(1)); result = HoodieCommitMetadata.getFileSliceForFileGroupFromDeltaCommit( serializedCommitMetadata, new HoodieFileGroupId("partition1","222")); assertTrue(result.isPresent()); assertTrue(result.get().getKey().isEmpty()); assertEquals(1, result.get().getValue().size()); assertEquals("5.log", result.get().getValue().get(0)); }
@Override public void importData(JsonReader reader) throws IOException { logger.info("Reading configuration for 1.1"); // this *HAS* to start as an object reader.beginObject(); while (reader.hasNext()) { JsonToken tok = reader.peek(); switch (tok) { case NAME: String name = reader.nextName(); // find out which member it is if (name.equals(CLIENTS)) { readClients(reader); } else if (name.equals(GRANTS)) { readGrants(reader); } else if (name.equals(WHITELISTEDSITES)) { readWhitelistedSites(reader); } else if (name.equals(BLACKLISTEDSITES)) { readBlacklistedSites(reader); } else if (name.equals(AUTHENTICATIONHOLDERS)) { readAuthenticationHolders(reader); } else if (name.equals(ACCESSTOKENS)) { readAccessTokens(reader); } else if (name.equals(REFRESHTOKENS)) { readRefreshTokens(reader); } else if (name.equals(SYSTEMSCOPES)) { readSystemScopes(reader); } else { for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { if (extension.supportsVersion(THIS_VERSION)) { extension.importExtensionData(name, reader); break; } } } // unknown token, skip it reader.skipValue(); } break; case END_OBJECT: // the object ended, we're done here reader.endObject(); continue; default: logger.debug("Found unexpected entry"); reader.skipValue(); continue; } } fixObjectReferences(); for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { extension.fixExtensionObjectReferences(maps); break; } } maps.clearAll(); }
@Test public void testImportClients() throws IOException { ClientDetailsEntity client1 = new ClientDetailsEntity(); client1.setId(1L); client1.setAccessTokenValiditySeconds(3600); client1.setClientId("client1"); client1.setClientSecret("clientsecret1"); client1.setRedirectUris(ImmutableSet.of("http://foo.com/")); client1.setScope(ImmutableSet.of("foo", "bar", "baz", "dolphin")); client1.setGrantTypes(ImmutableSet.of("implicit", "authorization_code", "urn:ietf:params:oauth:grant_type:redelegate", "refresh_token")); client1.setAllowIntrospection(true); ClientDetailsEntity client2 = new ClientDetailsEntity(); client2.setId(2L); client2.setAccessTokenValiditySeconds(3600); client2.setClientId("client2"); client2.setClientSecret("clientsecret2"); client2.setRedirectUris(ImmutableSet.of("http://bar.baz.com/")); client2.setScope(ImmutableSet.of("foo", "dolphin", "electric-wombat")); client2.setGrantTypes(ImmutableSet.of("client_credentials", "urn:ietf:params:oauth:grant_type:redelegate")); client2.setAllowIntrospection(false); String configJson = "{" + "\"" + MITREidDataService.SYSTEMSCOPES + "\": [], " + "\"" + MITREidDataService.ACCESSTOKENS + "\": [], " + "\"" + MITREidDataService.REFRESHTOKENS + "\": [], " + "\"" + MITREidDataService.GRANTS + "\": [], " + "\"" + MITREidDataService.WHITELISTEDSITES + "\": [], " + "\"" + MITREidDataService.BLACKLISTEDSITES + "\": [], " + "\"" + MITREidDataService.AUTHENTICATIONHOLDERS + "\": [], " + "\"" + MITREidDataService.CLIENTS + "\": [" + "{\"id\":1,\"accessTokenValiditySeconds\":3600,\"clientId\":\"client1\",\"secret\":\"clientsecret1\"," + "\"redirectUris\":[\"http://foo.com/\"]," + "\"scope\":[\"foo\",\"bar\",\"baz\",\"dolphin\"]," + "\"grantTypes\":[\"implicit\",\"authorization_code\",\"urn:ietf:params:oauth:grant_type:redelegate\",\"refresh_token\"]," + "\"allowIntrospection\":true}," + "{\"id\":2,\"accessTokenValiditySeconds\":3600,\"clientId\":\"client2\",\"secret\":\"clientsecret2\"," + "\"redirectUris\":[\"http://bar.baz.com/\"]," + "\"scope\":[\"foo\",\"dolphin\",\"electric-wombat\"]," + "\"grantTypes\":[\"client_credentials\",\"urn:ietf:params:oauth:grant_type:redelegate\"]," + "\"allowIntrospection\":false}" + " ]" + "}"; System.err.println(configJson); JsonReader reader = new JsonReader(new StringReader(configJson)); dataService.importData(reader); verify(clientRepository, times(2)).saveClient(capturedClients.capture()); List<ClientDetailsEntity> savedClients = capturedClients.getAllValues(); assertThat(savedClients.size(), is(2)); assertThat(savedClients.get(0).getAccessTokenValiditySeconds(), equalTo(client1.getAccessTokenValiditySeconds())); assertThat(savedClients.get(0).getClientId(), equalTo(client1.getClientId())); assertThat(savedClients.get(0).getClientSecret(), equalTo(client1.getClientSecret())); assertThat(savedClients.get(0).getRedirectUris(), equalTo(client1.getRedirectUris())); assertThat(savedClients.get(0).getScope(), equalTo(client1.getScope())); assertThat(savedClients.get(0).getGrantTypes(), equalTo(client1.getGrantTypes())); assertThat(savedClients.get(0).isAllowIntrospection(), equalTo(client1.isAllowIntrospection())); assertThat(savedClients.get(1).getAccessTokenValiditySeconds(), equalTo(client2.getAccessTokenValiditySeconds())); assertThat(savedClients.get(1).getClientId(), equalTo(client2.getClientId())); assertThat(savedClients.get(1).getClientSecret(), equalTo(client2.getClientSecret())); assertThat(savedClients.get(1).getRedirectUris(), equalTo(client2.getRedirectUris())); assertThat(savedClients.get(1).getScope(), equalTo(client2.getScope())); assertThat(savedClients.get(1).getGrantTypes(), equalTo(client2.getGrantTypes())); assertThat(savedClients.get(1).isAllowIntrospection(), equalTo(client2.isAllowIntrospection())); }
public static boolean isAnyProcessAlive(Collection<Long> pids, String user) throws IOException { if (pids == null || pids.isEmpty()) { return false; } if (ServerUtils.IS_ON_WINDOWS) { return isAnyWindowsProcessAlive(pids, user); } return isAnyPosixProcessAlive(pids, user); }
@Test public void testIsAnyProcessAlive() throws Exception { // no process should be alive for a randomly generated user String randomUser = RandomStringUtils.randomAlphanumeric(12); Collection<Long> pids = getRunningProcessIds(null); assertFalse(pids.isEmpty()); boolean status = ServerUtils.isAnyProcessAlive(pids, randomUser); assertFalse(status, "Random user " + randomUser + " is not expected to own any process"); // at least one pid will be owned by the current user (doing the testing) String currentUser = System.getProperty("user.name"); status = ServerUtils.isAnyProcessAlive(pids, currentUser); assertTrue(status, "Expecting user " + currentUser + " to own at least one process"); if (!ServerUtils.IS_ON_WINDOWS) { // userid test is valid only on Posix platforms int inValidUserId = -1; status = ServerUtils.isAnyProcessAlive(pids, inValidUserId); assertFalse(status, "Invalid userId " + randomUser + " is not expected to own any process"); int currentUid = ServerUtils.getUserId(null); status = ServerUtils.isAnyProcessAlive(pids, currentUid); assertTrue(status, "Expecting uid " + currentUid + " to own at least one process"); } }
public FloatArrayAsIterable usingExactEquality() { return new FloatArrayAsIterable(EXACT_EQUALITY_CORRESPONDENCE, iterableSubject()); }
@Test public void usingExactEquality_contains_otherTypes_intOutOfRange() { int expected = (1 << 24) + 1; float[] actual = array(1.0f, 2.0f, 3.0f); expectFailureWhenTestingThat(actual).usingExactEquality().contains(expected); assertFailureKeys( "value of", "expected to contain", "testing whether", "but was", "additionally, one or more exceptions were thrown while comparing elements", "first exception"); assertThatFailure() .factValue("first exception") .startsWith( "compare(" + actual[0] + ", " + expected + ") threw java.lang.IllegalArgumentException"); assertThatFailure() .factValue("first exception") .contains( "Expected value " + expected + " in assertion using exact float equality was an int with an absolute value " + "greater than 2^24 which has no exact float representation"); }
public XAConnection xaConnection(XAConnection xaConnection) { return TracingXAConnection.create(xaConnection, this); }
@Test void xaConnection_wrapsInput() { assertThat(jmsTracing.xaConnection(mock(XAConnection.class))) .isInstanceOf(TracingXAConnection.class); }
@SuppressWarnings({"unchecked", "rawtypes"}) public static int compareTo(final Comparable thisValue, final Comparable otherValue, final OrderDirection orderDirection, final NullsOrderType nullsOrderType, final boolean caseSensitive) { if (null == thisValue && null == otherValue) { return 0; } if (null == thisValue) { return NullsOrderType.FIRST == nullsOrderType ? -1 : 1; } if (null == otherValue) { return NullsOrderType.FIRST == nullsOrderType ? 1 : -1; } if (!caseSensitive && thisValue instanceof String && otherValue instanceof String) { return compareToCaseInsensitiveString((String) thisValue, (String) otherValue, orderDirection); } return OrderDirection.ASC == orderDirection ? thisValue.compareTo(otherValue) : -thisValue.compareTo(otherValue); }
@Test void assertCompareToWhenSecondValueIsNullForOrderByDescAndNullsLast() { assertThat(CompareUtils.compareTo(1, null, OrderDirection.DESC, NullsOrderType.LAST, caseSensitive), is(-1)); }
public B scope(String scope) { this.scope = scope; return getThis(); }
@Test void scope() { InterfaceBuilder builder = new InterfaceBuilder(); builder.scope("scope"); Assertions.assertEquals("scope", builder.build().getScope()); }
@Override public void execute(ComputationStep.Context context) { executeForBranch(treeRootHolder.getRoot()); }
@Test public void added_event_if_qp_is_added() { QualityProfile qp = qp(QP_NAME_1, LANGUAGE_KEY_1, new Date()); qProfileStatusRepository.register(qp.getQpKey(), ADDED); Language language = mockLanguageInRepository(LANGUAGE_KEY_1); mockQualityProfileMeasures(treeRootHolder.getRoot(), null, arrayOf(qp)); underTest.execute(new TestComputationStepContext()); verify(eventRepository).add(eventArgumentCaptor.capture()); verifyNoMoreInteractions(eventRepository); verifyEvent(eventArgumentCaptor.getValue(), "Use \"" + qp.getQpName() + "\" (" + language.getName() + ")", null, null); }
@Override public void removeLink(ConnectPoint src, ConnectPoint dst) { post(store.removeLink(src, dst)); }
@Test public void removeLink() { addLink(DID1, P1, DID2, P2, DIRECT); addLink(DID2, P2, DID1, P1, DIRECT); assertEquals("incorrect link count", 2, service.getLinkCount()); providerService.linkVanished(new DefaultLinkDescription(cp(DID1, P1), cp(DID2, P2), DIRECT)); validateEvents(LINK_REMOVED); assertEquals("incorrect link count", 1, service.getLinkCount()); assertNull("link should not be found", service.getLink(cp(DID1, P1), cp(DID2, P2))); assertNotNull("link should be found", service.getLink(cp(DID2, P2), cp(DID1, P1))); providerService.linkVanished(new DefaultLinkDescription(cp(DID1, P1), cp(DID2, P2), DIRECT)); assertEquals("no events expected", 0, listener.events.size()); }
@Override public String getDocumentationLink(@Nullable String suffix) { return documentationBaseUrl + Optional.ofNullable(suffix).orElse(""); }
@Test public void getDocumentationLink_whenSuffixProvided_concatenatesIt() { String generatedLink = documentationLinkGenerator.getDocumentationLink(TEST_SUFFIX); assertThat(generatedLink).isEqualTo(DOCUMENTATION_PUBLIC_URL + "100.1000/documentation/analyzing-source-code/scm-integration/"); }
public Optional<Measure> toMeasure(@Nullable ScannerReport.Measure batchMeasure, Metric metric) { Objects.requireNonNull(metric); if (batchMeasure == null) { return Optional.empty(); } Measure.NewMeasureBuilder builder = Measure.newMeasureBuilder(); switch (metric.getType().getValueType()) { case INT: return toIntegerMeasure(builder, batchMeasure); case LONG: return toLongMeasure(builder, batchMeasure); case DOUBLE: return toDoubleMeasure(builder, batchMeasure); case BOOLEAN: return toBooleanMeasure(builder, batchMeasure); case STRING: return toStringMeasure(builder, batchMeasure); case LEVEL: return toLevelMeasure(builder, batchMeasure); case NO_VALUE: return toNoValueMeasure(builder); default: throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType()); } }
@Test public void toMeasure_returns_no_value_if_dto_has_no_value_for_Double_Metric() { Optional<Measure> measure = underTest.toMeasure(EMPTY_BATCH_MEASURE, SOME_DOUBLE_METRIC); assertThat(measure).isPresent(); assertThat(measure.get().getValueType()).isEqualTo(Measure.ValueType.NO_VALUE); }
@Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { // actually, there was no sense in executing SELECT from db in this case, // should be reported as improvement return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); // IS (NOT) NULL operation does not require second argument // hence, lookupValue can be absent // basically, the index ignores both meta and value, so we can pass everything there Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { // if nothing matches, break the search return null; } } // iterate through all elements survived after filtering stage // and find the first matching BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { // BETWEEN is a special condition demanding two arguments // technically there are no obstacles to implement it, // as it is just a short form of: (a <= b) && (b <= c) // however, let it be so for now matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { // if not BETWEEN, than it is LIKE (or some new operator) // for now, LIKE is not supported here matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; }
@Test public void lookup_DoesNotFind_FilteredByIndex() throws Exception { ReadAllCache cache = buildCache( "=,IS NOT NULL,>=,IS NOT NULL" ); Object[] found = cache.getRowFromCache( keysMeta.clone(), new Object[] { 1L, null, new Date( 0 ), null } ); assertNull( "(keys[3] != NULL) --> none", found ); }
@ScalarFunction @SqlType(StandardTypes.BIGINT) public static long intersectionCardinality(@SqlType(KHyperLogLogType.NAME) Slice slice1, @SqlType(KHyperLogLogType.NAME) Slice slice2) { KHyperLogLog khll1 = KHyperLogLog.newInstance(slice1); KHyperLogLog khll2 = KHyperLogLog.newInstance(slice2); if (khll1.isExact() && khll2.isExact()) { return KHyperLogLog.exactIntersectionCardinality(khll1, khll2); } long lowestCardinality = Math.min(khll1.cardinality(), khll2.cardinality()); double jaccard = KHyperLogLog.jaccardIndex(khll1, khll2); KHyperLogLog setUnion = KHyperLogLog.merge(khll1, khll2); long result = Math.round(jaccard * setUnion.cardinality()); // When one of the sets is much smaller than the other and approaches being a true // subset of the other, the computed cardinality may exceed the cardinality estimate // of the smaller set. When this happens the cardinality of the smaller set is obviously // a better estimate of the one computed with the Jaccard Index. return Math.min(result, lowestCardinality); }
@Test public void testIntersectionCardinality() { int blockSize = 10; long uniqueElements = 10000 * blockSize; double error = uniqueElements * 0.05; List<KHyperLogLog> list1 = buildKHyperLogLogs(blockSize, uniqueElements, threshold, potential); List<KHyperLogLog> list2 = buildKHyperLogLogs(15, (uniqueElements * 15) / blockSize, threshold, potential); String projection = getIntersectionCardinalityProjection(list1, list2); functionAssertions.assertFunctionWithError(projection, BIGINT, uniqueElements, error); }
@Override public <U, V> ParSeqBasedCompletionStage<V> thenCombine(CompletionStage<? extends U> other, BiFunction<? super T, ? super U, ? extends V> fn) { Task<U> that = getOrGenerateTaskFromStage(other); return nextStageByComposingTask(Task.par(_task, that).map("thenCombine", fn::apply)); }
@Test public void testThenCombine() throws Exception { CompletionStage<String> completionStage1 = createTestStage(TESTVALUE1); CompletionStage<String> completionStage2 = createTestStage(TESTVALUE2); BiFunction<String, String, Integer> combiner = mock(BiFunction.class); when(combiner.apply(TESTVALUE1, TESTVALUE2)).thenReturn(0); Consumer<Integer> intConsumer = mock(Consumer.class); finish(completionStage1.thenCombine(completionStage2, combiner).thenAccept(intConsumer)); verify(combiner).apply(TESTVALUE1, TESTVALUE2); verify(intConsumer).accept(0); }
public void setStatus(String service, HealthCheckResponse.ServingStatus status) { healthService.setStatus(service, status); }
@Test void setStatus() { String service = "serv0"; manager.setStatus(service, ServingStatus.SERVING); ServingStatus stored = manager.getHealthService() .check(HealthCheckRequest.newBuilder().setService(service).build()) .getStatus(); Assertions.assertEquals(ServingStatus.SERVING, stored); }