focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override void handle(Connection connection, DatabaseCharsetChecker.State state) throws SQLException { expectCaseSensitiveDefaultCollation(connection); if (state == DatabaseCharsetChecker.State.UPGRADE || state == DatabaseCharsetChecker.State.STARTUP) { repairColumns(connection); } }
@Test public void support_the_max_size_of_varchar_column() throws Exception { answerDefaultCollation("Latin1_General_CS_AS"); // returned size is -1 answerColumnDefs(new ColumnDef(TABLE_PROJECTS, COLUMN_NAME, "Latin1_General", "Latin1_General_CI_AI", "nvarchar", -1, false)); answerIndices(); underTest.handle(connection, DatabaseCharsetChecker.State.UPGRADE); verify(sqlExecutor).executeDdl(connection, "ALTER TABLE projects ALTER COLUMN name nvarchar(max) COLLATE Latin1_General_CS_AS NOT NULL"); }
@Override public boolean allProceduresAreCallable() { return false; }
@Test void assertAllProceduresAreCallable() { assertFalse(metaData.allProceduresAreCallable()); }
@Override public RedisClusterNode clusterGetNodeForKey(byte[] key) { int slot = executorService.getConnectionManager().calcSlot(key); return clusterGetNodeForSlot(slot); }
@Test public void testClusterGetNodeForKey() { RedisClusterNode node = connection.clusterGetNodeForKey("123".getBytes()); assertThat(node).isNotNull(); }
@Override public JType apply(String nodeName, JsonNode node, JsonNode parent, JClassContainer jClassContainer, Schema schema) { String propertyTypeName = getTypeName(node); JType type; if (propertyTypeName.equals("object") || node.has("properties") && node.path("properties").size() > 0) { type = ruleFactory.getObjectRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema); } else if (node.has("existingJavaType")) { String typeName = node.path("existingJavaType").asText(); if (isPrimitive(typeName, jClassContainer.owner())) { type = primitiveType(typeName, jClassContainer.owner()); } else { type = resolveType(jClassContainer, typeName); } } else if (propertyTypeName.equals("string")) { type = jClassContainer.owner().ref(String.class); } else if (propertyTypeName.equals("number")) { type = getNumberType(jClassContainer.owner(), ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("integer")) { type = getIntegerType(jClassContainer.owner(), node, ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("boolean")) { type = unboxIfNecessary(jClassContainer.owner().ref(Boolean.class), ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("array")) { type = ruleFactory.getArrayRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema); } else { type = jClassContainer.owner().ref(Object.class); } if (!node.has("javaType") && !node.has("existingJavaType") && node.has("format")) { type = ruleFactory.getFormatRule().apply(nodeName, node.get("format"), node, type, schema); } else if (!node.has("javaType") && !node.has("existingJavaType") && propertyTypeName.equals("string") && node.has("media")) { type = ruleFactory.getMediaRule().apply(nodeName, node.get("media"), node, type, schema); } return type; }
@Test public void applyGeneratesIntegerPrimitive() { JPackage jpackage = new JCodeModel()._package(getClass().getPackage().getName()); ObjectNode objectNode = new ObjectMapper().createObjectNode(); objectNode.put("type", "integer"); when(config.isUsePrimitives()).thenReturn(true); JType result = rule.apply("fooBar", objectNode, null, jpackage, null); assertThat(result.fullName(), is("int")); }
@Override public Serializer<AvroWrapper<T>> getSerializer(Class<AvroWrapper<T>> c) { Configuration conf = getConf(); Schema schema; if (AvroKey.class.isAssignableFrom(c)) { schema = getKeyWriterSchema(conf); } else if (AvroValue.class.isAssignableFrom(c)) { schema = getValueWriterSchema(conf); } else { throw new IllegalStateException("Only AvroKey and AvroValue are supported."); } GenericData dataModel = createDataModel(conf); DatumWriter<T> datumWriter = dataModel.createDatumWriter(schema); return new AvroSerializer<>(schema, datumWriter); }
@Test void getSerializerForKey() throws IOException { // Set the writer schema in the job configuration. Schema writerSchema = Schema.create(Schema.Type.STRING); Job job = Job.getInstance(); AvroJob.setMapOutputKeySchema(job, writerSchema); // Get a serializer from the configuration. AvroSerialization serialization = ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration()); @SuppressWarnings("unchecked") Serializer<AvroWrapper> serializer = serialization.getSerializer(AvroKey.class); assertTrue(serializer instanceof AvroSerializer); AvroSerializer avroSerializer = (AvroSerializer) serializer; // Check that the writer schema is set correctly on the serializer. assertEquals(writerSchema, avroSerializer.getWriterSchema()); }
public static PeriodicSequence create() { return new PeriodicSequence(); }
@Test @Category({ NeedsRunner.class, UsesImpulse.class, UsesStatefulParDo.class, UsesUnboundedPCollections.class, UsesUnboundedSplittableParDo.class }) public void testOutputsProperElements() { Instant startTime = Instant.now().plus(Duration.standardSeconds(2)); Duration interval = Duration.millis(250); long intervalMillis = interval.getMillis(); long duration = 3 * intervalMillis; Instant stopTime = startTime.plus(Duration.millis(duration)); PCollection<TimestampedValue<KV<Instant, Instant>>> result = p.apply(Create.of(new PeriodicSequence.SequenceDefinition(startTime, stopTime, interval))) .apply(PeriodicSequence.create()) .apply( Window.<Instant>into(FixedWindows.of(interval)) .withTimestampCombiner(TimestampCombiner.EARLIEST)) .apply(WithKeys.of("dummy")) .apply(GroupByKey.create()) .apply( MapElements.into(TypeDescriptor.of(Instant.class)) .via(e -> Iterables.getOnlyElement(e.getValue()))) .apply(ParDo.of(new ExtractTsDoFn<>())); // validate timestamps ArrayList<Instant> expectedResults = new ArrayList<>(); for (long i = 0; i <= duration; i += intervalMillis) { Instant el = startTime.plus(Duration.millis(i)); expectedResults.add(el); } PAssert.that(result) .satisfies( values -> { List<TimestampedValue<KV<Instant, Instant>>> sortedValues = Streams.stream(values) .sorted(Comparator.comparing(e -> e.getValue().getValue())) .collect(Collectors.toList()); assertEquals( expectedResults, sortedValues.stream() .map(e -> e.getValue().getValue()) .collect(Collectors.toList())); Instant minTs = sortedValues.stream() .min(Comparator.comparing(TimestampedValue::getTimestamp)) .get() .getTimestamp(); Instant maxTs = sortedValues.stream() .max(Comparator.comparing(TimestampedValue::getTimestamp)) .get() .getTimestamp(); final long expectedDiff = intervalMillis / 2; assertTrue( String.format( "Expected processing-time diff at least %d, got %d", expectedDiff, maxTs.getMillis() - minTs.getMillis()), maxTs.getMillis() - minTs.getMillis() > expectedDiff); return null; }); p.run().waitUntilFinish(); }
@SuppressWarnings("unchecked") public static <T> Choice<T> none() { return (Choice) NONE; }
@Test public void none() { assertThat(Choice.none().first()).isAbsent(); assertThat(Choice.none().condition(true)).isSameInstanceAs(Choice.none()); assertThat(Choice.none().condition(Predicates.alwaysTrue())).isSameInstanceAs(Choice.none()); assertThat(Choice.none().thenChoose(Functions.constant(Choice.of("foo")))) .isSameInstanceAs(Choice.none()); }
@Nonnull public static <T> Sink<T> list(@Nonnull String listName) { return fromProcessor("listSink(" + listName + ')', writeListP(listName)); }
@Test public void when_setName_then_sinkHasIt() { //Given String sinkName = randomName(); String stageName = randomName(); SinkStage stage = p .readFrom(Sources.list(sinkName)) .writeTo(Sinks.list(sinkName)); //When stage = stage.setName(stageName); //Then assertEquals(stageName, stage.name()); }
public PathSpec getParent() { if (_path.size() <= 1) { return emptyPath(); } else { return new PathSpec(_path.subList(0, _path.size() - 1)); } }
@Test(dataProvider = "pathSpecsWithParent") public void testGetParent(PathSpec testPathSpec, PathSpec expectedParent) { Assert.assertEquals(testPathSpec.getParent(), expectedParent); }
public static ExecutorService newScalingThreadPool(int min, int max, long keepAliveTime) { ScalingQueue<Runnable> queue = new ScalingQueue<>(); ThreadPoolExecutor executor = new ScalingThreadPoolExecutor(min, max, keepAliveTime, TimeUnit.MILLISECONDS, queue); executor.setRejectedExecutionHandler(new ForceQueuePolicy()); return executor; }
@Test public void testCreateThreadsUpToMax() { ThreadPoolExecutor executorService = (ThreadPoolExecutor) ScalingThreadPoolExecutor.newScalingThreadPool(0, 5, 500); for (int i = 0; i < 10; i++) { executorService.submit(getSleepingRunnable()); } assertEquals(executorService.getLargestPoolSize(), 5); }
@Override public void abort() throws IOException { if (fp == null) { return; } IOUtils.cleanupWithLogger(LOG, fp); fp = null; }
@Test public void testEditLogFileOutputStreamAbortAbort() throws IOException { // abort after a close should just ignore EditLogFileOutputStream editLogStream = null; try { editLogStream = new EditLogFileOutputStream(conf, TEST_EDITS, 0); editLogStream.abort(); editLogStream.abort(); } finally { IOUtils.cleanupWithLogger(null, editLogStream); } }
public static <T> T instantiateClassDefConstructor(Class<T> clazz) { //if constructor present then it should have a no arg constructor //if not present then default constructor is already their Objects.requireNonNull(clazz, "class to instantiate should not be null"); if (clazz.getConstructors().length > 0 && Arrays.stream(clazz.getConstructors()).noneMatch(c -> c.getParameterCount() == 0)) { throw new InstantiationException( "Default constructor is required to create instance of public class: " + clazz .getName()); } try { return clazz.getConstructor().newInstance(); } catch (Exception e) { throw new InstantiationException(INSTANTIATION_ERROR_PREFIX + clazz.getName(), e); } }
@Test public void shouldInstantiateClassWithDefaultConstructor() { assertThat(ClassUtils.instantiateClassDefConstructor(DefaultConstructor.class)).isNotNull(); }
@Override public Stream<ColumnName> resolveSelectStar( final Optional<SourceName> sourceName ) { if (sourceName.isPresent() && !sourceName.equals(getSourceName())) { throw new IllegalArgumentException("Expected alias of " + getAlias() + ", but was " + sourceName.get()); } // Note: the 'value' columns include the key columns at this point: return orderColumns(getSchema().value(), getSchema()); }
@Test public void shouldThrowOnResolveSelectStarIfWrongSourceName() { assertThrows( IllegalArgumentException.class, () -> node.resolveSelectStar(Optional.of(SourceName.of("wrong"))) ); }
public static void trackMeizuAppOpenNotification(String extras, String title, String content, String appPushServiceName) { if (!isTrackPushEnabled()) return; SALog.i(TAG, String.format("trackMeizuAppOpenNotification is called, title is %s, content is %s," + " extras is %s, appPushChannel is %s, appPushServiceName is %s", title, content, extras, "Meizu", appPushServiceName)); try { String nExtras = extras; try { JSONObject extrasJson = null; try { extrasJson = new JSONObject(extras); } catch (Exception e) { SALog.i(TAG, "Failed to construct JSON"); } //极光的魅族厂商通道 if (extrasJson != null && extrasJson.has("JMessageExtra")) { JSONObject jMessageJson = extrasJson.optJSONObject("JMessageExtra"); if (jMessageJson != null) { JSONObject contentJson = jMessageJson.optJSONObject("m_content"); if (contentJson != null) { nExtras = contentJson.optString("n_extras"); } } appPushServiceName = "JPush"; } } catch (Exception e) { SALog.printStackTrace(e); } String sfData = getSFData(nExtras); trackNotificationOpenedEvent(sfData, title, content, appPushServiceName, "Meizu"); } catch (Exception e) { SALog.printStackTrace(e); } }
@Test public void trackMeizuAppOpenNotification() throws InterruptedException { PushAutoTrackHelper.trackMeizuAppOpenNotification("", "mock_meizu", "mock_content", "JPush"); }
public long taken() { if (start > 0) { long delta = System.nanoTime() - start; return Duration.ofNanos(delta).toMillis(); } return 0; }
@Test void testDurationMatchesExpect() throws InterruptedException { StopWatch stopWatch = new StopWatch(); Thread.sleep(Duration.ofSeconds(1).toMillis()); long taken = stopWatch.taken(); assertTrue(taken >= 1000, "Elapsed time should be equal to or greater than 1000 ms but was " + taken); assertTrue(taken < 1500, "Elapsed time should be smaller than 1500 ms but was " + taken); }
@Override public SelResult childrenAccept(SelParserVisitor visitor, Object data) { SelResult res = SelResult.NONE; if (children != null) { for (int i = 0; i < children.length; ++i) { res = (SelResult) children[i].jjtAccept(visitor, data); switch (res) { case BREAK: return SelResult.BREAK; case CONTINUE: return SelResult.CONTINUE; case RETURN: return SelResult.RETURN; } } } return res; }
@Test public void testVisitedSomeDataNodeAndBreak() { root.jjtAddChild(dataNode, 2); root.jjtAddChild(breakNode, 1); root.jjtAddChild(dataNode, 0); SelResult res = root.childrenAccept(null, null); assertEquals(SelResult.BREAK, res); assertArrayEquals(new int[] {1, 0, 0, 1, 0}, visited); }
@Override public MepLbCreate decode(ObjectNode json, CodecContext context) { if (json == null || !json.isObject()) { return null; } JsonNode loopbackNode = json.get(LOOPBACK); JsonNode remoteMepIdNode = loopbackNode.get(REMOTE_MEP_ID); JsonNode remoteMepMacNode = loopbackNode.get(REMOTE_MEP_MAC); MepLbCreate.MepLbCreateBuilder lbCreateBuilder; if (remoteMepIdNode != null) { MepId remoteMepId = MepId.valueOf((short) remoteMepIdNode.asInt()); lbCreateBuilder = DefaultMepLbCreate.builder(remoteMepId); } else if (remoteMepMacNode != null) { MacAddress remoteMepMac = MacAddress.valueOf( remoteMepMacNode.asText()); lbCreateBuilder = DefaultMepLbCreate.builder(remoteMepMac); } else { throw new IllegalArgumentException( "Either a remoteMepId or a remoteMepMac"); } JsonNode numMessagesNode = loopbackNode.get(NUMBER_MESSAGES); if (numMessagesNode != null) { int numMessages = numMessagesNode.asInt(); lbCreateBuilder.numberMessages(numMessages); } JsonNode vlanDropEligibleNode = loopbackNode.get(VLAN_DROP_ELIGIBLE); if (vlanDropEligibleNode != null) { boolean vlanDropEligible = vlanDropEligibleNode.asBoolean(); lbCreateBuilder.vlanDropEligible(vlanDropEligible); } JsonNode vlanPriorityNode = loopbackNode.get(VLAN_PRIORITY); if (vlanPriorityNode != null) { short vlanPriority = (short) vlanPriorityNode.asInt(); lbCreateBuilder.vlanPriority(Priority.values()[vlanPriority]); } JsonNode dataTlvHexNode = loopbackNode.get(DATA_TLV_HEX); if (dataTlvHexNode != null) { String dataTlvHex = loopbackNode.get(DATA_TLV_HEX).asText(); if (!dataTlvHex.isEmpty()) { lbCreateBuilder.dataTlv(HexString.fromHexString(dataTlvHex)); } } return lbCreateBuilder.build(); }
@Test public void testDecodeMepLbCreateMepId() throws JsonProcessingException, IOException { String loopbackString = "{\"loopback\": { \"remoteMepId\": 20," + "\"numberMessages\": 10, \"vlanDropEligible\": true," + "\"vlanPriority\": 6, \"dataTlvHex\": \"0A:BB:CC\" }}"; InputStream input = new ByteArrayInputStream( loopbackString.getBytes(StandardCharsets.UTF_8)); JsonNode cfg = mapper.readTree(input); MepLbCreate mepLbCreate = context .codec(MepLbCreate.class).decode((ObjectNode) cfg, context); assertNull(mepLbCreate.remoteMepAddress()); assertEquals(20, mepLbCreate.remoteMepId().id().shortValue()); assertEquals(10, mepLbCreate.numberMessages().intValue()); assertEquals(6, mepLbCreate.vlanPriority().ordinal()); assertEquals(true, mepLbCreate.vlanDropEligible()); assertEquals("0A:BB:CC".toLowerCase(), mepLbCreate.dataTlvHex()); }
@Override public void update(Customer customer) throws SQLException { var sql = "update CUSTOMERS set money = ? where name = ?;"; try (var connection = dataSource.getConnection(); var preparedStatement = connection.prepareStatement(sql)) { preparedStatement.setBigDecimal(1, customer.getMoney().getAmount()); preparedStatement.setString(2, customer.getName()); preparedStatement.executeUpdate(); } }
@Test void shouldUpdateCustomer() throws SQLException { TestUtils.executeSQL(INSERT_CUSTOMER_SQL, dataSource); customer.setMoney(Money.of(CurrencyUnit.USD, 99)); customerDao.update(customer); try (var connection = dataSource.getConnection(); var statement = connection.createStatement(); ResultSet rs = statement.executeQuery(SELECT_CUSTOMERS_SQL)) { assertTrue(rs.next()); assertEquals(customer.getName(), rs.getString("name")); assertEquals(customer.getMoney(), Money.of(USD, rs.getBigDecimal("money"))); assertFalse(rs.next()); } }
public List<Service> importServiceDefinition(String repositoryUrl, Secret repositorySecret, boolean disableSSLValidation, boolean mainArtifact) throws MockRepositoryImportException { log.info("Importing service definitions from {}", repositoryUrl); File localFile = null; Map<String, List<String>> fileProperties = null; if (repositoryUrl.startsWith("http")) { try { HTTPDownloader.FileAndHeaders fileAndHeaders = HTTPDownloader .handleHTTPDownloadToFileAndHeaders(repositoryUrl, repositorySecret, disableSSLValidation); localFile = fileAndHeaders.getLocalFile(); fileProperties = fileAndHeaders.getResponseHeaders(); } catch (IOException ioe) { throw new MockRepositoryImportException(repositoryUrl + " cannot be downloaded", ioe); } } else { // Simply build localFile from repository url. localFile = new File(repositoryUrl); } RelativeReferenceURLBuilder referenceURLBuilder = RelativeReferenceURLBuilderFactory .getRelativeReferenceURLBuilder(fileProperties); String artifactName = referenceURLBuilder.getFileName(repositoryUrl, fileProperties); // Initialize a reference resolver to the folder of this repositoryUrl. ReferenceResolver referenceResolver = new ReferenceResolver(repositoryUrl, repositorySecret, disableSSLValidation, referenceURLBuilder); return importServiceDefinition(localFile, referenceResolver, new ArtifactInfo(artifactName, mainArtifact)); }
@Test void testImportServiceDefinition() { List<Service> services = null; try { File artifactFile = new File("target/test-classes/io/github/microcks/service/weather-forecast-openapi.yaml"); services = service.importServiceDefinition(artifactFile, null, new ArtifactInfo("weather-forecast-openapi.yaml", true)); } catch (MockRepositoryImportException mrie) { mrie.printStackTrace(); fail("No MockRepositoryImportException should have be thrown"); } assertNotNull(services); assertEquals(1, services.size()); // Inspect Service own attributes. Service importedSvc = services.get(0); assertEquals("WeatherForecast API", importedSvc.getName()); assertEquals("1.0.0", importedSvc.getVersion()); assertEquals("weather-forecast-openapi.yaml", importedSvc.getSourceArtifact()); assertNotNull(importedSvc.getMetadata()); assertEquals(1, importedSvc.getOperations().size()); assertEquals("GET /forecast/{region}", importedSvc.getOperations().get(0).getName()); assertEquals(5, importedSvc.getOperations().get(0).getResourcePaths().size()); // Inspect and check resources. List<Resource> resources = resourceRepository.findByServiceId(importedSvc.getId()); assertEquals(1, resources.size()); Resource resource = resources.get(0); assertEquals("WeatherForecast API-1.0.0.yaml", resource.getName()); assertEquals("weather-forecast-openapi.yaml", resource.getSourceArtifact()); // Inspect and check requests. List<Request> requests = requestRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(5, requests.size()); for (Request request : requests) { assertEquals("weather-forecast-openapi.yaml", request.getSourceArtifact()); } // Inspect and check responses. List<Response> responses = responseRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(5, responses.size()); for (Response response : responses) { assertEquals("weather-forecast-openapi.yaml", response.getSourceArtifact()); } }
public List<LineageRel> analyzeLineage(String statement) { // 1. Generate original relNode tree Tuple2<String, RelNode> parsed = parseStatement(statement); String sinkTable = parsed.getField(0); RelNode oriRelNode = parsed.getField(1); // 2. Build lineage based from RelMetadataQuery return buildFiledLineageResult(sinkTable, oriRelNode); }
@Test public void testAnalyzeLineage() { String sql = "INSERT INTO TT SELECT a||c A ,b||c B FROM ST"; String[][] expectedArray = { {"ST", "a", "TT", "A", "||(a, c)"}, {"ST", "c", "TT", "A", "||(a, c)"}, {"ST", "b", "TT", "B", "||(b, c)"}, {"ST", "c", "TT", "B", "||(b, c)"} }; analyzeLineage(sql, expectedArray); }
public void set(boolean value) { releaseShared(value ? TRUE : FALSE); }
@Test @Ignore public void samePerf() { int iters = 10_000; BlockingBoolean b1 = new BlockingBoolean(false); long t1 = System.nanoTime(); for (int i = 0; i < iters; i++) { b1.set(false); } long t2 = System.nanoTime(); MutableBoolean b2 = new MutableBoolean(false); for (int i = 0; i < iters; i++) { b2.setValue(false); } long t3 = System.nanoTime(); System.out.println((t2 - t1) + " " + (t3 - t2) + " " + ((t2 - t1) <= (t3 - t2))); }
@Override public SubClusterId getHomeSubcluster( ApplicationSubmissionContext appSubmissionContext, List<SubClusterId> blackListSubClusters) throws YarnException { // null checks and default-queue behavior validate(appSubmissionContext); List<ResourceRequest> rrList = appSubmissionContext.getAMContainerResourceRequests(); // Fast path for FailForward to WeightedRandomRouterPolicy if (rrList == null || rrList.isEmpty() || (rrList.size() == 1 && ResourceRequest.isAnyLocation(rrList.get(0).getResourceName()))) { return super.getHomeSubcluster(appSubmissionContext, blackListSubClusters); } if (rrList.size() != 3) { throw new FederationPolicyException( "Invalid number of resource requests: " + rrList.size()); } Map<SubClusterId, SubClusterInfo> activeSubClusters = getActiveSubclusters(); Set<SubClusterId> validSubClusters = activeSubClusters.keySet(); FederationPolicyUtils.validateSubClusterAvailability(activeSubClusters.keySet(), blackListSubClusters); if (blackListSubClusters != null) { // Remove from the active SubClusters from StateStore the blacklisted ones validSubClusters.removeAll(blackListSubClusters); } try { // With three requests, this has been processed by the // ResourceRequestInterceptorREST, and should have // node, rack, and any SubClusterId targetId = null; ResourceRequest nodeRequest = null; ResourceRequest rackRequest = null; ResourceRequest anyRequest = null; for (ResourceRequest rr : rrList) { // Handle "node" requests try { targetId = resolver.getSubClusterForNode(rr.getResourceName()); nodeRequest = rr; } catch (YarnException e) { LOG.error("Cannot resolve node : {}.", e.getMessage()); } // Handle "rack" requests try { resolver.getSubClustersForRack(rr.getResourceName()); rackRequest = rr; } catch (YarnException e) { LOG.error("Cannot resolve rack : {}.", e.getMessage()); } // Handle "ANY" requests if (ResourceRequest.isAnyLocation(rr.getResourceName())) { anyRequest = rr; continue; } } if (nodeRequest == null) { throw new YarnException("Missing node request."); } if (rackRequest == null) { throw new YarnException("Missing rack request."); } if (anyRequest == null) { throw new YarnException("Missing any request."); } LOG.info("Node request: {} , Rack request: {} , Any request: {}.", nodeRequest.getResourceName(), rackRequest.getResourceName(), anyRequest.getResourceName()); // Handle "node" requests if (validSubClusters.contains(targetId) && enabledSCs .contains(targetId)) { LOG.info("Node {} is in SubCluster: {}.", nodeRequest.getResourceName(), targetId); return targetId; } else { throw new YarnException("The node " + nodeRequest.getResourceName() + " is in a blacklist SubCluster or not active. "); } } catch (YarnException e) { LOG.error("Validating resource requests failed, " + "Falling back to WeightedRandomRouterPolicy placement : {}.", e.getMessage()); // FailForward to WeightedRandomRouterPolicy // Overwrite request to use a default ANY ResourceRequest amReq = Records.newRecord(ResourceRequest.class); amReq.setPriority(appSubmissionContext.getPriority()); amReq.setResourceName(ResourceRequest.ANY); amReq.setCapability(appSubmissionContext.getResource()); amReq.setNumContainers(1); amReq.setRelaxLocality(true); amReq.setNodeLabelExpression(appSubmissionContext.getNodeLabelExpression()); amReq.setExecutionTypeRequest(ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED)); appSubmissionContext.setAMContainerResourceRequests(Collections.singletonList(amReq)); return super.getHomeSubcluster(appSubmissionContext, blackListSubClusters); } }
@Test public void testNodeInActiveSubCluster() throws YarnException { List<ResourceRequest> requests = new ArrayList<ResourceRequest>(); requests.add(ResourceRequest .newInstance(Priority.UNDEFINED, "node1", Resource.newInstance(10, 1), 1)); requests.add(ResourceRequest .newInstance(Priority.UNDEFINED, "rack1", Resource.newInstance(10, 1), 1)); requests.add(ResourceRequest .newInstance(Priority.UNDEFINED, ResourceRequest.ANY, Resource.newInstance(10, 1), 1)); ApplicationSubmissionContext asc = ApplicationSubmissionContext .newInstance(null, null, null, null, null, false, false, 0, Resources.none(), null, false, null, null); asc.setAMContainerResourceRequests(requests); SubClusterId chosen = ((FederationRouterPolicy) getPolicy()).getHomeSubcluster(asc, null); // If node1 is active, we should choose the sub cluster with node1 if (getActiveSubclusters().containsKey( getFederationPolicyContext().getFederationSubclusterResolver() .getSubClusterForNode("node1").getId())) { Assert.assertEquals( getFederationPolicyContext().getFederationSubclusterResolver() .getSubClusterForNode("node1"), chosen); } // Regardless, we should choose an active SubCluster Assert.assertTrue(getActiveSubclusters().containsKey(chosen)); }
@Override protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception { if (remaining.split("/").length > 1) { throw new IllegalArgumentException("Invalid URI: " + URISupport.sanitizeUri(uri)); } SplunkHECEndpoint answer = new SplunkHECEndpoint(uri, this, new SplunkHECConfiguration()); setProperties(answer, parameters); answer.setSplunkURL(remaining); return answer; }
@Test public void testInvalidEndpoint() { assertThrows(Exception.class, () -> component.createEndpoint("")); }
@Override @SuppressFBWarnings(value = "EI_EXPOSE_REP") public KsqlConfig getKsqlConfig() { return ksqlConfig; }
@Test public void shouldReadConfigAfterWrite() { // Given: expectRead(consumerBefore); addPollResult(KafkaConfigStore.CONFIG_MSG_KEY, savedProperties, properties); expectRead(consumerAfter); // When: final KsqlConfig mergedConfig = getKsqlConfig(); // Then: verifyDrainLog(consumerBefore, 0); verifyProduce(); verifyDrainLog(consumerAfter, 1); verifyMergedConfig(mergedConfig); }
@Override public void logoutFailure(HttpRequest request, String errorMessage) { checkRequest(request); requireNonNull(errorMessage, "error message can't be null"); if (!LOGGER.isDebugEnabled()) { return; } LOGGER.debug("logout failure [error|{}][IP|{}|{}]", emptyIfNull(errorMessage), request.getRemoteAddr(), getAllIps(request)); }
@Test public void logout_does_not_interact_with_request_if_log_level_is_above_DEBUG() { HttpRequest request = mock(HttpRequest.class); logTester.setLevel(Level.INFO); underTest.logoutFailure(request, "bad csrf"); verifyNoInteractions(request); }
@Override public Map<String, Set<Integer>> brokerIdsByBrokerSetId(ClusterModel clusterModel) throws BrokerSetResolutionException { Map<String, Set<Integer>> brokerIdsByBrokerSetId; try { brokerIdsByBrokerSetId = loadBrokerSetData(); } catch (IOException e) { throw new BrokerSetResolutionException(e.getMessage()); } return _brokerSetAssignmentPolicy.assignBrokerSetsForUnresolvedBrokers(clusterModel, brokerIdsByBrokerSetId); }
@Test public void testBrokerSetResolutionWithNoOpDefaultAssignmentPolicy() throws BrokerSetResolutionException { BrokerSetResolver brokerSetResolver = getBrokerSetResolver("testBrokerSets.json", this.getClass()); final Map<String, Set<Integer>> brokerSets = brokerSetResolver.brokerIdsByBrokerSetId( BrokerSetResolutionHelper.getRackIdByBrokerIdMapping(DeterministicCluster.brokerSetUnSatisfiable3())); assertNotNull(brokerSets); assertTrue(brokerSets.containsKey("Blue")); assertTrue(brokerSets.containsKey("Green")); assertTrue(brokerSets.containsKey(NoOpBrokerSetAssignmentPolicy.UNMAPPED_BROKER_SET_ID)); assertEquals(Set.of(0, 1, 2), brokerSets.get("Blue")); assertEquals(Set.of(3, 4, 5), brokerSets.get("Green")); assertEquals(Set.of(6), brokerSets.get(NoOpBrokerSetAssignmentPolicy.UNMAPPED_BROKER_SET_ID)); }
@SuppressWarnings("Duplicates") public boolean equalsValueAt(int index, byte[] valueToCompare) { assert index < _numValues; int offsetBufferIndex = index >>> OFFSET_BUFFER_SHIFT_OFFSET; PinotDataBuffer offsetBuffer = _offsetBuffers.get(offsetBufferIndex); int offsetIndex = index & OFFSET_BUFFER_MASK; int previousValueEndOffset = offsetBuffer.getInt(offsetIndex << 2); int valueEndOffset = offsetBuffer.getInt((offsetIndex + 1) << 2); int inputValueLength = valueToCompare.length; if (previousValueEndOffset == valueEndOffset) { return inputValueLength == 0; } // Check value length first int valueBufferIndex = (valueEndOffset - 1) >>> VALUE_BUFFER_SHIFT_OFFSET; int startOffsetInValueBuffer; if ((previousValueEndOffset - 1) >>> VALUE_BUFFER_SHIFT_OFFSET != valueBufferIndex) { // The first value in the value buffer if ((valueEndOffset & VALUE_BUFFER_MASK) != inputValueLength) { return false; } startOffsetInValueBuffer = 0; } else { // Not the first value in the value buffer if (valueEndOffset - previousValueEndOffset != inputValueLength) { return false; } startOffsetInValueBuffer = previousValueEndOffset & VALUE_BUFFER_MASK; } // Value length matches, check value PinotDataBuffer valueBuffer = _valueBuffers.get(valueBufferIndex); if (inputValueLength <= PinotDataBuffer.BULK_BYTES_PROCESSING_THRESHOLD) { for (int i = 0; i < inputValueLength; i++) { if (valueToCompare[i] != valueBuffer.getByte(startOffsetInValueBuffer + i)) { return false; } } return true; } else { byte[] value = new byte[inputValueLength]; valueBuffer.copyTo(startOffsetInValueBuffer, value); for (int i = 0; i < inputValueLength; i++) { if (valueToCompare[i] != value[i]) { return false; } } return true; } }
@Test public void testEqualsValueAt() throws Exception { try (OffHeapMutableBytesStore offHeapMutableBytesStore = new OffHeapMutableBytesStore(_memoryManager, null)) { for (int i = 0; i < NUM_VALUES; i++) { offHeapMutableBytesStore.add(_values[i]); } for (int i = 0; i < NUM_VALUES; i++) { int index = RANDOM.nextInt(NUM_VALUES); assertTrue(offHeapMutableBytesStore.equalsValueAt(index, _values[index])); if (!Arrays.equals(_values[index], _values[0])) { assertFalse(offHeapMutableBytesStore.equalsValueAt(0, _values[index])); assertFalse(offHeapMutableBytesStore.equalsValueAt(index, _values[0])); } } } }
public FEELFnResult<BigDecimal> invoke(@ParameterName("from") String from, @ParameterName("grouping separator") String group, @ParameterName("decimal separator") String decimal) { if ( from == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null")); } if ( group != null && !group.equals( " " ) && !group.equals( "." ) && !group.equals( "," ) ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "group", "not a valid one, can only be one of: dot ('.'), comma (','), space (' ') ")); } if ( decimal != null ) { if (!decimal.equals( "." ) && !decimal.equals( "," )) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "not a valid one, can only be one of: dot ('.'), comma (',') ")); } else if (group != null && decimal.equals( group )) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "cannot be the same as parameter 'group' ")); } } if ( group != null ) { from = from.replaceAll( "\\" + group, "" ); } if ( decimal != null ) { from = from.replaceAll( "\\" + decimal, "." ); } BigDecimal result = NumberEvalHelper.getBigDecimalOrNull(from ); if( from != null && result == null ) { // conversion failed return FEELFnResult.ofError( new InvalidParametersEvent(Severity.ERROR, "unable to calculate final number result" ) ); } else { return FEELFnResult.ofResult( result ); } }
@Test void invokeNumberWithLeadingZeros() { FunctionTestUtil.assertResult(numberFunction.invoke("009876", null, null), BigDecimal.valueOf(9876)); }
public static FEEL_1_1Parser parse(FEELEventListenersManager eventsManager, String source, Map<String, Type> inputVariableTypes, Map<String, Object> inputVariables, Collection<FEELFunction> additionalFunctions, List<FEELProfile> profiles, FEELTypeRegistry typeRegistry) { CharStream input = CharStreams.fromString(source); FEEL_1_1Lexer lexer = new FEEL_1_1Lexer( input ); CommonTokenStream tokens = new CommonTokenStream( lexer ); FEEL_1_1Parser parser = new FEEL_1_1Parser( tokens ); ParserHelper parserHelper = new ParserHelper(eventsManager); additionalFunctions.forEach(f -> parserHelper.getSymbolTable().getBuiltInScope().define(f.getSymbol())); parser.setHelper(parserHelper); parser.setErrorHandler( new FEELErrorHandler() ); parser.removeErrorListeners(); // removes the error listener that prints to the console parser.addErrorListener( new FEELParserErrorListener( eventsManager ) ); // pre-loads the parser with symbols defineVariables( inputVariableTypes, inputVariables, parser ); if (typeRegistry != null) { parserHelper.setTypeRegistry(typeRegistry); } return parser; }
@Test void functionInvocationPositionalParams() { String inputExpression = "my.test.Function( x+10, \"foo\" )"; BaseNode functionBase = parse( inputExpression ); assertThat( functionBase).isInstanceOf(FunctionInvocationNode.class); assertThat( functionBase.getText()).isEqualTo(inputExpression); FunctionInvocationNode function = (FunctionInvocationNode) functionBase; assertThat( function.getName()).isInstanceOf(QualifiedNameNode.class); assertThat( function.getName().getText()).isEqualTo("my.test.Function"); assertThat( function.getParams()).isInstanceOf(ListNode.class); assertThat( function.getParams().getElements()).hasSize(2); assertThat( function.getParams().getElements().get( 0 )).isInstanceOf(InfixOpNode.class); assertThat( function.getParams().getElements().get( 1 )).isInstanceOf(StringNode.class); }
@Override public void isEqualTo(@Nullable Object expected) { super.isEqualTo(expected); }
@Test @GwtIncompatible("Math.nextAfter") public void testDoubleConstants_matchNextAfter() { assertThat(nextAfter(2.0 + DEFAULT_TOLERANCE, NEGATIVE_INFINITY)).isEqualTo(TOLERABLE_2); assertThat(nextAfter(2.2 + DEFAULT_TOLERANCE, NEGATIVE_INFINITY)).isEqualTo(TOLERABLE_2POINT2); assertThat(nextAfter(2.2 + DEFAULT_TOLERANCE, POSITIVE_INFINITY)) .isEqualTo(INTOLERABLE_2POINT2); assertThat(nextAfter(2.2, POSITIVE_INFINITY)).isEqualTo(OVER_2POINT2); assertThat(nextAfter(3.3 + DEFAULT_TOLERANCE, NEGATIVE_INFINITY)).isEqualTo(TOLERABLE_3POINT3); assertThat(nextAfter(3.3 + DEFAULT_TOLERANCE, POSITIVE_INFINITY)) .isEqualTo(INTOLERABLE_3POINT3); assertThat(nextAfter((double) Long.MIN_VALUE, NEGATIVE_INFINITY)).isEqualTo(UNDER_MIN_OF_LONG); }
public void onServerListChange() { if (currentConnection != null && currentConnection.serverInfo != null) { ServerInfo serverInfo = currentConnection.serverInfo; boolean found = false; for (String serverAddress : serverListFactory.getServerList()) { if (resolveServerInfo(serverAddress).getAddress().equalsIgnoreCase(serverInfo.getAddress())) { found = true; break; } } if (!found) { LoggerUtils.printIfInfoEnabled(LOGGER, "Current connected server {} is not in latest server list, switch switchServerAsync", serverInfo.getAddress()); switchServerAsync(); } } }
@Test void testOnServerListChangeWhenServiceInfoIsNullThenDoNothing() throws IllegalAccessException { int beforeSize = ((Queue<?>) reconnectionSignalField.get(rpcClient)).size(); rpcClient.currentConnection = mock(Connection.class); rpcClient.onServerListChange(); int afterSize = ((Queue<?>) reconnectionSignalField.get(rpcClient)).size(); assertEquals(beforeSize, afterSize); }
@VisibleForTesting static CPUResource getDefaultCpus(final Configuration configuration) { int fallback = configuration.get(YarnConfigOptions.VCORES); double cpuCoresDouble = TaskExecutorProcessUtils.getCpuCoresWithFallback(configuration, fallback) .getValue() .doubleValue(); @SuppressWarnings("NumericCastThatLosesPrecision") long cpuCoresLong = Math.max((long) Math.ceil(cpuCoresDouble), 1L); //noinspection FloatingPointEquality if (cpuCoresLong != cpuCoresDouble) { LOG.info( "The amount of cpu cores must be a positive integer on Yarn. Rounding {} up to the closest positive integer {}.", cpuCoresDouble, cpuCoresLong); } if (cpuCoresLong > Integer.MAX_VALUE) { throw new IllegalConfigurationException( String.format( "The amount of cpu cores %d cannot exceed Integer.MAX_VALUE: %d", cpuCoresLong, Integer.MAX_VALUE)); } //noinspection NumericCastThatLosesPrecision return new CPUResource(cpuCoresLong); }
@Test void testGetCpuCoresNumSlots() { final Configuration configuration = new Configuration(); configuration.set(TaskManagerOptions.NUM_TASK_SLOTS, 3); assertThat(YarnWorkerResourceSpecFactory.getDefaultCpus(configuration)) .isEqualTo(new CPUResource(3.0)); }
@Nullable @Override public RoaringBitmap getNullBitmap(ValueBlock valueBlock) { return null; }
@Test public void testGetNullBitmapReturnsNull() { ExpressionContext isDistinctFromExpression = RequestContextUtils.getExpression(String.format(_expression, INT_SV_NULL_COLUMN, INT_SV_COLUMN)); TransformFunction isDistinctFromTransformFunction = TransformFunctionFactory.get(isDistinctFromExpression, _dataSourceMap); Assert.assertNull(isDistinctFromTransformFunction.getNullBitmap(_projectionBlock)); }
@Override public RSet<V> get(final K key) { String keyHash = keyHash(key); final String setName = getValuesName(keyHash); return new RedissonSet<V>(codec, commandExecutor, setName, null) { @Override public RFuture<Boolean> addAsync(V value) { return RedissonSetMultimap.this.putAsync(key, value); } @Override public RFuture<Boolean> addAllAsync(Collection<? extends V> c) { return RedissonSetMultimap.this.putAllAsync(key, c); } @Override public RFuture<Boolean> removeAsync(Object value) { return RedissonSetMultimap.this.removeAsync(key, value); } @Override public RFuture<Boolean> removeAllAsync(Collection<?> c) { if (c.isEmpty()) { return new CompletableFutureWrapper<>(false); } List<Object> args = new ArrayList<Object>(c.size() + 1); args.add(encodeMapKey(key)); encode(args, c); return commandExecutor.evalWriteAsync(RedissonSetMultimap.this.getRawName(), codec, RedisCommands.EVAL_BOOLEAN_AMOUNT, "local count = 0;" + "for i=2, #ARGV, 5000 do " + "count = count + redis.call('srem', KEYS[2], unpack(ARGV, i, math.min(i+4999, table.getn(ARGV)))) " + "end; " + "if count > 0 then " + "if redis.call('scard', KEYS[2]) == 0 then " + "redis.call('hdel', KEYS[1], ARGV[1]); " + "end; " + "return 1;" + "end;" + "return 0; ", Arrays.<Object>asList(RedissonSetMultimap.this.getRawName(), setName), args.toArray()); } @Override public RFuture<Boolean> deleteAsync() { ByteBuf keyState = encodeMapKey(key); return RedissonSetMultimap.this.fastRemoveAsync(Arrays.asList(keyState), Arrays.asList(RedissonSetMultimap.this.getRawName(), setName), RedisCommands.EVAL_BOOLEAN_AMOUNT); } @Override public RFuture<Boolean> clearExpireAsync() { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit, String param, String... keys) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override protected RFuture<Boolean> expireAtAsync(long timestamp, String param, String... keys) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Long> remainTimeToLiveAsync() { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Void> renameAsync(String newName) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Boolean> renamenxAsync(String newName) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } }; }
@Test public void testRenamenx() { RSetMultimap<String, String> map = redisson.getSetMultimap("simple"); map.put("1", "2"); map.put("2", "3"); RSetMultimap<String, String> map2 = redisson.getSetMultimap("simple2"); map2.put("4", "5"); assertThat(map.renamenx("simple2")).isFalse(); assertThat(map.size()).isEqualTo(2); assertThat(map.get("1")).containsOnly("2"); assertThat(map.get("2")).containsOnly("3"); assertThat(map2.get("4")).containsOnly("5"); assertThat(map.renamenx("simple3")).isTrue(); RSetMultimap<String, String> map3 = redisson.getSetMultimap("simple"); assertThat(map3.isExists()).isFalse(); assertThat(map3.isEmpty()).isTrue(); RSetMultimap<String, String> map4 = redisson.getSetMultimap("simple3"); assertThat(map4.size()).isEqualTo(2); assertThat(map4.get("1")).containsOnly("2"); assertThat(map4.get("2")).containsOnly("3"); }
@Override public ComputeNode getWorkerById(long workerId) { ComputeNode worker = availableID2Backend.get(workerId); if (worker != null) { return worker; } return availableID2ComputeNode.get(workerId); }
@Test public void testCaptureAvailableWorkers() { long deadBEId = 1L; long deadCNId = 11L; long inBlacklistBEId = 3L; long inBlacklistCNId = 13L; Set<Long> nonAvailableWorkerId = ImmutableSet.of(deadBEId, deadCNId, inBlacklistBEId, inBlacklistCNId); id2Backend.get(deadBEId).setAlive(false); id2ComputeNode.get(deadCNId).setAlive(false); new MockUp<SimpleScheduler>() { @Mock public boolean isInBlocklist(long backendId) { return backendId == inBlacklistBEId || backendId == inBlacklistCNId; } }; Reference<Integer> nextComputeNodeIndex = new Reference<>(0); new MockUp<DefaultWorkerProvider>() { @Mock int getNextComputeNodeIndex() { int next = nextComputeNodeIndex.getRef(); nextComputeNodeIndex.setRef(next + 1); return next; } }; new MockUp<SystemInfoService>() { @Mock public ImmutableMap<Long, ComputeNode> getIdToBackend() { return id2Backend; } @Mock public ImmutableMap<Long, ComputeNode> getIdComputeNode() { return id2ComputeNode; } }; DefaultWorkerProvider.Factory workerProviderFactory = new DefaultWorkerProvider.Factory(); DefaultWorkerProvider workerProvider; List<Integer> numUsedComputeNodesList = ImmutableList.of(100, 0, -1, 1, 2, 3, 4, 5, 6); for (Integer numUsedComputeNodes : numUsedComputeNodesList) { // Reset nextComputeNodeIndex. nextComputeNodeIndex.setRef(0); workerProvider = workerProviderFactory.captureAvailableWorkers(GlobalStateMgr.getCurrentState().getNodeMgr().getClusterInfo(), true, numUsedComputeNodes, ComputationFragmentSchedulingPolicy.COMPUTE_NODES_ONLY, WarehouseManager.DEFAULT_WAREHOUSE_ID); int numAvailableComputeNodes = 0; for (long id = 0; id < 15; id++) { ComputeNode worker = workerProvider.getWorkerById(id); if (nonAvailableWorkerId.contains(id) // Exceed the limitation of numUsedComputeNodes. || (numUsedComputeNodes > 0 && numAvailableComputeNodes >= numUsedComputeNodes)) { Assert.assertNull(worker); } else { Assert.assertNotNull("numUsedComputeNodes=" + numUsedComputeNodes + ",id=" + id, worker); Assert.assertEquals(id, worker.getId()); if (id2ComputeNode.containsKey(id)) { numAvailableComputeNodes++; } } } } }
@PostMapping("/updateDetail") @RequiresPermissions("system:authen:edit") public ShenyuAdminResult updateDetail(@RequestBody @Valid final AppAuthDTO appAuthDTO) { return appAuthService.updateDetail(appAuthDTO); }
@Test public void testUpdateDetail() throws Exception { AppAuthDTO appAuthDTO = new AppAuthDTO(); appAuthDTO.setId("0001"); appAuthDTO.setAppKey("app key"); appAuthDTO.setAppSecret("app secret"); appAuthDTO.setPhone("1234567"); given(this.appAuthService.updateDetail(appAuthDTO)).willReturn( ShenyuAdminResult.success(ShenyuResultMessage.UPDATE_SUCCESS)); ConfigurableApplicationContext context = mock(ConfigurableApplicationContext.class); SpringBeanUtils.getInstance().setApplicationContext(context); when(SpringBeanUtils.getInstance().getBean(AppAuthMapper.class)).thenReturn(appAuthMapper); when(appAuthMapper.existed(appAuthDTO.getId())).thenReturn(true); this.mockMvc.perform(MockMvcRequestBuilders.post("/appAuth/updateDetail") .contentType(MediaType.APPLICATION_JSON) .content(GsonUtils.getInstance().toJson(appAuthDTO))) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.UPDATE_SUCCESS))) .andReturn(); }
public static void isTrue(boolean expression, String message) { if (!expression) { throw new IllegalArgumentException(message); } }
@Test(expected = IllegalArgumentException.class) public void assertIsTrueAndMessageIsNull() { Assert.isTrue(false); }
@Override public void run() { try (DbSession dbSession = dbClient.openSession(false)) { List<AlmSettingDto> gitlabSettingsDtos = dbClient.almSettingDao().selectByAlm(dbSession, ALM.GITLAB); if (gitlabSettingsDtos.isEmpty()) { metrics.setGitlabStatusToRed(); return; } validateGitlab(gitlabSettingsDtos); } }
@Test public void run_gitlabValidatorDoesntThrowException_setGreenStatusInMetricsOnce() { List<AlmSettingDto> dtos = generateDtos(5, ALM.GITLAB); when(almSettingsDao.selectByAlm(any(), any())).thenReturn(dtos); underTest.run(); verify(metrics, times(1)).setGitlabStatusToGreen(); verify(metrics, times(0)).setGitlabStatusToRed(); }
@Override public void configure(Map<String, ?> props) { final SimpleConfig config = new SimpleConfig(CONFIG_DEF, props); casts = parseFieldTypes(config.getList(SPEC_CONFIG)); wholeValueCastType = casts.get(WHOLE_VALUE_CAST); schemaUpdateCache = new SynchronizedCache<>(new LRUCache<>(16)); replaceNullWithDefault = config.getBoolean(REPLACE_NULL_WITH_DEFAULT_CONFIG); }
@Test public void testConfigEmpty() { assertThrows(ConfigException.class, () -> xformKey.configure(Collections.singletonMap(Cast.SPEC_CONFIG, ""))); }
public static void checkArgument(boolean isValid, String message) throws IllegalArgumentException { if (!isValid) { throw new IllegalArgumentException(message); } }
@Test public void testCheckArgumentWithoutParams() { try { Preconditions.checkArgument(true, "Test message"); } catch (IllegalArgumentException e) { Assert.fail("Should not throw exception when isValid is true"); } try { Preconditions.checkArgument(false, "Test message"); Assert.fail("Should throw exception when isValid is false"); } catch (IllegalArgumentException e) { Assert.assertEquals("Should format message", "Test message", e.getMessage()); } }
public static EnvVar createEnvVar(String name, String value) { return new EnvVarBuilder() .withName(name) .withValue(value) .build(); }
@Test public void testCreateEnvVar() { EnvVar var = ContainerUtils.createEnvVar("VAR_1", "value1"); assertThat(var.getName(), is("VAR_1")); assertThat(var.getValue(), is("value1")); }
public MetricsBuilder exportMetricsService(Boolean exportMetricsService) { this.exportMetricsService = exportMetricsService; return getThis(); }
@Test void exportMetricsService() { MetricsBuilder builder = MetricsBuilder.newBuilder(); builder.exportMetricsService(false); Assertions.assertFalse(builder.build().getExportMetricsService()); }
public Plan validateReservationSubmissionRequest( ReservationSystem reservationSystem, ReservationSubmissionRequest request, ReservationId reservationId) throws YarnException { String message; if (reservationId == null) { message = "Reservation id cannot be null. Please try again specifying " + " a valid reservation id by creating a new reservation id."; throw RPCUtil.getRemoteException(message); } // Check if it is a managed queue String queue = request.getQueue(); Plan plan = getPlanFromQueue(reservationSystem, queue, AuditConstants.SUBMIT_RESERVATION_REQUEST); validateReservationDefinition(reservationId, request.getReservationDefinition(), plan, AuditConstants.SUBMIT_RESERVATION_REQUEST); return plan; }
@Test public void testSubmitReservationDoesNotExist() { ReservationSubmissionRequest request = new ReservationSubmissionRequestPBImpl(); Plan plan = null; try { plan = rrValidator.validateReservationSubmissionRequest(rSystem, request, ReservationSystemTestUtil.getNewReservationId()); Assert.fail(); } catch (YarnException e) { Assert.assertNull(plan); String message = e.getMessage(); Assert.assertEquals("The queue is not specified. Please try again with a " + "valid reservable queue.", message); LOG.info(message); } }
@Override protected void registerMetadata(final MetaDataRegisterDTO metaDataDTO) { MetaDataService metaDataService = getMetaDataService(); if (LOG.isDebugEnabled()) { LOG.debug("grpc register metadata:{}", GsonUtils.getInstance().toJson(metaDataDTO)); } MetaDataDO exist = metaDataService.findByPath(metaDataDTO.getPath()); metaDataService.saveOrUpdateMetaData(exist, metaDataDTO); }
@Test public void testRegisterMetadata() { MetaDataDO metaDataDO = MetaDataDO.builder().build(); when(metaDataService.findByPath(any())).thenReturn(metaDataDO); MetaDataRegisterDTO metaDataDTO = MetaDataRegisterDTO.builder().path("/test").build(); shenyuClientRegisterGrpcService.registerMetadata(metaDataDTO); verify(metaDataService).findByPath("/test"); verify(metaDataService).saveOrUpdateMetaData(metaDataDO, metaDataDTO); }
public final Span joinSpan(TraceContext context) { if (context == null) throw new NullPointerException("context == null"); if (!supportsJoin) return newChild(context); // set shared flag if not already done int flags = InternalPropagation.instance.flags(context); if (!context.shared()) { flags |= FLAG_SHARED; return toSpan(context, InternalPropagation.instance.withFlags(context, flags)); } else { flags &= ~FLAG_SHARED; return toSpan(InternalPropagation.instance.withFlags(context, flags), context); } }
@Test void join_idempotent() { TraceContext incoming = TraceContext.newBuilder().traceId(1L).spanId(2L).sampled(true) .shared(true).build(); TraceContext joined = tracer.joinSpan(incoming).context(); assertThat(joined).isNotSameAs(incoming); assertThat(tracer.joinSpan(incoming).context()).isSameAs(joined); }
RegistryEndpointProvider<URL> writer(URL location, Consumer<Long> writtenByteCountListener) { return new Writer(location, writtenByteCountListener); }
@Test public void testWriter_getContent() throws IOException { LongAdder byteCount = new LongAdder(); BlobHttpContent body = testBlobPusher.writer(mockUrl, byteCount::add).getContent(); Assert.assertNotNull(body); Assert.assertEquals("application/octet-stream", body.getType()); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); body.writeTo(byteArrayOutputStream); Assert.assertEquals( TEST_BLOB_CONTENT, new String(byteArrayOutputStream.toByteArray(), StandardCharsets.UTF_8)); Assert.assertEquals(TEST_BLOB_CONTENT.length(), byteCount.sum()); }
@Override public BatchKVResponse<K, UpdateStatus> wrapResponse(DataMap dataMap, Map<String, String> headers, ProtocolVersion version) { if (dataMap == null) { return null; } DataMap responseData = ResponseDecoderUtil.mergeUpdateStatusResponseData(dataMap); return new BatchKVResponse<>(responseData, _keyType, new TypeSpec<>(UpdateStatus.class), _keyParts, _complexKeyType, version); }
@Test(dataProvider = TestConstants.RESTLI_PROTOCOL_1_2_PREFIX + "batchEntityResponseDataProvider") public void testDecodingWithEmptyDataMap(ProtocolVersion protocolVersion) throws InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, IOException { final BatchUpdateResponseDecoder<String> decoder = new BatchUpdateResponseDecoder<>(new TypeSpec<>(String.class), Collections.<String, CompoundKey.TypeInfo>emptyMap(), null); final BatchKVResponse<String, UpdateStatus> response = decoder.wrapResponse(null, Collections.<String, String>emptyMap(), protocolVersion); Assert.assertNull(response); }
@Override public List<Distribution.Method> getMethods(final Path file) { if(!ServiceUtils.isBucketNameValidDNSName(containerService.getContainer(file).getName())) { // Disable website configuration if bucket name is not DNS compatible return super.getMethods(file); } final List<Distribution.Method> methods = new ArrayList<>(); if(S3Session.isAwsHostname(session.getHost().getHostname())) { methods.addAll(super.getMethods(file)); methods.addAll(Arrays.asList(Distribution.WEBSITE, Distribution.WEBSITE_CDN)); } else { // Only allow website configuration for non AWS endpoints. methods.add(Distribution.WEBSITE); } return methods; }
@Test public void testGetMethodsAWS() { final S3Session session = new S3Session(new Host(new S3Protocol())); final WebsiteCloudFrontDistributionConfiguration configuration = new WebsiteCloudFrontDistributionConfiguration(session, new S3LocationFeature(session), new DisabledX509TrustManager(), new DefaultX509KeyManager() ); assertTrue(configuration.getMethods( new Path(new Path("/", EnumSet.of(Path.Type.directory, Path.Type.volume)), "bbb", EnumSet.of(Path.Type.directory, Path.Type.volume))).contains(Distribution.DOWNLOAD)); assertTrue(configuration.getMethods( new Path(new Path("/", EnumSet.of(Path.Type.directory, Path.Type.volume)), "bbb", EnumSet.of(Path.Type.directory, Path.Type.volume))).contains(Distribution.STREAMING)); assertFalse(configuration.getMethods( new Path(new Path("/", EnumSet.of(Path.Type.directory, Path.Type.volume)), "bbb", EnumSet.of(Path.Type.directory, Path.Type.volume))).contains(Distribution.CUSTOM)); assertTrue(configuration.getMethods( new Path(new Path("/", EnumSet.of(Path.Type.directory, Path.Type.volume)), "bbb", EnumSet.of(Path.Type.directory, Path.Type.volume))).contains(Distribution.WEBSITE_CDN)); assertTrue(configuration.getMethods( new Path(new Path("/", EnumSet.of(Path.Type.directory, Path.Type.volume)), "bbb", EnumSet.of(Path.Type.directory, Path.Type.volume))).contains(Distribution.WEBSITE)); assertFalse(configuration.getMethods( new Path(new Path("/", EnumSet.of(Path.Type.directory, Path.Type.volume)), "bbb_b", EnumSet.of(Path.Type.directory, Path.Type.volume))).contains(Distribution.WEBSITE)); }
public static void assignFieldParams(Object bean, Map<String, Param> params) throws TikaConfigException { Class<?> beanClass = bean.getClass(); if (!PARAM_INFO.containsKey(beanClass)) { synchronized (TikaConfig.class) { if (!PARAM_INFO.containsKey(beanClass)) { List<AccessibleObject> aObjs = collectInfo(beanClass, org.apache.tika.config.Field.class); List<ParamField> fields = new ArrayList<>(aObjs.size()); for (AccessibleObject aObj : aObjs) { fields.add(new ParamField(aObj)); } PARAM_INFO.put(beanClass, fields); } } } List<ParamField> fields = PARAM_INFO.get(beanClass); for (ParamField field : fields) { Param<?> param = params.get(field.getName()); if (param != null) { if (field.getType().isAssignableFrom(param.getType())) { try { field.assignValue(bean, param.getValue()); } catch (InvocationTargetException e) { LOG.error("Error assigning value '{}' to '{}'", param.getValue(), param.getName()); final Throwable cause = e.getCause() == null ? e : e.getCause(); throw new TikaConfigException(cause.getMessage(), cause); } catch (IllegalAccessException e) { LOG.error("Error assigning value '{}' to '{}'", param.getValue(), param.getName()); throw new TikaConfigException(e.getMessage(), e); } } else { String msg = String.format(Locale.ROOT, "Value '%s' of type '%s' can't be" + " assigned to field '%s' of defined type '%s'", param.getValue(), param.getValue().getClass(), field.getName(), field.getType()); throw new TikaConfigException(msg); } } else if (field.isRequired()) { //param not supplied but field is declared as required? String msg = String.format(Locale.ROOT, "Param %s is required for %s," + " but it is not given in config.", field.getName(), bean.getClass().getName()); throw new TikaConfigException(msg); } else { LOG.debug("Param not supplied, field is not mandatory"); } } }
@Test public void testPrimitiveAndBoxedTypes() { class MyParser extends Configurable { @Field(required = true) int config; @Field(required = true, name = "config") Integer config2; } Map<String, Param> params = new HashMap<>(); try { MyParser bean = new MyParser(); int val = 100; params.put("config", new Param<>("config", val)); AnnotationUtils.assignFieldParams(bean, params); assertTrue(bean.config == bean.config2); assertTrue(bean.config == val); } catch (TikaConfigException e) { e.printStackTrace(); fail("Exception Not expected"); } }
public void sendRequests(Callback<None> callback) { LOG.info("Event Bus Requests throttler started for {} keys at a {} load rate", _keysToFetch.size(), _maxConcurrentRequests); if (_keysToFetch.size() == 0) { callback.onSuccess(None.none()); return; } _callback = callback; makeRequests(_maxConcurrentRequests); }
@Test(timeOut = 10000) public void testThrottling() throws InterruptedException, ExecutionException, TimeoutException { TestSubscriber testSubscriber = new TestSubscriber(); TestEventBus testZkEventBus = new TestEventBus(testSubscriber, 50); final int nRequests = 100; PropertyEventBusRequestsThrottler<String> propertyEventBusRequestsThrottler = new PropertyEventBusRequestsThrottler<>(testZkEventBus, testSubscriber, generateNKeys(nRequests), PropertyEventBusRequestsThrottler.DEFAULT_MAX_CONCURRENT_REQUESTS, false); FutureCallback<None> callback = new FutureCallback<>(); propertyEventBusRequestsThrottler.sendRequests(callback); boolean triggeredAtLeastOnce = false; while (!callback.isDone()) { int currentConcurrentRequests = testZkEventBus.getRequestCount().get() - testSubscriber.getCompletedRequestCount().get(); if (currentConcurrentRequests > 0) { triggeredAtLeastOnce = true; } if (currentConcurrentRequests > PropertyEventBusRequestsThrottler.DEFAULT_MAX_CONCURRENT_REQUESTS) { Assert.fail("The concurrent requests (" + currentConcurrentRequests + ") are greater than the allowed (" + PropertyEventBusRequestsThrottler.DEFAULT_MAX_CONCURRENT_REQUESTS + ")"); } Thread.sleep(50); } callback.get(1000, TimeUnit.MILLISECONDS); Assert.assertTrue(triggeredAtLeastOnce); Assert.assertEquals(nRequests, testZkEventBus.getRequestCount().get()); Assert.assertEquals(nRequests, testSubscriber.getCompletedRequestCount().get()); }
@GetMapping(value = "/{id}") public Mono<Post> get(@PathVariable(value = "id") Long id) { return this.posts.findById(id); }
@Test public void getPostById() throws Exception { this.client .get() .uri("/posts/1") .accept(APPLICATION_JSON) .exchange() .expectBody() .jsonPath("$.title") .isEqualTo("post one"); this.client .get() .uri("/posts/2") .accept(APPLICATION_JSON) .exchange() .expectBody() .jsonPath("$.title") .isEqualTo("post two"); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback delete, final ConnectionCallback callback) throws BackgroundException { try { if(status.isExists()) { if(log.isWarnEnabled()) { log.warn(String.format("Delete file %s to be replaced with %s", renamed, file)); } new BoxDeleteFeature(session, fileid).delete(Collections.singletonList(renamed), callback, delete); } final String id = fileid.getFileId(file); if(file.isDirectory()) { final Folder result = new FoldersApi(new BoxApiClient(session.getClient())).putFoldersId( id, new FoldersFolderIdBody() .name(renamed.getName()) .parent(new FoldersfolderIdParent() .id(fileid.getFileId(renamed.getParent()))), null, BoxAttributesFinderFeature.DEFAULT_FIELDS); fileid.cache(file, null); fileid.cache(renamed, id); return renamed.withAttributes(new BoxAttributesFinderFeature(session, fileid).toAttributes(result)); } final File result = new FilesApi(new BoxApiClient(session.getClient())).putFilesId( id, new FilesFileIdBody() .name(renamed.getName()) .parent(new FilesfileIdParent() .id(fileid.getFileId(renamed.getParent()))), null, BoxAttributesFinderFeature.DEFAULT_FIELDS); fileid.cache(file, null); fileid.cache(renamed, id); return renamed.withAttributes(new BoxAttributesFinderFeature(session, fileid).toAttributes(result)); } catch(ApiException e) { throw new BoxExceptionMappingService(fileid).map("Cannot rename {0}", e, file); } }
@Test public void testMoveOverride() throws Exception { final BoxFileidProvider fileid = new BoxFileidProvider(session); final Path test = new BoxTouchFeature(session, fileid).touch( new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final Path target = new BoxTouchFeature(session, fileid).touch( new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final Path override = new BoxMoveFeature(session, fileid).move(test, target, new TransferStatus().exists(true).withRemote(target.attributes()), new Delete.DisabledCallback(), new DisabledConnectionCallback()); assertFalse(new BoxFindFeature(session, fileid).find(test)); assertTrue(new BoxFindFeature(session, fileid).find(override)); assertEquals(test.attributes().getModificationDate(), target.attributes().getModificationDate()); assertEquals(test.attributes().getChecksum(), target.attributes().getChecksum()); new BoxDeleteFeature(session, fileid).delete(Collections.<Path>singletonList(target), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public void write(int b) throws IOException { throwIfClosed(); inputBuffer[inputPosition++] = (byte) b; flushIfFull(); }
@Test void output_stream_compresses_input() throws IOException { byte[] inputData = "The quick brown fox jumps over the lazy dog".getBytes(); ByteArrayOutputStream arrayOut = new ByteArrayOutputStream(); try (ZstdOutputStream zstdOut = new ZstdOutputStream(arrayOut, 12)) { zstdOut.write(inputData[0]); zstdOut.write(inputData, 1, inputData.length - 1); } byte[] compressedData = arrayOut.toByteArray(); ZstdCompressor compressor = new ZstdCompressor(); byte[] decompressedData = new byte[inputData.length]; compressor.decompress(compressedData, 0, compressedData.length, decompressedData, 0, decompressedData.length); assertArrayEquals(inputData, decompressedData); }
public static <T extends PipelineOptions> T as(Class<T> klass) { return new Builder().as(klass); }
@Test public void testMultiGetterSetterTypeMismatchThrows() { expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("Type mismatches between getters and setters detected:"); expectedException.expectMessage( "Property [value]: Getter is of type " + "[boolean] whereas setter is of type [int]."); expectedException.expectMessage( "Property [other]: Getter is of type [long] " + "whereas setter is of type [class java.lang.String]."); PipelineOptionsFactory.as(MultiGetterSetterTypeMismatch.class); }
public void reEvaluateCheckRT(long checkRT, HealthCheckTaskV2 task, SwitchDomain.HealthParams params) { task.setCheckRtLast(checkRT); if (checkRT > task.getCheckRtWorst()) { task.setCheckRtWorst(checkRT); } if (checkRT < task.getCheckRtBest()) { task.setCheckRtBest(checkRT); } checkRT = (long) ((params.getFactor() * task.getCheckRtNormalized()) + (1 - params.getFactor()) * checkRT); if (checkRT > params.getMax()) { checkRT = params.getMax(); } if (checkRT < params.getMin()) { checkRT = params.getMin(); } task.setCheckRtNormalized(checkRT); }
@Test void testReEvaluateCheckRT() { healthCheckCommonV2.reEvaluateCheckRT(1, healthCheckTaskV2, healthParams); verify(healthParams, times(2)).getMax(); verify(healthParams, times(1)).getMin(); verify(healthParams, times(2)).getFactor(); verify(healthCheckTaskV2).getCheckRtWorst(); verify(healthCheckTaskV2).getCheckRtBest(); verify(healthCheckTaskV2).getCheckRtNormalized(); }
@PostMapping("/token") @PermitAll @Operation(summary = "获得访问令牌", description = "适合 code 授权码模式,或者 implicit 简化模式;在 sso.vue 单点登录界面被【获取】调用") @Parameters({ @Parameter(name = "grant_type", required = true, description = "授权类型", example = "code"), @Parameter(name = "code", description = "授权范围", example = "userinfo.read"), @Parameter(name = "redirect_uri", description = "重定向 URI", example = "https://www.iocoder.cn"), @Parameter(name = "state", description = "状态", example = "1"), @Parameter(name = "username", example = "tudou"), @Parameter(name = "password", example = "cai"), // 多个使用空格分隔 @Parameter(name = "scope", example = "user_info"), @Parameter(name = "refresh_token", example = "123424233"), }) public CommonResult<OAuth2OpenAccessTokenRespVO> postAccessToken(HttpServletRequest request, @RequestParam("grant_type") String grantType, @RequestParam(value = "code", required = false) String code, // 授权码模式 @RequestParam(value = "redirect_uri", required = false) String redirectUri, // 授权码模式 @RequestParam(value = "state", required = false) String state, // 授权码模式 @RequestParam(value = "username", required = false) String username, // 密码模式 @RequestParam(value = "password", required = false) String password, // 密码模式 @RequestParam(value = "scope", required = false) String scope, // 密码模式 @RequestParam(value = "refresh_token", required = false) String refreshToken) { // 刷新模式 List<String> scopes = OAuth2Utils.buildScopes(scope); // 1.1 校验授权类型 OAuth2GrantTypeEnum grantTypeEnum = OAuth2GrantTypeEnum.getByGrantType(grantType); if (grantTypeEnum == null) { throw exception0(BAD_REQUEST.getCode(), StrUtil.format("未知授权类型({})", grantType)); } if (grantTypeEnum == OAuth2GrantTypeEnum.IMPLICIT) { throw exception0(BAD_REQUEST.getCode(), "Token 接口不支持 implicit 授权模式"); } // 1.2 校验客户端 String[] clientIdAndSecret = obtainBasicAuthorization(request); OAuth2ClientDO client = oauth2ClientService.validOAuthClientFromCache(clientIdAndSecret[0], clientIdAndSecret[1], grantType, scopes, redirectUri); // 2. 根据授权模式,获取访问令牌 OAuth2AccessTokenDO accessTokenDO; switch (grantTypeEnum) { case AUTHORIZATION_CODE: accessTokenDO = oauth2GrantService.grantAuthorizationCodeForAccessToken(client.getClientId(), code, redirectUri, state); break; case PASSWORD: accessTokenDO = oauth2GrantService.grantPassword(username, password, client.getClientId(), scopes); break; case CLIENT_CREDENTIALS: accessTokenDO = oauth2GrantService.grantClientCredentials(client.getClientId(), scopes); break; case REFRESH_TOKEN: accessTokenDO = oauth2GrantService.grantRefreshToken(refreshToken, client.getClientId()); break; default: throw new IllegalArgumentException("未知授权类型:" + grantType); } Assert.notNull(accessTokenDO, "访问令牌不能为空"); // 防御性检查 return success(OAuth2OpenConvert.INSTANCE.convert(accessTokenDO)); }
@Test public void testPostAccessToken_authorizationCode() { // 准备参数 String granType = OAuth2GrantTypeEnum.AUTHORIZATION_CODE.getGrantType(); String code = randomString(); String redirectUri = randomString(); String state = randomString(); HttpServletRequest request = mockRequest("test_client_id", "test_client_secret"); // mock 方法(client) OAuth2ClientDO client = randomPojo(OAuth2ClientDO.class).setClientId("test_client_id"); when(oauth2ClientService.validOAuthClientFromCache(eq("test_client_id"), eq("test_client_secret"), eq(granType), eq(new ArrayList<>()), eq(redirectUri))).thenReturn(client); // mock 方法(访问令牌) OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class) .setExpiresTime(LocalDateTimeUtil.offset(LocalDateTime.now(), 30000L, ChronoUnit.MILLIS)); when(oauth2GrantService.grantAuthorizationCodeForAccessToken(eq("test_client_id"), eq(code), eq(redirectUri), eq(state))).thenReturn(accessTokenDO); // 调用 CommonResult<OAuth2OpenAccessTokenRespVO> result = oauth2OpenController.postAccessToken(request, granType, code, redirectUri, state, null, null, null, null); // 断言 assertEquals(0, result.getCode()); assertPojoEquals(accessTokenDO, result.getData()); assertTrue(ObjectUtils.equalsAny(result.getData().getExpiresIn(), 29L, 30L)); // 执行过程会过去几毫秒 }
public void sendMessage(final Account account, final Device device, final Envelope message, final boolean online) { final String channel; if (device.getGcmId() != null) { channel = "gcm"; } else if (device.getApnId() != null) { channel = "apn"; } else if (device.getFetchesMessages()) { channel = "websocket"; } else { channel = "none"; } final boolean clientPresent; if (online) { clientPresent = clientPresenceManager.isPresent(account.getUuid(), device.getId()); if (clientPresent) { messagesManager.insert(account.getUuid(), device.getId(), message.toBuilder().setEphemeral(true).build()); } } else { messagesManager.insert(account.getUuid(), device.getId(), message); // We check for client presence after inserting the message to take a conservative view of notifications. If the // client wasn't present at the time of insertion but is now, they'll retrieve the message. If they were present // but disconnected before the message was delivered, we should send a notification. clientPresent = clientPresenceManager.isPresent(account.getUuid(), device.getId()); if (!clientPresent) { try { pushNotificationManager.sendNewMessageNotification(account, device.getId(), message.getUrgent()); } catch (final NotPushRegisteredException ignored) { } } } Metrics.counter(SEND_COUNTER_NAME, CHANNEL_TAG_NAME, channel, EPHEMERAL_TAG_NAME, String.valueOf(online), CLIENT_ONLINE_TAG_NAME, String.valueOf(clientPresent), URGENT_TAG_NAME, String.valueOf(message.getUrgent()), STORY_TAG_NAME, String.valueOf(message.getStory()), SEALED_SENDER_TAG_NAME, String.valueOf(!message.hasSourceUuid())) .increment(); }
@Test void testSendMessageFetchClientNotPresent() throws Exception { when(clientPresenceManager.isPresent(ACCOUNT_UUID, DEVICE_ID)).thenReturn(false); when(device.getFetchesMessages()).thenReturn(true); doThrow(NotPushRegisteredException.class) .when(pushNotificationManager).sendNewMessageNotification(account, DEVICE_ID, message.getUrgent()); assertDoesNotThrow(() -> messageSender.sendMessage(account, device, message, false)); verify(messagesManager).insert(ACCOUNT_UUID, DEVICE_ID, message); }
public static <T, R, E extends Exception> Function<T, R> rethrowFunction(FunctionWithExceptions<T, R, E> function) throws E { return t -> { try { return function.apply(t); } catch (Exception exception) { throwAsUnchecked(exception); return null; } }; }
@Test public void test_Function_with_checked_exceptions() throws ClassNotFoundException { List<Class> classes1 = Stream.of("Object", "Integer", "String") .map(rethrowFunction(className -> Class.forName("java.lang." + className))) .collect(Collectors.toList()); List<Class> classes2 = Stream.of("java.lang.Object", "java.lang.Integer", "java.lang.String") .map(rethrowFunction(Class::forName)) .collect(Collectors.toList()); }
@Override public String getName() { return FUNCTION_NAME; }
@Test public void instantiationTests() throws Exception { // Success case ExpressionContext expression = RequestContextUtils .getExpression(String.format("lookup('baseballTeams','teamName','teamID',%s)", STRING_SV_COLUMN)); TransformFunction transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap); Assert.assertTrue(transformFunction instanceof LookupTransformFunction); Assert.assertEquals(transformFunction.getName(), LookupTransformFunction.FUNCTION_NAME); // Wrong number of arguments Assert.assertThrows(BadQueryRequestException.class, () -> { TransformFunctionFactory .get(RequestContextUtils.getExpression(String.format("lookup('baseballTeams','teamName','teamID')")), _dataSourceMap); }); // Wrong number of join keys Assert.assertThrows(BadQueryRequestException.class, () -> { TransformFunctionFactory.get(RequestContextUtils.getExpression( String.format("lookup('baseballTeams','teamName','teamID', %s, 'danglingKey')", STRING_SV_COLUMN)), _dataSourceMap); }); // Non literal tableName argument Assert.assertThrows(BadQueryRequestException.class, () -> { TransformFunctionFactory.get(RequestContextUtils .getExpression(String.format("lookup(%s,'teamName','teamID', %s)", STRING_SV_COLUMN, INT_SV_COLUMN)), _dataSourceMap); }); // Non literal lookup columnName argument Assert.assertThrows(BadQueryRequestException.class, () -> { TransformFunctionFactory.get(RequestContextUtils.getExpression( String.format("lookup('baseballTeams',%s,'teamID',%s)", STRING_SV_COLUMN, INT_SV_COLUMN)), _dataSourceMap); }); // Non literal lookup columnName argument Assert.assertThrows(BadQueryRequestException.class, () -> { TransformFunctionFactory.get(RequestContextUtils.getExpression( String.format("lookup('baseballTeams','teamName',%s,%s)", STRING_SV_COLUMN, INT_SV_COLUMN)), _dataSourceMap); }); }
Record deserialize(Object data) { return (Record) fieldDeserializer.value(data); }
@Test public void testStructDeserialize() { Deserializer deserializer = new Deserializer.Builder() .schema(CUSTOMER_SCHEMA) .writerInspector((StructObjectInspector) IcebergObjectInspector.create(CUSTOMER_SCHEMA)) .sourceInspector(CUSTOMER_OBJECT_INSPECTOR) .build(); Record expected = GenericRecord.create(CUSTOMER_SCHEMA); expected.set(0, 1L); expected.set(1, "Bob"); Record actual = deserializer.deserialize(new Object[] {new LongWritable(1L), new Text("Bob")}); assertThat(actual).isEqualTo(expected); }
public static File createTmpFile(String dir, String prefix, String suffix) throws IOException { return Files.createTempFile(Paths.get(dir), prefix, suffix).toFile(); }
@Test void testCreateTmpFile() throws IOException { File tmpFile = null; try { tmpFile = DiskUtils.createTmpFile("nacos1", ".ut"); assertTrue(tmpFile.getName().startsWith("nacos1")); assertTrue(tmpFile.getName().endsWith(".ut")); } finally { if (tmpFile != null) { tmpFile.deleteOnExit(); } } }
public boolean shouldShow(@Nullable Keyboard.Key pressedKey) { return pressedKey != null && shouldShow(pressedKey.getPrimaryCode()); }
@Test public void testPathResetWithSameStart() { final OnKeyWordHelper helper = new OnKeyWordHelper("test".toCharArray()); Keyboard.Key key = Mockito.mock(Keyboard.Key.class); Mockito.doReturn((int) 't').when(key).getPrimaryCode(); Assert.assertFalse(helper.shouldShow(key)); Mockito.doReturn((int) 't').when(key).getPrimaryCode(); Assert.assertFalse(helper.shouldShow(key)); Mockito.doReturn((int) 'e').when(key).getPrimaryCode(); Assert.assertFalse(helper.shouldShow(key)); Mockito.doReturn((int) 's').when(key).getPrimaryCode(); Assert.assertFalse(helper.shouldShow(key)); Mockito.doReturn((int) 't').when(key).getPrimaryCode(); Assert.assertTrue(helper.shouldShow(key)); }
@Override public Optional<String> getUrlPathToJs() { return Optional.ofNullable(analytics) .map(WebAnalytics::getUrlPathToJs) .filter(path -> !path.startsWith("/") && !path.contains("..") && !path.contains("://")) .map(path -> "/" + path); }
@Test public void return_empty_if_path_has_up_operation() { WebAnalytics analytics = newWebAnalytics("foo/../bar"); WebAnalyticsLoaderImpl underTest = new WebAnalyticsLoaderImpl(new WebAnalytics[] {analytics}); assertThat(underTest.getUrlPathToJs()).isEmpty(); }
public FEELFnResult<Boolean> invoke(@ParameterName("list") List list) { if (list == null) { return FEELFnResult.ofResult(true); } boolean result = true; for (final Object element : list) { if (element != null && !(element instanceof Boolean)) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "an element in the list is not" + " a Boolean")); } else { if (element != null) { result &= (Boolean) element; } } } return FEELFnResult.ofResult(result); }
@Test void invokeArrayParamNull() { FunctionTestUtil.assertResult(nnAllFunction.invoke((Object[]) null), true); }
@Override public Type getRawType() { return rawType; }
@Test void getRawType() { Type rawType = List.class; List<Type> typeArguments = Arrays.asList(String.class, Boolean.class, Long.class); EfestoClassKey efestoClassKey = new EfestoClassKey(rawType, typeArguments.toArray(new Type[0])); Type retrieved = efestoClassKey.getRawType(); assertThat(retrieved).isNotNull(); assertThat(retrieved).isEqualTo(rawType); }
@Override public void accept(Point newPoint) { //ensure this method is never called by multiple threads at the same time. parallelismDetector.run( () -> doAccept(newPoint) ); }
@Test public void testTimeRegression_instantaneous() { TrackMaker maker = new TrackMaker(Duration.ofMinutes(5), new TestConsumer()); maker.accept(newPoint("track1", Instant.EPOCH)); maker.accept(newPoint("track1", Instant.EPOCH.plusSeconds(60L * 1))); maker.accept(newPoint("track1", Instant.EPOCH.plusSeconds(60L * 2))); maker.accept(newPoint("track1", Instant.EPOCH.plusSeconds(60L * 3))); maker.accept(newPoint("track1", Instant.EPOCH.plusSeconds(60L * 4))); maker.accept(newPoint("track1", Instant.EPOCH.plusSeconds(60L * 5))); Point badInputPoint = newPoint("track1", Instant.EPOCH); /* * The points supplied to a TrackMaker cannot go backwards in time. Some form of input * filter must guarantee this. */ assertThrows( IllegalArgumentException.class, () -> maker.accept(badInputPoint) ); }
@Override public void forEach(Consumer<? super E> action) { underlying().forEach(action); }
@Test public void testDelegationOfForEach() { final Consumer<Object> mockConsumer = mock(Consumer.class); new PCollectionsTreeSetWrapperDelegationChecker<>() .defineMockConfigurationForVoidMethodInvocation(mock -> mock.forEach(eq(mockConsumer))) .defineWrapperVoidMethodInvocation(wrapper -> wrapper.forEach(mockConsumer)) .doVoidMethodDelegationCheck(); }
@VisibleForTesting List<MatchResult> matchNonGlobs(List<GcsPath> gcsPaths) throws IOException { List<StorageObjectOrIOException> results = options.getGcsUtil().getObjects(gcsPaths); ImmutableList.Builder<MatchResult> ret = ImmutableList.builder(); for (StorageObjectOrIOException result : results) { ret.add(toMatchResult(result)); } return ret.build(); }
@Test public void testMatchNonGlobs() throws Exception { List<StorageObjectOrIOException> items = new ArrayList<>(); // Files within the directory items.add( StorageObjectOrIOException.create( createStorageObject("gs://testbucket/testdirectory/file1name", 1L /* fileSize */))); items.add( StorageObjectOrIOException.create( createStorageObject("gs://testbucket/testdirectory/dir2name/", 0L /* fileSize */))); items.add(StorageObjectOrIOException.create(new FileNotFoundException())); items.add(StorageObjectOrIOException.create(new IOException())); items.add( StorageObjectOrIOException.create( createStorageObject("gs://testbucket/testdirectory/file4name", 4L /* fileSize */))); List<GcsPath> gcsPaths = ImmutableList.of( GcsPath.fromUri("gs://testbucket/testdirectory/file1name"), GcsPath.fromUri("gs://testbucket/testdirectory/dir2name/"), GcsPath.fromUri("gs://testbucket/testdirectory/file2name"), GcsPath.fromUri("gs://testbucket/testdirectory/file3name"), GcsPath.fromUri("gs://testbucket/testdirectory/file4name")); when(mockGcsUtil.getObjects(eq(gcsPaths))).thenReturn(items); List<MatchResult> matchResults = gcsFileSystem.matchNonGlobs(gcsPaths); assertEquals(5, matchResults.size()); assertThat( ImmutableList.of("gs://testbucket/testdirectory/file1name"), contains(toFilenames(matchResults.get(0)).toArray())); assertThat( ImmutableList.of("gs://testbucket/testdirectory/dir2name/"), contains(toFilenames(matchResults.get(1)).toArray())); assertEquals(Status.NOT_FOUND, matchResults.get(2).status()); assertEquals(Status.ERROR, matchResults.get(3).status()); assertThat( ImmutableList.of("gs://testbucket/testdirectory/file4name"), contains(toFilenames(matchResults.get(4)).toArray())); }
static void registerMethod(MetricsRegistry metricsRegistry, Object osBean, String methodName, String name) { if (OperatingSystemMXBeanSupport.GET_FREE_PHYSICAL_MEMORY_SIZE_DISABLED && methodName.equals("getFreePhysicalMemorySize")) { metricsRegistry.registerStaticProbe(osBean, name, MANDATORY, (LongProbeFunction<Object>) source -> -1); } else { registerMethod(metricsRegistry, osBean, methodName, name, 1); } }
@Test public void registerMethod_whenNotExist() { metricsRegistry = new MetricsRegistryImpl(getLogger(MetricsRegistryImpl.class), INFO); FakeOperatingSystemBean fakeOperatingSystemBean = new FakeOperatingSystemBean(); registerMethod(metricsRegistry, fakeOperatingSystemBean, "notExist", "notExist"); boolean parameterExist = metricsRegistry.getNames().contains("notExist"); assertFalse(parameterExist); }
@Override public boolean put(final long time, final Record<K, Change<V>> record, final ProcessorRecordContext recordContext) { requireNonNull(record.value(), "value cannot be null"); requireNonNull(recordContext, "recordContext cannot be null"); final Bytes serializedKey = Bytes.wrap(keySerde.serializer().serialize(changelogTopic, record.key())); final Change<byte[]> serialChange = valueSerde.serializeParts(changelogTopic, record.value()); final BufferValue buffered = getBuffered(serializedKey); final byte[] serializedPriorValue; if (buffered == null) { serializedPriorValue = serialChange.oldValue; } else { serializedPriorValue = buffered.priorValue(); } cleanPut( time, serializedKey, new BufferValue(serializedPriorValue, serialChange.oldValue, serialChange.newValue, recordContext) ); if (loggingEnabled) { dirtyKeys.add(serializedKey); } updateBufferMetrics(); return true; }
@Test public void bufferShouldAllowLoggingEnablement() { final String expect = "3"; final Map<String, String> logConfig = new HashMap<>(); logConfig.put("min.insync.replicas", expect); final StoreBuilder<InMemoryTimeOrderedKeyValueChangeBuffer<Object, Object, Change<Object>>> builder = new InMemoryTimeOrderedKeyValueChangeBuffer.Builder<>(null, null, null) .withLoggingEnabled(logConfig); assertThat(builder.logConfig(), is(singletonMap("min.insync.replicas", expect))); assertThat(builder.loggingEnabled(), is(true)); }
@Override public void start() { this.executorService.scheduleAtFixedRate(this::tryBroadcastEvents, getInitialDelay(), getPeriod(), TimeUnit.SECONDS); }
@Test public void nothing_to_broadcast_when_no_push_events() { var project = db.components().insertPrivateProject().getMainBranchComponent(); var sonarLintClient = mock(SonarLintClient.class); when(sonarLintClient.getClientProjectUuids()).thenReturn(Set.of(project.uuid())); when(clientsRegistry.getClients()).thenReturn(List.of(sonarLintClient)); var underTest = new PushEventPollScheduler(executorService, clientsRegistry, db.getDbClient(), system2, config); underTest.start(); executorService.runCommand(); verify(clientsRegistry, times(0)).broadcastMessage(any(SonarLintPushEvent.class)); }
public static ObjectNode json(Highlights highlights) { ObjectNode payload = objectNode(); ArrayNode devices = arrayNode(); ArrayNode hosts = arrayNode(); ArrayNode links = arrayNode(); payload.set(DEVICES, devices); payload.set(HOSTS, hosts); payload.set(LINKS, links); highlights.devices().forEach(dh -> devices.add(json(dh))); highlights.hosts().forEach(hh -> hosts.add(json(hh))); highlights.links().forEach(lh -> links.add(json(lh))); Highlights.Amount toSubdue = highlights.subdueLevel(); if (!toSubdue.equals(Highlights.Amount.ZERO)) { payload.put(SUBDUE, toSubdue.toString()); } int delay = highlights.delayMs(); if (delay > 0) { payload.put(DELAY, delay); } return payload; }
@Test public void subdueMinimalHighlights() { Highlights h = new Highlights().subdueAllElse(Amount.MINIMALLY); payload = TopoJson.json(h); checkEmptyArrays(); String subdue = JsonUtils.string(payload, TopoJson.SUBDUE); assertEquals("not min", "min", subdue); }
@VisibleForTesting static DefaultIssue toDefaultIssue(IssueCache.Issue next) { DefaultIssue defaultIssue = new DefaultIssue(); defaultIssue.setKey(next.getKey()); defaultIssue.setType(RuleType.valueOf(next.getRuleType())); defaultIssue.setComponentUuid(next.hasComponentUuid() ? next.getComponentUuid() : null); defaultIssue.setComponentKey(next.getComponentKey()); defaultIssue.setProjectUuid(next.getProjectUuid()); defaultIssue.setProjectKey(next.getProjectKey()); defaultIssue.setRuleKey(RuleKey.parse(next.getRuleKey())); defaultIssue.setLanguage(next.hasLanguage() ? next.getLanguage() : null); defaultIssue.setSeverity(next.hasSeverity() ? next.getSeverity() : null); defaultIssue.setManualSeverity(next.getManualSeverity()); defaultIssue.setMessage(next.hasMessage() ? next.getMessage() : null); defaultIssue.setMessageFormattings(next.hasMessageFormattings() ? next.getMessageFormattings() : null); defaultIssue.setLine(next.hasLine() ? next.getLine() : null); defaultIssue.setGap(next.hasGap() ? next.getGap() : null); defaultIssue.setEffort(next.hasEffort() ? Duration.create(next.getEffort()) : null); defaultIssue.setStatus(next.getStatus()); defaultIssue.setResolution(next.hasResolution() ? next.getResolution() : null); defaultIssue.setAssigneeUuid(next.hasAssigneeUuid() ? next.getAssigneeUuid() : null); defaultIssue.setAssigneeLogin(next.hasAssigneeLogin() ? next.getAssigneeLogin() : null); defaultIssue.setChecksum(next.hasChecksum() ? next.getChecksum() : null); defaultIssue.setAuthorLogin(next.hasAuthorLogin() ? next.getAuthorLogin() : null); next.getCommentsList().forEach(c -> defaultIssue.addComment(toDefaultIssueComment(c))); defaultIssue.setTags(ImmutableSet.copyOf(STRING_LIST_SPLITTER.split(next.getTags()))); defaultIssue.setCodeVariants(ImmutableSet.copyOf(STRING_LIST_SPLITTER.split(next.getCodeVariants()))); defaultIssue.setRuleDescriptionContextKey(next.hasRuleDescriptionContextKey() ? next.getRuleDescriptionContextKey() : null); defaultIssue.setLocations(next.hasLocations() ? next.getLocations() : null); defaultIssue.setIsFromExternalRuleEngine(next.getIsFromExternalRuleEngine()); defaultIssue.setCreationDate(new Date(next.getCreationDate())); defaultIssue.setUpdateDate(next.hasUpdateDate() ? new Date(next.getUpdateDate()) : null); defaultIssue.setCloseDate(next.hasCloseDate() ? new Date(next.getCloseDate()) : null); defaultIssue.setCurrentChangeWithoutAddChange(next.hasCurrentChanges() ? toDefaultIssueChanges(next.getCurrentChanges()) : null); defaultIssue.setNew(next.getIsNew()); defaultIssue.setIsOnChangedLine(next.getIsOnChangedLine()); defaultIssue.setIsNewCodeReferenceIssue(next.getIsNewCodeReferenceIssue()); defaultIssue.setCopied(next.getIsCopied()); defaultIssue.setBeingClosed(next.getBeingClosed()); defaultIssue.setOnDisabledRule(next.getOnDisabledRule()); defaultIssue.setChanged(next.getIsChanged()); defaultIssue.setSendNotifications(next.getSendNotifications()); defaultIssue.setSelectedAt(next.hasSelectedAt() ? next.getSelectedAt() : null); defaultIssue.setQuickFixAvailable(next.getQuickFixAvailable()); defaultIssue.setPrioritizedRule(next.getIsPrioritizedRule()); defaultIssue.setIsNoLongerNewCodeReferenceIssue(next.getIsNoLongerNewCodeReferenceIssue()); defaultIssue.setCleanCodeAttribute(next.hasCleanCodeAttribute() ? CleanCodeAttribute.valueOf(next.getCleanCodeAttribute()) : null); if (next.hasAnticipatedTransitionUuid()) { defaultIssue.setAnticipatedTransitionUuid(next.getAnticipatedTransitionUuid()); } for (IssueCache.Impact impact : next.getImpactsList()) { defaultIssue.addImpact(SoftwareQuality.valueOf(impact.getSoftwareQuality()), Severity.valueOf(impact.getSeverity())); } for (IssueCache.FieldDiffs protoFieldDiffs : next.getChangesList()) { defaultIssue.addChange(toDefaultIssueChanges(protoFieldDiffs)); } return defaultIssue; }
@Test public void toDefaultIssue_whenImpactIsSet_shouldSetItInDefaultIssue() { IssueCache.Issue issue = prepareIssueWithCompulsoryFields() .addImpacts(toImpact(SoftwareQuality.MAINTAINABILITY, Severity.HIGH)) .addImpacts(toImpact(SoftwareQuality.RELIABILITY, Severity.LOW)) .build(); DefaultIssue defaultIssue = ProtobufIssueDiskCache.toDefaultIssue(issue); assertThat(defaultIssue.impacts()).containsExactlyInAnyOrderEntriesOf(Map.of(SoftwareQuality.MAINTAINABILITY, Severity.HIGH, SoftwareQuality.RELIABILITY, Severity.LOW)); }
@Override public Long sendSingleNotifyToMember(Long userId, String templateCode, Map<String, Object> templateParams) { return sendSingleNotify(userId, UserTypeEnum.MEMBER.getValue(), templateCode, templateParams); }
@Test public void testSendSingleNotifyToMember() { // 准备参数 Long userId = randomLongId(); String templateCode = randomString(); Map<String, Object> templateParams = MapUtil.<String, Object>builder().put("code", "1234") .put("op", "login").build(); // mock NotifyTemplateService 的方法 NotifyTemplateDO template = randomPojo(NotifyTemplateDO.class, o -> { o.setStatus(CommonStatusEnum.ENABLE.getStatus()); o.setContent("验证码为{code}, 操作为{op}"); o.setParams(Lists.newArrayList("code", "op")); }); when(notifyTemplateService.getNotifyTemplateByCodeFromCache(eq(templateCode))).thenReturn(template); String content = randomString(); when(notifyTemplateService.formatNotifyTemplateContent(eq(template.getContent()), eq(templateParams))) .thenReturn(content); // mock NotifyMessageService 的方法 Long messageId = randomLongId(); when(notifyMessageService.createNotifyMessage(eq(userId), eq(UserTypeEnum.MEMBER.getValue()), eq(template), eq(content), eq(templateParams))).thenReturn(messageId); // 调用 Long resultMessageId = notifySendService.sendSingleNotifyToMember(userId, templateCode, templateParams); // 断言 assertEquals(messageId, resultMessageId); }
static String headerLine(CSVFormat csvFormat) { return String.join(String.valueOf(csvFormat.getDelimiter()), csvFormat.getHeader()); }
@Test public void givenTrailingDelimiter_skipsEndingDelimiter() { CSVFormat csvFormat = csvFormat().withTrailingDelimiter(true); PCollection<String> input = pipeline.apply(Create.of(headerLine(csvFormat), "a,1,1.1,", "b,2,2.2,", "c,3,3.3,")); CsvIOStringToCsvRecord underTest = new CsvIOStringToCsvRecord(csvFormat); CsvIOParseResult<List<String>> result = input.apply(underTest); PAssert.that(result.getOutput()) .containsInAnyOrder( Arrays.asList( Arrays.asList("a", "1", "1.1"), Arrays.asList("b", "2", "2.2"), Arrays.asList("c", "3", "3.3"))); PAssert.that(result.getErrors()).empty(); pipeline.run(); }
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { if (stream == null) { throw new NullPointerException("null stream"); } Throwable t; boolean alive = false; ForkClient client = acquireClient(); try { ContentHandler tee = (handler instanceof AbstractRecursiveParserWrapperHandler) ? handler : new TeeContentHandler(handler, new MetadataContentHandler(metadata)); t = client.call("parse", stream, tee, metadata, context); alive = true; } catch (TikaException te) { // Problem occurred on our side alive = true; throw te; } catch (IOException e) { // Problem occurred on the other side throw new TikaException("Failed to communicate with a forked parser process." + " The process has most likely crashed due to some error" + " like running out of memory. A new process will be" + " started for the next parsing request.", e); } finally { releaseClient(client, alive); } if (t instanceof IOException) { throw (IOException) t; } else if (t instanceof SAXException) { throw (SAXException) t; } else if (t instanceof TikaException) { throw (TikaException) t; } else if (t != null) { throw new TikaException("Unexpected error in forked server process", t); } }
@Test public void testHelloWorld() throws Exception { try (ForkParser parser = new ForkParser(ForkParserTest.class.getClassLoader(), new ForkTestParser())) { Metadata metadata = new Metadata(); ContentHandler output = new BodyContentHandler(); InputStream stream = new ByteArrayInputStream(new byte[0]); ParseContext context = new ParseContext(); parser.parse(stream, output, metadata, context); assertEquals("Hello, World!", output.toString().trim()); assertEquals("text/plain", metadata.get(Metadata.CONTENT_TYPE)); } }
static void validateFilter(AclBindingFilter filter) { if (filter.patternFilter().isUnknown()) { throw new InvalidRequestException("Unknown patternFilter."); } if (filter.entryFilter().isUnknown()) { throw new InvalidRequestException("Unknown entryFilter."); } }
@Test public void testValidateFilter() { AclControlManager.validateFilter(new AclBindingFilter( new ResourcePatternFilter(ResourceType.ANY, "*", LITERAL), new AccessControlEntryFilter("User:*", "*", AclOperation.ANY, AclPermissionType.ANY))); assertEquals("Unknown patternFilter.", assertThrows(InvalidRequestException.class, () -> AclControlManager.validateFilter(new AclBindingFilter( new ResourcePatternFilter(ResourceType.ANY, "*", PatternType.UNKNOWN), new AccessControlEntryFilter("User:*", "*", AclOperation.ANY, AclPermissionType.ANY)))). getMessage()); assertEquals("Unknown entryFilter.", assertThrows(InvalidRequestException.class, () -> AclControlManager.validateFilter(new AclBindingFilter( new ResourcePatternFilter(ResourceType.ANY, "*", MATCH), new AccessControlEntryFilter("User:*", "*", AclOperation.ANY, AclPermissionType.UNKNOWN)))). getMessage()); }
public boolean submit(PriorityLeaderTask task) { long signature = task.getSignature(); synchronized (runningTasks) { if (runningTasks.containsKey(signature)) { return false; } try { PriorityFutureTask<?> future = executor.submit(task); runningTasks.put(signature, future); return true; } catch (RejectedExecutionException e) { LOG.warn("submit task {} failed.", task.getSignature(), e); return false; } } }
@Test public void testSubmit() { // submit task PriorityLeaderTask task1 = new TestLeaderTask(1L); Assert.assertTrue(executor.submit(task1)); Assert.assertEquals(1, executor.getTaskNum()); // submit same running task error Assert.assertFalse(executor.submit(task1)); Assert.assertEquals(1, executor.getTaskNum()); // submit another task PriorityLeaderTask task2 = new TestLeaderTask(2L); Assert.assertTrue(executor.submit(task2)); Assert.assertEquals(2, executor.getTaskNum()); // submit priority task PriorityLeaderTask task3 = new TestLeaderTask(3L, 1); Assert.assertTrue(executor.submit(task3)); Assert.assertEquals(3, executor.getTaskNum()); // submit priority task PriorityLeaderTask task4 = new TestLeaderTask(4L); Assert.assertTrue(executor.submit(task4)); Assert.assertEquals(4, executor.getTaskNum()); Assert.assertTrue(executor.updatePriority(4L, 5)); // wait for tasks run to end try { Thread.sleep(2000); Assert.assertEquals(0, executor.getTaskNum()); } catch (InterruptedException e) { LOG.error("error", e); } Assert.assertEquals(4, SEQ.size()); Assert.assertEquals(1L, SEQ.get(0).longValue()); Assert.assertEquals(4L, SEQ.get(1).longValue()); Assert.assertEquals(3L, SEQ.get(2).longValue()); Assert.assertEquals(2L, SEQ.get(3).longValue()); }
static ValueExtractor instantiateExtractor(AttributeConfig config, ClassLoader classLoader) { ValueExtractor extractor = null; if (classLoader != null) { try { extractor = instantiateExtractorWithConfigClassLoader(config, classLoader); } catch (IllegalArgumentException ex) { // cached back-stage, initialised lazily since it's not a common case Logger.getLogger(ExtractorHelper.class) .warning("Could not instantiate extractor with the config class loader", ex); } } if (extractor == null) { extractor = instantiateExtractorWithClassForName(config, classLoader); } return extractor; }
@Test public void instantiate_extractor_notExistingClass() { // GIVEN AttributeConfig config = new AttributeConfig("iq", "not.existing.class"); // WHEN assertThatThrownBy(() -> instantiateExtractor(config)) .isInstanceOf(IllegalArgumentException.class) .hasCauseInstanceOf(ClassNotFoundException.class); }
public HiveTableSink(HiveTable hiveTable, TupleDescriptor desc, boolean isStaticPartitionSink, SessionVariable sessionVariable) { this.desc = desc; this.stagingDir = HiveWriteUtils.getStagingDir(hiveTable, sessionVariable.getHiveTempStagingDir()); this.partitionColNames = hiveTable.getPartitionColumnNames(); this.dataColNames = hiveTable.getDataColumnNames(); this.tableIdentifier = hiveTable.getUUID(); this.isStaticPartitionSink = isStaticPartitionSink; HiveStorageFormat format = hiveTable.getStorageFormat(); if (format != HiveStorageFormat.PARQUET && format != HiveStorageFormat.ORC && format != HiveStorageFormat.TEXTFILE) { throw new StarRocksConnectorException("Writing to hive table in [%s] format is not supported.", format.name()); } this.fileFormat = hiveTable.getStorageFormat().name().toLowerCase(); if (format == HiveStorageFormat.TEXTFILE) { this.textFileFormatDesc = Optional.of(toTextFileFormatDesc(hiveTable.getSerdeProperties())); this.compressionType = String.valueOf(TCompressionType.NO_COMPRESSION); } else { this.compressionType = sessionVariable.getConnectorSinkCompressionCodec(); } this.targetMaxFileSize = sessionVariable.getConnectorSinkTargetMaxFileSize(); String catalogName = hiveTable.getCatalogName(); Connector connector = GlobalStateMgr.getCurrentState().getConnectorMgr().getConnector(catalogName); Preconditions.checkState(connector != null, String.format("connector of catalog %s should not be null", catalogName)); this.cloudConfiguration = connector.getMetadata().getCloudConfiguration(); Preconditions.checkState(cloudConfiguration != null, String.format("cloudConfiguration of catalog %s should not be null", catalogName)); }
@Test public void testHiveTableSink(@Mocked CatalogConnector hiveConnector) { HiveTable.Builder builder = HiveTable.builder() .setId(ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt()) .setTableName("hive_table") .setCatalogName("hive_catalog") .setResourceName(toResourceName("hive_catalog", "hive")) .setHiveDbName("hive_db") .setHiveTableName("hive_table") .setPartitionColumnNames(Lists.newArrayList("p1")) .setDataColumnNames(Lists.newArrayList("c1")) .setFullSchema(Lists.newArrayList(new Column("c1", Type.INT), new Column("p1", Type.INT))) .setTableLocation("hdfs://hadoop01:9000/tableLocation") .setProperties(new HashMap<>()) .setStorageFormat(HiveStorageFormat.PARQUET) .setCreateTime(System.currentTimeMillis()); new Expectations() { { hiveConnector.getMetadata().getCloudConfiguration(); result = CloudConfigurationFactory.buildCloudConfigurationForStorage(new HashMap<>()); minTimes = 1; } }; ConnectorMgr connectorMgr = AnalyzeTestUtil.getConnectContext().getGlobalStateMgr().getConnectorMgr(); new Expectations(connectorMgr) { { connectorMgr.getConnector("hive_catalog"); result = hiveConnector; minTimes = 1; } }; TupleDescriptor desc = new TupleDescriptor(new TupleId(0)); HiveTableSink sink = new HiveTableSink(builder.build(), desc, true, new SessionVariable()); Assert.assertNull(sink.getExchNodeId()); Assert.assertNull(sink.getOutputPartition()); Assert.assertNull(sink.getOutputPartition()); Assert.assertTrue(sink.canUsePipeLine()); Assert.assertTrue(sink.canUseRuntimeAdaptiveDop()); Assert.assertTrue(sink.getStagingDir().contains("/tmp/starrocks")); Assert.assertTrue(sink.getExplainString("SINK", TExplainLevel.NORMAL).contains( "SINKHive TABLE SINK\n" + "SINK TABLE: hive_catalog.hive_db.hive_table")); TDataSink tDataSink = sink.toThrift(); Assert.assertEquals(TDataSinkType.HIVE_TABLE_SINK, tDataSink.getType()); THiveTableSink tHiveTableSink = tDataSink.getHive_table_sink(); Assert.assertTrue(tHiveTableSink.getStaging_dir().startsWith("hdfs://hadoop01:9000/tmp/starrocks")); Assert.assertEquals("parquet", tHiveTableSink.getFile_format()); Assert.assertEquals("c1", tHiveTableSink.getData_column_names().get(0)); Assert.assertEquals("p1", tHiveTableSink.getPartition_column_names().get(0)); Assert.assertEquals(TCompressionType.NO_COMPRESSION, tHiveTableSink.getCompression_type()); Assert.assertTrue(tHiveTableSink.is_static_partition_sink); Assert.assertEquals(TCloudType.DEFAULT, tHiveTableSink.getCloud_configuration().cloud_type); builder.setStorageFormat(HiveStorageFormat.AVRO); ExceptionChecker.expectThrowsWithMsg(StarRocksConnectorException.class, "Writing to hive table in [AVRO] format is not supported", () ->new HiveTableSink(builder.build(), desc, true, new SessionVariable())); }
public CreateTableBuilder withPkConstraintName(String pkConstraintName) { this.pkConstraintName = validateConstraintName(pkConstraintName); return this; }
@Test public void withPkConstraintName_throws_IAE_if_name_starts_with_underscore() { assertThatThrownBy(() -> underTest.withPkConstraintName("_a")) .isInstanceOf(IllegalArgumentException.class) .hasMessageContaining("Constraint name must not start by a number or '_', got '_a'"); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return PathAttributes.EMPTY; } final SMBSession.DiskShareWrapper share = session.openShare(file); try { if(new SMBPathContainerService(session).isContainer(file)) { final ShareInfo shareInformation = share.get().getShareInformation(); final PathAttributes attributes = new PathAttributes(); final long used = shareInformation.getTotalSpace() - shareInformation.getFreeSpace(); attributes.setSize(used); attributes.setQuota(new Quota.Space(used, shareInformation.getFreeSpace())); return attributes; } else { final FileAllInformation fileInformation = share.get().getFileInformation(new SMBPathContainerService(session).getKey(file)); if(file.isDirectory() && !fileInformation.getStandardInformation().isDirectory()) { throw new NotfoundException(String.format("File %s found but type is not directory", file.getName())); } else if(file.isFile() && fileInformation.getStandardInformation().isDirectory()) { throw new NotfoundException(String.format("File %s found but type is not file", file.getName())); } return this.toAttributes(fileInformation); } } catch(SMBRuntimeException e) { throw new SMBExceptionMappingService().map("Failure to read attributes of {0}", e, file); } finally { session.releaseShare(share); } }
@Test public void testFindDirectory() throws Exception { final Path test = new SMBDirectoryFeature(session).mkdir(new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final SMBAttributesFinderFeature f = new SMBAttributesFinderFeature(session); final PathAttributes attributes = f.find(test); assertNotEquals(-1L, attributes.getModificationDate()); // Test wrong type assertThrows(NotfoundException.class, () -> f.find(new Path(test.getAbsolute(), EnumSet.of(Path.Type.file)))); new SMBDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static <T, S> T copy(S source, T target, String... ignore) { return copy(source, target, DEFAULT_CONVERT, ignore); }
@Test public void testMapList() { Map<String, Object> data = new HashMap<>(); data.put("templates", new HashMap() { { put("0", Collections.singletonMap("name", "test")); put("1", Collections.singletonMap("name", "test")); } }); Config config = FastBeanCopier.copy(data, new Config()); Assert.assertNotNull(config); Assert.assertNotNull(config.templates); System.out.println(config.templates); Assert.assertEquals(2,config.templates.size()); }
public static long count(int n, int m) { if (0 == m || n == m) { return 1; } return (n > m) ? NumberUtil.factorial(n, n - m) / NumberUtil.factorial(m) : 0; }
@Test public void countTest() { long result = Combination.count(5, 2); assertEquals(10, result); result = Combination.count(5, 5); assertEquals(1, result); result = Combination.count(5, 0); assertEquals(1, result); long resultAll = Combination.countAll(5); assertEquals(31, resultAll); }
public static boolean isPositiveInteger( String strNum ) { boolean result = true; if ( strNum == null ) { result = false; } else { try { int value = Integer.parseInt( strNum.trim() ); if ( value <= 0 ) { result = false; } } catch ( NumberFormatException nfe ) { result = false; } } return result; }
@Test public void test_isPositiveNumber_ForValueOtherThanPositiveIntegers() { for ( String value : notPosInt) { assertFalse( JobEntryPing.isPositiveInteger( value ) ); } }
public List<Long> getWorkersByWorkerGroup(long workerGroupId) throws UserException { List<Long> nodeIds = new ArrayList<>(); prepare(); try { List<WorkerGroupDetailInfo> workerGroupDetailInfos = client. listWorkerGroup(serviceId, Collections.singletonList(workerGroupId), true); for (WorkerGroupDetailInfo detailInfo : workerGroupDetailInfos) { detailInfo.getWorkersInfoList() .forEach(x -> getOrUpdateNodeIdByWorkerInfo(x).ifPresent(nodeIds::add)); } return nodeIds; } catch (StarClientException e) { throw new UserException("Failed to get workers by group id. error: " + e.getMessage()); } }
@Test public void testGetWorkers() throws StarClientException, UserException { String serviceId = "1"; Deencapsulation.setField(starosAgent, "serviceId", serviceId); long workerId0 = 10000L; WorkerInfo worker0 = newWorkerInfo(workerId0, "127.0.0.1:8090", 9050, 9060, 8040, 8060, 10); long workerId1 = 10001L; WorkerInfo worker1 = newWorkerInfo(workerId1, "127.0.0.2:8091", 9051, 9061, 8041, 8061, 10); long groupId0 = 10L; WorkerGroupDetailInfo group0 = WorkerGroupDetailInfo.newBuilder().setGroupId(groupId0).addWorkersInfo(worker0) .addWorkersInfo(worker1).build(); long workerId2 = 10002L; WorkerInfo worker2 = newWorkerInfo(workerId2, "127.0.0.3:8092", 9052, 9062, 8042, 8062, 10); long groupId1 = 11L; WorkerGroupDetailInfo group1 = WorkerGroupDetailInfo.newBuilder().setGroupId(groupId1).addWorkersInfo(worker2) .build(); new Expectations() { { client.getWorkerInfo(serviceId, workerId0); minTimes = 0; result = worker0; client.listWorkerGroup(serviceId, Lists.newArrayList(groupId0), true); minTimes = 0; result = Lists.newArrayList(group0); client.listWorkerGroup(serviceId, Lists.newArrayList(), true); minTimes = 0; result = Lists.newArrayList(group0, group1); } }; List<Long> nodes = starosAgent.getWorkersByWorkerGroup(groupId0); Assert.assertEquals(2, nodes.size()); }
public static CronPattern of(String pattern) { return new CronPattern(pattern); }
@Test public void rangeYearTest() { assertThrows(CronException.class, () -> { // year的范围是1970~2099年,超出报错 CronPattern.of("0/1 * * * 1/1 ? 2020-2120"); }); }
synchronized boolean lock(final TaskId taskId) { if (!hasPersistentStores) { return true; } final Thread lockOwner = lockedTasksToOwner.get(taskId); if (lockOwner != null) { if (lockOwner.equals(Thread.currentThread())) { log.trace("{} Found cached state dir lock for task {}", logPrefix(), taskId); // we already own the lock return true; } else { // another thread owns the lock return false; } } else if (!stateDir.exists()) { log.error("Tried to lock task directory for {} but the state directory does not exist", taskId); throw new IllegalStateException("The state directory has been deleted"); } else { lockedTasksToOwner.put(taskId, Thread.currentThread()); // make sure the task directory actually exists, and create it if not getOrCreateDirectoryForTask(taskId); return true; } }
@Test public void shouldNotLockStateDirLockedByAnotherThread() throws Exception { final TaskId taskId = new TaskId(0, 0); final Thread thread = new Thread(() -> directory.lock(taskId)); thread.start(); thread.join(30000); assertFalse(directory.lock(taskId)); }
public static <T> Class<T> getClass(String name, String source) throws CompileException, ClassNotFoundException { JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); InMemoryFileManager fileManager = new InMemoryFileManager(compiler.getStandardFileManager(null, null, null)); DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<>(); JavaCompiler.CompilationTask task = compiler.getTask( null, fileManager, diagnostics, ImmutableList.of("-classpath", classpathSupplier.get()), null, Collections.singletonList(new InMemoryFileManager.InputJavaFileObject(name, source))); boolean result = task.call(); if (!result) { throw new CompileException(diagnostics); } else { return (Class<T>) fileManager.getClassLoader().loadClass(name); } }
@Test public void testGetClass() throws Exception { Class<?> clazz = StringCompiler.getClass("Square", SQUARE_SOURCE); assertTrue(Function.class.isAssignableFrom(clazz)); assertEquals("Square", clazz.getSimpleName()); }
@Override public Point calculatePositionForPreview( Keyboard.Key key, PreviewPopupTheme theme, int[] windowOffset) { Point point = new Point(key.x + windowOffset[0], windowOffset[1]); Rect padding = new Rect(); theme.getPreviewKeyBackground().getPadding(padding); point.offset((key.width / 2), padding.bottom - theme.getVerticalOffset()); return point; }
@Test public void testCalculatePositionForPreviewWithExtendAnimation() throws Exception { mTheme.setPreviewAnimationType(PreviewPopupTheme.ANIMATION_STYLE_EXTEND); int[] offsets = new int[] {50, 60}; Point result = mUnderTest.calculatePositionForPreview(mTestKey, mTheme, offsets); Assert.assertEquals(mTestKey.x + mTestKey.width / 2 + offsets[0], result.x); Assert.assertEquals(offsets[1], result.y); }
public File getLogsDir() { return new File(config.get(PATH_LOGS.getKey()).get()); }
@Test public void getLogsDir() throws IOException { File dir = temp.newFolder(); settings.setProperty(PATH_LOGS.getKey(), dir.getAbsolutePath()); assertThat(underTest.getLogsDir()).isEqualTo(dir); }
public void promoteLeader() { Worker leader = null; if (!workers.isEmpty()) { leader = workers.get(0); } this.leader = leader; }
@Test void testNullLeader() { var workCenter = new WorkCenter(); workCenter.promoteLeader(); assertNull(workCenter.getLeader()); }
@Override public LocalResourceId resolve(String other, ResolveOptions resolveOptions) { checkState(isDirectory(), "Expected the path is a directory, but had [%s].", pathString); checkArgument( resolveOptions.equals(StandardResolveOptions.RESOLVE_FILE) || resolveOptions.equals(StandardResolveOptions.RESOLVE_DIRECTORY), "ResolveOptions: [%s] is not supported.", resolveOptions); checkArgument( !(resolveOptions.equals(StandardResolveOptions.RESOLVE_FILE) && other.endsWith("/")), "The resolved file: [%s] should not end with '/'.", other); if (SystemUtils.IS_OS_WINDOWS) { return resolveLocalPathWindowsOS(other, resolveOptions); } else { return resolveLocalPath(other, resolveOptions); } }
@Test public void testResolveInvalidInputs() { // TODO: Java core test failing on windows, https://github.com/apache/beam/issues/20465 assumeFalse(SystemUtils.IS_OS_WINDOWS); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("The resolved file: [tmp/] should not end with '/'."); toResourceIdentifier("/root/").resolve("tmp/", StandardResolveOptions.RESOLVE_FILE); }
@VisibleForTesting void validateCaptcha(AuthLoginReqVO reqVO) { // 如果验证码关闭,则不进行校验 if (!captchaEnable) { return; } // 校验验证码 ValidationUtils.validate(validator, reqVO, AuthLoginReqVO.CodeEnableGroup.class); CaptchaVO captchaVO = new CaptchaVO(); captchaVO.setCaptchaVerification(reqVO.getCaptchaVerification()); ResponseModel response = captchaService.verification(captchaVO); // 验证不通过 if (!response.isSuccess()) { // 创建登录失败日志(验证码不正确) createLoginLog(null, reqVO.getUsername(), LoginLogTypeEnum.LOGIN_USERNAME, LoginResultEnum.CAPTCHA_CODE_ERROR); throw exception(AUTH_LOGIN_CAPTCHA_CODE_ERROR, response.getRepMsg()); } }
@Test public void testValidateCaptcha_constraintViolationException() { // 准备参数 AuthLoginReqVO reqVO = randomPojo(AuthLoginReqVO.class).setCaptchaVerification(null); // mock 验证码打开 ReflectUtil.setFieldValue(authService, "captchaEnable", true); // 调用,并断言异常 assertThrows(ConstraintViolationException.class, () -> authService.validateCaptcha(reqVO), "验证码不能为空"); }
@Override public DescribeConsumerGroupsResult describeConsumerGroups(final Collection<String> groupIds, final DescribeConsumerGroupsOptions options) { SimpleAdminApiFuture<CoordinatorKey, ConsumerGroupDescription> future = DescribeConsumerGroupsHandler.newFuture(groupIds); DescribeConsumerGroupsHandler handler = new DescribeConsumerGroupsHandler(options.includeAuthorizedOperations(), logContext); invokeDriver(handler, future, options.timeoutMs); return new DescribeConsumerGroupsResult(future.all().entrySet().stream() .collect(Collectors.toMap(entry -> entry.getKey().idValue, Map.Entry::getValue))); }
@Test public void testDescribeGroupsWithBothUnsupportedApis() throws InterruptedException { try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(mockCluster(1, 0))) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponse(prepareFindCoordinatorResponse(Errors.NONE, env.cluster().controller())); // The first request sent will be a ConsumerGroupDescribe request. Let's // fail it in order to fail back to using the classic version. env.kafkaClient().prepareUnsupportedVersionResponse( request -> request instanceof ConsumerGroupDescribeRequest); // Let's also fail the second one. env.kafkaClient().prepareUnsupportedVersionResponse( request -> request instanceof DescribeGroupsRequest); DescribeConsumerGroupsResult result = env.adminClient().describeConsumerGroups(singletonList(GROUP_ID)); TestUtils.assertFutureError(result.describedGroups().get(GROUP_ID), UnsupportedVersionException.class); } }
@Override public Long time(RedisClusterNode node) { RedisClient entry = getEntry(node); RFuture<Long> f = executorService.readAsync(entry, LongCodec.INSTANCE, RedisCommands.TIME_LONG); return syncFuture(f); }
@Test public void testTime() { RedisClusterNode master = getFirstMaster(); Long time = connection.time(master); assertThat(time).isGreaterThan(1000); }
public MethodSpec getMethodSpec() { return methodNeedsInjection() ? new MethodSpecGenerator( uniqueMethodName, BeforeAll.class, Modifier.STATIC, defaultParameterSpecsForEachUnitTest(), generateStatementBody()) .generate() : new MethodSpecGenerator(uniqueMethodName, generateStatementBody()).generate(); }
@Test public void testThatDeployMethodWasGenerated() { Optional<Method> deployMethod = filteredMethods.stream().filter(m -> m.getName().equals("deploy")).findAny(); MethodSpec deployMethodSpec = new MethodParser(deployMethod.get(), greeterContractClass, "deploy") .getMethodSpec(); assertEquals( "@org.junit.jupiter.api.BeforeAll\n" + "static void deploy(org.web3j.protocol.Web3j web3j,\n" + " org.web3j.tx.TransactionManager transactionManager,\n" + " org.web3j.tx.gas.ContractGasProvider contractGasProvider) throws java.lang.Exception {\n" + " greeter = org.web3j.test.contract.Greeter.deploy(web3j, transactionManager, contractGasProvider, \"REPLACE_ME\").send();\n" + "}\n", deployMethodSpec.toString()); }
public List<InetAddress> addresses(String inetHost, ResolvedAddressTypes resolvedAddressTypes) { String normalized = normalize(inetHost); ensureHostsFileEntriesAreFresh(); switch (resolvedAddressTypes) { case IPV4_ONLY: return inet4Entries.get(normalized); case IPV6_ONLY: return inet6Entries.get(normalized); case IPV4_PREFERRED: List<InetAddress> allInet4Addresses = inet4Entries.get(normalized); return allInet4Addresses != null ? allAddresses(allInet4Addresses, inet6Entries.get(normalized)) : inet6Entries.get(normalized); case IPV6_PREFERRED: List<InetAddress> allInet6Addresses = inet6Entries.get(normalized); return allInet6Addresses != null ? allAddresses(allInet6Addresses, inet4Entries.get(normalized)) : inet4Entries.get(normalized); default: throw new IllegalArgumentException("Unknown ResolvedAddressTypes " + resolvedAddressTypes); } }
@Test public void shouldPickIpv4FirstWhenBothAreDefinedButIpv4IsPreferred() { HostsFileEntriesProvider.Parser parser = givenHostsParserWith( LOCALHOST_V4_ADDRESSES, LOCALHOST_V6_ADDRESSES ); DefaultHostsFileEntriesResolver resolver = new DefaultHostsFileEntriesResolver(parser, ENTRIES_TTL); List<InetAddress> addresses = resolver.addresses("localhost", ResolvedAddressTypes.IPV4_PREFERRED); assertNotNull(addresses); assertEquals(2, addresses.size()); assertThat("Should pick an IPv4 address", addresses.get(0), instanceOf(Inet4Address.class)); assertThat("Should pick an IPv6 address", addresses.get(1), instanceOf(Inet6Address.class)); }
public String process(final Expression expression) { return formatExpression(expression); }
@Test public void shouldGenerateCorrectCodeForTimeStringEQ() { // Given: final ComparisonExpression compExp = new ComparisonExpression( Type.EQUAL, TIMECOL, new StringLiteral("01:23:45") ); // When: final String java = sqlToJavaVisitor.process(compExp); // Then: assertThat(java, containsString("(((java.sql.Time) arguments.get(\"COL12\")).compareTo(SqlTimeTypes.parseTime(\"01:23:45\")) == 0)")); }
protected void hideModels(Iterable<EpoxyModel<?>> models) { showModels(models, false); }
@Test public void testHideModels() { TestModel testModel1 = new TestModel(); TestModel testModel2 = new TestModel(); testAdapter.addModels(testModel1, testModel2); testAdapter.hideModels(testAdapter.models); verify(observer).onItemRangeChanged(0, 1, null); verify(observer).onItemRangeChanged(1, 1, null); assertFalse(testModel1.isShown()); assertFalse(testModel2.isShown()); checkDifferState(); }