focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public String evaluate(EvaluationContext evaluationContext, String... args) { if (args != null) { switch (args.length) { case 1: int maxValue = Integer.MAX_VALUE; try { maxValue = Integer.parseInt(args[0]); } catch (NumberFormatException nfe) { // Ignore, we'll stick to integer max value. } return String.valueOf(getRandom().nextInt(maxValue)); case 2: int minValue = 0; maxValue = Integer.MAX_VALUE; try { minValue = Integer.parseInt(args[0]); maxValue = Integer.parseInt(args[1]); } catch (NumberFormatException nfe) { // Ignore, we'll stick to the defaults. } return String.valueOf(getRandom().nextInt(maxValue - minValue) + minValue); default: return String.valueOf(getRandom().nextInt()); } } return String.valueOf(getRandom().nextInt()); }
@Test void testBoundedEvaluation() { // Compute evaluation. RandomIntELFunction function = new RandomIntELFunction(); String randomIntString = function.evaluate(null, "50"); int randomInt = Integer.parseInt(randomIntString); assertTrue(randomInt >= 0); assertTrue(randomInt <= 50); }
@Override public PageResult<OAuth2ClientDO> getOAuth2ClientPage(OAuth2ClientPageReqVO pageReqVO) { return oauth2ClientMapper.selectPage(pageReqVO); }
@Test public void testGetOAuth2ClientPage() { // mock 数据 OAuth2ClientDO dbOAuth2Client = randomPojo(OAuth2ClientDO.class, o -> { // 等会查询到 o.setName("潜龙"); o.setStatus(CommonStatusEnum.ENABLE.getStatus()); }); oauth2ClientMapper.insert(dbOAuth2Client); // 测试 name 不匹配 oauth2ClientMapper.insert(cloneIgnoreId(dbOAuth2Client, o -> o.setName("凤凰"))); // 测试 status 不匹配 oauth2ClientMapper.insert(cloneIgnoreId(dbOAuth2Client, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus()))); // 准备参数 OAuth2ClientPageReqVO reqVO = new OAuth2ClientPageReqVO(); reqVO.setName("龙"); reqVO.setStatus(CommonStatusEnum.ENABLE.getStatus()); // 调用 PageResult<OAuth2ClientDO> pageResult = oauth2ClientService.getOAuth2ClientPage(reqVO); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbOAuth2Client, pageResult.getList().get(0)); }
public List<Request> verifyTimes(final HttpMethod method, final String url, final int times) { if (times < 0) { throw new IllegalArgumentException("times must be a non negative number"); } if (times == 0) { verifyNever(method, url); return Collections.emptyList(); } RequestKey requestKey = RequestKey.builder(method, url).build(); if (!requests.containsKey(requestKey)) { throw new VerificationAssertionError("Wanted: '%s' but never invoked! Got: %s", requestKey, requests.keySet()); } List<Request> result = requests.get(requestKey); if (result.size() != times) { throw new VerificationAssertionError("Wanted: '%s' to be invoked: '%s' times but got: '%s'!", requestKey, times, result.size()); } return result; }
@Test void verifyNegative() { try { mockClient.verifyTimes(HttpMethod.POST, "/repos/netflix/feign/contributors", -1); fail(""); } catch (IllegalArgumentException e) { assertThat(e.getMessage()).contains("non negative"); } }
@GET @Produces(MediaType.APPLICATION_JSON) public Response getVirtualNetworks() { Set<TenantId> tenantIds = vnetAdminService.getTenantIds(); List<VirtualNetwork> allVnets = tenantIds.stream() .map(tenantId -> vnetService.getVirtualNetworks(tenantId)) .flatMap(Collection::stream) .collect(Collectors.toList()); return ok(encodeArray(VirtualNetwork.class, "vnets", allVnets)).build(); }
@Test public void testGetVirtualNetworksArray() { final Set<VirtualNetwork> vnetSet = ImmutableSet.of(vnet1, vnet2, vnet3, vnet4); expect(mockVnetAdminService.getTenantIds()).andReturn(ImmutableSet.of(tenantId3)).anyTimes(); replay(mockVnetAdminService); expect(mockVnetService.getVirtualNetworks(tenantId3)).andReturn(vnetSet).anyTimes(); replay(mockVnetService); WebTarget wt = target(); String response = wt.path("vnets").request().get(String.class); assertThat(response, containsString("{\"vnets\":[")); final JsonObject result = Json.parse(response).asObject(); assertThat(result, notNullValue()); assertThat(result.names(), hasSize(1)); assertThat(result.names().get(0), is("vnets")); final JsonArray vnetJsonArray = result.get("vnets").asArray(); assertThat(vnetJsonArray, notNullValue()); assertEquals("Virtual networks array is not the correct size.", vnetSet.size(), vnetJsonArray.size()); vnetSet.forEach(vnet -> assertThat(vnetJsonArray, hasVnet(vnet))); verify(mockVnetService); verify(mockVnetAdminService); }
@Override public boolean isEmpty() { return peek() == null; }
@Test public void testIsEmpty_whenEmpty() { assertTrue(queue.isEmpty()); }
@Override public CRFModel train(SequenceDataset<Label> sequenceExamples, Map<String, Provenance> runProvenance) { if (sequenceExamples.getOutputInfo().getUnknownCount() > 0) { throw new IllegalArgumentException("The supplied Dataset contained unknown Outputs, and this Trainer is supervised."); } // Creates a new RNG, adds one to the invocation count, generates a local optimiser. SplittableRandom localRNG; TrainerProvenance trainerProvenance; StochasticGradientOptimiser localOptimiser; synchronized(this) { localRNG = rng.split(); localOptimiser = optimiser.copy(); trainerProvenance = getProvenance(); trainInvocationCounter++; } ImmutableOutputInfo<Label> labelIDMap = sequenceExamples.getOutputIDInfo(); ImmutableFeatureMap featureIDMap = sequenceExamples.getFeatureIDMap(); SGDVector[][] sgdFeatures = new SGDVector[sequenceExamples.size()][]; int[][] sgdLabels = new int[sequenceExamples.size()][]; double[] weights = new double[sequenceExamples.size()]; int n = 0; for (SequenceExample<Label> example : sequenceExamples) { weights[n] = example.getWeight(); Pair<int[],SGDVector[]> pair = CRFModel.convertToVector(example,featureIDMap,labelIDMap); sgdFeatures[n] = pair.getB(); sgdLabels[n] = pair.getA(); n++; } logger.info(String.format("Training SGD CRF with %d examples", n)); CRFParameters crfParameters = new CRFParameters(featureIDMap.size(),labelIDMap.size()); localOptimiser.initialise(crfParameters); double loss = 0.0; int iteration = 0; for (int i = 0; i < epochs; i++) { if (shuffle) { Util.shuffleInPlace(sgdFeatures, sgdLabels, weights, localRNG); } if (minibatchSize == 1) { /* * Special case a minibatch of size 1. Directly updates the parameters after each * example rather than aggregating. */ for (int j = 0; j < sgdFeatures.length; j++) { Pair<Double,Tensor[]> output = crfParameters.valueAndGradient(sgdFeatures[j],sgdLabels[j]); loss += output.getA()*weights[j]; //Update the gradient with the current learning rates Tensor[] updates = localOptimiser.step(output.getB(),weights[j]); //Apply the update to the current parameters. crfParameters.update(updates); iteration++; if ((iteration % loggingInterval == 0) && (loggingInterval != -1)) { logger.info("At iteration " + iteration + ", average loss = " + loss/loggingInterval); loss = 0.0; } } } else { Tensor[][] gradients = new Tensor[minibatchSize][]; for (int j = 0; j < sgdFeatures.length; j += minibatchSize) { double tempWeight = 0.0; int curSize = 0; //Aggregate the gradient updates for each example in the minibatch for (int k = j; k < j+minibatchSize && k < sgdFeatures.length; k++) { Pair<Double,Tensor[]> output = crfParameters.valueAndGradient(sgdFeatures[j],sgdLabels[j]); loss += output.getA()*weights[k]; tempWeight += weights[k]; gradients[k-j] = output.getB(); curSize++; } //Merge the values into a single gradient update Tensor[] updates = crfParameters.merge(gradients,curSize); for (Tensor update : updates) { update.scaleInPlace(minibatchSize); } tempWeight /= minibatchSize; //Update the gradient with the current learning rates updates = localOptimiser.step(updates,tempWeight); //Apply the gradient. crfParameters.update(updates); iteration++; if ((loggingInterval != -1) && (iteration % loggingInterval == 0)) { logger.info("At iteration " + iteration + ", average loss = " + loss/loggingInterval); loss = 0.0; } } } } localOptimiser.finalise(); //public CRFModel(String name, String description, ImmutableInfoMap featureIDMap, ImmutableInfoMap outputIDInfo, CRFParameters parameters) { ModelProvenance provenance = new ModelProvenance(CRFModel.class.getName(),OffsetDateTime.now(),sequenceExamples.getProvenance(),trainerProvenance,runProvenance); CRFModel model = new CRFModel("crf-sgd-model",provenance,featureIDMap,labelIDMap,crfParameters); localOptimiser.reset(); return model; }
@Test public void testInvalidExample() { assertThrows(IllegalArgumentException.class, () -> { SequenceDataset<Label> p = SequenceDataGenerator.generateGorillaDataset(5); SequenceModel<Label> m = t.train(p); m.predict(SequenceDataGenerator.generateInvalidExample()); }); }
public static long nextStartTimestamp(TimeUnit timeUnit, long start) { long nextTimestamp = start; switch (timeUnit) { case MILLISECONDS: break; case SECONDS: nextTimestamp = 1000 + 1000 * (start / 1000); // the next second is the start timestamp. break; case MINUTES: nextTimestamp = 60000 + 60000 * (start / 60000); // next minute is the start timestamp break; case HOURS: nextTimestamp = 3600000 + 3600000 * (start / 3600000); // next hour is the start timestamp break; case DAYS: nextTimestamp = 86400000 + 86400000 * (start / 86400000); // next day is the start timestamp break; } return nextTimestamp; }
@Test public void testNextSecond() { long start = System.currentTimeMillis(); System.out.println("start = " + start); long nextTimestamp = TimeUtil.nextStartTimestamp(TimeUnit.SECONDS, start); System.out.println("next second = " + nextTimestamp); }
public static DistCpOptions parse(String[] args) throws IllegalArgumentException { CommandLineParser parser = new CustomParser(); CommandLine command; try { command = parser.parse(cliOptions, args, true); } catch (ParseException e) { throw new IllegalArgumentException("Unable to parse arguments. " + Arrays.toString(args), e); } DistCpOptions.Builder builder = parseSourceAndTargetPaths(command); builder .withAtomicCommit( command.hasOption(DistCpOptionSwitch.ATOMIC_COMMIT.getSwitch())) .withSyncFolder( command.hasOption(DistCpOptionSwitch.SYNC_FOLDERS.getSwitch())) .withDeleteMissing( command.hasOption(DistCpOptionSwitch.DELETE_MISSING.getSwitch())) .withIgnoreFailures( command.hasOption(DistCpOptionSwitch.IGNORE_FAILURES.getSwitch())) .withOverwrite( command.hasOption(DistCpOptionSwitch.OVERWRITE.getSwitch())) .withAppend( command.hasOption(DistCpOptionSwitch.APPEND.getSwitch())) .withSkipCRC( command.hasOption(DistCpOptionSwitch.SKIP_CRC.getSwitch())) .withBlocking( !command.hasOption(DistCpOptionSwitch.BLOCKING.getSwitch())) .withVerboseLog( command.hasOption(DistCpOptionSwitch.VERBOSE_LOG.getSwitch())) .withDirectWrite( command.hasOption(DistCpOptionSwitch.DIRECT_WRITE.getSwitch())) .withUseIterator( command.hasOption(DistCpOptionSwitch.USE_ITERATOR.getSwitch())) .withUpdateRoot( command.hasOption(DistCpOptionSwitch.UPDATE_ROOT.getSwitch())); if (command.hasOption(DistCpOptionSwitch.DIFF.getSwitch())) { String[] snapshots = getVals(command, DistCpOptionSwitch.DIFF.getSwitch()); checkSnapshotsArgs(snapshots); builder.withUseDiff(snapshots[0], snapshots[1]); } if (command.hasOption(DistCpOptionSwitch.RDIFF.getSwitch())) { String[] snapshots = getVals(command, DistCpOptionSwitch.RDIFF.getSwitch()); checkSnapshotsArgs(snapshots); builder.withUseRdiff(snapshots[0], snapshots[1]); } if (command.hasOption(DistCpOptionSwitch.FILTERS.getSwitch())) { builder.withFiltersFile( getVal(command, DistCpOptionSwitch.FILTERS.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.LOG_PATH.getSwitch())) { builder.withLogPath( new Path(getVal(command, DistCpOptionSwitch.LOG_PATH.getSwitch()))); } if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch())) { final String workPath = getVal(command, DistCpOptionSwitch.WORK_PATH.getSwitch()); if (workPath != null && !workPath.isEmpty()) { builder.withAtomicWorkPath(new Path(workPath)); } } if (command.hasOption(DistCpOptionSwitch.TRACK_MISSING.getSwitch())) { builder.withTrackMissing( new Path(getVal( command, DistCpOptionSwitch.TRACK_MISSING.getSwitch()))); } if (command.hasOption(DistCpOptionSwitch.BANDWIDTH.getSwitch())) { try { final Float mapBandwidth = Float.parseFloat( getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch())); builder.withMapBandwidth(mapBandwidth); } catch (NumberFormatException e) { throw new IllegalArgumentException("Bandwidth specified is invalid: " + getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch()), e); } } if (command.hasOption( DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())) { try { final Integer numThreads = Integer.parseInt(getVal(command, DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())); builder.withNumListstatusThreads(numThreads); } catch (NumberFormatException e) { throw new IllegalArgumentException( "Number of liststatus threads is invalid: " + getVal(command, DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch()), e); } } if (command.hasOption(DistCpOptionSwitch.MAX_MAPS.getSwitch())) { try { final Integer maps = Integer.parseInt( getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch())); builder.maxMaps(maps); } catch (NumberFormatException e) { throw new IllegalArgumentException("Number of maps is invalid: " + getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch()), e); } } if (command.hasOption(DistCpOptionSwitch.COPY_STRATEGY.getSwitch())) { builder.withCopyStrategy( getVal(command, DistCpOptionSwitch.COPY_STRATEGY.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())) { builder.preserve( getVal(command, DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.FILE_LIMIT.getSwitch())) { LOG.warn(DistCpOptionSwitch.FILE_LIMIT.getSwitch() + " is a deprecated" + " option. Ignoring."); } if (command.hasOption(DistCpOptionSwitch.SIZE_LIMIT.getSwitch())) { LOG.warn(DistCpOptionSwitch.SIZE_LIMIT.getSwitch() + " is a deprecated" + " option. Ignoring."); } if (command.hasOption(DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch())) { final String chunkSizeStr = getVal(command, DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch().trim()); try { int csize = Integer.parseInt(chunkSizeStr); csize = csize > 0 ? csize : 0; LOG.info("Set distcp blocksPerChunk to " + csize); builder.withBlocksPerChunk(csize); } catch (NumberFormatException e) { throw new IllegalArgumentException("blocksPerChunk is invalid: " + chunkSizeStr, e); } } if (command.hasOption(DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch())) { final String copyBufferSizeStr = getVal(command, DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch().trim()); try { int copyBufferSize = Integer.parseInt(copyBufferSizeStr); builder.withCopyBufferSize(copyBufferSize); } catch (NumberFormatException e) { throw new IllegalArgumentException("copyBufferSize is invalid: " + copyBufferSizeStr, e); } } return builder.build(); }
@Test(expected=IllegalArgumentException.class) public void testParseNonPositiveBandwidth() { OptionsParser.parse(new String[] { "-bandwidth", "-11", "hdfs://localhost:8020/source/first", "hdfs://localhost:8020/target/"}); }
@ScalarOperator(SUBTRACT) @SqlType(StandardTypes.REAL) public static long subtract(@SqlType(StandardTypes.REAL) long left, @SqlType(StandardTypes.REAL) long right) { return floatToRawIntBits(intBitsToFloat((int) left) - intBitsToFloat((int) right)); }
@Test public void testSubtract() { assertFunction("REAL'12.34' - REAL'56.78'", REAL, 12.34f - 56.78f); assertFunction("REAL'-17.34' - REAL'-22.891'", REAL, -17.34f - -22.891f); assertFunction("REAL'-89.123' - REAL'754.0'", REAL, -89.123f - 754.0f); assertFunction("REAL'-0.0' - REAL'0.0'", REAL, -0.0f - 0.0f); }
public static <IN, OUT> CompletableFuture<OUT> thenApplyAsyncIfNotDone( CompletableFuture<IN> completableFuture, Executor executor, Function<? super IN, ? extends OUT> applyFun) { return completableFuture.isDone() ? completableFuture.thenApply(applyFun) : completableFuture.thenApplyAsync(applyFun, executor); }
@Test void testApplyAsyncIfNotDone() { testFutureContinuation( (CompletableFuture<?> future, Executor executor) -> FutureUtils.thenApplyAsyncIfNotDone(future, executor, o -> null)); }
public PeriodicityType getPeriodicityType() { return periodicityType; }
@Test public void testPeriodicity() { { RollingCalendar rc = new RollingCalendar("yyyy-MM-dd_HH_mm_ss"); assertEquals(PeriodicityType.TOP_OF_SECOND, rc.getPeriodicityType()); } { RollingCalendar rc = new RollingCalendar("yyyy-MM-dd_HH_mm"); assertEquals(PeriodicityType.TOP_OF_MINUTE, rc.getPeriodicityType()); } { RollingCalendar rc = new RollingCalendar("yyyy-MM-dd_HH"); assertEquals(PeriodicityType.TOP_OF_HOUR, rc.getPeriodicityType()); } { RollingCalendar rc = new RollingCalendar("yyyy-MM-dd_hh"); assertEquals(PeriodicityType.TOP_OF_HOUR, rc.getPeriodicityType()); } { RollingCalendar rc = new RollingCalendar("yyyy-MM-dd"); assertEquals(PeriodicityType.TOP_OF_DAY, rc.getPeriodicityType()); } { RollingCalendar rc = new RollingCalendar("yyyy-MM"); assertEquals(PeriodicityType.TOP_OF_MONTH, rc.getPeriodicityType()); } { RollingCalendar rc = new RollingCalendar("yyyy-ww"); assertEquals(PeriodicityType.TOP_OF_WEEK, rc.getPeriodicityType()); } { RollingCalendar rc = new RollingCalendar("yyyy-W"); assertEquals(PeriodicityType.TOP_OF_WEEK, rc.getPeriodicityType()); } }
public static <T> T getBean(Class<T> interfaceClass, Class typeClass) { Object object = serviceMap.get(interfaceClass.getName() + "<" + typeClass.getName() + ">"); if(object == null) return null; if(object instanceof Object[]) { return (T)Array.get(object, 0); } else { return (T)object; } }
@Test public void testMap() { LImpl l = (LImpl)SingletonServiceFactory.getBean(L.class); Assert.assertEquals("https", l.getProtocol()); Assert.assertEquals(8080, l.getPort()); Assert.assertEquals(2, l.getParameters().size()); }
abstract void execute(Admin admin, Namespace ns, PrintStream out) throws Exception;
@Test public void testFindHangingLookupTopicAndBrokerId() throws Exception { int brokerId = 5; String topic = "foo"; String[] args = new String[]{ "--bootstrap-server", "localhost:9092", "find-hanging", "--broker-id", String.valueOf(brokerId), "--topic", topic }; Node node0 = new Node(0, "localhost", 9092); Node node1 = new Node(1, "localhost", 9093); Node node5 = new Node(5, "localhost", 9097); TopicPartitionInfo partition0 = new TopicPartitionInfo( 0, node0, Arrays.asList(node0, node1), Arrays.asList(node0, node1) ); TopicPartitionInfo partition1 = new TopicPartitionInfo( 1, node1, Arrays.asList(node1, node5), Arrays.asList(node1, node5) ); TopicDescription description = new TopicDescription( topic, false, Arrays.asList(partition0, partition1) ); expectDescribeTopics(singletonMap(topic, description)); DescribeProducersResult result = Mockito.mock(DescribeProducersResult.class); Mockito.when(result.all()).thenReturn(completedFuture(emptyMap())); Mockito.when(admin.describeProducers( Collections.singletonList(new TopicPartition(topic, 1)), new DescribeProducersOptions().brokerId(brokerId) )).thenReturn(result); execute(args); assertNormalExit(); assertNoHangingTransactions(); }
public boolean parseBoolean(String name) { String property = getProperties().getProperty(name); if (property == null) { throw new IllegalArgumentException(); } return Boolean.parseBoolean(property); }
@Test public void testParseBoolean() { System.out.println("parseBoolean"); boolean expResult; boolean result; Properties props = new Properties(); props.put("value1", "true"); props.put("value2", "false"); props.put("empty", ""); props.put("str", "abc"); props.put("boolean", "true"); props.put("float", "24.98"); props.put("int", "12"); props.put("char", "a"); PropertyParser instance = new PropertyParser(props); expResult = true; result = instance.parseBoolean("value1"); assertEquals(expResult, result); expResult = false; result = instance.parseBoolean("value2"); assertEquals(expResult, result); expResult = false; result = instance.parseBoolean("empty"); assertEquals(expResult, result); expResult = false; result = instance.parseBoolean("str"); assertEquals(expResult, result); expResult = true; result = instance.parseBoolean("boolean"); assertEquals(expResult, result); expResult = false; result = instance.parseBoolean("float"); assertEquals(expResult, result); expResult = false; result = instance.parseBoolean("int"); assertEquals(expResult, result); expResult = false; result = instance.parseBoolean("char"); assertEquals(expResult, result); try { expResult = false; result = instance.parseBoolean("nonexistent"); fail("no exception"); assertEquals(expResult, result); } catch (IllegalArgumentException e) { } }
public synchronized T getConfig(String configId) { try (ConfigSubscriber subscriber = new ConfigSubscriber()) { ConfigHandle<T> handle = subscriber.subscribe(clazz, configId); subscriber.nextConfig(true); return handle.getConfig(); } }
@Test public void testGetFromDir() { ConfigGetter<AppConfig> getter = new ConfigGetter<>(AppConfig.class); AppConfig config = getter.getConfig("dir:src/test/resources/configs/foo/"); verifyFooValues(config); }
@Override public void encode(Object object, Type bodyType, RequestTemplate template) throws EncodeException { if (object == null) return; if (object instanceof JSONArray || object instanceof JSONObject) { template.body(object.toString()); } else { throw new EncodeException(format("%s is not a type supported by this encoder.", bodyType)); } }
@Test void unknownTypeThrowsEncodeException() { Exception exception = assertThrows(EncodeException.class, () -> new JsonEncoder().encode("qwerty", Clock.class, new RequestTemplate())); assertThat(exception.getMessage()) .isEqualTo("class java.time.Clock is not a type supported by this encoder."); }
@GetMapping("/list.json") @AuthAction(AuthService.PrivilegeType.READ_RULE) public Result<List<GatewayFlowRuleEntity>> queryFlowRules(String app, String ip, Integer port) { if (StringUtil.isEmpty(app)) { return Result.ofFail(-1, "app can't be null or empty"); } if (StringUtil.isEmpty(ip)) { return Result.ofFail(-1, "ip can't be null or empty"); } if (port == null) { return Result.ofFail(-1, "port can't be null"); } try { List<GatewayFlowRuleEntity> rules = sentinelApiClient.fetchGatewayFlowRules(app, ip, port).get(); repository.saveAll(rules); return Result.ofSuccess(rules); } catch (Throwable throwable) { logger.error("query gateway flow rules error:", throwable); return Result.ofThrowable(-1, throwable); } }
@Test public void testQueryFlowRules() throws Exception { String path = "/gateway/flow/list.json"; List<GatewayFlowRuleEntity> entities = new ArrayList<>(); // Mock two entities GatewayFlowRuleEntity entity = new GatewayFlowRuleEntity(); entity.setId(1L); entity.setApp(TEST_APP); entity.setIp(TEST_IP); entity.setPort(TEST_PORT); entity.setResource("httpbin_route"); entity.setResourceMode(RESOURCE_MODE_ROUTE_ID); entity.setGrade(FLOW_GRADE_QPS); entity.setCount(5D); entity.setInterval(30L); entity.setIntervalUnit(GatewayFlowRuleEntity.INTERVAL_UNIT_SECOND); entity.setControlBehavior(CONTROL_BEHAVIOR_DEFAULT); entity.setBurst(0); entity.setMaxQueueingTimeoutMs(0); GatewayParamFlowItemEntity itemEntity = new GatewayParamFlowItemEntity(); entity.setParamItem(itemEntity); itemEntity.setParseStrategy(PARAM_PARSE_STRATEGY_CLIENT_IP); entities.add(entity); GatewayFlowRuleEntity entity2 = new GatewayFlowRuleEntity(); entity2.setId(2L); entity2.setApp(TEST_APP); entity2.setIp(TEST_IP); entity2.setPort(TEST_PORT); entity2.setResource("some_customized_api"); entity2.setResourceMode(RESOURCE_MODE_CUSTOM_API_NAME); entity2.setCount(30D); entity2.setInterval(2L); entity2.setIntervalUnit(GatewayFlowRuleEntity.INTERVAL_UNIT_MINUTE); entity2.setControlBehavior(CONTROL_BEHAVIOR_DEFAULT); entity2.setBurst(0); entity2.setMaxQueueingTimeoutMs(0); GatewayParamFlowItemEntity itemEntity2 = new GatewayParamFlowItemEntity(); entity2.setParamItem(itemEntity2); itemEntity2.setParseStrategy(PARAM_PARSE_STRATEGY_CLIENT_IP); entities.add(entity2); CompletableFuture<List<GatewayFlowRuleEntity>> completableFuture = mock(CompletableFuture.class); given(completableFuture.get()).willReturn(entities); given(sentinelApiClient.fetchGatewayFlowRules(TEST_APP, TEST_IP, TEST_PORT)).willReturn(completableFuture); MockHttpServletRequestBuilder requestBuilder = MockMvcRequestBuilders.get(path); requestBuilder.param("app", TEST_APP); requestBuilder.param("ip", TEST_IP); requestBuilder.param("port", String.valueOf(TEST_PORT)); // Do controller logic MvcResult mvcResult = mockMvc.perform(requestBuilder) .andExpect(MockMvcResultMatchers.status().isOk()).andDo(MockMvcResultHandlers.print()).andReturn(); // Verify the fetchGatewayFlowRules method has been called verify(sentinelApiClient).fetchGatewayFlowRules(TEST_APP, TEST_IP, TEST_PORT); // Verify if two same entities are got Result<List<GatewayFlowRuleEntity>> result = JSONObject.parseObject(mvcResult.getResponse().getContentAsString(), new TypeReference<Result<List<GatewayFlowRuleEntity>>>(){}); assertTrue(result.isSuccess()); List<GatewayFlowRuleEntity> data = result.getData(); assertEquals(2, data.size()); assertEquals(entities, data); // Verify the entities are add into memory repository List<GatewayFlowRuleEntity> entitiesInMem = repository.findAllByApp(TEST_APP); assertEquals(2, entitiesInMem.size()); assertEquals(entities, entitiesInMem); }
@Override public void process(boolean validate, boolean documentsOnly) { schema.allImportedFields() .forEach(this::setTransform); }
@Test void attribute_summary_transform_applied_to_summary_field_of_imported_field() { Schema schema = createSearchWithDocument(DOCUMENT_NAME); schema.setImportedFields(createSingleImportedField(IMPORTED_FIELD_NAME)); schema.addSummary(createDocumentSummary(IMPORTED_FIELD_NAME, schema)); AddDataTypeAndTransformToSummaryOfImportedFields processor = new AddDataTypeAndTransformToSummaryOfImportedFields( schema, null, null, null); processor.process(true, false); SummaryField summaryField = schema.getSummaries().get(SUMMARY_NAME).getSummaryField(IMPORTED_FIELD_NAME); SummaryTransform actualTransform = summaryField.getTransform(); assertEquals(SummaryTransform.ATTRIBUTE, actualTransform); }
public final void isNegativeInfinity() { isEqualTo(Double.NEGATIVE_INFINITY); }
@Test public void isNegativeInfinity() { assertThat(Double.NEGATIVE_INFINITY).isNegativeInfinity(); assertThatIsNegativeInfinityFails(1.23); assertThatIsNegativeInfinityFails(Double.POSITIVE_INFINITY); assertThatIsNegativeInfinityFails(Double.NaN); assertThatIsNegativeInfinityFails(null); }
public static <T> T get(Collection<T> collection, int index) { if (null == collection) { return null; } final int size = collection.size(); if (0 == size) { return null; } if (index < 0) { index += size; } // 检查越界 if (index >= size || index < 0) { return null; } if (collection instanceof List) { final List<T> list = ((List<T>) collection); return list.get(index); } else { return IterUtil.get(collection.iterator(), index); } }
@Test public void getTest() { final HashSet<String> set = CollUtil.set(true, "A", "B", "C", "D"); String str = CollUtil.get(set, 2); assertEquals("C", str); str = CollUtil.get(set, -1); assertEquals("D", str); }
@ApiOperation(value = "Assign device to tenant (assignDeviceToTenant)", notes = "Creates assignment of the device to tenant. Thereafter tenant will be able to reassign the device to a customer." + TENANT_AUTHORITY_PARAGRAPH) @PreAuthorize("hasAuthority('TENANT_ADMIN')") @RequestMapping(value = "/tenant/{tenantId}/device/{deviceId}", method = RequestMethod.POST) @ResponseBody public Device assignDeviceToTenant(@Parameter(description = TENANT_ID_PARAM_DESCRIPTION) @PathVariable(TENANT_ID) String strTenantId, @Parameter(description = DEVICE_ID_PARAM_DESCRIPTION) @PathVariable(DEVICE_ID) String strDeviceId) throws ThingsboardException { checkParameter(TENANT_ID, strTenantId); checkParameter(DEVICE_ID, strDeviceId); DeviceId deviceId = new DeviceId(toUUID(strDeviceId)); Device device = checkDeviceId(deviceId, Operation.ASSIGN_TO_TENANT); TenantId newTenantId = TenantId.fromUUID(toUUID(strTenantId)); Tenant newTenant = tenantService.findTenantById(newTenantId); if (newTenant == null) { throw new ThingsboardException("Could not find the specified Tenant!", ThingsboardErrorCode.BAD_REQUEST_PARAMS); } return tbDeviceService.assignDeviceToTenant(device, newTenant, getCurrentUser()); }
@Test public void testAssignDeviceToTenant() throws Exception { Device device = new Device(); device.setName("My device"); device.setType("default"); Device savedDevice = doPost("/api/device", device, Device.class); Device anotherDevice = new Device(); anotherDevice.setName("My device1"); anotherDevice.setType("default"); Device savedAnotherDevice = doPost("/api/device", anotherDevice, Device.class); EntityRelation relation = new EntityRelation(); relation.setFrom(savedDevice.getId()); relation.setTo(savedAnotherDevice.getId()); relation.setTypeGroup(RelationTypeGroup.COMMON); relation.setType("Contains"); doPost("/api/relation", relation).andExpect(status().isOk()); loginSysAdmin(); Tenant tenant = new Tenant(); tenant.setTitle("Different tenant"); Tenant savedDifferentTenant = saveTenant(tenant); Assert.assertNotNull(savedDifferentTenant); User user = new User(); user.setAuthority(Authority.TENANT_ADMIN); user.setTenantId(savedDifferentTenant.getId()); user.setEmail("tenant9@thingsboard.org"); user.setFirstName("Sam"); user.setLastName("Downs"); createUserAndLogin(user, "testPassword1"); login("tenant2@thingsboard.org", "testPassword1"); Mockito.reset(tbClusterService, auditLogService, gatewayNotificationsService); Device assignedDevice = doPost("/api/tenant/" + savedDifferentTenant.getId().getId() + "/device/" + savedDevice.getId().getId(), Device.class); doGet("/api/device/" + assignedDevice.getId().getId()) .andExpect(status().isNotFound()) .andExpect(statusReason(containsString(msgErrorNoFound("Device", assignedDevice.getId().getId().toString())))); testNotifyEntityOneTimeMsgToEdgeServiceNever(assignedDevice, assignedDevice.getId(), assignedDevice.getId(), savedTenant.getId(), tenantAdmin.getCustomerId(), tenantAdmin.getId(), tenantAdmin.getEmail(), ActionType.ASSIGNED_TO_TENANT, savedDifferentTenant.getId().getId().toString(), savedDifferentTenant.getTitle()); testNotificationUpdateGatewayNever(); ArgumentCaptor<TransportProtos.DeviceStateServiceMsgProto> protoCaptor = ArgumentCaptor.forClass(TransportProtos.DeviceStateServiceMsgProto.class); Awaitility.await().atMost(5, TimeUnit.SECONDS).until(() -> { Mockito.verify(deviceStateService, Mockito.atLeastOnce()).onQueueMsg(protoCaptor.capture(), any()); return protoCaptor.getAllValues().stream().anyMatch(proto -> proto.getTenantIdMSB() == savedTenant.getUuidId().getMostSignificantBits() && proto.getTenantIdLSB() == savedTenant.getUuidId().getLeastSignificantBits() && proto.getDeviceIdMSB() == savedDevice.getUuidId().getMostSignificantBits() && proto.getDeviceIdLSB() == savedDevice.getUuidId().getLeastSignificantBits() && proto.getDeleted()); }); Awaitility.await().atMost(5, TimeUnit.SECONDS).until(() -> { Mockito.verify(deviceStateService, Mockito.atLeastOnce()).onQueueMsg(protoCaptor.capture(), any()); return protoCaptor.getAllValues().stream().anyMatch(proto -> proto.getTenantIdMSB() == savedDifferentTenant.getUuidId().getMostSignificantBits() && proto.getTenantIdLSB() == savedDifferentTenant.getUuidId().getLeastSignificantBits() && proto.getDeviceIdMSB() == savedDevice.getUuidId().getMostSignificantBits() && proto.getDeviceIdLSB() == savedDevice.getUuidId().getLeastSignificantBits() && proto.getAdded()); }); login("tenant9@thingsboard.org", "testPassword1"); Device foundDevice1 = doGet("/api/device/" + assignedDevice.getId().getId(), Device.class); Assert.assertNotNull(foundDevice1); doGet("/api/relation?fromId=" + savedDevice.getId().getId() + "&fromType=DEVICE&relationType=Contains&toId=" + savedAnotherDevice.getId().getId() + "&toType=DEVICE") .andExpect(status().isNotFound()) .andExpect(statusReason(containsString(msgErrorNoFound("Device", savedAnotherDevice.getId().getId().toString())))); loginSysAdmin(); deleteTenant(savedDifferentTenant.getId()); }
void readOperatingSystemList(String initialOperatingSystemJson) throws IOException { JSONArray systems = JSONArray.fromObject(initialOperatingSystemJson); if (systems.isEmpty()) { throw new IOException("Empty data set"); } for (Object systemObj : systems) { if (!(systemObj instanceof JSONObject)) { throw new IOException("Wrong object type in data file"); } JSONObject system = (JSONObject) systemObj; if (!system.has("pattern")) { throw new IOException("Missing pattern in definition file"); } String pattern = system.getString("pattern"); if (!system.has("endOfLife")) { throw new IOException("No end of life date for " + pattern); } LocalDate endOfLife = LocalDate.parse(system.getString("endOfLife")); /* Start date defaults to 6 months before end of life */ LocalDate startDate = system.has("start") ? LocalDate.parse(system.getString("start")) : endOfLife.minusMonths(6); File dataFile = getDataFile(system); LOGGER.log(Level.FINEST, "Pattern {0} starts {1} and reaches end of life {2} from file {3}", new Object[]{pattern, startDate, endOfLife, dataFile}); String name = readOperatingSystemName(dataFile, pattern); if (name.isEmpty()) { LOGGER.log(Level.FINE, "Pattern {0} did not match from file {1}", new Object[]{pattern, dataFile}); continue; } if (startDate.isBefore(warningsStartDate)) { warningsStartDate = startDate; LOGGER.log(Level.FINE, "Warnings start date is now {0}", warningsStartDate); } LOGGER.log(Level.FINE, "Matched operating system {0}", name); if (startDate.isBefore(LocalDate.now())) { this.operatingSystemName = name; this.documentationUrl = buildDocumentationUrl(this.operatingSystemName); this.endOfLifeDate = endOfLife.toString(); if (endOfLife.isBefore(LocalDate.now())) { LOGGER.log(Level.FINE, "Operating system {0} is after end of life {1}", new Object[]{name, endOfLife}); afterEndOfLifeDate = true; } else { LOGGER.log(Level.FINE, "Operating system {0} started warnings {1} and reaches end of life {2}", new Object[]{name, startDate, endOfLife}); } } } if (lastLines != null) { // Discard the cached contents of the last read file lastLines.clear(); } }
@Test public void testReadOperatingSystemListNoEndOfLife() { IOException e = assertThrows(IOException.class, () -> monitor.readOperatingSystemList("[{\"pattern\": \"Alpine\"}]")); assertThat(e.getMessage(), is("No end of life date for Alpine")); }
@Override public IndexSetConfig getDefault() { final DefaultIndexSetConfig defaultIndexSetConfig = clusterConfigService.get(DefaultIndexSetConfig.class); checkState(defaultIndexSetConfig != null, "No default index set configured. This is a bug!"); final String indexSetId = defaultIndexSetConfig.defaultIndexSetId(); return get(indexSetId) .orElseThrow(() -> new IllegalStateException("Couldn't find default index set <" + indexSetId + ">. This is a bug!")); }
@Test @MongoDBFixtures("MongoIndexSetServiceTest.json") public void getDefault() throws Exception { clusterConfigService.write(DefaultIndexSetConfig.create("57f3d721a43c2d59cb750002")); final IndexSetConfig indexSetConfig = indexSetService.getDefault(); assertThat(indexSetConfig).isNotNull(); assertThat(indexSetConfig.id()).isEqualTo("57f3d721a43c2d59cb750002"); }
@Override public String getName() { return "None"; }
@Test public void testGetName() { NoneCompressionProvider provider = (NoneCompressionProvider) factory.getCompressionProviderByName( PROVIDER_NAME ); assertNotNull( provider ); assertEquals( PROVIDER_NAME, provider.getName() ); }
public static void copyConfigurationToJob(Properties props, Map<String, String> jobProps) throws HiveException, IOException { checkRequiredPropertiesAreDefined(props); resolveMetadata(props); for (Entry<Object, Object> entry : props.entrySet()) { String key = String.valueOf(entry.getKey()); if (!key.equals(CONFIG_PWD) && !key.equals(CONFIG_PWD_KEYSTORE) && !key.equals(CONFIG_PWD_KEY) && !key.equals(CONFIG_PWD_URI)) { jobProps.put(String.valueOf(entry.getKey()), String.valueOf(entry.getValue())); } } }
@Test(expected = IllegalArgumentException.class) public void testWithDatabaseTypeMissing() throws Exception { Properties props = new Properties(); props.put(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc://localhost:3306/hive"); props.put(JdbcStorageConfig.QUERY.getPropertyName(), "SELECT col1,col2,col3 FROM sometable"); Map<String, String> jobMap = new HashMap<>(); JdbcStorageConfigManager.copyConfigurationToJob(props, jobMap); }
@Schema(description = "응답 메시지", example = """ data: { "aDomain": { // 단수명사는 object 형태로 반환 ... },` "bDomains": [ // 복수명사는 array 형태로 반환 ... ] } """) private T data; @Builder private SuccessResponse(T data) { this.data = data; } /** * data : { "key" : data } 형태의 성공 응답을 반환한다. * <br/> * 명시적으로 key의 이름을 지정하기 위해 사용한다. */ public static <V> SuccessResponse<Map<String, V>> from(String key, V data) { return SuccessResponse.<Map<String, V>>builder() .data(Map.of(key, data)) .build(); }
@Test @DisplayName("SuccessResponse.from() - key와 DTO를 통한 성공 응답") public void successResponseFromDtoWithKey() { // Given String key = "test"; // When SuccessResponse<Map<String, TestDto>> response = SuccessResponse.from(key, dto); // Then assertEquals("2000", response.getCode()); assertEquals(Map.of(key, dto), response.getData()); System.out.println(response); }
@Override public Iterable<DiscoveryNode> discoverNodes() { try { List<GcpAddress> gcpAddresses = gcpClient.getAddresses(); logGcpAddresses(gcpAddresses); List<DiscoveryNode> result = new ArrayList<>(); for (GcpAddress gcpAddress : gcpAddresses) { for (int port = portRange.getFromPort(); port <= portRange.getToPort(); port++) { result.add(createDiscoveryNode(gcpAddress, port)); } } return result; } catch (Exception e) { LOGGER.warning("Cannot discover nodes, returning empty list", e); return Collections.emptyList(); } }
@Test public void discoverNodesException() { // given given(gcpClient.getAddresses()).willThrow(new RuntimeException("Error while checking GCP instances")); // when Iterable<DiscoveryNode> nodes = gcpDiscoveryStrategy.discoverNodes(); // then assertFalse(nodes.iterator().hasNext()); }
static <T extends CompoundPredicate> T flattenCompound(Predicate predicateLeft, Predicate predicateRight, Class<T> klass) { // The following could have been achieved with {@link com.hazelcast.query.impl.predicates.FlatteningVisitor}, // however since we only care for 2-argument flattening, we can avoid constructing a visitor and its internals // for each token pass at the cost of the following explicit code. Predicate[] predicates; if (klass.isInstance(predicateLeft) || klass.isInstance(predicateRight)) { Predicate[] left = getSubPredicatesIfClass(predicateLeft, klass); Predicate[] right = getSubPredicatesIfClass(predicateRight, klass); predicates = new Predicate[left.length + right.length]; ArrayUtils.concat(left, right, predicates); } else { predicates = new Predicate[]{predicateLeft, predicateRight}; } try { T compoundPredicate = klass.getDeclaredConstructor().newInstance(); compoundPredicate.setPredicates(predicates); return compoundPredicate; } catch (ReflectiveOperationException e) { throw new IllegalArgumentException(String.format("%s must have a public default constructor", klass.getName())); } }
@Test public void testFlattenOr_withOrAndPredicates() { OrPredicate orPredicate = new OrPredicate(leftOfOr, rightOfOr); AndPredicate andPredicate = new AndPredicate(leftOfAnd, rightOfAnd); OrPredicate flattenedCompoundOr = SqlPredicate.flattenCompound(orPredicate, andPredicate, OrPredicate.class); assertSame(leftOfOr, flattenedCompoundOr.getPredicates()[0]); assertSame(rightOfOr, flattenedCompoundOr.getPredicates()[1]); assertSame(andPredicate, flattenedCompoundOr.getPredicates()[2]); }
@Override protected Upstream doSelect(final List<Upstream> upstreamList, final String ip) { long start = System.currentTimeMillis(); Upstream[] upstreams = pickTwoUpstreams(upstreamList); Upstream picked; Upstream unpicked; if (load(upstreams[0]) > load(upstreams[1])) { picked = upstreams[1]; unpicked = upstreams[0]; } else { picked = upstreams[0]; unpicked = upstreams[1]; } // If the failed node is not selected once in the forceGap period, it is forced to be selected once. long pick = unpicked.getLastPicked(); if ((start - pick) > FORCE_GAP) { unpicked.setLastPicked(start); picked = unpicked; } if (picked != unpicked) { picked.setLastPicked(start); } picked.getInflight().incrementAndGet(); return picked; }
@Test public void testResponseTimeBalancerSameLag() { buildUpstreamList(); final P2cLoadBalancer p2cLoadBalancer = new P2cLoadBalancer(); Upstream upstream = p2cLoadBalancer.doSelect(upstreamList, "localhost"); Upstream upstream1 = p2cLoadBalancer.doSelect(upstreamList, "localhost"); Assertions.assertTrue((upstream.getUrl().equals("baidu.com") && upstream1.getUrl().equals("pro.jd.com")) || upstream1.getUrl().equals("baidu.com") && upstream.getUrl().equals("pro.jd.com")); }
public double getCenterLongitude() { return getCenterLongitude(mLonWest, mLonEast); }
@Test public void testGetCenterLongitude() { assertEquals(1.5, BoundingBox.getCenterLongitude(1, 2), TOLERANCE); assertEquals(-178.5, BoundingBox.getCenterLongitude(2, 1), TOLERANCE); }
@Config public static PrintStream fallbackLogger() { final String fallbackLoggerName = getProperty(FALLBACK_LOGGER_PROP_NAME, "stderr"); switch (fallbackLoggerName) { case "stdout": return System.out; case "no_op": return NO_OP_LOGGER; case "stderr": default: return System.err; } }
@Test void fallbackLoggerReturnsSystemErrorIfNothingSpecified() { System.clearProperty(FALLBACK_LOGGER_PROP_NAME); assertSame(System.err, CommonContext.fallbackLogger()); }
@SuppressWarnings("UnstableApiUsage") @Override public Stream<ColumnName> resolveSelectStar( final Optional<SourceName> sourceName ) { final Stream<ColumnName> names = Stream.of(left, right) .flatMap(JoinNode::getPreJoinProjectDataSources) .filter(s -> !sourceName.isPresent() || sourceName.equals(s.getSourceName())) .flatMap(s -> s.resolveSelectStar(sourceName)); if (sourceName.isPresent() || !joinKey.isSynthetic() || !finalJoin) { return names; } // if we use a synthetic key, we know there's only a single key element final Column syntheticKey = getOnlyElement(getSchema().key()); return Streams.concat(Stream.of(syntheticKey.name()), names); }
@Test public void shouldResolveNestedAliasedSelectStarByCallingOnlyCorrectParentWithMultiJoins() { // Given: final JoinNode inner = new JoinNode(new PlanNodeId("foo"), LEFT, joinKey, true, right, right2, empty(), "KAFKA"); final JoinNode joinNode = new JoinNode(nodeId, LEFT, joinKey, true, left, inner, empty(), "KAFKA"); when(right.resolveSelectStar(any())).thenReturn(Stream.of(ColumnName.of("r"))); // When: final Stream<ColumnName> result = joinNode .resolveSelectStar(Optional.of(RIGHT_ALIAS)); // Then: final List<ColumnName> columns = result.collect(Collectors.toList()); assertThat(columns, contains(ColumnName.of("r"))); verify(left, never()).resolveSelectStar(any()); verify(right2, never()).resolveSelectStar(any()); verify(right).resolveSelectStar(Optional.of(RIGHT_ALIAS)); }
static boolean containsIn(CloneGroup first, CloneGroup second) { if (first.getCloneUnitLength() > second.getCloneUnitLength()) { return false; } List<ClonePart> firstParts = first.getCloneParts(); List<ClonePart> secondParts = second.getCloneParts(); return SortedListsUtils.contains(secondParts, firstParts, new ContainsInComparator(second.getCloneUnitLength(), first.getCloneUnitLength())) && SortedListsUtils.contains(firstParts, secondParts, ContainsInComparator.RESOURCE_ID_COMPARATOR); }
@Test public void different_resources() { CloneGroup c1 = newCloneGroup(1, newClonePart("a", 0), newClonePart("a", 2)); CloneGroup c2 = newCloneGroup(3, newClonePart("a", 0), newClonePart("b", 0)); assertThat(Filter.containsIn(c1, c2), is(false)); assertThat(Filter.containsIn(c2, c1), is(false)); }
@Override public void update() { if (++frames == delay) { shootLightning(); frames = 0; } }
@Test void testUpdateForPendingShoot() { statue.frames = 10; statue.update(); assertEquals(11, statue.frames); }
public static Writer createWriter(Configuration conf, Writer.Option... opts ) throws IOException { Writer.CompressionOption compressionOption = Options.getOption(Writer.CompressionOption.class, opts); CompressionType kind; if (compressionOption != null) { kind = compressionOption.getValue(); } else { kind = getDefaultCompressionType(conf); opts = Options.prependOptions(opts, Writer.compression(kind)); } switch (kind) { default: case NONE: return new Writer(conf, opts); case RECORD: return new RecordCompressWriter(conf, opts); case BLOCK: return new BlockCompressWriter(conf, opts); } }
@SuppressWarnings("deprecation") @Test public void testCreateUsesFsArg() throws Exception { FileSystem fs = FileSystem.getLocal(conf); FileSystem spyFs = Mockito.spy(fs); Path p = new Path(GenericTestUtils.getTempPath("testCreateUsesFSArg.seq")); SequenceFile.Writer writer = SequenceFile.createWriter( spyFs, conf, p, NullWritable.class, NullWritable.class); writer.close(); Mockito.verify(spyFs).getDefaultReplication(p); }
@Override public Object convert(String value) { if (isNullOrEmpty(value)) { return value; } if (value.contains("=")) { final Map<String, String> fields = new HashMap<>(); Matcher m = PATTERN.matcher(value); while (m.find()) { if (m.groupCount() != 2) { continue; } fields.put(removeQuotes(m.group(1)), removeQuotes(m.group(2))); } return fields; } else { return Collections.emptyMap(); } }
@Test public void testFilterWithOneInvalidKVPair() { TokenizerConverter f = new TokenizerConverter(new HashMap<String, Object>()); @SuppressWarnings("unchecked") Map<String, String> result = (Map<String, String>) f.convert("Ohai I am a message and this is a URL: index.php?foo=bar"); assertEquals(0, result.size()); }
@Override public void persistEphemeral(final String key, final String value) { try { if (isExisted(key)) { client.delete().deletingChildrenIfNeeded().forPath(key); } client.create().creatingParentsIfNeeded().withMode(CreateMode.EPHEMERAL).forPath(key, value.getBytes(StandardCharsets.UTF_8)); // CHECKSTYLE:OFF } catch (final Exception ex) { // CHECKSTYLE:ON ZookeeperExceptionHandler.handleException(ex); } }
@Test void assertPersistEphemeralNotExist() throws Exception { when(protect.withMode(CreateMode.EPHEMERAL)).thenReturn(protect); REPOSITORY.persistEphemeral("/test/ephemeral", "value3"); verify(protect).forPath("/test/ephemeral", "value3".getBytes(StandardCharsets.UTF_8)); }
public static Calendar getDateFromString(String str, String format) { Calendar cal = Calendar.getInstance(); SimpleDateFormat sdf = new SimpleDateFormat(format); try { cal.setTime(sdf.parse(str)); } catch (ParseException e) { LogDelegate.e("Malformed datetime string" + e.getMessage()); } catch (NullPointerException e) { LogDelegate.e("Date or time not set"); } return cal; }
@Test public void getDateFromStringTest() { String date = "2020/03/12 15:15:15"; String dateformat = "yyyy/MM/dd HH:mm:ss"; Calendar calendar = DateUtils.getDateFromString(date, dateformat); assertEquals(calendar.get(Calendar.YEAR), 2020); assertEquals(calendar.get(Calendar.MONTH), 2); assertEquals(calendar.get(Calendar.DATE), 12); assertEquals(calendar.get(Calendar.HOUR_OF_DAY), 15); assertEquals(calendar.get(Calendar.MINUTE), 15); assertEquals(calendar.get(Calendar.SECOND), 15); }
@UdafFactory(description = "sum int values in a list into a single int") public static TableUdaf<List<Integer>, Integer, Integer> sumIntList() { return new TableUdaf<List<Integer>, Integer, Integer>() { @Override public Integer initialize() { return 0; } @Override public Integer aggregate(final List<Integer> valueToAdd, final Integer aggregateValue) { if (valueToAdd == null) { return aggregateValue; } return aggregateValue + sumList(valueToAdd); } @Override public Integer merge(final Integer aggOne, final Integer aggTwo) { return aggOne + aggTwo; } @Override public Integer map(final Integer agg) { return agg; } @Override public Integer undo(final List<Integer> valueToUndo, final Integer aggregateValue) { if (valueToUndo == null) { return aggregateValue; } return aggregateValue - sumList(valueToUndo); } private int sumList(final List<Integer> list) { return sum(list, initialize(), Integer::sum); } }; }
@Test public void shouldUndoSum() { final TableUdaf<List<Integer>, Integer, Integer> udaf = ListSumUdaf.sumIntList(); final Integer[] values = new Integer[] {1, 1, 1, 1}; final List<Integer> list = Arrays.asList(values); final Integer sum = udaf.aggregate(list, 0); final Integer[] undoValues = new Integer[] {1, 1, 1}; final List<Integer> undoList = Arrays.asList(undoValues); final int undo = udaf.undo(undoList, sum); assertThat(1, equalTo(undo)); }
public long size() { return counters.size(); }
@Test public void testAddWithDifferentNamesAddsAll() { counterSet.longSum(name1); counterSet.intMax(name2); assertThat(counterSet.size(), equalTo(2L)); }
@Override public Integer doCall() throws Exception { String pipe = constructPipe(); if (pipe.isEmpty()) { printer().println("Failed to construct Pipe resource"); return -1; } return dumpPipe(pipe); }
@Test public void shouldBindKameletSourceToKameletSink() throws Exception { Bind command = createCommand("timer", "log"); command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" """.trim(), output); } @Test public void shouldBindNamespacedKamelets() throws Exception { Bind command = createCommand("timer", "log"); command.source = "my-namespace/timer-source"; command.sink = "my-namespace/log-sink"; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source namespace: my-namespace properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink namespace: my-namespace #properties: #key: "value" """.trim(), output); } @Test public void shouldBindKameletsExplicitPrefix() throws Exception { Bind command = createCommand("timer", "log"); command.source = "kamelet:timer-source"; command.sink = "kamelet:log-sink"; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" """.trim(), output); } @Test public void shouldBindKameletSourceToKameletSinkWithProperties() throws Exception { Bind command = createCommand("timer", "log"); command.properties = new String[] { "source.message=Hello", "source.period=5000", "sink.showHeaders=true", }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: Hello period: 5000 sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink properties: showHeaders: true """.trim(), output); } @Test public void shouldBindKameletsWithUriProperties() throws Exception { Bind command = createCommand("timer", "log"); command.source = "timer-source?message=Hi"; command.sink = "log-sink?showHeaders=true"; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: Hi sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink properties: showHeaders: true """.trim(), output); } @Test public void shouldBindWithSteps() throws Exception { Bind command = createCommand("timer", "http"); command.steps = new String[] { "set-body-action", "log-action" }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-http spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" steps: - ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: set-body-action properties: value: "value" - ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-action #properties: #key: "value" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: http-sink properties: url: "https://my-service/path" """.trim(), output); } @Test public void shouldBindWithUriSteps() throws Exception { Bind command = createCommand("timer", "http"); command.steps = new String[] { "set-body-action", "log:info" }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-http spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" steps: - ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: set-body-action properties: value: "value" - uri: log:info #properties: #key: "value" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: http-sink properties: url: "https://my-service/path" """.trim(), output); } @Test public void shouldBindWithStepsAndProperties() throws Exception { Bind command = createCommand("timer", "http"); command.steps = new String[] { "set-body-action", "log-action" }; command.properties = new String[] { "step-1.value=\"Camel rocks!\"", "step-2.showHeaders=true", "step-2.showExchangePattern=false" }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-http spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" steps: - ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: set-body-action properties: value: "Camel rocks!" - ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-action properties: showHeaders: true showExchangePattern: false sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: http-sink properties: url: "https://my-service/path" """.trim(), output); } @Test public void shouldBindWithUriStepsAndProperties() throws Exception { Bind command = createCommand("timer", "http"); command.steps = new String[] { "set-body-action", "log:info" }; command.properties = new String[] { "step-1.value=\"Camel rocks!\"", "step-2.showHeaders=true" }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-http spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" steps: - ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: set-body-action properties: value: "Camel rocks!" - uri: log:info properties: showHeaders: true sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: http-sink properties: url: "https://my-service/path" """.trim(), output); } @Test public void shouldBindWithUriStepsAndUriProperties() throws Exception { Bind command = createCommand("timer", "http"); command.steps = new String[] { "set-body-action", "log:info?showExchangePattern=false&showStreams=true" }; command.properties = new String[] { "step-1.value=\"Camel rocks!\"", "step-2.showHeaders=true" }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-http spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" steps: - ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: set-body-action properties: value: "Camel rocks!" - uri: log:info properties: showStreams: true showHeaders: true showExchangePattern: false sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: http-sink properties: url: "https://my-service/path" """.trim(), output); } @Test public void shouldBindKameletSourceToUri() throws Exception { Bind command = createCommand("timer", "log:info"); command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: uri: log:info #properties: #key: "value" """.trim(), output); } @Test public void shouldBindKameletSourceToUriWithProperties() throws Exception { Bind command = createCommand("timer", "log:info"); command.properties = new String[] { "source.message=Hello", "sink.showHeaders=true", }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: Hello sink: uri: log:info properties: showHeaders: true """.trim(), output); } @Test public void shouldBindKameletSourceToUriWithUriProperties() throws Exception { Bind command = createCommand("timer", "log:info?showStreams=false"); command.properties = new String[] { "source.message=Hello", "sink.showHeaders=true", }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: Hello sink: uri: log:info properties: showStreams: false showHeaders: true """.trim(), output); } @Test public void shouldBindUriToUri() throws Exception { Bind command = createCommand("timer:tick", "log:info"); command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: uri: timer:tick #properties: #key: "value" sink: uri: log:info #properties: #key: "value" """.trim(), output); } @Test public void shouldBindUriToUriWithProperties() throws Exception { Bind command = createCommand("timer:tick", "log:info"); command.properties = new String[] { "source.message=Hello", "sink.showHeaders=true", }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: uri: timer:tick properties: message: Hello sink: uri: log:info properties: showHeaders: true """.trim(), output); } @Test public void shouldBindUriToUriWithUriProperties() throws Exception { Bind command = createCommand("timer:tick?period=10000", "log:info?showStreams=false"); command.properties = new String[] { "source.message=Hello", "sink.showHeaders=true", }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: uri: timer:tick properties: period: 10000 message: Hello sink: uri: log:info properties: showStreams: false showHeaders: true """.trim(), output); } @Test public void shouldBindKameletSinkErrorHandler() throws Exception { Bind command = createCommand("timer", "log"); command.errorHandler = "sink:log-sink"; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" errorHandler: sink: endpoint: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" parameters: {} """.trim(), output); } @Test public void shouldBindKameletSinkErrorHandlerWithParameters() throws Exception { Bind command = createCommand("timer", "log"); command.errorHandler = "sink:log-sink"; command.properties = new String[] { "error-handler.maximumRedeliveries=3", "error-handler.redeliveryDelay=2000" }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" errorHandler: sink: endpoint: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" parameters: redeliveryDelay: 2000 maximumRedeliveries: 3 """.trim(), output); } @Test public void shouldBindKameletSinkErrorHandlerAndSinkProperties() throws Exception { Bind command = createCommand("timer", "log"); command.errorHandler = "sink:log-sink"; command.properties = new String[] { "error-handler.sink.showHeaders=true", "error-handler.maximumRedeliveries=3", "error-handler.redeliveryDelay=2000" }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" errorHandler: sink: endpoint: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink properties: showHeaders: true parameters: redeliveryDelay: 2000 maximumRedeliveries: 3 """.trim(), output); } @Test public void shouldBindEndpointUriSinkErrorHandler() throws Exception { Bind command = createCommand("timer", "log"); command.errorHandler = "sink:log:error"; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" errorHandler: sink: endpoint: uri: log:error #properties: #key: "value" parameters: {} """.trim(), output); } @Test public void shouldBindEndpointUriSinkErrorHandlerWithParameters() throws Exception { Bind command = createCommand("timer", "log"); command.errorHandler = "sink:log:error"; command.properties = new String[] { "error-handler.maximumRedeliveries=3", "error-handler.redeliveryDelay=2000" }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" errorHandler: sink: endpoint: uri: log:error #properties: #key: "value" parameters: redeliveryDelay: 2000 maximumRedeliveries: 3 """.trim(), output); } @Test public void shouldBindEndpointUriSinkErrorHandlerAndSinkProperties() throws Exception { Bind command = createCommand("timer", "log"); command.errorHandler = "sink:log:error"; command.properties = new String[] { "error-handler.sink.showHeaders=true", "error-handler.maximumRedeliveries=3", "error-handler.redeliveryDelay=2000" }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" errorHandler: sink: endpoint: uri: log:error properties: showHeaders: true parameters: redeliveryDelay: 2000 maximumRedeliveries: 3 """.trim(), output); } @Test public void shouldBindEndpointUriSinkErrorHandlerAndUriProperties() throws Exception { Bind command = createCommand("timer", "log"); command.errorHandler = "sink:log:error?showStreams=false"; command.properties = new String[] { "error-handler.sink.showHeaders=true", "error-handler.maximumRedeliveries=3", "error-handler.redeliveryDelay=2000" }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" errorHandler: sink: endpoint: uri: log:error properties: showStreams: false showHeaders: true parameters: redeliveryDelay: 2000 maximumRedeliveries: 3 """.trim(), output); } @Test public void shouldBindWithLogErrorHandler() throws Exception { Bind command = createCommand("timer", "log"); command.errorHandler = "log"; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" errorHandler: log: parameters: {} """.trim(), output); } @Test public void shouldBindWithLogErrorHandlerWithParameters() throws Exception { Bind command = createCommand("timer", "log"); command.errorHandler = "log"; command.properties = new String[] { "error-handler.maximumRedeliveries=3", "error-handler.redeliveryDelay=2000" }; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" errorHandler: log: parameters: redeliveryDelay: 2000 maximumRedeliveries: 3 """.trim(), output); } @Test public void shouldBindWithNoErrorHandler() throws Exception { Bind command = createCommand("timer", "log"); command.errorHandler = "none"; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" apiVersion: camel.apache.org/v1 kind: Pipe metadata: name: timer-to-log spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: timer-source properties: message: "hello world" sink: ref: kind: Kamelet apiVersion: camel.apache.org/v1 name: log-sink #properties: #key: "value" errorHandler: none: {} """.trim(), output); } @Test public void shouldSupportJsonOutput() throws Exception { Bind command = createCommand("timer", "log"); command.output = "json"; command.doCall(); String output = printer.getOutput(); Assertions.assertEquals(""" { "apiVersion": "camel.apache.org/v1", "kind": "Pipe", "metadata": { "name": "timer-to-log" }, "spec": { "source": { "ref": { "kind": "Kamelet", "apiVersion": "camel.apache.org/v1", "name": "timer-source" }, "properties": { "message": "hello world" } }, "sink": { "ref": { "kind": "Kamelet", "apiVersion": "camel.apache.org/v1", "name": "log-sink" } } } } """.trim(), output); } @Test public void shouldHandleUnsupportedOutputFormat() throws Exception { Bind command = createCommand("timer", "log"); command.output = "wrong"; Assertions.assertEquals(-1, command.doCall()); Assertions.assertEquals("Unsupported output format 'wrong' (supported: file, yaml, json)", printer.getOutput()); } private Bind createCommand(String source, String sink) { Bind command = new Bind(new CamelJBangMain().withPrinter(printer)); String sourceName; String sourceUri; if (source.contains(":")) { sourceName = StringHelper.before(source, ":"); sourceUri = source; } else { sourceName = source; sourceUri = source + "-source"; } String sinkName; String sinkUri; if (sink.contains(":")) { sinkName = StringHelper.before(sink, ":"); sinkUri = sink; } else { sinkName = sink; sinkUri = sink + "-sink"; } command.file = sourceName + "-to-" + sinkName + ".yaml"; command.source = sourceUri; command.sink = sinkUri; command.output = "yaml"; return command; } }
public static <T> CompressedSource<T> from(FileBasedSource<T> sourceDelegate) { return new CompressedSource<>(sourceDelegate, CompressionMode.AUTO); }
@Test public void testSplittableProgress() throws IOException { File tmpFile = tmpFolder.newFile("nonempty.txt"); String filename = tmpFile.toPath().toString(); Files.write(new byte[2], tmpFile); PipelineOptions options = PipelineOptionsFactory.create(); CompressedSource<Byte> source = CompressedSource.from(new ByteSource(filename, 1)); try (BoundedReader<Byte> readerOrig = source.createReader(options)) { assertThat(readerOrig, not(instanceOf(CompressedReader.class))); assertThat(readerOrig, instanceOf(FileBasedReader.class)); FileBasedReader<Byte> reader = (FileBasedReader<Byte>) readerOrig; // Check preconditions before starting assertEquals(0.0, reader.getFractionConsumed(), delta); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals(BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); // First record: none consumed, unknown remaining. assertTrue(reader.start()); assertEquals(0, reader.getSplitPointsConsumed()); assertEquals(BoundedReader.SPLIT_POINTS_UNKNOWN, reader.getSplitPointsRemaining()); // Second record: 1 consumed, know that we're on the last record. assertTrue(reader.advance()); assertEquals(1, reader.getSplitPointsConsumed()); assertEquals(1, reader.getSplitPointsRemaining()); // Confirm empty and check post-conditions assertFalse(reader.advance()); assertEquals(1.0, reader.getFractionConsumed(), delta); assertEquals(2, reader.getSplitPointsConsumed()); assertEquals(0, reader.getSplitPointsRemaining()); } }
public CompletableFuture<Boolean> allowTopicOperationAsync(TopicName topicName, TopicOperation operation, String role, AuthenticationDataSource authData) { if (log.isDebugEnabled()) { log.debug("Check if role {} is allowed to execute topic operation {} on topic {}", role, operation, topicName); } if (!this.conf.isAuthorizationEnabled()) { return CompletableFuture.completedFuture(true); } CompletableFuture<Boolean> allowFuture = provider.allowTopicOperationAsync(topicName, role, operation, authData); if (log.isDebugEnabled()) { return allowFuture.whenComplete((allowed, exception) -> { if (exception == null) { if (allowed) { log.debug("Topic operation {} on topic {} is allowed: role = {}", operation, topicName, role); } else { log.debug("Topic operation {} on topic {} is NOT allowed: role = {}", operation, topicName, role); } } else { log.debug("Failed to check if topic operation {} on topic {} is allowed:" + " role = {}", operation, topicName, role, exception); } }); } else { return allowFuture; } }
@Test(dataProvider = "roles") public void testTopicOperationAsync(String role, String originalRole, boolean shouldPass) throws Exception { boolean isAuthorized = authorizationService.allowTopicOperationAsync(TopicName.get("topic"), TopicOperation.PRODUCE, originalRole, role, null).get(); checkResult(shouldPass, isAuthorized); }
public static <InputT> Builder<InputT> withoutHold(AppliedPTransform<?, ?, ?> transform) { return new Builder(transform, BoundedWindow.TIMESTAMP_MAX_VALUE); }
@Test public void producedBundlesProducedOutputs() { UncommittedBundle<Integer> bundle = bundleFactory.createBundle(pc); TransformResult<Integer> result = StepTransformResult.<Integer>withoutHold(transform).addOutput(bundle).build(); assertThat(result.getOutputBundles(), containsInAnyOrder(bundle)); }
static ConfusionMatrixTuple tabulate(ImmutableOutputInfo<MultiLabel> domain, List<Prediction<MultiLabel>> predictions) { // this just keeps track of how many times [class x] was predicted to be [class y] DenseMatrix confusion = new DenseMatrix(domain.size(), domain.size()); Set<MultiLabel> observed = new HashSet<>(); DenseMatrix[] mcm = new DenseMatrix[domain.size()]; for (int i = 0; i < domain.size(); i++) { mcm[i] = new DenseMatrix(2, 2); } int predIndex = 0; for (Prediction<MultiLabel> prediction : predictions) { MultiLabel predictedOutput = prediction.getOutput(); MultiLabel trueOutput = prediction.getExample().getOutput(); if (trueOutput.equals(MultiLabelFactory.UNKNOWN_MULTILABEL)) { throw new IllegalArgumentException("The sentinel Unknown MultiLabel was used as a ground truth label at prediction number " + predIndex); } else if (predictedOutput.equals(MultiLabelFactory.UNKNOWN_MULTILABEL)) { throw new IllegalArgumentException("The sentinel Unknown MultiLabel was predicted by the model at prediction number " + predIndex); } Set<Label> trueSet = trueOutput.getLabelSet(); Set<Label> predSet = predictedOutput.getLabelSet(); // // Count true positives and false positives for (Label pred : predSet) { int idx = domain.getID(new MultiLabel(pred.getLabel())); if (trueSet.contains(pred)) { // // true positive: mcm[i, 1, 1]++ mcm[idx].add(1, 1, 1d); } else { // // false positive: mcm[i, 1, 0]++ mcm[idx].add(1, 0, 1d); } observed.add(new MultiLabel(pred)); } // // Count false negatives and populate the confusion table for (Label trueLabel : trueSet) { int idx = domain.getID(new MultiLabel(trueLabel.getLabel())); if (idx < 0) { throw new IllegalArgumentException("Unknown label '" + trueLabel.getLabel() + "' found in the ground truth labels at prediction number " + predIndex + ", this label is not known by the model which made the predictions."); } // // Doing two things in this loop: // 1) Checking if predSet contains trueLabel // 2) Counting the # of times [trueLabel] was predicted to be [predLabel] to populate the confusion table boolean found = false; for (Label predLabel : predSet) { int jdx = domain.getID(new MultiLabel(predLabel.getLabel())); confusion.add(jdx, idx, 1d); if (predLabel.equals(trueLabel)) { found = true; } } if (!found) { // // false negative: mcm[i, 0, 1]++ mcm[idx].add(0, 1, 1d); } // else { true positive: already counted } observed.add(new MultiLabel(trueLabel)); } // // True negatives everywhere else for (MultiLabel multilabel : domain.getDomain()) { Set<Label> labels = multilabel.getLabelSet(); for (Label label : labels) { if (!trueSet.contains(label) && !predSet.contains(label)) { int ix = domain.getID(new MultiLabel(label)); mcm[ix].add(0, 0, 1d); } } } predIndex++; } return new ConfusionMatrixTuple(mcm, confusion, observed); }
@Test public void testTabulateMultiLabel() { MultiLabel a = label("a"); MultiLabel b = label("b"); MultiLabel c = label("c"); List<Prediction<MultiLabel>> predictions = Arrays.asList( mkPrediction(label("a"), label("a", "b")), mkPrediction(label("c", "b"), label("b")), mkPrediction(label("b"), label("b")), mkPrediction(label("b"), label("c")) ); ImmutableOutputInfo<MultiLabel> domain = mkDomain(predictions); assertEquals(3, domain.size()); DenseMatrix[] mcm = MultiLabelConfusionMatrix .tabulate(domain, predictions) .getMCM(); int aIndex = domain.getID(a); int bIndex = domain.getID(b); int cIndex = domain.getID(c); assertEquals(domain.size(), mcm.length); assertEquals(3d, mcm[aIndex].get(0, 0)); assertEquals(1d, mcm[aIndex].get(1, 1)); assertEquals(1d, mcm[bIndex].get(0, 1)); assertEquals(1d, mcm[bIndex].get(1, 0)); assertEquals(2d, mcm[bIndex].get(1, 1)); assertEquals(2d, mcm[cIndex].get(0, 0)); assertEquals(1d, mcm[cIndex].get(0, 1)); assertEquals(1d, mcm[cIndex].get(1, 0)); }
@Nested @Optional public BaseImageParameters getFrom() { return from; }
@Test public void testSystemPropertiesWithInvalidPlatform() { System.setProperty("jib.from.platforms", "linux /amd64"); assertThrows(IllegalArgumentException.class, testJibExtension.getFrom()::getPlatforms); }
@Override protected CompletableFuture<ProfilingInfoList> handleRequest( @Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull ResourceManagerGateway gateway) throws RestHandlerException { final ResourceID taskManagerId = request.getPathParameter(TaskManagerIdPathParameter.class); final ResourceManagerGateway resourceManagerGateway = getResourceManagerGateway(resourceManagerGatewayRetriever); final CompletableFuture<Collection<ProfilingInfo>> profilingListFuture = resourceManagerGateway.requestTaskManagerProfilingList(taskManagerId, getTimeout()); return profilingListFuture .thenApply(ProfilingInfoList::new) .exceptionally( (throwable) -> { final Throwable strippedThrowable = ExceptionUtils.stripCompletionException(throwable); if (strippedThrowable instanceof UnknownTaskExecutorException) { throw new CompletionException( new RestHandlerException( "Could not find TaskExecutor " + taskManagerId, HttpResponseStatus.NOT_FOUND, strippedThrowable)); } else { throw new CompletionException(throwable); } }); }
@Test void testGetTaskManagerProfilingListForUnknownTaskExecutorException() throws Exception { resourceManagerGateway.setRequestProfilingListFunction( EXPECTED_TASK_MANAGER_ID -> FutureUtils.completedExceptionally( new UnknownTaskExecutorException(EXPECTED_TASK_MANAGER_ID))); try { taskManagerProfilingListHandler .handleRequest(handlerRequest, resourceManagerGateway) .get(); } catch (ExecutionException e) { final Throwable cause = e.getCause(); assertThat(cause).isInstanceOf(RestHandlerException.class); final RestHandlerException restHandlerException = (RestHandlerException) cause; assertThat(restHandlerException.getHttpResponseStatus()) .isEqualTo(HttpResponseStatus.NOT_FOUND); assertThat(restHandlerException.getMessage()) .contains("Could not find TaskExecutor " + EXPECTED_TASK_MANAGER_ID); } }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof ResourceAllocation)) { return false; } final ResourceAllocation that = (ResourceAllocation) obj; return Objects.equals(this.resource, that.resource) && Objects.equals(this.consumerId, that.consumerId); }
@Test public void testEquals() { ResourceAllocation alloc1 = new ResourceAllocation(Resources.discrete(D1, P1, VLAN1).resource(), RCID1); ResourceAllocation sameAsAlloc1 = new ResourceAllocation(Resources.discrete(D1, P1, VLAN1).resource(), RCID1); ResourceAllocation alloc2 = new ResourceAllocation(Resources.discrete(D2, P2, VLAN2).resource(), RCID2); ResourceAllocation sameAsAlloc2 = new ResourceAllocation(Resources.discrete(D2, P2, VLAN2).resource(), RCID2); new EqualsTester() .addEqualityGroup(alloc1, sameAsAlloc1) .addEqualityGroup(alloc2, sameAsAlloc2) .testEquals(); }
public FEELFnResult<Object> invoke(@ParameterName("list") List list) { if ( list == null || list.isEmpty() ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null or empty")); } else { try { return FEELFnResult.ofResult(Collections.max(list, new InterceptNotComparableComparator())); } catch (ClassCastException e) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "contains items that are not comparable")); } } }
@Test void invokeListOfIntegers() { FunctionTestUtil.assertResult(maxFunction.invoke(Collections.singletonList(1)), 1); FunctionTestUtil.assertResult(maxFunction.invoke(Arrays.asList(1, 2, 3)), 3); FunctionTestUtil.assertResult(maxFunction.invoke(Arrays.asList(1, 3, 2)), 3); FunctionTestUtil.assertResult(maxFunction.invoke(Arrays.asList(3, 1, 2)), 3); }
public void upload(@Nullable String password, List<FormDataBodyPart> parts) throws CACreationException { final var providedPassword = password == null ? null : password.toCharArray(); // TODO: if the upload consists of more than one file, handle accordingly // or: decide that it's always only one file containing all certificates try { KeyStore keyStore = KeyStore.getInstance(PKCS12, "BC"); keyStore.load(null, null); for (BodyPart part : parts) { InputStream is = part.getEntityAs(InputStream.class); byte[] bytes = is.readAllBytes(); String pem = new String(bytes, StandardCharsets.UTF_8); // Test, if upload is PEM file, must contain at least a certificate if (pem.contains("-----BEGIN CERTIFICATE")) { var ca = PemCaReader.readCA(pem, password); keyStore.setKeyEntry(CA_KEY_ALIAS, ca.privateKey(), providedPassword, ca.certificates().toArray(new Certificate[0])); } else { ByteArrayInputStream bais = new ByteArrayInputStream(bytes); keyStore.load(bais, providedPassword); } } writeToDatabase(adaptUploadedKeystore(keyStore, providedPassword)); triggerCaChangedEvent(); } catch (IOException | KeyStoreStorageException | GeneralSecurityException ex) { LOG.error("Could not write CA: " + ex.getMessage(), ex); throw new CACreationException("Could not write CA: " + ex.getMessage(), ex); } }
@Test void testUpload() throws Exception { final Configuration configuration = Mockito.spy(new Configuration()); final String password = RandomStringUtils.randomAlphanumeric(20); final EncryptedValueService encryptionService = new EncryptedValueService(password); final InMemoryClusterConfigService clusterConfigService = new InMemoryClusterConfigService(); final CaPersistenceService service = new CaPersistenceService(configuration, password, Mockito.mock(ClusterEventBus.class), clusterConfigService, encryptionService); final String existingCaPassword = "foobar"; // TODO: it would be nice to generate the PemCaReaderTest.PEM_CERT_WITH_ENCRYPTED_KEY here instead of using hardcoded // values. But so far haven't found any way to PEM-encode private key correctly, so the upload accepts it. service.upload(existingCaPassword, List.of( getBodyPart(PemCaReaderTest.PEM_CERT_WITH_ENCRYPTED_KEY) )); Assertions.assertThat(service.get()) .isPresent() .hasValueSatisfying(info -> { Assertions.assertThat(info.id()).isEqualTo("GRAYLOG CA"); Assertions.assertThat(info.type()).isEqualTo(CAType.GENERATED); }); Assertions.assertThat(service.loadKeyStore()) .isPresent() .hasValueSatisfying(k -> { Assertions.assertThat(k.password()).isEqualTo(password); try { Assertions.assertThat(k.keyStore().getCertificate("ca")).isNotNull(); } catch (KeyStoreException e) { throw new RuntimeException(e); } }); }
@Udf public String lpad( @UdfParameter(description = "String to be padded") final String input, @UdfParameter(description = "Target length") final Integer targetLen, @UdfParameter(description = "Padding string") final String padding) { if (input == null) { return null; } if (padding == null || padding.isEmpty() || targetLen == null || targetLen < 0) { return null; } final StringBuilder sb = new StringBuilder(targetLen + padding.length()); final int padUpTo = Math.max(targetLen - input.length(), 0); for (int i = 0; i < padUpTo; i += padding.length()) { sb.append(padding); } sb.setLength(padUpTo); sb.append(input); sb.setLength(targetLen); return sb.toString(); }
@Test public void shouldReturnNullForEmptyPaddingString() { final String result = udf.lpad("foo", 4, ""); assertThat(result, is(nullValue())); }
@Override public Ftp reconnectIfTimeout() { String pwd = null; try { pwd = pwd(); } catch (IORuntimeException fex) { // ignore } if (pwd == null) { return this.init(); } return this; }
@Test @Disabled public void reconnectIfTimeoutTest() throws InterruptedException { final Ftp ftp = new Ftp("looly.centos"); Console.log("打印pwd: " + ftp.pwd()); Console.log("休眠一段时间,然后再次发送pwd命令,抛出异常表明连接超时"); Thread.sleep(35 * 1000); try{ Console.log("打印pwd: " + ftp.pwd()); }catch (final FtpException e) { e.printStackTrace(); } Console.log("判断是否超时并重连..."); ftp.reconnectIfTimeout(); Console.log("打印pwd: " + ftp.pwd()); IoUtil.close(ftp); }
@Override public Batch toBatch() { return new SparkBatch( sparkContext, table, readConf, groupingKeyType(), taskGroups(), expectedSchema, hashCode()); }
@Test public void testUnpartitionedTruncateString() throws Exception { createUnpartitionedTable(spark, tableName); SparkScanBuilder builder = scanBuilder(); TruncateFunction.TruncateString function = new TruncateFunction.TruncateString(); UserDefinedScalarFunc udf = toUDF(function, expressions(intLit(4), fieldRef("data"))); Predicate predicate = new Predicate("<>", expressions(udf, stringLit("data"))); pushFilters(builder, predicate); Batch scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(10); // NOT NotEqual builder = scanBuilder(); predicate = new Not(predicate); pushFilters(builder, predicate); scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(10); }
@Override public boolean process(Exchange exchange, AsyncCallback callback) { try { stompEndpoint.send(exchange, callback); return false; } catch (Exception e) { exchange.setException(e); } callback.done(true); return true; }
@Test public void testProduce() throws Exception { context.addRoutes(createRouteBuilder()); context.start(); Stomp stomp = createStompClient(); final BlockingConnection subscribeConnection = stomp.connectBlocking(); StompFrame frame = new StompFrame(SUBSCRIBE); frame.addHeader(DESTINATION, StompFrame.encodeHeader("test")); frame.addHeader(ID, subscribeConnection.nextId()); subscribeConnection.request(frame); final CountDownLatch latch = new CountDownLatch(numberOfMessages); Thread thread = new Thread(new Runnable() { public void run() { for (int i = 0; i < numberOfMessages; i++) { try { StompFrame frame = subscribeConnection.receive(); assertTrue(frame.contentAsString().startsWith("test message ")); assertTrue(frame.getHeader(new AsciiBuffer(HEADER)).ascii().toString().startsWith(HEADER_VALUE)); latch.countDown(); } catch (Exception e) { LOG.warn("Unhandled exception receiving STOMP data: {}", e.getMessage(), e); break; } } } }); thread.start(); Endpoint endpoint = context.getEndpoint("direct:foo"); Producer producer = endpoint.createProducer(); for (int i = 0; i < numberOfMessages; i++) { Exchange exchange = endpoint.createExchange(); exchange.getIn().setBody(("test message " + i).getBytes("UTF-8")); exchange.getIn().setHeader(HEADER, HEADER_VALUE); producer.process(exchange); } latch.await(20, TimeUnit.SECONDS); assertEquals(0, latch.getCount(), "Messages not consumed = " + latch.getCount()); }
public static boolean isStartMonitor() { return CONFIG.isEnableStartMonitor(); }
@Test public void testSwitch() { FlowControlConfig metricConfig = new FlowControlConfig(); metricConfig.setEnableStartMonitor(true); try (MockedStatic<PluginConfigManager> pluginConfigManagerMockedStatic = Mockito.mockStatic(PluginConfigManager.class)) { pluginConfigManagerMockedStatic.when(() -> PluginConfigManager.getPluginConfig(FlowControlConfig.class)) .thenReturn(metricConfig); Assert.assertTrue(MonitorUtils.isStartMonitor()); } }
public static List<Event> computeEventDiff(final Params params) { final List<Event> events = new ArrayList<>(); emitPerNodeDiffEvents(createBaselineParams(params), events); emitWholeClusterDiffEvent(createBaselineParams(params), events); emitDerivedBucketSpaceStatesDiffEvents(params, events); return events; }
@Test void disabling_distribution_config_in_state_bundle_emits_cluster_level_event() { var fixture = EventFixture.createForNodes(3) .clusterStateBefore("distributor:3 storage:3") .clusterStateAfter("distributor:3 storage:3") .distributionConfigBefore(flatClusterDistribution(3)) .distributionConfigAfter(null); var events = fixture.computeEventDiff(); assertThat(events.size(), equalTo(1)); assertThat(events, hasItem(clusterEventWithDescription( "Cluster controller is no longer the authoritative source for distribution config"))); }
protected boolean isAssumeIdentity(ConnectionReference conn) { return isAnonymousAccessAllowed() || (isSystemConnection(conn) && !isVmConnectionAuthenticationRequired()); }
@Test public void testIsAssumeIdentity() { policy.setAnonymousAccessAllowed(true); assertTrue(policy.isAssumeIdentity(null)); }
public static ParsedCommand parse( // CHECKSTYLE_RULES.ON: CyclomaticComplexity final String sql, final Map<String, String> variables) { validateSupportedStatementType(sql); final String substituted; try { substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables); } catch (ParseFailedException e) { throw new MigrationException(String.format( "Failed to parse the statement. Statement: %s. Reason: %s", sql, e.getMessage())); } final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted) .get(0).getStatement(); final boolean isStatement = StatementType.get(statementContext.statement().getClass()) == StatementType.STATEMENT; return new ParsedCommand(substituted, isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY) .buildStatement(statementContext))); }
@Test public void shouldThrowOnPrintStatement() { // When: final MigrationException e = assertThrows(MigrationException.class, () -> parse("print 'my_topic';")); // Then: assertThat(e.getMessage(), is("'PRINT' statements are not supported.")); }
@VisibleForTesting static int luminance(@ColorInt int color) { double r = Color.red(color) * 0.2126; double g = Color.green(color) * 0.7152; double b = Color.blue(color) * 0.0722; return (int) Math.ceil(r + g + b); }
@Test public void testLuminance() { Assert.assertEquals(255, OverlayDataNormalizer.luminance(Color.WHITE)); Assert.assertEquals(0, OverlayDataNormalizer.luminance(Color.BLACK)); Assert.assertEquals(136, OverlayDataNormalizer.luminance(Color.GRAY)); Assert.assertEquals(19, OverlayDataNormalizer.luminance(Color.BLUE)); Assert.assertEquals(183, OverlayDataNormalizer.luminance(Color.GREEN)); Assert.assertEquals(55, OverlayDataNormalizer.luminance(Color.RED)); Assert.assertEquals(73, OverlayDataNormalizer.luminance(Color.MAGENTA)); }
public CaseCreationResponse startCase( ProcessDefinitionResponse processDefinition, Map<String, Serializable> rawInputs) throws Exception { if (processDefinition == null) { throw new IllegalArgumentException("ProcessDefinition is null"); } if (rawInputs == null) { throw new IllegalArgumentException("The contract input is null"); } Map<String, Serializable> inputs = BonitaAPIUtil.getInstance(bonitaApiConfig) .prepareInputs(processDefinition, rawInputs); WebTarget resource = getBaseResource().path("process/{processId}/instantiation") .resolveTemplate("processId", processDefinition.getId()); return resource.request().accept(MediaType.APPLICATION_JSON) .post(entity(inputs, MediaType.APPLICATION_JSON), CaseCreationResponse.class); }
@Test public void testStartCaseNUllContractInput() { BonitaAPI bonitaApi = BonitaAPIBuilder .build(new BonitaAPIConfig("hostname", "port", "username", "password")); ProcessDefinitionResponse processDefinition = new ProcessDefinitionResponse(); assertThrows(IllegalArgumentException.class, () -> bonitaApi.startCase(processDefinition, null)); }
@Override public StructType schema() { return df.schema(); }
@Test public void testSchema() { System.out.println("schema"); System.out.println(df.schema()); System.out.println(df.structure()); System.out.println(df); smile.data.type.StructType schema = DataTypes.struct( new StructField("age", DataTypes.IntegerType), new StructField("birthday", DataTypes.DateType), new StructField("gender", DataTypes.ByteType, new NominalScale("Male", "Female")), new StructField("name", DataTypes.StringType), new StructField("salary", DataTypes.object(Double.class)) ); assertEquals(schema, df.schema()); }
@Override public void resetLocal() { this.max = Double.NEGATIVE_INFINITY; }
@Test void testResetLocal() { DoubleMaximum max = new DoubleMaximum(); double value = 13.57902468; max.add(value); assertThat(max.getLocalValue()).isCloseTo(value, within(0.0)); max.resetLocal(); assertThat(max.getLocalValue()).isCloseTo(Double.NEGATIVE_INFINITY, within(0.0)); }
public static SerdeFeatures of(final SerdeFeature... features) { return new SerdeFeatures(ImmutableSet.copyOf(features)); }
@Test public void shouldExcludeFeaturesIfEmpty() throws Exception { // Given: final Pojo formats = new Pojo(SerdeFeatures.of()); // When: final String json = MAPPER.writeValueAsString(formats); // Then: assertThat(json, is("{}")); }
public LogicalSchema resolve(final ExecutionStep<?> step, final LogicalSchema schema) { return Optional.ofNullable(HANDLERS.get(step.getClass())) .map(h -> h.handle(this, schema, step)) .orElseThrow(() -> new IllegalStateException("Unhandled step class: " + step.getClass())); }
@Test public void shouldResolveSchemaForStreamFlatMap() { // Given: givenTableFunction("EXPLODE", SqlTypes.DOUBLE); final StreamFlatMap<?> step = new StreamFlatMap<>( PROPERTIES, streamSource, ImmutableList.of(functionCall("EXPLODE", "BANANA")) ); // When: final LogicalSchema result = resolver.resolve(step, SCHEMA); // Then: assertThat(result, is( LogicalSchema.builder() .keyColumn(ColumnName.of("K0"), SqlTypes.INTEGER) .valueColumn(ColumnName.of("ORANGE"), SqlTypes.INTEGER) .valueColumn(ColumnName.of("APPLE"), SqlTypes.BIGINT) .valueColumn(ColumnName.of("BANANA"), SqlTypes.STRING) .valueColumn(ColumnNames.synthesisedSchemaColumn(0), SqlTypes.DOUBLE) .build()) ); }
@Override public StorageVolume getStorageVolumeByName(String svName) { try (LockCloseable lock = new LockCloseable(rwLock.readLock())) { try { FileStoreInfo fileStoreInfo = GlobalStateMgr.getCurrentState().getStarOSAgent().getFileStoreByName(svName); if (fileStoreInfo == null) { return null; } return StorageVolume.fromFileStoreInfo(fileStoreInfo); } catch (DdlException e) { throw new SemanticException(e.getMessage()); } } }
@Test public void testCreateHDFS() throws DdlException, AlreadyExistsException { String svName = "test"; // create StorageVolumeMgr svm = new SharedDataStorageVolumeMgr(); List<String> locations = Arrays.asList("hdfs://abc"); Map<String, String> storageParams = new HashMap<>(); storageParams.put("dfs.nameservices", "ha_cluster"); storageParams.put("dfs.ha.namenodes.ha_cluster", "ha_n1,ha_n2"); storageParams.put("dfs.namenode.rpc-address.ha_cluster.ha_n1", "<hdfs_host>:<hdfs_port>"); storageParams.put("dfs.namenode.rpc-address.ha_cluster.ha_n2", "<hdfs_host>:<hdfs_port>"); String svKey = svm.createStorageVolume(svName, "hdfs", locations, storageParams, Optional.empty(), ""); Assert.assertEquals(true, svm.exists(svName)); storageParams.put("dfs.client.failover.proxy.provider", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"); svm.updateStorageVolume("test", storageParams, Optional.of(false), ""); Assert.assertEquals(false, svm.getStorageVolumeByName(svName).getEnabled()); }
public static ReplaceFirst replaceFirst(String regex, String replacement) { return replaceFirst(Pattern.compile(regex), replacement); }
@Test @Category(NeedsRunner.class) public void testReplaceFirstMixed() { PCollection<String> output = p.apply(Create.of("abc", "xjx", "yjy", "zjz", "def")) .apply(Regex.replaceFirst("[xyz]", "new")); PAssert.that(output).containsInAnyOrder("abc", "newjx", "newjy", "newjz", "def"); p.run(); }
public static String[] parsePeerId(final String s) { if (s.startsWith(IPV6_START_MARK) && StringUtils.containsIgnoreCase(s, IPV6_END_MARK)) { String ipv6Addr; if (s.endsWith(IPV6_END_MARK)) { ipv6Addr = s; } else { ipv6Addr = s.substring(0, (s.indexOf(IPV6_END_MARK) + 1)); } if (!isIPv6(ipv6Addr)) { throw new IllegalArgumentException("The IPv6 address(\"" + ipv6Addr + "\") is incorrect."); } String tempString = s.substring((s.indexOf(ipv6Addr) + ipv6Addr.length())); if (tempString.startsWith(":")) { tempString = tempString.substring(1); } String[] tempArr = StringUtils.splitPreserveAllTokens(tempString, ':'); String[] result = new String[1 + tempArr.length]; result[0] = ipv6Addr; System.arraycopy(tempArr, 0, result, 1, tempArr.length); return result; } else { return StringUtils.splitPreserveAllTokens(s, ':'); } }
@Test public void testParsePeerId() { String pid = "192.168.1.88:5566"; String[] result = Utils.parsePeerId(pid); String[] expecteds = { "192.168.1.88", "5566" }; Assert.assertTrue(result.length == 2); Assert.assertArrayEquals(expecteds, result); pid = "[fe80:0:0:0:6450:aa3c:cd98:ed0f]:8847"; result = Utils.parsePeerId(pid); expecteds = new String[] { "[fe80:0:0:0:6450:aa3c:cd98:ed0f]", "8847" }; Assert.assertTrue(result.length == 2); Assert.assertArrayEquals(expecteds, result); pid = "192.168.1.88:5566:9"; result = Utils.parsePeerId(pid); expecteds = new String[] { "192.168.1.88", "5566", "9" }; Assert.assertTrue(result.length == 3); Assert.assertArrayEquals(expecteds, result); pid = "[fe80:0:0:0:6450:aa3c:cd98:ed0f]:8847:9"; result = Utils.parsePeerId(pid); expecteds = new String[] { "[fe80:0:0:0:6450:aa3c:cd98:ed0f]", "8847", "9" }; Assert.assertTrue(result.length == 3); Assert.assertArrayEquals(expecteds, result); pid = "192.168.1.88:5566:0:6"; result = Utils.parsePeerId(pid); expecteds = new String[] { "192.168.1.88", "5566", "0", "6" }; Assert.assertTrue(result.length == 4); Assert.assertArrayEquals(expecteds, result); pid = "[fe80:0:0:0:6450:aa3c:cd98:ed0f]:8847:0:6"; result = Utils.parsePeerId(pid); expecteds = new String[] { "[fe80:0:0:0:6450:aa3c:cd98:ed0f]", "8847", "0", "6" }; Assert.assertTrue(result.length == 4); Assert.assertArrayEquals(expecteds, result); boolean ex1 = false; try { pid = "[192.168.1].88:eee:x:b:j"; Utils.parsePeerId(pid); } catch (Exception e) { ex1 = true; } Assert.assertTrue(ex1); boolean ex2 = false; try { pid = "[dsfsadf]:eee:x:b:j"; Utils.parsePeerId(pid); } catch (Exception e) { ex2 = true; } Assert.assertTrue(ex2); }
@Override public void execute(String commandName, BufferedReader reader, BufferedWriter writer) throws Py4JException, IOException { String targetObjectId = reader.readLine(); String methodName = reader.readLine(); List<Object> arguments = getArguments(reader); ReturnObject returnObject = invokeMethod(methodName, targetObjectId, arguments); String returnCommand = Protocol.getOutputCommand(returnObject); logger.finest("Returning command: " + returnCommand); writer.write(returnCommand); writer.flush(); }
@Test public void testCharMethod() { String inputCommand = target + "\nmethod4\nsc\ne\n"; try { command.execute("c", new BufferedReader(new StringReader(inputCommand)), writer); assertEquals("!yro1\n", sWriter.toString()); } catch (Exception e) { e.printStackTrace(); fail(); } }
@Override @CacheEvict(cacheNames = RedisKeyConstants.OAUTH_CLIENT, allEntries = true) // allEntries 清空所有缓存,因为可能修改到 clientId 字段,不好清理 public void updateOAuth2Client(OAuth2ClientSaveReqVO updateReqVO) { // 校验存在 validateOAuth2ClientExists(updateReqVO.getId()); // 校验 Client 未被占用 validateClientIdExists(updateReqVO.getId(), updateReqVO.getClientId()); // 更新 OAuth2ClientDO updateObj = BeanUtils.toBean(updateReqVO, OAuth2ClientDO.class); oauth2ClientMapper.updateById(updateObj); }
@Test public void testUpdateOAuth2Client_notExists() { // 准备参数 OAuth2ClientSaveReqVO reqVO = randomPojo(OAuth2ClientSaveReqVO.class); // 调用, 并断言异常 assertServiceException(() -> oauth2ClientService.updateOAuth2Client(reqVO), OAUTH2_CLIENT_NOT_EXISTS); }
public String getOutput() { return output; }
@Test public void testOutputParamInRobotFrameworkCamelConfigurations() throws Exception { RobotFrameworkEndpoint robotFrameworkEndpoint = createEndpointWithOption("output=customOutput.log"); assertEquals("customOutput.log", robotFrameworkEndpoint.getConfiguration().getOutput()); }
public static void getSemanticPropsDualFromString( DualInputSemanticProperties result, String[] forwardedFirst, String[] forwardedSecond, String[] nonForwardedFirst, String[] nonForwardedSecond, String[] readFieldsFirst, String[] readFieldsSecond, TypeInformation<?> inType1, TypeInformation<?> inType2, TypeInformation<?> outType) { getSemanticPropsDualFromString( result, forwardedFirst, forwardedSecond, nonForwardedFirst, nonForwardedSecond, readFieldsFirst, readFieldsSecond, inType1, inType2, outType, false); }
@Test void testForwardedNonForwardedFirstCheck() { String[] forwarded = {"1"}; String[] nonForwarded = {"1"}; assertThatThrownBy( () -> SemanticPropUtil.getSemanticPropsDualFromString( new DualInputSemanticProperties(), forwarded, null, nonForwarded, null, null, null, threeIntTupleType, threeIntTupleType, threeIntTupleType)) .isInstanceOf(InvalidSemanticAnnotationException.class); }
public StringObjectId( String id ) { this.id = id; }
@Test public void testStringObjectId() { String expectedId = UUID.randomUUID().toString(); StringObjectId obj = new StringObjectId( expectedId ); assertEquals( expectedId, obj.getId() ); assertEquals( expectedId, obj.toString() ); assertEquals( expectedId.hashCode(), obj.hashCode() ); assertFalse( obj.equals( null ) ); assertTrue( obj.equals( obj ) ); assertEquals( 0, obj.compareTo( obj ) ); StringObjectId clone = new StringObjectId( obj ); assertNotSame( obj, clone ); assertEquals( obj.getId(), clone.getId() ); }
@Override public Long next(Object ignored) { if (!limitedDomain) { return next0(); } return domain .stream() .skip(random.nextInt(domain.size())) .findFirst() .orElseThrow(() -> new RuntimeException("Should never happen")); }
@Test public void testFluctuation() { SequenceGenerator generator = new SequenceGenerator.Builder() .setMin(1L) .setMax(100L) .setFluctuation(1) .build(); for (int i = 1; i < 10; i++) { Long next = generator.next(null); assertTrue(i <= next.intValue()); assertTrue(i * 2 >= next.intValue()); } }
@Bean public PluginDataHandler modifyResponsePluginDataHandler() { return new ModifyResponsePluginDataHandler(); }
@Test public void testModifyResponsePluginDataHandler() { new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(ModifyResponsePluginConfiguration.class)) .withBean(ModifyResponsePluginConfigurationTest.class) .withPropertyValues("debug=true") .run(context -> { assertThat(context).hasSingleBean(ModifyResponsePluginDataHandler.class); PluginDataHandler handler = context.getBean("modifyResponsePluginDataHandler", PluginDataHandler.class); assertNotNull(handler); }); }
@Override public void execute(Context context) { executeForBranch(treeRootHolder.getRoot()); }
@Test public void execute_whenAnalyzerChanged_shouldRaiseEventForAllLanguages() { QualityProfile qp1 = qp(QP_NAME_1, LANGUAGE_KEY_1, new Date()); QualityProfile qp2 = qp(QP_NAME_2, LANGUAGE_KEY_2, new Date()); mockLanguageInRepository(LANGUAGE_KEY_1); mockLanguageInRepository(LANGUAGE_KEY_2); when(measureRepository.getBaseMeasure(treeRootHolder.getRoot(), qualityProfileMetric)).thenReturn(Optional.of(newMeasure())); when(measureRepository.getRawMeasure(treeRootHolder.getRoot(), qualityProfileMetric)).thenReturn(Optional.of(newMeasure(qp1, qp2))); ScannerPlugin scannerPluginLanguage1 = mockScannerPlugin(LANGUAGE_KEY_1, 3L); ScannerPlugin scannerPluginLanguage2 = mockScannerPlugin(LANGUAGE_KEY_2, 2L); when(analysisMetadataHolder.getScannerPluginsByKey()).thenReturn(Map.of(LANGUAGE_KEY_1, scannerPluginLanguage1, LANGUAGE_KEY_2, scannerPluginLanguage2)); when(analysisMetadataHolder.getBaseAnalysis()).thenReturn(new Analysis.Builder().setUuid("uuid").setCreatedAt(1L).build()); underTest.execute(new TestComputationStepContext()); verify(eventRepository, times(2)).add(eventArgumentCaptor.capture()); verifyNoMoreInteractions(eventRepository); assertThat(eventArgumentCaptor.getAllValues()) .extracting(Event::getCategory, Event::getName, Event::getDescription) .containsExactlyInAnyOrder(tuple(Event.Category.ISSUE_DETECTION, "Capabilities have been updated (language_key1_name)", null), tuple(Event.Category.ISSUE_DETECTION, "Capabilities have been updated (language_key2_name)", null)); }
public static String getClientVersion() { return clientVersion; }
@Test void testGetClientVersion() { String defaultVal = ParamUtil.getClientVersion(); assertEquals(defaultVersion, defaultVal); String expect = "test"; ParamUtil.setClientVersion(expect); assertEquals(expect, ParamUtil.getClientVersion()); }
@Override public PublisherContext getPublisherContext() { // no need to implement this for client part throw new UnsupportedOperationException(); }
@Test(expected = UnsupportedOperationException.class) public void testGetPublisherContext() { context.getPublisherContext(); }
@Override public AppInfo getApp(HttpServletRequest hsr, String appId, Set<String> unselectedFields) { try { long startTime = clock.getTime(); // Get SubClusterInfo according to applicationId DefaultRequestInterceptorREST interceptor = getOrCreateInterceptorByAppId(appId); if (interceptor == null) { routerMetrics.incrAppsFailedRetrieved(); return null; } AppInfo response = interceptor.getApp(hsr, appId, unselectedFields); long stopTime = clock.getTime(); routerMetrics.succeededAppsRetrieved(stopTime - startTime); return response; } catch (YarnException e) { routerMetrics.incrAppsFailedRetrieved(); LOG.error("getApp Error, applicationId = {}.", appId, e); return null; } catch (IllegalArgumentException e) { routerMetrics.incrAppsFailedRetrieved(); throw e; } }
@Test public void testGetApplicationNotExists() { ApplicationId appId = ApplicationId.newInstance(System.currentTimeMillis(), 1); AppInfo response = interceptor.getApp(null, appId.toString(), null); Assert.assertNull(response); }
@Override public Map<String, Object> payload() { return new HashMap<>(payloadAsMap); }
@Test public void nullPayloadValuesShouldNotBeAllowed() { assertThrows( NullPointerException.class, () -> new HttpRequestImpl("GET", "/", client).payload("", null) ); }
@Override public Tuple apply(Tuple x) { StructType schema = x.schema(); Neighbor<Tuple, Tuple>[] neighbors = knn.search(x, k); return new smile.data.AbstractTuple() { @Override public Object get(int i) { Object xi = x.get(i); if (!SimpleImputer.isMissing(xi)) { return xi; } else { StructField field = schema.field(i); if (field.type.isBoolean()) { int[] vector = MathEx.omit( Arrays.stream(neighbors) .mapToInt(neighbor -> neighbor.key.getInt(i)).toArray(), Integer.MIN_VALUE); return vector.length == 0 ? null : MathEx.mode(vector) != 0; } else if (field.type.isChar()) { int[] vector = MathEx.omit( Arrays.stream(neighbors) .mapToInt(neighbor -> neighbor.key.getInt(i)).toArray(), Integer.MIN_VALUE); return vector.length == 0 ? null : (char) MathEx.mode(vector); } else if (field.measure instanceof NominalScale) { int[] vector = MathEx.omit( Arrays.stream(neighbors) .mapToInt(neighbor -> neighbor.key.getInt(i)).toArray(), Integer.MIN_VALUE); return vector.length == 0 ? null : MathEx.mode(vector); } else if (field.type.isNumeric()) { double[] vector = MathEx.omit( Arrays.stream(neighbors) .mapToDouble(neighbor -> neighbor.key.getDouble(i)).toArray(), Integer.MIN_VALUE); return vector.length == 0 ? null : MathEx.mean(vector); } else { return null; } } } @Override public StructType schema() { return schema; } }; }
@Test public void test() throws Exception { System.out.println("KNNImputer"); double[][] data = SyntheticControl.x; DataFrame df = DataFrame.of(data); KNNImputer knnImputer = new KNNImputer(df, 5); Function<double[][], double[][]> imputer = x -> knnImputer.apply(DataFrame.of(x)).toArray(); impute(imputer, data, 0.01, 11.08); impute(imputer, data, 0.05, 12.22); impute(imputer, data, 0.10, 11.63); }
public ResourceMethodDescriptor process(final ServerResourceContext context) { String path = context.getRequestURI().getRawPath(); if (path.length() < 2) { throw new RoutingException(HttpStatus.S_404_NOT_FOUND.getCode()); } if (path.charAt(0) == '/') { path = path.substring(1); } Queue<String> remainingPath = new LinkedList<>(Arrays.asList(SLASH_PATTERN.split(path))); String rootPath = "/" + remainingPath.poll(); ResourceModel currentResource; try { currentResource = _pathRootResourceMap.get(URLDecoder.decode(rootPath, RestConstants.DEFAULT_CHARSET_NAME)); } catch (UnsupportedEncodingException e) { throw new RestLiInternalException("UnsupportedEncodingException while trying to decode the root path", e); } if (currentResource == null) { throw new RoutingException(String.format("No root resource defined for path '%s'", rootPath), HttpStatus.S_404_NOT_FOUND.getCode()); } return processResourceTree(currentResource, context, remainingPath); }
@Test public void failsOnVeryShortUriPath() throws URISyntaxException { final TestSetup setup = new TestSetup(); final RestLiRouter router = setup._router; final ServerResourceContext context = setup._context; doReturn(new URI("/")).when(context).getRequestURI(); final RoutingException e = runAndCatch(() -> router.process(context), RoutingException.class); Assert.assertEquals(e.getStatus(), HttpStatus.S_404_NOT_FOUND.getCode()); }
@Override public V put(K key, V value, Duration ttl) { return get(putAsync(key, value, ttl)); }
@Test public void testContainsKeyTTL() throws InterruptedException { RMapCacheNative<SimpleKey, SimpleValue> map = redisson.getMapCacheNative("simple30"); map.put(new SimpleKey("33"), new SimpleValue("44"), Duration.ofSeconds(1)); Assertions.assertTrue(map.containsKey(new SimpleKey("33"))); Assertions.assertFalse(map.containsKey(new SimpleKey("34"))); Thread.sleep(1000); Assertions.assertFalse(map.containsKey(new SimpleKey("33"))); map.destroy(); }
@Override public synchronized List<HeliumPackage> getAll() throws IOException { HttpClient client = HttpClientBuilder.create() .setUserAgent("ApacheZeppelin/" + Util.getVersion()) .setProxy(getProxy(uri())) .build(); HttpGet get = new HttpGet(uri()); HttpResponse response; try { if ((get.getURI().getHost().equals(zConf.getS3Endpoint()))) { if (zConf.getS3Timeout() != null) { int timeout = Integer.parseInt(zConf.getS3Timeout()); RequestConfig requestCfg = RequestConfig.custom() .setConnectTimeout(timeout) .setSocketTimeout(timeout) .build(); get.setConfig(requestCfg); } } response = client.execute(get); } catch (Exception e) { logger.error(e.getMessage()); return readFromCache(); } if (response.getStatusLine().getStatusCode() != 200) { // try read from cache if (logger.isErrorEnabled()) { logger.error("{} returned {}", uri(), response.getStatusLine()); } return readFromCache(); } else { List<HeliumPackage> packageList = new LinkedList<>(); BufferedReader reader; reader = new BufferedReader( new InputStreamReader(response.getEntity().getContent())); List<Map<String, Map<String, HeliumPackage>>> packages = gson.fromJson( reader, new TypeToken<List<Map<String, Map<String, HeliumPackage>>>>() { }.getType()); reader.close(); for (Map<String, Map<String, HeliumPackage>> pkg : packages) { for (Map<String, HeliumPackage> versions : pkg.values()) { packageList.addAll(versions.values()); } } writeToCache(packageList); return packageList; } }
@Test void zeppelinNotebookS3TimeoutPropertyTest() throws IOException { ZeppelinConfiguration zConf = ZeppelinConfiguration.load(); zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_TIMEOUT.getVarName(), TIMEOUT); zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_ENDPOINT.getVarName(), IP); HeliumOnlineRegistry heliumOnlineRegistry = new HeliumOnlineRegistry( "https://" + IP, "https://" + IP, tmpDir, zConf ); long start = System.currentTimeMillis(); heliumOnlineRegistry.getAll(); long processTime = System.currentTimeMillis() - start; long basicTimeout = Long.valueOf( ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_TIMEOUT.getStringValue() ); assertTrue( basicTimeout > processTime, String.format( "Wrong timeout during connection: expected %s, actual is about %d", TIMEOUT, processTime ) ); }
public boolean write(final int msgTypeId, final DirectBuffer srcBuffer, final int offset, final int length) { checkTypeId(msgTypeId); checkMsgLength(length); final AtomicBuffer buffer = this.buffer; final int recordLength = length + HEADER_LENGTH; final int recordIndex = claimCapacity(buffer, recordLength); if (INSUFFICIENT_CAPACITY == recordIndex) { return false; } buffer.putIntOrdered(lengthOffset(recordIndex), -recordLength); MemoryAccess.releaseFence(); buffer.putBytes(encodedMsgOffset(recordIndex), srcBuffer, offset, length); buffer.putInt(typeOffset(recordIndex), msgTypeId); buffer.putIntOrdered(lengthOffset(recordIndex), recordLength); return true; }
@Test void shouldRejectWriteWhenBufferFull() { final int length = 8; final long head = 0L; final long tail = head + CAPACITY; when(buffer.getLongVolatile(HEAD_COUNTER_INDEX)).thenReturn(head); when(buffer.getLongVolatile(TAIL_COUNTER_INDEX)).thenReturn(tail); final UnsafeBuffer srcBuffer = new UnsafeBuffer(allocateDirect(1024)); final int srcIndex = 0; assertFalse(ringBuffer.write(MSG_TYPE_ID, srcBuffer, srcIndex, length)); verify(buffer, never()).putInt(anyInt(), anyInt()); verify(buffer, never()).compareAndSetLong(anyInt(), anyLong(), anyLong()); verify(buffer, never()).putIntOrdered(anyInt(), anyInt()); }
public synchronized void write(Mutation tableRecord) throws IllegalStateException { write(ImmutableList.of(tableRecord)); }
@Test public void testWriteSingleRecordShouldThrowExceptionWhenSpannerWriteFails() throws ExecutionException, InterruptedException { // arrange prepareTable(); when(spanner.getDatabaseClient(any()).write(any())).thenThrow(SpannerException.class); Mutation testMutation = Mutation.newInsertOrUpdateBuilder("SingerId") .set("SingerId") .to(1) .set("FirstName") .to("Marc") .set("LastName") .to("Richards") .build(); // act & assert assertThrows(SpannerResourceManagerException.class, () -> testManager.write(testMutation)); }
@Override public boolean add(String key) { repo.lock(key); try { return repo.putIfAbsent(key, false) == null; } finally { repo.unlock(key); } }
@Test public void testAdd() throws Exception { // ADD first key assertTrue(repo.add(key01)); assertTrue(cache.containsKey(key01)); // try to ADD the same key again assertFalse(repo.add(key01)); assertEquals(1, cache.size()); // try to ADD an other one assertTrue(repo.add(key02)); assertEquals(2, cache.size()); }
public static TikaConfig getDefaultConfig() { try { return new TikaConfig(); } catch (IOException e) { throw new RuntimeException("Unable to read default configuration", e); } catch (TikaException e) { throw new RuntimeException("Unable to access default configuration", e); } }
@Test public void testXMLReaderUtils() throws Exception { //pool size may have been reset already by an //earlier test. Can't test for default here. assertEquals(XMLReaderUtils.DEFAULT_MAX_ENTITY_EXPANSIONS, XMLReaderUtils.getMaxEntityExpansions()); //make sure that detection on this file actually works with //default expansions assertEquals("application/rdf+xml", detect("test-difficult-rdf1.xml", TikaConfig.getDefaultConfig()).toString()); TikaConfig tikaConfig = getConfig("TIKA-2732-xmlreaderutils.xml"); try { assertEquals(33, XMLReaderUtils.getPoolSize()); assertEquals(5, XMLReaderUtils.getMaxEntityExpansions()); //make sure that there's actually a change in behavior assertEquals("text/plain", detect("test-difficult-rdf1.xml", tikaConfig).toString()); } finally { XMLReaderUtils.setMaxEntityExpansions(XMLReaderUtils.DEFAULT_MAX_ENTITY_EXPANSIONS); XMLReaderUtils.setPoolSize(XMLReaderUtils.DEFAULT_POOL_SIZE); } }
public <T> IfrVfrStatus statusOf(Track<T> track, Instant time) { checkNotNull(track); checkNotNull(time); checkArgument( track.asTimeWindow().contains(time), "This track does not exist at this moment in time" ); EnumMultiset<IfrVfrStatus> counts = EnumMultiset.create(IfrVfrStatus.class); Collection<Point<T>> localPoints = track.kNearestPoints(time, numPointsToConsider); for (Point<T> point : localPoints) { counts.add(statusOf(point)); } return (counts.count(IFR) > counts.count(VFR)) ? IFR : VFR; }
@Test public void testStatusOfPoint_1200_beacon() { String rawNop = "[RH],STARS,D21_B,03/24/2018,15:09:14.157,,,,1200,015,059,062,042.20658,-083.77467,2643,0000,-17.6355,-0.3504,,,,D21,,,,,,ACT,IFR,,00000,,,,,,,1,,0,{RH}"; Point<NopHit> point = NopHit.from(rawNop); assertTrue(point.rawData().hasFlightRules()); assertFalse(point.hasValidCallsign()); IfrVfrAssigner assigner = new IfrVfrAssigner(); assertEquals(VFR, assigner.statusOf(point)); }
public <T> void compareFutureResult(final T expected, final CompletionStage<T> experimentStage) { final Timer.Sample sample = Timer.start(); experimentStage.whenComplete((actual, cause) -> { if (cause != null) { recordError(cause, sample); } else { recordResult(expected, actual, sample); } }); }
@Test void compareFutureResultError() { experiment.compareFutureResult(12, CompletableFuture.failedFuture(new RuntimeException("OH NO"))); verify(errorTimer).record(anyLong(), eq(TimeUnit.NANOSECONDS)); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) { return helper.interpret(session, st, context); }
@Test void should_describe_all_udts() { // Given String query = "DESCRIBE TYPES;"; final String expected = reformatHtml(readTestResource( "/scalate/DescribeTypes.html")); // When final InterpreterResult actual = interpreter.interpret(query, intrContext); // Then assertEquals(Code.SUCCESS, actual.code()); assertEquals(expected, reformatHtml(actual.message().get(0).getData())); }
@Override public Status getStatus() { return status; }
@Test void getStatus_whenComponentIsCreated_shouldReturnNONE() { assertThat(underTest.getStatus()).isEqualTo(DatabaseMigrationState.Status.NONE); }
public static <T> Inner<T> create() { return new Inner<>(); }
@Test @Category(NeedsRunner.class) public void addSimpleAndNestedField() { Schema nested = Schema.builder().addStringField("field1").build(); Schema schema = Schema.builder().addRowField("nested", nested).build(); Row subRow = Row.withSchema(nested).addValue("value").build(); Row row = Row.withSchema(schema).addValue(subRow).build(); PCollection<Row> added = pipeline .apply(Create.of(row).withRowSchema(schema)) .apply( AddFields.<Row>create() .field("field2", Schema.FieldType.INT32) .field("nested.field2", Schema.FieldType.INT32) .field("nested.field3", Schema.FieldType.array(Schema.FieldType.STRING)) .field("nested.field4", Schema.FieldType.iterable(Schema.FieldType.STRING))); Schema expectedNestedSchema = Schema.builder() .addStringField("field1") .addNullableField("field2", Schema.FieldType.INT32) .addNullableField("field3", Schema.FieldType.array(Schema.FieldType.STRING)) .addNullableField("field4", Schema.FieldType.iterable(Schema.FieldType.STRING)) .build(); Schema expectedSchema = Schema.builder() .addRowField("nested", expectedNestedSchema) .addNullableField("field2", Schema.FieldType.INT32) .build(); assertEquals(expectedSchema, added.getSchema()); Row expectedNested = Row.withSchema(expectedNestedSchema).addValues("value", null, null, null).build(); Row expected = Row.withSchema(expectedSchema).addValues(expectedNested, null).build(); PAssert.that(added).containsInAnyOrder(expected); pipeline.run(); }
public static <T> List<T> list(T... elements) { if (elements == null) { throw new IllegalArgumentException("Expected an array of elements (or empty array) but received a null."); } ArrayList<T> list = new ArrayList<>(elements.length); Collections.addAll(list, elements); return list; }
@Test void testList() { assertEquals(Arrays.asList(null, null, null), CollectionUtils.list(null, null, null)); assertEquals(Arrays.asList("", "a", "b"), CollectionUtils.list("", "a", "b")); assertEquals(new ArrayList(), CollectionUtils.list()); }
public void setParamCheckEnabled(boolean paramCheckEnabled) { this.paramCheckEnabled = paramCheckEnabled; }
@Test void setParamCheckEnabled() { ServerParamCheckConfig paramCheckConfig = ServerParamCheckConfig.getInstance(); paramCheckConfig.setParamCheckEnabled(false); assertFalse(paramCheckConfig.isParamCheckEnabled()); }
@Override public Optional<ResultDecorator<MaskRule>> newInstance(final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database, final MaskRule maskRule, final ConfigurationProperties props, final SQLStatementContext sqlStatementContext) { return sqlStatementContext instanceof SelectStatementContext ? Optional.of(new MaskDQLResultDecorator(maskRule, (SelectStatementContext) sqlStatementContext)) : Optional.empty(); }
@Test void assertNewInstanceWithSelectStatement() { MaskResultDecoratorEngine engine = (MaskResultDecoratorEngine) OrderedSPILoader.getServices(ResultProcessEngine.class, Collections.singleton(rule)).get(rule); Optional<ResultDecorator<MaskRule>> actual = engine.newInstance(mock(RuleMetaData.class), database, rule, mock(ConfigurationProperties.class), mock(SelectStatementContext.class, RETURNS_DEEP_STUBS)); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(MaskDQLResultDecorator.class)); }
@Udf public String extractPath( @UdfParameter(description = "a valid URL") final String input) { return UrlParser.extract(input, URI::getPath); }
@Test public void shouldReturnSlashIfRootPath() { assertThat(extractUdf.extractPath("https://docs.confluent.io/"), equalTo("/")); }
public Set<VplsConfig> vplss() { Set<VplsConfig> vplss = Sets.newHashSet(); JsonNode vplsNode = object.get(VPLS); if (vplsNode == null) { return vplss; } vplsNode.forEach(jsonNode -> { String name = jsonNode.get(NAME).asText(); Set<String> ifaces = Sets.newHashSet(); JsonNode vplsIfaces = jsonNode.path(INTERFACE); if (vplsIfaces.toString().isEmpty()) { vplsIfaces = ((ObjectNode) jsonNode).putArray(INTERFACE); } vplsIfaces.forEach(ifacesNode -> ifaces.add(ifacesNode.asText())); String encap = null; if (jsonNode.hasNonNull(ENCAPSULATION)) { encap = jsonNode.get(ENCAPSULATION).asText(); } vplss.add(new VplsConfig(name, ifaces, EncapsulationType.enumFromString(encap))); }); return vplss; }
@Test public void emptyVplss() { assertTrue("Configuration retrieved from JSON was not empty", emptyVplsAppConfig.vplss().isEmpty()); }
@Override public String buildContext() { final String metadata = ((Collection<?>) getSource()) .stream() .map(s -> ((MetaDataDO) s).getAppName()) .collect(Collectors.joining(",")); return String.format("the meta data [%s] is %s", metadata, StringUtils.lowerCase(getType().getType().toString())); }
@Test public void batchMetaDataChangedContextTest() { BatchMetaDataChangedEvent batchMetaDataChangedEvent = new BatchMetaDataChangedEvent(Arrays.asList(one, two), null, EventTypeEnum.META_DATA_UPDATE, "test-operator"); String context = String.format("the meta data [%s] is %s", "testAppNameOne,testAppNameTwo", StringUtils.lowerCase(EventTypeEnum.META_DATA_UPDATE.getType().toString())); assertEquals(context, batchMetaDataChangedEvent.buildContext()); }
public CacheSimpleConfig setMerkleTreeConfig(@Nonnull MerkleTreeConfig merkleTreeConfig) { this.merkleTreeConfig = checkNotNull(merkleTreeConfig, "MerkleTreeConfig cannot be null"); return this; }
@Test public void givenNullMerkleTreeConfig_throws_NPE() { CacheSimpleConfig cacheSimpleConfig = new CacheSimpleConfig(); assertThrows(NullPointerException.class, () -> cacheSimpleConfig.setMerkleTreeConfig(null)); }
@Override public Description getDescription() { if (description == null) { description = Description.createSuiteDescription(getName(), new FeatureId(feature)); getChildren().forEach(child -> description.addChild(describeChild(child))); } return description; }
@Test void should_populate_descriptions_with_stable_unique_ids() { Feature feature = TestPickleBuilder.parseFeature("path/test.feature", "" + "Feature: feature name\n" + " Background:\n" + " Given background step\n" + " Scenario: A\n" + " Then scenario name\n" + " Scenario: B\n" + " Then scenario name\n" + " Scenario Outline: C\n" + " Then scenario <name>\n" + " Examples:\n" + " | name |\n" + " | C |\n" + " | D |\n" + " | E |\n" ); FeatureRunner runner = createFeatureRunner(feature, new JUnitOptions()); FeatureRunner rerunner = createFeatureRunner(feature, new JUnitOptions()); Set<Description> descriptions = new HashSet<>(); assertDescriptionIsUnique(runner.getDescription(), descriptions); assertDescriptionIsPredictable(runner.getDescription(), descriptions); assertDescriptionIsPredictable(rerunner.getDescription(), descriptions); }