focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public DictDataDO getDictData(Long id) { return dictDataMapper.selectById(id); }
@Test public void testGetDictData() { // mock 数据 DictDataDO dbDictData = randomDictDataDO(); dictDataMapper.insert(dbDictData); // 准备参数 Long id = dbDictData.getId(); // 调用 DictDataDO dictData = dictDataService.getDictData(id); // 断言 assertPojoEquals(dbDictData, dictData); }
@Deprecated public TemplateConfig disable(@NotNull TemplateType... templateTypes) { if (templateTypes != null) { for (TemplateType templateType : templateTypes) { switch (templateType) { case ENTITY: this.entity = null; this.entityKt = null; //暂时没其他多的需求,使用一个单独的boolean变量进行支持一下. this.disableEntity = true; break; case CONTROLLER: this.controller = null; break; case MAPPER: this.mapper = null; break; case XML: this.xml = null; break; case SERVICE: this.service = null; break; case SERVICE_IMPL: this.serviceImpl = null; break; default: } } } return this; }
@Test void disableTest() { TemplateConfig templateConfig; templateConfig = GeneratorBuilder.templateConfig().disable(); Assertions.assertNull(templateConfig.getController()); Assertions.assertNull(templateConfig.getService()); Assertions.assertNull(templateConfig.getServiceImpl()); Assertions.assertNull(templateConfig.getMapper()); Assertions.assertNull(templateConfig.getXml()); Assertions.assertNull(templateConfig.getEntity(true)); Assertions.assertNull(templateConfig.getEntity(false)); templateConfig = GeneratorBuilder.templateConfig().disable(TemplateType.SERVICE); Assertions.assertNull(templateConfig.getService()); Assertions.assertNotNull(templateConfig.getEntity(true)); Assertions.assertNotNull(templateConfig.getEntity(false)); templateConfig = GeneratorBuilder.templateConfig().disable(TemplateType.SERVICE_IMPL); Assertions.assertNull(templateConfig.getServiceImpl()); Assertions.assertNotNull(templateConfig.getEntity(true)); Assertions.assertNotNull(templateConfig.getEntity(false)); templateConfig = GeneratorBuilder.templateConfig().disable(TemplateType.ENTITY); Assertions.assertNotNull(templateConfig.getServiceImpl()); Assertions.assertNotNull(templateConfig.getService()); Assertions.assertNull(templateConfig.getEntity(true)); Assertions.assertNull(templateConfig.getEntity(false)); templateConfig = GeneratorBuilder.templateConfig().disable(TemplateType.ENTITY); Assertions.assertNotNull(templateConfig.getServiceImpl()); Assertions.assertNotNull(templateConfig.getService()); Assertions.assertNull(templateConfig.getEntity(true)); Assertions.assertNull(templateConfig.getEntity(false)); }
public boolean isSecurePortEnabled() { return server.getInstanceInfo().isPortEnabled(PortType.SECURE); }
@Test void securePortMustCheckInstanceInfo() { final InstanceInfo instanceInfo = Builder.newBuilder() .setAppName("secure-port") .setHostName("secure-port") .setPort(7777) .enablePort(PortType.SECURE, false) .build(); final InstanceInfo secureEnabled = Builder.newBuilder() .setAppName("secure-port") .setHostName("secure-port") .setPort(7777) .enablePort(PortType.SECURE, true) .build(); final DiscoveryEnabledServer server = new DiscoveryEnabledServer(instanceInfo, true); final DiscoveryEnabledServer secureServer = new DiscoveryEnabledServer(secureEnabled, true); final DynamicServerListLoadBalancer<Server> lb = new DynamicServerListLoadBalancer<>(new DefaultClientConfigImpl()); final DiscoveryResult result = new DiscoveryResult(server, lb.getLoadBalancerStats()); final DiscoveryResult secure = new DiscoveryResult(secureServer, lb.getLoadBalancerStats()); Truth.assertThat(result.isSecurePortEnabled()).isFalse(); Truth.assertThat(secure.isSecurePortEnabled()).isTrue(); }
@Override public List<Instance> selectInstances(String serviceName, boolean healthy) throws NacosException { return selectInstances(serviceName, new ArrayList<>(), healthy); }
@Test void testSelectInstances7() throws NacosException { //given String serviceName = "service1"; List<String> clusterList = Arrays.asList("cluster1", "cluster2"); //when client.selectInstances(serviceName, clusterList, true, false); //then verify(proxy, times(1)).queryInstancesOfService(serviceName, Constants.DEFAULT_GROUP, "cluster1,cluster2", false); }
static String headerLine(CSVFormat csvFormat) { return String.join(String.valueOf(csvFormat.getDelimiter()), csvFormat.getHeader()); }
@Test public void givenNoNullString_isNoop() { CSVFormat csvFormat = csvFormat(); PCollection<String> input = pipeline.apply(Create.of(headerLine(csvFormat), "a,1,🐼", "b,🐼,2.2", "🐼,3,3.3")); CsvIOStringToCsvRecord underTest = new CsvIOStringToCsvRecord(csvFormat); CsvIOParseResult<List<String>> result = input.apply(underTest); PAssert.that(result.getOutput()) .containsInAnyOrder( Arrays.asList( Arrays.asList("a", "1", "🐼"), Arrays.asList("b", "🐼", "2.2"), Arrays.asList("🐼", "3", "3.3"))); PAssert.that(result.getErrors()).empty(); pipeline.run(); }
public Errors get(String group) throws IllegalArgumentException { DeletableGroupResult result = data.results().find(group); if (result == null) { throw new IllegalArgumentException("could not find group " + group + " in the delete group response"); } return Errors.forCode(result.errorCode()); }
@Test public void testGetErrorWithInvalidGroupId() { assertThrows(IllegalArgumentException.class, () -> DELETE_GROUPS_RESPONSE.get("invalid-group-id")); }
@SuppressWarnings("unchecked") public static <T extends Type> T decodeDynamicArray( String input, int offset, TypeReference<T> typeReference) { int length = decodeUintAsInt(input, offset); BiFunction<List<T>, String, T> function = (elements, typeName) -> (T) new DynamicArray(AbiTypes.getType(typeName), elements); int valueOffset = offset + MAX_BYTE_LENGTH_FOR_HEX_STRING; return decodeArrayElements(input, valueOffset, typeReference, length, function); }
@Test public void testDynamicArrayOfDynamicArrays() throws Exception { assertEquals( TypeDecoder.decodeDynamicArray( "0000000000000000000000000000000000000000000000000000000000000002" + "0000000000000000000000000000000000000000000000000000000000000040" + "00000000000000000000000000000000000000000000000000000000000000a0" + "0000000000000000000000000000000000000000000000000000000000000001" + "0000000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000001" + "0000000000000000000000000000000000000000000000000000000000000001" + "0000000000000000000000000000000000000000000000000000000000000000", 0, new TypeReference<DynamicArray<DynamicArray<AbiV2TestFixture.Bar>>>() {}), new DynamicArray( DynamicArray.class, Arrays.asList( new DynamicArray( AbiV2TestFixture.Bar.class, new AbiV2TestFixture.Bar( new Uint256(BigInteger.ZERO), new Uint256(BigInteger.ZERO))), new DynamicArray( AbiV2TestFixture.Bar.class, new AbiV2TestFixture.Bar( new Uint256(BigInteger.ONE), new Uint256(BigInteger.ZERO)))))); }
static String generatePackageName(final OpenAPI document) { final String host = RestDslGenerator.determineHostFrom(document); if (ObjectHelper.isNotEmpty(host)) { final StringBuilder packageName = new StringBuilder(); final String hostWithoutPort = host.replaceFirst(":.*", ""); if ("localhost".equalsIgnoreCase(hostWithoutPort)) { return DEFAULT_PACKAGE_NAME; } final String[] parts = hostWithoutPort.split("\\."); for (int i = parts.length - 1; i >= 0; i--) { packageName.append(parts[i]); if (i != 0) { packageName.append('.'); } } return packageName.toString(); } return DEFAULT_PACKAGE_NAME; }
@Test public void shouldCreatePackageNamesFromHostnames() { final OpenAPI openapi = new OpenAPI(); Server server = new Server(); server.url("http://api.example.org"); openapi.addServersItem(server); assertThat(RestDslSourceCodeGenerator.generatePackageName(openapi)).isEqualTo("org.example.api"); }
public ServiceConfiguration getConfiguration() { return this.config; }
@Test public void testAdvertisedListeners() throws Exception { cleanup(); // don't use dynamic ports when using advertised listeners (#12079) useStaticPorts = true; conf.setAdvertisedListeners("internal:pulsar://gateway:6650, internal:pulsar+ssl://gateway:6651"); conf.setInternalListenerName("internal"); setup(); assertEquals(pulsar.getAdvertisedAddress(), "localhost"); assertEquals(pulsar.getBrokerServiceUrlTls(), "pulsar+ssl://gateway:6651"); assertEquals(pulsar.getBrokerServiceUrl(), "pulsar://gateway:6650"); assertEquals(pulsar.getWebServiceAddress(), "http://localhost:8081"); assertEquals(pulsar.getWebServiceAddressTls(), "https://localhost:8082"); assertEquals(conf, pulsar.getConfiguration()); }
public int getPort() { return port; }
@Test public void testMiniKdcStart() { MiniKdc kdc = getKdc(); Assert.assertNotSame(0, kdc.getPort()); }
public EncryptAlgorithm getQueryEncryptor() { return null == assistedQuery ? cipher.getEncryptor() : assistedQuery.getEncryptor(); }
@Test void assertGetQueryEncryptorWithoutAssistedQuery() { EncryptAlgorithm cipherAlgorithm = mock(EncryptAlgorithm.class); assertThat(new EncryptColumn("foo_tbl", new CipherColumnItem("foo_col", cipherAlgorithm)).getQueryEncryptor(), is(cipherAlgorithm)); }
public <T> void compareSupplierResultAsync(final T expected, final Supplier<T> experimentSupplier, final Executor executor) { final Timer.Sample sample = Timer.start(); try { compareFutureResult(expected, CompletableFuture.supplyAsync(experimentSupplier, executor)); } catch (final Exception e) { recordError(e, sample); } }
@Test void compareSupplierResultAsyncMatch() throws InterruptedException { final ExecutorService experimentExecutor = Executors.newSingleThreadExecutor(); experiment.compareSupplierResultAsync(12, () -> 12, experimentExecutor); experimentExecutor.shutdown(); experimentExecutor.awaitTermination(1, TimeUnit.SECONDS); verify(matchTimer).record(anyLong(), eq(TimeUnit.NANOSECONDS)); }
public void checkCoinBaseHeight(final int height) throws VerificationException { checkArgument(height >= Block.BLOCK_HEIGHT_GENESIS); checkState(isCoinBase()); // Check block height is in coinbase input script final TransactionInput in = this.getInput(0); final ScriptBuilder builder = new ScriptBuilder(); builder.number(height); final byte[] expected = builder.build().program(); final byte[] actual = in.getScriptBytes(); if (actual.length < expected.length) { throw new VerificationException.CoinbaseHeightMismatch("Block height mismatch in coinbase."); } for (int scriptIdx = 0; scriptIdx < expected.length; scriptIdx++) { if (actual[scriptIdx] != expected[scriptIdx]) { throw new VerificationException.CoinbaseHeightMismatch("Block height mismatch in coinbase."); } } }
@Test public void testCoinbaseHeightCheckWithDamagedScript() { // Coinbase transaction from block 224,430 ByteBuffer transactionBytes = ByteBuffer.wrap(ByteUtils.parseHex( "01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff3b03ae6c0300044bd7031a0400000000522cfabe6d6d00000000000000b7b8bf0100000068692066726f6d20706f6f6c7365727665726aac1eeeed88ffffffff01e0587597000000001976a91421c0d001728b3feaf115515b7c135e779e9f442f88ac00000000")); final int height = 224430; final Transaction transaction = TESTNET.getDefaultSerializer().makeTransaction(transactionBytes); transaction.checkCoinBaseHeight(height); }
@Override public V get(Object o) { if (o == null) return null; // null keys are not allowed int i = arrayIndexOfKey(o); return i != -1 ? value(i + 1) : null; }
@Test void someFiltered() { array[0] = "1"; array[1] = "one"; array[2] = "2"; array[3] = "two"; array[4] = "3"; array[5] = "three"; Map<String, String> map = builder.filterKeys("1", "3").build(array); assertSize(map, 1); assertBaseCase(map); assertThat(map).containsOnly( entry("2", "two") ); assertThat(map).hasToString( "UnsafeArrayMap{2=two}" ); assertThat(map.get("1")).isNull(); assertThat(map.get("2")).isEqualTo("two"); assertThat(map.get("3")).isNull(); }
public static <T> List<T> batchTransform(final Class<T> clazz, List<?> srcList) { if (CollectionUtils.isEmpty(srcList)) { return Collections.emptyList(); } List<T> result = new ArrayList<>(srcList.size()); for (Object srcObject : srcList) { result.add(transform(clazz, srcObject)); } return result; }
@Test(expected = BeanUtilsException.class) public void testBatchTransformBeanUtilsException() { someList.add(77); assertNotNull(BeanUtils.batchTransform(null, someList)); }
public String origin() { return origins.isEmpty() ? "*" : origins.iterator().next(); }
@Test public void origin() { final CorsConfig cors = forOrigin("http://localhost:7888").build(); assertThat(cors.origin(), is(equalTo("http://localhost:7888"))); assertThat(cors.isAnyOriginSupported(), is(false)); }
@Override public ObjectNode encode(ExternalPeerRouter router, CodecContext context) { checkNotNull(router, "External peer router cannot be null"); return context.mapper().createObjectNode() .put(IP_ADDRESS, router.ipAddress().toString()) .put(MAC_ADDRESS, router.macAddress().toString()) .put(VLAN_ID, router.vlanId().toString()); }
@Test public void testExternalPeerRouterEncode() { ExternalPeerRouter router = DefaultExternalPeerRouter.builder() .ipAddress(IpAddress.valueOf("10.10.10.1")) .macAddress(MacAddress.valueOf("11:22:33:44:55:66")) .vlanId(VlanId.vlanId("1")) .build(); ObjectNode routerJson = externalPeerRouterCodec.encode(router, context); assertThat(routerJson, matchesExternalPeerRouter(router)); }
ControllerResult<Map<String, ApiError>> updateFeatures( Map<String, Short> updates, Map<String, FeatureUpdate.UpgradeType> upgradeTypes, boolean validateOnly ) { TreeMap<String, ApiError> results = new TreeMap<>(); List<ApiMessageAndVersion> records = BoundedList.newArrayBacked(MAX_RECORDS_PER_USER_OP); for (Entry<String, Short> entry : updates.entrySet()) { results.put(entry.getKey(), updateFeature(entry.getKey(), entry.getValue(), upgradeTypes.getOrDefault(entry.getKey(), FeatureUpdate.UpgradeType.UPGRADE), records)); } if (validateOnly) { return ControllerResult.of(Collections.emptyList(), results); } else { return ControllerResult.atomicOf(records, results); } }
@Test public void testCannotUseSafeDowngradeIfMetadataChanged() { FeatureControlManager manager = TEST_MANAGER_BUILDER1.build(); assertEquals(ControllerResult.of(Collections.emptyList(), singletonMap(MetadataVersion.FEATURE_NAME, new ApiError(Errors.INVALID_UPDATE_VERSION, "Invalid metadata.version 4. Refusing to perform the requested downgrade because " + "it might delete metadata information."))), manager.updateFeatures( singletonMap(MetadataVersion.FEATURE_NAME, MetadataVersion.IBP_3_3_IV0.featureLevel()), singletonMap(MetadataVersion.FEATURE_NAME, FeatureUpdate.UpgradeType.SAFE_DOWNGRADE), true)); }
public static <T> MutationDetector forValueWithCoder(T value, Coder<T> coder) throws CoderException { if (value == null) { return noopMutationDetector(); } else { return new CodedValueMutationDetector<>(value, coder); } }
@Test public void testMutatingArray() throws Exception { byte[] value = new byte[] {0x1, 0x2, 0x3, 0x4}; MutationDetector detector = MutationDetectors.forValueWithCoder(value, ByteArrayCoder.of()); value[0] = 0xa; thrown.expect(IllegalMutationException.class); detector.verifyUnmodified(); }
@SuppressWarnings("JdkObsolete") protected void addCustomProperties(Properties props) { Properties jMeterProperties = JMeterUtils.getJMeterProperties(); @SuppressWarnings("unchecked") Enumeration<String> enums = (Enumeration<String>) jMeterProperties.propertyNames(); while (enums.hasMoreElements()) { String key = enums.nextElement(); if (key.startsWith("mail.")) { String value = jMeterProperties.getProperty(key); props.put(key, value); } } }
@Test public void testPassCustomProperties() { createJMeterEnv(); Properties jMeterProperties = JMeterUtils.getJMeterProperties(); jMeterProperties.put("prop1.name", "prop1.value"); jMeterProperties.put("mail.prop2.name", "mail.prop2.value"); MailReaderSampler sampler = new MailReaderSampler(); Properties properties = new Properties(); sampler.addCustomProperties(properties); Assertions.assertEquals(1, properties.size()); Assertions.assertEquals("mail.prop2.value", properties.getProperty("mail.prop2.name")); }
void allocateCollectionField( Object object, BeanInjectionInfo beanInjectionInfo, String fieldName ) { BeanInjectionInfo.Property property = getProperty( beanInjectionInfo, fieldName ); String groupName = ( property != null ) ? property.getGroupName() : null; if ( groupName == null ) { return; } List<BeanInjectionInfo.Property> groupProperties; groupProperties = getGroupProperties( beanInjectionInfo, groupName ); Integer maxGroupSize = getMaxSize( groupProperties, object ); // not able to get numeric size if ( maxGroupSize == null ) { return; } // guaranteed to get at least one field for constant allocateCollectionField( property, object, Math.max( 1, maxGroupSize ) ); }
@Test public void allocateCollectionField_Property_Array_IntiallyNull() { BeanInjector bi = new BeanInjector(null ); BeanInjectionInfo bii = new BeanInjectionInfo( MetaBeanLevel1.class ); MetaBeanLevel1 mbl1 = new MetaBeanLevel1(); mbl1.setSub( new MetaBeanLevel2() ); BeanInjectionInfo.Property arrayProperty = bii.getProperties().values().stream() .filter( p -> p.getName().equals( "FILENAME_ARRAY" ) ).findFirst().orElse( null ); assertNull( mbl1.getSub().getFilenames() ); bi.allocateCollectionField( arrayProperty, mbl1.getSub(), 7); assertEquals( 7, mbl1.getSub().getFilenames().length ); }
public String getFQDNHostname() { return hostNameSupplier.getFqdnHostName(); }
@Test void testSerialization() { try { // without resolved hostname { TaskManagerLocation original = new TaskManagerLocation( ResourceID.generate(), InetAddress.getByName("1.2.3.4"), 8888); TaskManagerLocation serCopy = InstantiationUtil.clone(original); assertThat(original).isEqualTo(serCopy); } // with resolved hostname { TaskManagerLocation original = new TaskManagerLocation( ResourceID.generate(), InetAddress.getByName("127.0.0.1"), 19871); original.getFQDNHostname(); TaskManagerLocation serCopy = InstantiationUtil.clone(original); assertThat(original).isEqualTo(serCopy); } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
public Exception getException() { if (exception != null) return exception; try { final Class<? extends Exception> exceptionClass = ReflectionUtils.toClass(getExceptionType()); if (getExceptionCauseType() != null) { final Class<? extends Exception> exceptionCauseClass = ReflectionUtils.toClass(getExceptionCauseType()); final Exception exceptionCause = getExceptionCauseMessage() != null ? ReflectionUtils.newInstanceCE(exceptionCauseClass, getExceptionCauseMessage()) : ReflectionUtils.newInstanceCE(exceptionCauseClass); exceptionCause.setStackTrace(new StackTraceElement[]{}); return getExceptionMessage() != null ? ReflectionUtils.newInstanceCE(exceptionClass, getExceptionMessage(), exceptionCause) : ReflectionUtils.newInstanceCE(exceptionClass, exceptionCause); } else { return getExceptionMessage() != null ? ReflectionUtils.newInstanceCE(exceptionClass, getExceptionMessage()) : ReflectionUtils.newInstanceCE(exceptionClass); } } catch (ReflectiveOperationException e) { throw new IllegalStateException("Could not reconstruct exception for class " + getExceptionType() + " and message " + getExceptionMessage(), e); } }
@Test void getExceptionForJobClassNotFoundException() { final FailedState failedState = new FailedState("JobRunr message", new JobClassNotFoundException(jobDetails().build())); setInternalState(failedState, "exception", null); assertThat(failedState.getException()) .isInstanceOf(JobClassNotFoundException.class); }
@Async @EventListener(CommentCreatedEvent.class) public void onNewComment(CommentCreatedEvent event) { Comment comment = event.getComment(); if (isPostComment(comment)) { newCommentOnPostReasonPublisher.publishReasonBy(comment); } else if (isPageComment(comment)) { newCommentOnPageReasonPublisher.publishReasonBy(comment); } }
@Test void onNewCommentTest() { var comment = mock(Comment.class); var spyReasonPublisher = spy(reasonPublisher); doReturn(true).when(spyReasonPublisher).isPostComment(eq(comment)); var event = new CommentCreatedEvent(this, comment); spyReasonPublisher.onNewComment(event); verify(newCommentOnPostReasonPublisher).publishReasonBy(eq(comment)); doReturn(false).when(spyReasonPublisher).isPostComment(eq(comment)); doReturn(true).when(spyReasonPublisher).isPageComment(eq(comment)); spyReasonPublisher.onNewComment(event); verify(newCommentOnPageReasonPublisher).publishReasonBy(eq(comment)); }
void forwardToStateService(DeviceStateServiceMsgProto deviceStateServiceMsg, TbCallback callback) { if (statsEnabled) { stats.log(deviceStateServiceMsg); } stateService.onQueueMsg(deviceStateServiceMsg, callback); }
@Test public void givenStatsDisabled_whenForwardingActivityMsgToStateService_thenStatsAreNotRecorded() { // GIVEN ReflectionTestUtils.setField(defaultTbCoreConsumerServiceMock, "stats", statsMock); ReflectionTestUtils.setField(defaultTbCoreConsumerServiceMock, "statsEnabled", false); var activityMsg = TransportProtos.DeviceActivityProto.newBuilder() .setTenantIdMSB(tenantId.getId().getMostSignificantBits()) .setTenantIdLSB(tenantId.getId().getLeastSignificantBits()) .setDeviceIdMSB(deviceId.getId().getMostSignificantBits()) .setDeviceIdLSB(deviceId.getId().getLeastSignificantBits()) .setLastActivityTime(time) .build(); doCallRealMethod().when(defaultTbCoreConsumerServiceMock).forwardToStateService(activityMsg, tbCallbackMock); // WHEN defaultTbCoreConsumerServiceMock.forwardToStateService(activityMsg, tbCallbackMock); // THEN then(statsMock).should(never()).log(activityMsg); }
public void resetPollTimer(final long pollMs) { pollTimer.update(pollMs); if (pollTimer.isExpired()) { logger.warn("Time between subsequent calls to poll() was longer than the configured " + "max.poll.interval.ms, exceeded approximately by {} ms. Member {} will rejoin the group now.", pollTimer.isExpiredBy(), membershipManager.memberId()); membershipManager.maybeRejoinStaleMember(); } pollTimer.reset(maxPollIntervalMs); }
@Test public void testPollTimerExpiration() { heartbeatRequestManager = createHeartbeatRequestManager( coordinatorRequestManager, membershipManager, heartbeatState, heartbeatRequestState, backgroundEventHandler); when(membershipManager.shouldSkipHeartbeat()).thenReturn(false); // On poll timer expiration, the member should send a last heartbeat to leave the group // and notify the membership manager time.sleep(DEFAULT_MAX_POLL_INTERVAL_MS); assertHeartbeat(heartbeatRequestManager, DEFAULT_HEARTBEAT_INTERVAL_MS); verify(membershipManager).transitionToSendingLeaveGroup(true); verify(heartbeatState).reset(); verify(heartbeatRequestState).reset(); verify(membershipManager).onHeartbeatRequestGenerated(); when(membershipManager.shouldSkipHeartbeat()).thenReturn(true); assertNoHeartbeat(heartbeatRequestManager); heartbeatRequestManager.resetPollTimer(time.milliseconds()); assertTrue(pollTimer.notExpired()); verify(membershipManager).maybeRejoinStaleMember(); when(membershipManager.shouldSkipHeartbeat()).thenReturn(false); assertHeartbeat(heartbeatRequestManager, DEFAULT_HEARTBEAT_INTERVAL_MS); }
@CanIgnoreReturnValue public Replacements add(Replacement replacement) { return add(replacement, CoalescePolicy.REJECT); }
@Test public void identicalDuplicatesOK() { Replacements replacements = new Replacements(); replacements.add(Replacement.create(42, 43, "hello")); replacements.add(Replacement.create(42, 43, "hello")); }
@SuppressFBWarnings("RCN_REDUNDANT_NULLCHECK_WOULD_HAVE_BEEN_A_NPE") public List<Shard> shards(String... indices) { Map<String, String> idToAddress = nodes().stream().collect(toMap(Node::getId, Node::getHttpAddress)); try { Request r = new Request("GET", "/_cat/shards/" + String.join(",", indices)); r.addParameter("format", "json"); r.addParameter("h", "id,index,shard,prirep,docs,state,ip,node"); Response res = withRetry(() -> client.performRequest(r), retries); try (InputStreamReader reader = new InputStreamReader(res.getEntity().getContent(), UTF_8)) { JsonArray array = Json.parse(reader).asArray(); List<Shard> shards = new ArrayList<>(array.size()); for (JsonValue value : array) { Optional<Shard> shard = convertToShard(value, idToAddress); shard.ifPresent(shards::add); } LOG.log(FINE, "Shards " + shards); return shards; } } catch (IOException e) { throw new JetException("Could not get ES shards", e); } }
@Test public void shouldRetryOnShards() throws IOException { ElasticCatClient catClient = new ElasticCatClient(restClient, 5); Response nodesResponse = response("es2node_nodes.json"); Response shardsResponse = response("es2node_shards.json"); when(restClient.performRequest(any())) .thenThrow(new IOException("Could not connect")) .thenReturn(nodesResponse, shardsResponse); List<Shard> shards = catClient.shards("my-index"); assertThat(shards).extracting(Shard::getHttpAddress) .containsOnly("127.0.0.1:9200", "127.0.0.1:9201"); }
@VisibleForTesting void validateOldPassword(Long id, String oldPassword) { AdminUserDO user = userMapper.selectById(id); if (user == null) { throw exception(USER_NOT_EXISTS); } if (!isPasswordMatch(oldPassword, user.getPassword())) { throw exception(USER_PASSWORD_FAILED); } }
@Test public void testValidateOldPassword_passwordFailed() { // mock 数据 AdminUserDO user = randomAdminUserDO(); userMapper.insert(user); // 准备参数 Long id = user.getId(); String oldPassword = user.getPassword(); // 调用,校验异常 assertServiceException(() -> userService.validateOldPassword(id, oldPassword), USER_PASSWORD_FAILED); // 校验调用 verify(passwordEncoder, times(1)).matches(eq(oldPassword), eq(user.getPassword())); }
public static Http2Headers toHttp2Headers(HttpMessage in, boolean validateHeaders) { HttpHeaders inHeaders = in.headers(); final Http2Headers out = new DefaultHttp2Headers(validateHeaders, inHeaders.size()); if (in instanceof HttpRequest) { HttpRequest request = (HttpRequest) in; String host = inHeaders.getAsString(HttpHeaderNames.HOST); if (isOriginForm(request.uri()) || isAsteriskForm(request.uri())) { out.path(new AsciiString(request.uri())); setHttp2Scheme(inHeaders, out); } else { URI requestTargetUri = URI.create(request.uri()); out.path(toHttp2Path(requestTargetUri)); // Take from the request-line if HOST header was empty host = isNullOrEmpty(host) ? requestTargetUri.getAuthority() : host; setHttp2Scheme(inHeaders, requestTargetUri, out); } setHttp2Authority(host, out); out.method(request.method().asciiName()); } else if (in instanceof HttpResponse) { HttpResponse response = (HttpResponse) in; out.status(response.status().codeAsText()); } // Add the HTTP headers which have not been consumed above toHttp2Headers(inHeaders, out); return out; }
@Test public void stripTEHeadersExcludingTrailers() { HttpHeaders inHeaders = new DefaultHttpHeaders(); inHeaders.add(TE, GZIP); inHeaders.add(TE, TRAILERS); Http2Headers out = new DefaultHttp2Headers(); HttpConversionUtil.toHttp2Headers(inHeaders, out); assertSame(TRAILERS, out.get(TE)); }
@Override @CheckForNull public EmailMessage format(Notification notif) { if (!(notif instanceof ChangesOnMyIssuesNotification)) { return null; } ChangesOnMyIssuesNotification notification = (ChangesOnMyIssuesNotification) notif; if (notification.getChange() instanceof AnalysisChange) { checkState(!notification.getChangedIssues().isEmpty(), "changedIssues can't be empty"); return formatAnalysisNotification(notification.getChangedIssues().keySet().iterator().next(), notification); } return formatMultiProject(notification); }
@Test public void formats_returns_html_message_for_multiple_issues_of_same_rule_on_same_project_on_branch_when_user_change() { String branchName = randomAlphabetic(19); Project project = newBranch("1", branchName); String ruleName = randomAlphabetic(8); String host = randomAlphabetic(15); Rule rule = newRandomNotAHotspotRule(ruleName); List<ChangedIssue> changedIssues = IntStream.range(0, 2 + new Random().nextInt(5)) .mapToObj(i -> newChangedIssue("issue_" + i, randomValidStatus(), project, rule)) .collect(toList()); UserChange userChange = newUserChange(); when(emailSettings.getServerBaseURL()).thenReturn(host); EmailMessage emailMessage = underTest.format(new ChangesOnMyIssuesNotification(userChange, ImmutableSet.copyOf(changedIssues))); String expectedHref = host + "/project/issues?id=" + project.getKey() + "&branch=" + branchName + "&issues=" + changedIssues.stream().map(ChangedIssue::getKey).collect(joining("%2C")); String expectedLinkText = "See all " + changedIssues.size() + " issues"; HtmlFragmentAssert.assertThat(emailMessage.getMessage()) .hasParagraph().hasParagraph() // skip header .hasParagraph(project.getProjectName() + ", " + branchName) .hasList("Rule " + ruleName + " - " + expectedLinkText) .withLink(expectedLinkText, expectedHref) .hasParagraph().hasParagraph() // skip footer .noMoreBlock(); }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { if (schema == null && value == null) { return null; } JsonNode jsonValue = config.schemasEnabled() ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } }
@Test public void shortToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.INT16_SCHEMA, (short) 12)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"int16\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(12, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).intValue()); }
public String getPasswordKey(String pwdKeyAlias, char[] pwdKeyPassword) { SecretKey passwordKey; try { passwordKey = (SecretKey) pwdKeyStore.getKey(pwdKeyAlias, pwdKeyPassword); } catch (Exception e) { throw new RuntimeException("Unable to load a key from Key Store. Source " + e.getCause()); } return new String(passwordKey.getEncoded()); }
@Test public void testLoadPassword() throws CertificateException, NoSuchAlgorithmException, IOException, KeyStoreException, InvalidKeyException, InvalidKeySpecException { final SecretKey storedSecretKey = storeKeyIntoKeyStoreFile(KEY_PHRASE); // Set properties to simulate the server final URL serverKeyStoreURL = getClass().getResource(KEYSTORE_JCEKS_RESOURCE_NAME); System.setProperty(KeyStoreConstants.PROP_PWD_KS_URL, serverKeyStoreURL.toExternalForm()); System.setProperty(KeyStoreConstants.PROP_PWD_KS_PWD, KEYSTORE_SERVER_PASSWORD); try { final KeyStoreHelper serverHelper = new KeyStoreHelper(); final String passwordKey = serverHelper.getPasswordKey(KEY_ALIAS, KEY_PASSWORD.toCharArray()); assertThat(passwordKey).isEqualTo(new String(storedSecretKey.getEncoded())); } catch (final RuntimeException re) { re.printStackTrace(); fail(re.getMessage()); } }
public static StatementExecutorResponse execute( final ConfiguredStatement<TerminateQuery> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final TerminateQuery terminateQuery = statement.getStatement(); // do default behaviour for TERMINATE ALL if (!terminateQuery.getQueryId().isPresent()) { return StatementExecutorResponse.notHandled(); } final QueryId queryId = terminateQuery.getQueryId().get(); final RemoteHostExecutor remoteHostExecutor = RemoteHostExecutor.create( statement, sessionProperties, executionContext, serviceContext.getKsqlClient() ); if (executionContext.getPersistentQuery(queryId).isPresent() || statement.getUnMaskedStatementText().equals( TerminateCluster.TERMINATE_CLUSTER_STATEMENT_TEXT)) { // do default behaviour for terminating persistent queries return StatementExecutorResponse.notHandled(); } else { // Check are we running this push query locally, if yes then terminate, otherwise // propagate terminate query to other nodes if (executionContext.getQuery(queryId).isPresent()) { executionContext.getQuery(queryId).get().close(); } else { final boolean wasTerminatedRemotely = remoteHostExecutor.fetchAllRemoteResults().getLeft() .values() .stream() .map(TerminateQueryEntity.class::cast) .map(TerminateQueryEntity::getWasTerminated) .anyMatch(b -> b.equals(true)); if (!wasTerminatedRemotely) { throw new KsqlException(String.format( "Failed to terminate query with query ID: '%s'", queryId)); } } return StatementExecutorResponse.handled(Optional.of( new TerminateQueryEntity(statement.getMaskedStatementText(), queryId.toString(), true) )); } }
@Test public void shouldDefaultToDistributorForTerminateAll() { // Given: final ConfiguredStatement<?> terminatePersistent = engine.configure("TERMINATE ALL;"); final KsqlEngine engine = mock(KsqlEngine.class); // When: final Optional<KsqlEntity> ksqlEntity = CustomExecutors.TERMINATE_QUERY.execute( terminatePersistent, mock(SessionProperties.class), engine, this.engine.getServiceContext() ).getEntity(); // Then: assertThat(ksqlEntity, is(Optional.empty())); }
public double getJobManagerCPU() { return flinkConfig.get(KubernetesConfigOptions.JOB_MANAGER_CPU); }
@Test void testGetJobManagerCPU() { flinkConfig.set(KubernetesConfigOptions.JOB_MANAGER_CPU, JOB_MANAGER_CPU); assertThat(kubernetesJobManagerParameters.getJobManagerCPU()) .isEqualTo(JOB_MANAGER_CPU, within(0.00001)); }
public Iterator<T> getBookmark() { LinkedSetIterator toRet = new LinkedSetIterator(); toRet.next = this.bookmark.next; this.bookmark = toRet; return toRet; }
@Test(timeout=60000) public void testGetBookmarkReturnsBookmarkIterator() { LOG.info("Test getBookmark returns proper iterator"); assertTrue(set.addAll(list)); Iterator<Integer> bookmark = set.getBookmark(); assertEquals(bookmark.next(), list.get(0)); final int numAdvance = list.size()/2; for(int i=1; i<numAdvance; i++) { bookmark.next(); } Iterator<Integer> bookmark2 = set.getBookmark(); assertEquals(bookmark2.next(), list.get(numAdvance)); }
@Override protected void init() throws ServiceException { LOG.info("Using FileSystemAccess JARs version [{}]", VersionInfo.getVersion()); String security = getServiceConfig().get(AUTHENTICATION_TYPE, "simple").trim(); if (security.equals("kerberos")) { String defaultName = getServer().getName(); String keytab = System.getProperty("user.home") + "/" + defaultName + ".keytab"; keytab = getServiceConfig().get(KERBEROS_KEYTAB, keytab).trim(); if (keytab.length() == 0) { throw new ServiceException(FileSystemAccessException.ERROR.H01, KERBEROS_KEYTAB); } String principal = defaultName + "/localhost@LOCALHOST"; principal = getServiceConfig().get(KERBEROS_PRINCIPAL, principal).trim(); if (principal.length() == 0) { throw new ServiceException(FileSystemAccessException.ERROR.H01, KERBEROS_PRINCIPAL); } Configuration conf = new Configuration(); conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos"); UserGroupInformation.setConfiguration(conf); try { UserGroupInformation.loginUserFromKeytab(principal, keytab); } catch (IOException ex) { throw new ServiceException(FileSystemAccessException.ERROR.H02, ex.getMessage(), ex); } LOG.info("Using FileSystemAccess Kerberos authentication, principal [{}] keytab [{}]", principal, keytab); } else if (security.equals("simple")) { Configuration conf = new Configuration(); conf.set(HADOOP_SECURITY_AUTHENTICATION, "simple"); UserGroupInformation.setConfiguration(conf); LOG.info("Using FileSystemAccess simple/pseudo authentication, principal [{}]", System.getProperty("user.name")); } else { throw new ServiceException(FileSystemAccessException.ERROR.H09, security); } String hadoopConfDirProp = getServiceConfig().get(HADOOP_CONF_DIR, getServer().getConfigDir()); File hadoopConfDir = new File(hadoopConfDirProp).getAbsoluteFile(); if (!hadoopConfDir.exists()) { hadoopConfDir = new File(getServer().getConfigDir()).getAbsoluteFile(); } if (!hadoopConfDir.exists()) { throw new ServiceException(FileSystemAccessException.ERROR.H10, hadoopConfDir); } try { serviceHadoopConf = loadHadoopConf(hadoopConfDir); fileSystemConf = getNewFileSystemConfiguration(); } catch (IOException ex) { throw new ServiceException(FileSystemAccessException.ERROR.H11, ex.toString(), ex); } if (LOG.isDebugEnabled()) { LOG.debug("FileSystemAccess FileSystem configuration:"); for (Map.Entry entry : serviceHadoopConf) { LOG.debug(" {} = {}", entry.getKey(), entry.getValue()); } } setRequiredServiceHadoopConf(serviceHadoopConf); nameNodeWhitelist = toLowerCase(getServiceConfig().getTrimmedStringCollection(NAME_NODE_WHITELIST)); }
@Test @TestException(exception = ServiceException.class, msgRegExp = "H01.*") @TestDir public void noKerberosKeytabProperty() throws Exception { String dir = TestDirHelper.getTestDir().getAbsolutePath(); String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName())); Configuration conf = new Configuration(false); conf.set("server.services", services); conf.set("server.hadoop.authentication.type", "kerberos"); conf.set("server.hadoop.authentication.kerberos.keytab", " "); Server server = new Server("server", dir, dir, dir, dir, conf); server.init(); }
@Override public int intersection(String... names) { return get(intersectionAsync(names)); }
@Test public void testIntersection() throws InterruptedException { redisson.getKeys().flushall(); RSetCache<Integer> cache1 = redisson.getSetCache("cache1"); cache1.add(1); cache1.add(2, 1, TimeUnit.SECONDS); cache1.add(5, 1, TimeUnit.SECONDS); cache1.add(3); RSetCache<Integer> cache2 = redisson.getSetCache("cache2"); cache2.add(4); cache2.add(2, 1, TimeUnit.SECONDS); cache2.add(5, 1, TimeUnit.SECONDS); cache2.add(7); assertThat(cache1.countIntersection("cache2")).isEqualTo(2); RSetCache<Integer> cache3 = redisson.getSetCache("cache3"); assertThat(cache3.intersection("cache1", "cache2")).isEqualTo(2); assertThat(cache3).containsExactlyInAnyOrder(2, 5); cache3.clear(); Thread.sleep(1500); assertThat(cache1.countIntersection("cache2")).isEqualTo(0); assertThat(cache3.intersection("cache1", "cache2")).isEqualTo(0); assertThat(cache3).isEmpty(); assertThat(redisson.getKeys().count()).isEqualTo(2); assertThat(redisson.getKeys().getKeys()).containsExactlyInAnyOrder("cache1", "cache2"); }
public void onKeyEvent(@NotNull TypeAheadEvent keyEvent) { if (!myTerminalModel.isTypeAheadEnabled()) return; myTerminalModel.lock(); try { if (myTerminalModel.isUsingAlternateBuffer()) { resetState(); return; } TypeAheadTerminalModel.LineWithCursorX lineWithCursorX = myTerminalModel.getCurrentLineWithCursor(); long prevTypedTime = myLastTypedTime; myLastTypedTime = System.nanoTime(); long autoSyncDelay; if (myLatencyStatistics.getSampleSize() >= LATENCY_MIN_SAMPLES_TO_TURN_ON) { autoSyncDelay = Math.min(myLatencyStatistics.getMaxLatency(), MAX_TERMINAL_DELAY); } else { autoSyncDelay = MAX_TERMINAL_DELAY; } boolean hasTypedRecently = System.nanoTime() - prevTypedTime < autoSyncDelay; if (hasTypedRecently) { if (myOutOfSyncDetected) { return; } } else { myOutOfSyncDetected = false; } reevaluatePredictorState(hasTypedRecently); updateLeftMostCursorPosition(lineWithCursorX.myCursorX); if (myPredictions.isEmpty() && myClearPredictionsDebouncer != null) { myClearPredictionsDebouncer.call(); // start a timer that will clear predictions } TypeAheadPrediction prediction = createPrediction(lineWithCursorX, keyEvent); myPredictions.add(prediction); applyPredictions(); logger.debug("Created " + keyEvent.myEventType + " prediction"); } finally { myTerminalModel.unlock(); } }
@Test public void testBackspacePrediction() throws Exception { new TestRunner() { @Override void run() { manager.onKeyEvent(TypeAheadEvent.fromChar('a')); model.insertString("a"); actions.clear(); manager.onKeyEvent(new TypeAheadEvent(TypeAheadEvent.EventType.Backspace)); boolean found = false; for (Action action : actions) { if (action instanceof Action.RemoveCharacters) { assertFalse(found); assertEquals(0, ((Action.RemoveCharacters) action).from); assertEquals(1, ((Action.RemoveCharacters) action).count); found = true; } } assertTrue(found); } }.fillLatencyStats().setIsNotPasswordPrompt().run(); }
public static KTableHolder<GenericKey> build( final KGroupedTableHolder groupedTable, final TableAggregate aggregate, final RuntimeBuildContext buildContext, final MaterializedFactory materializedFactory) { return build( groupedTable, aggregate, buildContext, materializedFactory, new AggregateParamsFactory() ); }
@Test public void shouldBuildKeySerdeCorrectlyForAggregate() { // When: aggregate.build(planBuilder, planInfo); // Then: verify(buildContext).buildKeySerde(KEY_FORMAT, PHYSICAL_AGGREGATE_SCHEMA, MATERIALIZE_CTX); }
@Override public int size() { return size; }
@Test public void testSize() { RangeSet rs = new RangeSet(4); assertEquals(4, rs.size()); }
public String toCompactListString() { return id + COMMA + locType + COMMA + latOrY + COMMA + longOrX; }
@Test public void toCompactListStringEmptyList() { String s = toCompactListString(new ArrayList<>()); assertEquals("not empty string", "", s); }
public static ModelInfo getModelInfo(Path path) { return withContextClassloader(() -> { // Hackish solution to read padding/truncation configuration through JNI wrapper directly LibUtils.checkStatus(); var handle = TokenizersLibrary.LIB.createTokenizerFromString(uncheck(() -> Files.readString(path))); try { return new ModelInfo( TruncationStrategy.fromString(TokenizersLibrary.LIB.getTruncationStrategy(handle)), PaddingStrategy.fromString(TokenizersLibrary.LIB.getPaddingStrategy(handle)), TokenizersLibrary.LIB.getMaxLength(handle), TokenizersLibrary.LIB.getStride(handle), TokenizersLibrary.LIB.getPadToMultipleOf(handle)); } finally { TokenizersLibrary.LIB.deleteTokenizer(handle); } }); }
@Test void provides_model_info() throws IOException { var expected = new ModelInfo(ModelInfo.TruncationStrategy.LONGEST_FIRST, ModelInfo.PaddingStrategy.LONGEST, 128, 0, 0); var actual = HuggingFaceTokenizer.getModelInfo(decompressModelFile(tmp, "paraphrase-multilingual-mpnet-base-v2")); assertEquals(expected, actual); }
@Override public String serverUrlFor(String subPath) { return format("%s/%s", baseRemotingURL, subPath); }
@Test public void shouldReturnServerUrlWithSubpath() { new SystemEnvironment().setProperty(SystemEnvironment.SERVICE_URL, BASE_URL + "/"); assertThat(new URLService().serverUrlFor("someSubPath/xyz"), is(BASE_URL + "/someSubPath/xyz")); }
@Override public FSDataOutputStream build() throws IOException { Path path = getPath(); final Configuration options = getOptions(); final Map<String, String> headers = new HashMap<>(); final Set<String> mandatoryKeys = getMandatoryKeys(); final Set<String> keysToValidate = new HashSet<>(); // pick up all headers from the mandatory list and strip them before // validating the keys String headerPrefix = FS_S3A_CREATE_HEADER + "."; final int prefixLen = headerPrefix.length(); mandatoryKeys.stream().forEach(key -> { if (key.startsWith(headerPrefix) && key.length() > prefixLen) { headers.put(key.substring(prefixLen), options.get(key)); } else { keysToValidate.add(key); } }); rejectUnknownMandatoryKeys(keysToValidate, CREATE_FILE_KEYS, "for " + path); // and add any optional headers getOptionalKeys().stream() .filter(key -> key.startsWith(headerPrefix) && key.length() > prefixLen) .forEach(key -> headers.put(key.substring(prefixLen), options.get(key))); EnumSet<CreateFlag> flags = getFlags(); if (flags.contains(CreateFlag.APPEND)) { throw new UnsupportedOperationException("Append is not supported"); } if (!flags.contains(CreateFlag.CREATE) && !flags.contains(CreateFlag.OVERWRITE)) { throw new PathIOException(path.toString(), "Must specify either create or overwrite"); } final boolean performance = options.getBoolean(Constants.FS_S3A_CREATE_PERFORMANCE, false); return callbacks.createFileFromBuilder( path, getProgress(), new CreateFileOptions(flags, isRecursive(), performance, headers)); }
@Test public void testAppendForbidden() throws Throwable { intercept(UnsupportedOperationException.class, () -> build(mkBuilder().append())); }
@Override public void onChange(List<JobRunrMetadata> metadataList) { if (this.serversWithLongGCCyclesMetadataList == null || this.serversWithLongGCCyclesMetadataList.size() != metadataList.size()) { problems.removeProblemsOfType(CpuAllocationIrregularityProblem.PROBLEM_TYPE); if (!metadataList.isEmpty()) { problems.addProblem(new CpuAllocationIrregularityProblem(metadataList)); problems.removeProblemsOfType(PollIntervalInSecondsTimeBoxIsTooSmallProblem.PROBLEM_TYPE); storageProvider.deleteMetadata(PollIntervalInSecondsTimeBoxIsTooSmallNotification.class.getSimpleName()); } this.serversWithLongGCCyclesMetadataList = metadataList; } }
@Test void ifChangesOnCpuAllocationIrregularitiesDetectedThenProblemCreated() { final JobRunrMetadata jobRunrMetadata = new JobRunrMetadata(CpuAllocationIrregularityNotification.class.getSimpleName(), "BackgroundJobServer " + UUID.randomUUID(), "23"); cpuAllocationIrregularityProblemHandler.onChange(asList(jobRunrMetadata)); verify(problems).addProblem(problemArgumentCaptor.capture()); assertThat(problemArgumentCaptor.getValue()) .isInstanceOf(CpuAllocationIrregularityProblem.class) .hasFieldOrPropertyWithValue("cpuAllocationIrregularityMetadataSet", asList(jobRunrMetadata)); verify(problems).removeProblemsOfType(PollIntervalInSecondsTimeBoxIsTooSmallProblem.PROBLEM_TYPE); verify(storageProvider).deleteMetadata(PollIntervalInSecondsTimeBoxIsTooSmallNotification.class.getSimpleName()); }
public static Map<String, Object> beanToMap(Object bean, String... properties) { int mapSize = 16; Editor<String> keyEditor = null; if (ArrayUtil.isNotEmpty(properties)) { mapSize = properties.length; final Set<String> propertiesSet = CollUtil.set(false, properties); keyEditor = property -> propertiesSet.contains(property) ? property : null; } // 指明了要复制的属性 所以不忽略null值 return beanToMap(bean, new LinkedHashMap<>(mapSize, 1), false, keyEditor); }
@Test public void beanToMapWithLocalDateTimeTest() { final LocalDateTime now = LocalDateTime.now(); final SubPerson person = new SubPerson(); person.setAge(14); person.setOpenid("11213232"); person.setName("测试A11"); person.setSubName("sub名字"); person.setDate(now); person.setDate2(now.toLocalDate()); final Map<String, Object> map = BeanUtil.beanToMap(person, false, true); assertEquals(now, map.get("date")); assertEquals(now.toLocalDate(), map.get("date2")); }
Optional<TimeRange> parse(final String shortTimerange) { if (shortTimerange != null && SHORT_FORMAT_PATTERN.matcher(shortTimerange).matches()) { final String numberPart = shortTimerange.substring(0, shortTimerange.length() - 1); final String periodPart = shortTimerange.substring(shortTimerange.length() - 1); String longPeriodPart = SHORT_TO_LONG_PERIOD_MAPPING.get(periodPart); if (longPeriodPart != null) { if ("1".equals(numberPart)) { longPeriodPart = longPeriodPart.substring(0, longPeriodPart.length() - 1); //removing last "s" } return Optional.of( KeywordRange.create( "last " + numberPart + " " + longPeriodPart, "UTC") ); } } return Optional.empty(); }
@Test void returnsProperTimeRangeOnGoodInput() { assertThat(toTest.parse("12s")) .isPresent() .contains(KeywordRange.create("last 12 seconds", "UTC")); assertThat(toTest.parse("42m")) .isPresent() .contains(KeywordRange.create("last 42 minutes", "UTC")); assertThat(toTest.parse("1h")) .isPresent() .contains(KeywordRange.create("last 1 hour", "UTC")); assertThat(toTest.parse("1d")) .isPresent() .contains(KeywordRange.create("last 1 day", "UTC")); assertThat(toTest.parse("2w")) .isPresent() .contains(KeywordRange.create("last 2 weeks", "UTC")); assertThat(toTest.parse("3M")) .isPresent() .contains(KeywordRange.create("last 3 months", "UTC")); assertThat(toTest.parse("1000y")) .isPresent() .contains(KeywordRange.create("last 1000 years", "UTC")); }
@Override public AppResponse process(Flow flow, CheckAuthenticationStatusRequest request){ switch(appSession.getState()) { case "AUTHENTICATION_REQUIRED", "AWAITING_QR_SCAN": return new CheckAuthenticationStatusResponse("PENDING", false); case "RETRIEVED", "AWAITING_CONFIRMATION": return new CheckAuthenticationStatusResponse("PENDING", true); case "CONFIRMED": return new StatusResponse("PENDING_CONFIRMED"); case "AUTHENTICATED": return new OkResponse(); case "CANCELLED": return new StatusResponse("CANCELLED"); case "ABORTED": if (appSession.getAbortCode().equals("verification_code_invalid")) { String logCode = "wid_checker".equals(request.getAppType()) ? "1320" : "1368"; digidClient.remoteLog(logCode, Map.of(HIDDEN, true)); } return new NokResponse(); default: return new CheckAuthenticationStatusResponse("PENDING", false); } }
@Test void processAuthenticationRequired(){ appSession.setState("AUTHENTICATION_REQUIRED"); AppResponse response = checkAuthenticationStatus.process(flow, request); assertTrue(response instanceof CheckAuthenticationStatusResponse); assertEquals("PENDING", ((CheckAuthenticationStatusResponse) response).getStatus()); assertEquals(false, ((CheckAuthenticationStatusResponse) response).isSessionReceived()); }
public StatMap<K> merge(K key, int value) { if (key.getType() == Type.LONG) { merge(key, (long) value); return this; } int oldValue = getInt(key); int newValue = key.merge(oldValue, value); if (newValue == 0) { _map.remove(key); } else { _map.put(key, newValue); } return this; }
@Test(dataProvider = "allTypeStats", expectedExceptions = IllegalArgumentException.class) public void dynamicTypeCheckWhenAddLong(MyStats stat) { if (stat.getType() == StatMap.Type.LONG) { throw new SkipException("Skipping LONG test"); } StatMap<MyStats> statMap = new StatMap<>(MyStats.class); statMap.merge(stat, 1L); }
private static GuardedByExpression bind(JCTree.JCExpression exp, BinderContext context) { GuardedByExpression expr = BINDER.visit(exp, context); checkGuardedBy(expr != null, String.valueOf(exp)); checkGuardedBy(expr.kind() != Kind.TYPE_LITERAL, "Raw type literal: %s", exp); return expr; }
@Test public void enclosingBlockScope() { assertThat( bind( "", "mu", forSourceLines( "threadsafety/Test.java", "package threadsafety;", "import javax.annotation.concurrent.GuardedBy;", "public class Test {", " public final Object mu = new Object();", " @GuardedBy(\"mu\") int x = 1;", " {", " new Object() {", " void f() {", " synchronized (mu) {", " x++;", " }", " }", " };", " }", "}"))) .isEqualTo("(SELECT (SELECT (THIS) outer$threadsafety.Test) mu)"); }
@Override public void cancel() { synchronized (monitor) { if (!periodicExecutionCancellation.isPresent()) { throw new IllegalStateException("setPeriodicExecutionCancellationCallback has not been called before cancel"); } cancelled = true; if (running) return; } runlet.close(); periodicExecutionCancellation.get().run(); }
@Test public void testCancelWhileRunning() { // halt execution in runlet runlet.shouldWaitInRun(true); executor.runAsync(); runlet.waitUntilInRun(); assertEquals(3, runlet.getRunsStarted()); assertEquals(2, runlet.getRunsCompleted()); assertTrue(executor.isExecutionRunning()); assertFalse(runlet.isClosed()); // Cancel now cancellable.cancel(); assertTrue(executor.isExecutionRunning()); assertFalse(runlet.isClosed()); // Complete the runlet.run(), and verify the close and executor cancellation takes effect runlet.shouldWaitInRun(false); executor.waitUntilRunCompleted(3); assertFalse(executor.isExecutionRunning()); assertTrue(runlet.isClosed()); // Ensure a spurious run is ignored. executor.runToCompletion(4); assertEquals(3, runlet.getRunsStarted()); }
public Map<String, ParamDefinition> generateStaticStepParamDefs( WorkflowSummary workflowSummary, Step stepDefinition, StepRuntime stepRuntime) { Map<String, ParamDefinition> allParamDefs = new LinkedHashMap<>(); // Start with default step level params if present Map<String, ParamDefinition> globalDefault = defaultParamManager.getDefaultStepParams(); if (globalDefault != null) { ParamsMergeHelper.mergeParams( allParamDefs, globalDefault, ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_DEFAULT)); } // Merge in injected params returned by step if present (template schema) Map<String, ParamDefinition> injectedParams = stepRuntime.injectRuntimeParams(workflowSummary, stepDefinition); if (injectedParams != null) { maybeOverrideParamType(injectedParams); ParamsMergeHelper.mergeParams( allParamDefs, injectedParams, ParamsMergeHelper.MergeContext.stepCreate(ParamSource.TEMPLATE_SCHEMA)); } // Merge in params applicable to step type Optional<Map<String, ParamDefinition>> defaultStepTypeParams = defaultParamManager.getDefaultParamsForType(stepDefinition.getType()); defaultStepTypeParams.ifPresent( stepTypeParams -> ParamsMergeHelper.mergeParams( allParamDefs, stepTypeParams, ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_DEFAULT))); // Final merge from step definition if (stepDefinition.getParams() != null) { maybeOverrideParamType(stepDefinition.getParams()); ParamsMergeHelper.mergeParams( allParamDefs, stepDefinition.getParams(), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.DEFINITION)); } return allParamDefs; }
@Test public void testStaticStepParamMerge() { Map<String, ParamDefinition> stepParams = new LinkedHashMap<>(); when(defaultParamManager.getDefaultStepParams()) .thenReturn(singletonMap("p1", ParamDefinition.buildParamDefinition("p1", "d1"))); when(stepRuntime.injectRuntimeParams(any(), any())) .thenReturn(singletonMap("p2", ParamDefinition.buildParamDefinition("p2", "d2"))); when(defaultParamManager.getDefaultParamsForType(any())) .thenReturn( Optional.of(singletonMap("p3", ParamDefinition.buildParamDefinition("p3", "d3")))); ((TypedStep) step).setParams(stepParams); stepParams.put("p4", ParamDefinition.buildParamDefinition("p4", "d4")); Map<String, ParamDefinition> mergedStepParamDefs = paramsManager.generateStaticStepParamDefs(workflowSummary, step, stepRuntime); Assert.assertEquals("d1", mergedStepParamDefs.get("p1").asStringParamDef().getValue()); Assert.assertEquals("d2", mergedStepParamDefs.get("p2").asStringParamDef().getValue()); Assert.assertEquals("d3", mergedStepParamDefs.get("p3").asStringParamDef().getValue()); Assert.assertEquals("d4", mergedStepParamDefs.get("p4").asStringParamDef().getValue()); }
public CruiseConfig deserializeConfig(String content) throws Exception { String md5 = md5Hex(content); Element element = parseInputStream(new ByteArrayInputStream(content.getBytes())); LOGGER.debug("[Config Save] Updating config cache with new XML"); CruiseConfig configForEdit = classParser(element, BasicCruiseConfig.class, configCache, new GoCipher(), registry, new ConfigReferenceElements()).parse(); setMd5(configForEdit, md5); configForEdit.setOrigins(new FileConfigOrigin()); return configForEdit; }
@Test void shouldLoadPipelinesWithGroupName() throws Exception { CruiseConfig config = xmlLoader.deserializeConfig(PIPELINE_GROUPS); assertThat(config.getGroups().first().getGroup()).isEqualTo("studios"); assertThat(config.getGroups().get(1).getGroup()).isEqualTo("perfessionalservice"); }
public static HazelcastInstance newHazelcastClient() { return newHazelcastClientInternal(resolveClientConfig(null), null, null, null); }
@Test(expected = InvalidConfigurationException.class) public void testNewHazelcastClient_withSameConfig() { String instanceName = randomString(); ClientConfig config = new ClientConfig(); config.setInstanceName(instanceName); HazelcastClient.newHazelcastClient(config); HazelcastClient.newHazelcastClient(config); }
@Override public PageResult<OperateLogDO> getOperateLogPage(OperateLogPageReqVO pageReqVO) { return operateLogMapper.selectPage(pageReqVO); }
@Test public void testGetOperateLogPage_vo() { // 构造操作日志 OperateLogDO operateLogDO = RandomUtils.randomPojo(OperateLogDO.class, o -> { o.setUserId(2048L); o.setBizId(999L); o.setType("订单"); o.setSubType("创建订单"); o.setAction("修改编号为 1 的用户信息"); o.setCreateTime(buildTime(2021, 3, 6)); }); operateLogMapper.insert(operateLogDO); // 测试 userId 不匹配 operateLogMapper.insert(cloneIgnoreId(operateLogDO, o -> o.setUserId(1024L))); // 测试 bizId 不匹配 operateLogMapper.insert(cloneIgnoreId(operateLogDO, o -> o.setBizId(888L))); // 测试 type 不匹配 operateLogMapper.insert(cloneIgnoreId(operateLogDO, o -> o.setType("退款"))); // 测试 subType 不匹配 operateLogMapper.insert(cloneIgnoreId(operateLogDO, o -> o.setSubType("创建退款"))); // 测试 action 不匹配 operateLogMapper.insert(cloneIgnoreId(operateLogDO, o -> o.setAction("修改编号为 1 退款信息"))); // 测试 createTime 不匹配 operateLogMapper.insert(cloneIgnoreId(operateLogDO, o -> o.setCreateTime(buildTime(2021, 2, 6)))); // 构造调用参数 OperateLogPageReqVO reqVO = new OperateLogPageReqVO(); reqVO.setUserId(2048L); reqVO.setBizId(999L); reqVO.setType("订"); reqVO.setSubType("订单"); reqVO.setAction("用户信息"); reqVO.setCreateTime(buildBetweenTime(2021, 3, 5, 2021, 3, 7)); // 调用 PageResult<OperateLogDO> pageResult = operateLogServiceImpl.getOperateLogPage(reqVO); // 断言,只查到了一条符合条件的 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(operateLogDO, pageResult.getList().get(0)); }
@Override protected void handle(final String beanName, final Object bean) { Class<?> clazz = getCorrectedClass(bean); final ShenyuSpringWebSocketClient beanShenyuClient = AnnotatedElementUtils.findMergedAnnotation(clazz, getAnnotationType()); final String superPath = buildApiSuperPath(clazz, beanShenyuClient); // Compatible with previous versions if (Objects.nonNull(beanShenyuClient)) { handleClass(clazz, bean, beanShenyuClient, superPath); return; } final Method[] methods = ReflectionUtils.getUniqueDeclaredMethods(clazz); for (Method method : methods) { handleMethod(bean, clazz, beanShenyuClient, method, superPath); } }
@Test public void testHandle() { eventListener.handle("mock", mockClass); }
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return this.send(record, null); }
@Test void send_should_set_name() { tracingProducer.send(new ProducerRecord<>(TEST_TOPIC, TEST_KEY, TEST_VALUE)); mockProducer.completeNext(); MutableSpan producerSpan = spans.get(0); assertThat(producerSpan.kind()).isEqualTo(PRODUCER); assertThat(producerSpan.name()).isEqualTo("send"); }
@Override public void run() { try { backgroundJobServer.getJobSteward().notifyThreadOccupied(); MDCMapper.loadMDCContextFromJob(job); performJob(); } catch (Exception e) { if (isJobDeletedWhileProcessing(e)) { // nothing to do anymore as Job is deleted return; } else if (isJobServerStopped(e)) { updateJobStateToFailedAndRunJobFilters("Job processing was stopped as background job server has stopped", e); Thread.currentThread().interrupt(); } else if (isJobNotFoundException(e)) { updateJobStateToFailedAndRunJobFilters("Job method not found", e); } else { updateJobStateToFailedAndRunJobFilters("An exception occurred during the performance of the job", e); } } finally { backgroundJobServer.getJobSteward().notifyThreadIdle(); MDC.clear(); } }
@Test void onSuccessAfterDeleteTheIllegalJobStateChangeIsCatchedAndLogged() throws Exception { Job job = anEnqueuedJob().build(); mockBackgroundJobRunner(job, jobFromStorage -> jobFromStorage.delete("for testing")); BackgroundJobPerformer backgroundJobPerformer = new BackgroundJobPerformer(backgroundJobServer, job); final ListAppender<ILoggingEvent> logger = LoggerAssert.initFor(backgroundJobPerformer); backgroundJobPerformer.run(); assertThat(logAllStateChangesFilter.getStateChanges(job)).containsExactly("ENQUEUED->PROCESSING"); assertThat(logAllStateChangesFilter.onProcessingIsCalled(job)).isTrue(); assertThat(logAllStateChangesFilter.onProcessingSucceededIsCalled(job)).isTrue(); assertThat(logger) .hasNoErrorLogMessages() .hasInfoMessage("Job finished successfully but it was already deleted - ignoring illegal state change from DELETED to SUCCEEDED"); }
@VisibleForTesting static List<Reporter> getReporters() { return self.reporters; }
@Test public void defaults() throws Exception { String jsonFile = System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "TestMetricsOutput.json"; Configuration conf = MetastoreConf.newMetastoreConf(); MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_JSON_FILE_LOCATION, jsonFile); initializeMetrics(conf); Assert.assertEquals(2, Metrics.getReporters().size()); }
public static int scan(final UnsafeBuffer termBuffer, final int termOffset, final int limitOffset) { int offset = termOffset; while (offset < limitOffset) { final int frameLength = frameLengthVolatile(termBuffer, offset); if (frameLength <= 0) { break; } final int alignedFrameLength = align(frameLength, FRAME_ALIGNMENT); if (isPaddingFrame(termBuffer, offset)) { if (termOffset == offset) { offset += alignedFrameLength; } break; } if (offset + alignedFrameLength > limitOffset) { break; } offset += alignedFrameLength; } return offset; }
@Test void shouldReadOneMessageOnLimit() { final int offset = 0; final int messageLength = 50; final int alignedMessageLength = BitUtil.align(messageLength, FRAME_ALIGNMENT); when(termBuffer.getIntVolatile(lengthOffset(offset))).thenReturn(messageLength); when(termBuffer.getShort(typeOffset(offset))).thenReturn((short)HDR_TYPE_DATA); final int newOffset = TermBlockScanner.scan(termBuffer, offset, alignedMessageLength); assertEquals(alignedMessageLength, newOffset); }
public static void changeLocalFilePermission(String filePath, String perms) { try { Files.setPosixFilePermissions(Paths.get(filePath), PosixFilePermissions.fromString(perms)); } catch (UnsupportedOperationException e) { throw new UnimplementedRuntimeException(e, ErrorType.External); } catch (ClassCastException e) { throw new InvalidArgumentRuntimeException(e); } catch (SecurityException e) { throw new PermissionDeniedRuntimeException(e); } catch (IOException e) { throw new UnknownRuntimeException(e); } }
@Test public void changeLocalFilePermission() throws IOException { // This test only works with normal users - superusers can operate on files whether or not they // have the proper permission bits set. assumeFalse(System.getProperty("user.name").equals("root")); File tempFile = mTestFolder.newFile("perm.txt"); FileUtils.changeLocalFilePermission(tempFile.getAbsolutePath(), "---------"); assertFalse(tempFile.canRead() || tempFile.canWrite() || tempFile.canExecute()); FileUtils.changeLocalFilePermission(tempFile.getAbsolutePath(), "rwxrwxrwx"); assertTrue(tempFile.canRead() && tempFile.canWrite() && tempFile.canExecute()); // File deletion should fail, because we don't have write permissions FileUtils.changeLocalFilePermission(tempFile.getAbsolutePath(), "r--r--r--"); assertTrue(tempFile.canRead()); assertFalse(tempFile.canWrite()); assertFalse(tempFile.canExecute()); // expect a file permission error when we open it for writing mException.expect(IOException.class); @SuppressWarnings({"unused", "resource"}) FileWriter fw = new FileWriter(tempFile); fail("opening a read-only file for writing should have failed"); }
protected Request buildRequest(String url, String sender, String data) throws JsonProcessingException { if (sender == null || !WalletUtils.isValidAddress(sender)) { throw new EnsResolutionException("Sender address is null or not valid"); } if (data == null) { throw new EnsResolutionException("Data is null"); } if (!url.contains("{sender}")) { throw new EnsResolutionException("Url is not valid, sender parameter is not exist"); } // URL expansion String href = url.replace("{sender}", sender).replace("{data}", data); Request.Builder builder = new Request.Builder().url(href); if (url.contains("{data}")) { return builder.get().build(); } else { EnsGatewayRequestDTO requestDTO = new EnsGatewayRequestDTO(data); ObjectMapper om = ObjectMapperFactory.getObjectMapper(); return builder.post(RequestBody.create(om.writeValueAsString(requestDTO), JSON)) .addHeader("Content-Type", "application/json") .build(); } }
@Test void buildRequestWhenWithoutDataTest() throws IOException { String url = "https://example.com/gateway/{sender}.json"; String sender = "0x226159d592E2b063810a10Ebf6dcbADA94Ed68b8"; assertThrows( EnsResolutionException.class, () -> ensResolver.buildRequest(url, sender, null)); }
public static DateTimeFormatter matchDateTimeFormatter(String dateTime) { if (dateTime.length() == 19) { for (Map.Entry<Pattern, DateTimeFormatter> entry : YYYY_MM_DD_HH_MM_SS_19_FORMATTER_MAP_ENTRY_SET) { if (entry.getKey().matcher(dateTime).matches()) { return entry.getValue(); } } } else if (dateTime.length() > 19) { for (Map.Entry<Pattern, DateTimeFormatter> entry : YYYY_MM_DD_HH_MM_SS_M19_FORMATTER_MAP_ENTRY_SET) { if (entry.getKey().matcher(dateTime).matches()) { return entry.getValue(); } } } else if (dateTime.length() == 14) { return YYYY_MM_DD_HH_MM_SS_14_FORMATTER; } return null; }
@Test public void testMatchDateTimeFormatter() { String datetimeStr = "2020-10-10 10:10:10"; Assertions.assertEquals( "2020-10-10T10:10:10", DateTimeUtils.parse(datetimeStr, DateTimeUtils.matchDateTimeFormatter(datetimeStr)) .toString()); datetimeStr = "2020-10-10T10:10:10"; Assertions.assertEquals( "2020-10-10T10:10:10", DateTimeUtils.parse(datetimeStr, DateTimeUtils.matchDateTimeFormatter(datetimeStr)) .toString()); datetimeStr = "2020/10/10 10:10:10"; Assertions.assertEquals( "2020-10-10T10:10:10", DateTimeUtils.parse(datetimeStr, DateTimeUtils.matchDateTimeFormatter(datetimeStr)) .toString()); datetimeStr = "2020年10月10日 10时10分10秒"; Assertions.assertEquals( "2020-10-10T10:10:10", DateTimeUtils.parse(datetimeStr, DateTimeUtils.matchDateTimeFormatter(datetimeStr)) .toString()); datetimeStr = "2020.10.10 10:10:10"; Assertions.assertEquals( "2020-10-10T10:10:10", DateTimeUtils.parse(datetimeStr, DateTimeUtils.matchDateTimeFormatter(datetimeStr)) .toString()); datetimeStr = "20201010101010"; Assertions.assertEquals( "2020-10-10T10:10:10", DateTimeUtils.parse(datetimeStr, DateTimeUtils.matchDateTimeFormatter(datetimeStr)) .toString()); datetimeStr = "2020-10-10 10:10:10.201"; Assertions.assertEquals( "2020-10-10T10:10:10.201", DateTimeUtils.parse(datetimeStr, DateTimeUtils.matchDateTimeFormatter(datetimeStr)) .toString()); datetimeStr = "2020-10-10 10:10:10.201111"; Assertions.assertEquals( "2020-10-10T10:10:10.201111", DateTimeUtils.parse(datetimeStr, DateTimeUtils.matchDateTimeFormatter(datetimeStr)) .toString()); datetimeStr = "2020-10-10 10:10:10.201111001"; Assertions.assertEquals( "2020-10-10T10:10:10.201111001", DateTimeUtils.parse(datetimeStr, DateTimeUtils.matchDateTimeFormatter(datetimeStr)) .toString()); }
public Optional<TableDistribution> mergeDistribution( MergingStrategy mergingStrategy, Optional<TableDistribution> sourceTableDistribution, Optional<TableDistribution> derivedTabledDistribution) { if (derivedTabledDistribution.isPresent() && sourceTableDistribution.isPresent() && mergingStrategy != MergingStrategy.EXCLUDING) { throw new ValidationException( "The base table already has a distribution defined. You might want to specify " + "EXCLUDING DISTRIBUTION."); } if (derivedTabledDistribution.isPresent()) { return derivedTabledDistribution; } return sourceTableDistribution; }
@Test void mergeIncludingDistributionFailsOnDuplicate() { Optional<TableDistribution> sourceDistribution = Optional.of(TableDistribution.ofHash(Collections.singletonList("a"), 3)); Optional<TableDistribution> derivedDistribution = Optional.of(TableDistribution.ofHash(Collections.singletonList("b"), 3)); assertThatThrownBy( () -> util.mergeDistribution( MergingStrategy.INCLUDING, sourceDistribution, derivedDistribution)) .isInstanceOf(ValidationException.class) .hasMessage( "The base table already has a distribution defined. You might want " + "to specify EXCLUDING DISTRIBUTION."); }
public static ConjunctFuture<Void> completeAll( Collection<? extends CompletableFuture<?>> futuresToComplete) { return new CompletionConjunctFuture(futuresToComplete); }
@Test void testCompleteAllExceptional() { final CompletableFuture<String> inputFuture1 = new CompletableFuture<>(); final CompletableFuture<Integer> inputFuture2 = new CompletableFuture<>(); final List<CompletableFuture<?>> futuresToComplete = Arrays.asList(inputFuture1, inputFuture2); final FutureUtils.ConjunctFuture<Void> completeFuture = FutureUtils.completeAll(futuresToComplete); assertThat(completeFuture).isNotDone(); assertThat(completeFuture.getNumFuturesCompleted()).isZero(); assertThat(completeFuture.getNumFuturesTotal()).isEqualTo(futuresToComplete.size()); final FlinkException testException1 = new FlinkException("Test exception 1"); inputFuture1.completeExceptionally(testException1); assertThat(completeFuture).isNotDone(); assertThat(completeFuture.getNumFuturesCompleted()).isOne(); final FlinkException testException2 = new FlinkException("Test exception 2"); inputFuture2.completeExceptionally(testException2); assertThat(completeFuture.getNumFuturesCompleted()).isEqualTo(2); assertThatFuture(completeFuture) .eventuallyFailsWith(ExecutionException.class) .withCauseInstanceOf(FlinkException.class) .extracting(Throwable::getCause) .satisfies( e -> { final Throwable[] actualSuppressedExceptions = e.getSuppressed(); final FlinkException expectedSuppressedException = e.equals(testException1) ? testException2 : testException1; assertThat(actualSuppressedExceptions) .containsExactly(expectedSuppressedException); }); }
public static <K, V, S extends StateStore> Materialized<K, V, S> as(final DslStoreSuppliers storeSuppliers) { Objects.requireNonNull(storeSuppliers, "store type can't be null"); return new Materialized<>(storeSuppliers); }
@Test public void shouldThrowTopologyExceptionIfStoreNameExceedsMaxAllowedLength() { final StringBuffer invalidStoreNameBuffer = new StringBuffer(); final int maxNameLength = 249; for (int i = 0; i < maxNameLength + 1; i++) { invalidStoreNameBuffer.append('a'); } final String invalidStoreName = invalidStoreNameBuffer.toString(); final TopologyException e = assertThrows(TopologyException.class, () -> Materialized.as(invalidStoreName)); assertEquals(e.getMessage(), "Invalid topology: Name is illegal, it can't be longer than " + maxNameLength + " characters, name: " + invalidStoreName); }
public static <T> List<T> subtractToList(Collection<T> coll1, Collection<T> coll2) { if (isEmpty(coll1)) { return ListUtil.empty(); } if (isEmpty(coll2)) { return ListUtil.list(true, coll1); } //将被交数用链表储存,防止因为频繁扩容影响性能 final List<T> result = new LinkedList<>(); Set<T> set = new HashSet<>(coll2); for (T t : coll1) { if (false == set.contains(t)) { result.add(t); } } return result; }
@Test public void subtractToListTest() { final List<Long> list1 = Arrays.asList(1L, 2L, 3L); final List<Long> list2 = Arrays.asList(2L, 3L); final List<Long> result = CollUtil.subtractToList(list1, list2); assertEquals(1, result.size()); assertEquals(1L, (long) result.get(0)); }
@Override public void onEvent(ReplicaMigrationEvent event) { switch (event.getPartitionId()) { case MIGRATION_STARTED_PARTITION_ID: migrationListener.migrationStarted(event.getMigrationState()); break; case MIGRATION_FINISHED_PARTITION_ID: migrationListener.migrationFinished(event.getMigrationState()); break; default: if (event.isSuccess()) { migrationListener.replicaMigrationCompleted(event); } else { migrationListener.replicaMigrationFailed(event); } } }
@Test public void test_migrationCompleted() { MigrationState migrationSchedule = new MigrationStateImpl(); ReplicaMigrationEvent event = new ReplicaMigrationEventImpl(migrationSchedule, 0, 0, null, null, true, 0L); adapter.onEvent(event); verify(listener, never()).migrationStarted(any(MigrationState.class)); verify(listener, never()).migrationFinished(any(MigrationState.class)); verify(listener, never()).replicaMigrationFailed(any(ReplicaMigrationEvent.class)); verify(listener).replicaMigrationCompleted(event); }
public static Criterion matchVlanPcp(byte vlanPcp) { return new VlanPcpCriterion(vlanPcp); }
@Test public void testMatchVlanPcpMethod() { Criterion matchVlanPcp = Criteria.matchVlanPcp(vlanPcp1); VlanPcpCriterion vlanPcpCriterion = checkAndConvert(matchVlanPcp, Criterion.Type.VLAN_PCP, VlanPcpCriterion.class); assertThat(vlanPcpCriterion.priority(), is(equalTo(vlanPcp1))); }
@Override public Iterable<Measure> getChildrenMeasures(String metric) { validateInputMetric(metric); return () -> internalComponent.getChildren().stream() .map(new ComponentToMeasure(metricRepository.getByKey(metric))) .map(ToMeasureAPI.INSTANCE) .filter(Objects::nonNull) .iterator(); }
@Test public void not_fail_to_get_children_measures_on_output_metric() { measureRepository.addRawMeasure(FILE_1_REF, INT_METRIC_KEY, newMeasureBuilder().create(10)); MeasureComputerContextImpl underTest = newContext(PROJECT_REF, NCLOC_KEY, INT_METRIC_KEY); assertThat(underTest.getChildrenMeasures(INT_METRIC_KEY)).hasSize(1); assertThat(underTest.getChildrenMeasures(INT_METRIC_KEY)).extracting("intValue").containsOnly(10); }
static BigInteger toUnsignedBigInteger(long input) { // if the value is less than the max unsigned, avoid doing conversion to avoid performance impact if (input >= 0L) { return BigInteger.valueOf(input); } else { int upper = (int) (input >>> 32); int lower = (int) input; return BigInteger.valueOf(Integer.toUnsignedLong(upper)).shiftLeft(32).add(BigInteger.valueOf(Integer.toUnsignedLong(lower))); } }
@Test public void longToUnsignedBigIntegerConversion() { Assertions.assertEquals("0", toUnsignedBigInteger(0).toString()); Assertions.assertEquals(MAX_UNSIGNED_LONG, toUnsignedBigInteger(-1).toString()); Assertions.assertEquals(String.valueOf(Long.MAX_VALUE), toUnsignedBigInteger(Long.MAX_VALUE).toString()); Assertions.assertEquals("10", toUnsignedBigInteger(10L).toString()); // equivalent of lower 32 bits all set to 0 and upper 32 bits alternating 10 Assertions.assertEquals("12297829379609722880", toUnsignedBigInteger(-6148914694099828736L).toString()); }
public synchronized TopologyDescription describe() { return internalTopologyBuilder.describe(); }
@Test public void tableAnonymousMaterializedCountShouldPreserveTopologyStructure() { final StreamsBuilder builder = new StreamsBuilder(); builder.table("input-topic") .groupBy((key, value) -> null) .count(Materialized.<Object, Long, KeyValueStore<Bytes, byte[]>>with(null, Serdes.Long()) .withStoreType(Materialized.StoreType.ROCKS_DB)); final Topology topology = builder.build(); final TopologyDescription describe = topology.describe(); assertEquals( "Topologies:\n" + " Sub-topology: 0\n" + " Source: KSTREAM-SOURCE-0000000001 (topics: [input-topic])\n" + " --> KTABLE-SOURCE-0000000002\n" + " Processor: KTABLE-SOURCE-0000000002 (stores: [input-topic-STATE-STORE-0000000000])\n" + " --> KTABLE-SELECT-0000000003\n" + " <-- KSTREAM-SOURCE-0000000001\n" + " Processor: KTABLE-SELECT-0000000003 (stores: [])\n" + " --> KSTREAM-SINK-0000000005\n" + " <-- KTABLE-SOURCE-0000000002\n" + " Sink: KSTREAM-SINK-0000000005 (topic: KTABLE-AGGREGATE-STATE-STORE-0000000004-repartition)\n" + " <-- KTABLE-SELECT-0000000003\n" + "\n" + " Sub-topology: 1\n" + " Source: KSTREAM-SOURCE-0000000006 (topics: [KTABLE-AGGREGATE-STATE-STORE-0000000004-repartition])\n" + " --> KTABLE-AGGREGATE-0000000007\n" + " Processor: KTABLE-AGGREGATE-0000000007 (stores: [KTABLE-AGGREGATE-STATE-STORE-0000000004])\n" + " --> none\n" + " <-- KSTREAM-SOURCE-0000000006\n" + "\n", describe.toString() ); topology.internalTopologyBuilder.setStreamsConfig(streamsConfig); final ProcessorTopology processorTopology = topology.internalTopologyBuilder.setApplicationId("test").buildTopology(); // one for ktable, and one for count operation assertThat(processorTopology.stateStores().size(), is(2)); // ktable store is rocksDB (default) assertThat(processorTopology.stateStores().get(0).persistent(), is(true)); // count store is rocksDB assertThat(processorTopology.stateStores().get(1).persistent(), is(true)); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) throws InterpreterException { try { Properties finalProperties = new Properties(); finalProperties.putAll(this.properties); Properties updatedProperties = new Properties(); updatedProperties.load(new StringReader(st)); finalProperties.putAll(updatedProperties); LOGGER.debug("Properties for Session: {}:{}", sessionId, finalProperties); List<Interpreter> interpreters = interpreterSetting.getInterpreterGroup(interpreterGroupId).get(sessionId); for (Interpreter intp : interpreters) { // only check the RemoteInterpreter, ConfInterpreter itself will be ignored here. if (intp instanceof RemoteInterpreter) { RemoteInterpreter remoteInterpreter = (RemoteInterpreter) intp; if (remoteInterpreter.isOpened()) { return new InterpreterResult(InterpreterResult.Code.ERROR, "Can not change interpreter session properties after this session is started"); } remoteInterpreter.setProperties(finalProperties); } } return new InterpreterResult(InterpreterResult.Code.SUCCESS); } catch (IOException e) { LOGGER.error("Fail to update interpreter setting", e); return new InterpreterResult(InterpreterResult.Code.ERROR, ExceptionUtils.getStackTrace(e)); } }
@Test void testUserSessionConfInterpreter() throws InterpreterException { InterpreterSetting mockInterpreterSetting = mock(InterpreterSetting.class); ManagedInterpreterGroup mockInterpreterGroup = mock(ManagedInterpreterGroup.class); when(mockInterpreterSetting.getInterpreterGroup("group_1")).thenReturn(mockInterpreterGroup); Properties properties = new Properties(); properties.setProperty("property_1", "value_1"); properties.setProperty("property_2", "value_2"); SessionConfInterpreter confInterpreter = new SessionConfInterpreter( properties, "session_1", "group_1", mockInterpreterSetting); RemoteInterpreter remoteInterpreter = new RemoteInterpreter(properties, "session_1", "clasName", "user1", ZeppelinConfiguration.load()); List<Interpreter> interpreters = new ArrayList<>(); interpreters.add(confInterpreter); interpreters.add(remoteInterpreter); when(mockInterpreterGroup.get("session_1")).thenReturn(interpreters); InterpreterResult result = confInterpreter.interpret("property_1\tupdated_value_1\nproperty_3\tvalue_3", mock(InterpreterContext.class)); assertEquals(InterpreterResult.Code.SUCCESS, result.code); assertEquals(3, remoteInterpreter.getProperties().size()); assertEquals("updated_value_1", remoteInterpreter.getProperty("property_1")); assertEquals("value_2", remoteInterpreter.getProperty("property_2")); assertEquals("value_3", remoteInterpreter.getProperty("property_3")); remoteInterpreter.setOpened(true); result = confInterpreter.interpret("property_1\tupdated_value_1\nproperty_3\tvalue_3", mock(InterpreterContext.class)); assertEquals(InterpreterResult.Code.ERROR, result.code); }
@Override public void run() { boolean isNeedFlush = false; boolean sqlShowEnabled = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.SQL_SHOW); try { if (sqlShowEnabled) { fillLogMDC(); } isNeedFlush = executeCommand(context, databaseProtocolFrontendEngine.getCodecEngine().createPacketPayload((ByteBuf) message, context.channel().attr(CommonConstants.CHARSET_ATTRIBUTE_KEY).get())); // CHECKSTYLE:OFF } catch (final Exception ex) { // CHECKSTYLE:ON processException(ex); // CHECKSTYLE:OFF } catch (final Error error) { // CHECKSTYLE:ON processException(new RuntimeException(error)); } finally { connectionSession.clearQueryContext(); Collection<SQLException> exceptions = Collections.emptyList(); try { connectionSession.getDatabaseConnectionManager().closeExecutionResources(); } catch (final BackendConnectionException ex) { exceptions = ex.getExceptions().stream().filter(SQLException.class::isInstance).map(SQLException.class::cast).collect(Collectors.toList()); } if (isNeedFlush) { context.flush(); } processClosedExceptions(exceptions); context.pipeline().fireUserEventTriggered(new WriteCompleteEvent()); if (sqlShowEnabled) { clearLogMDC(); } if (message instanceof CompositeByteBuf) { releaseCompositeByteBuf((CompositeByteBuf) message); } ((ByteBuf) message).release(); } }
@Test void assertRunNeedFlushByTrue() throws SQLException, BackendConnectionException { when(queryCommandExecutor.execute()).thenReturn(Collections.singleton(databasePacket)); when(engine.getCommandExecuteEngine().getCommandPacket(payload, commandPacketType, connectionSession)).thenReturn(commandPacket); when(engine.getCommandExecuteEngine().getCommandExecutor(commandPacketType, commandPacket, connectionSession)).thenReturn(queryCommandExecutor); when(engine.getCommandExecuteEngine().getCommandPacketType(payload)).thenReturn(commandPacketType); when(engine.getCodecEngine().createPacketPayload(message, StandardCharsets.UTF_8)).thenReturn(payload); CommandExecutorTask actual = new CommandExecutorTask(engine, connectionSession, handlerContext, message); actual.run(); verify(handlerContext).flush(); verify(engine.getCommandExecuteEngine()).writeQueryData(handlerContext, databaseConnectionManager, queryCommandExecutor, 1); verify(queryCommandExecutor).close(); verify(databaseConnectionManager).closeExecutionResources(); }
@Override protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) { String handle = rule.getHandle(); RewriteHandle rewriteHandle = RewritePluginDataHandler.CACHED_HANDLE.get().obtainHandle(CacheKeyUtils.INST.getKey(rule)); if (Objects.isNull(rewriteHandle)) { LOG.error("uri rewrite rule can not configuration:{}", handle); return chain.execute(exchange); } String rewriteUri = this.getRawPath(exchange); // the default percentage compatible with older versions is 100 final Integer percentage = Optional.ofNullable(rewriteHandle.getPercentage()).orElse(100); if (StringUtils.isNoneBlank(rewriteHandle.getRegex(), rewriteHandle.getReplace()) && ThreadLocalRandom.current().nextInt(100) < percentage) { rewriteUri = rewriteHandle.getReplace().contains("{") ? PathMatchUtils.replaceAll(rewriteHandle.getReplace(), rewriteHandle.getRegex().substring(rewriteHandle.getRegex().indexOf("{")), rewriteUri.substring(rewriteHandle.getRegex().indexOf("{") + 1)) : rewriteUri.replaceAll(rewriteHandle.getRegex(), rewriteHandle.getReplace()); Map<String, Object> attributes = exchange.getAttributes(); if (Optional.ofNullable(rewriteHandle.getRewriteMetaData()).orElse(false)) { // when the rewritten uri crosses plugins, this is necessary final String contextPath = Optional.ofNullable((String) exchange.getAttribute(Constants.REWRITE_CONTEXT_PATH)) .orElseGet(() -> exchange.getAttribute(Constants.CONTEXT_PATH)); MetaData metaData = MetaDataCache.getInstance().obtain(contextPath + rewriteUri); Optional.ofNullable(exchange.getAttribute(Constants.META_DATA)) .ifPresent(metadata -> attributes.put(Constants.OLD_CONTEXT_PATH_META_DATA, metadata)); if (Objects.nonNull(metaData)) { attributes.put(Constants.META_DATA, metaData); } ShenyuContext context = exchange.getAttribute(Constants.CONTEXT); assert context != null; if (Objects.nonNull(metaData) && Boolean.TRUE.equals(metaData.getEnabled())) { context.setRpcType(metaData.getRpcType()); } else { context.setRpcType(RpcTypeEnum.HTTP.getName()); } } attributes.put(Constants.REWRITE_URI, rewriteUri); } return chain.execute(exchange); }
@Test public void shouldReturnNewURIForRewritePlugin() { RuleData data = new RuleData(); data.setHandle("{\"regex\":\"test\",\"replace\":\"rewrite\"}"); RewriteHandle rewriteHandle = GsonUtils.getGson().fromJson(data.getHandle(), RewriteHandle.class); RewritePluginDataHandler.CACHED_HANDLE.get().cachedHandle(CacheKeyUtils.INST.getKey(data), rewriteHandle); when(chain.execute(exchange)).thenReturn(Mono.empty()); SelectorData selectorData = mock(SelectorData.class); StepVerifier.create(rewritePlugin.doExecute(exchange, chain, selectorData, data)).expectSubscription().verifyComplete(); assertEquals("/shenyu/rewrite", exchange.getAttributes().get(Constants.REWRITE_URI)); }
@Override public void sendOneway(Message msg) throws MQClientException, RemotingException, InterruptedException { msg.setTopic(withNamespace(msg.getTopic())); this.defaultMQProducerImpl.sendOneway(msg); }
@Test public void assertSendOneway() throws RemotingException, InterruptedException, MQClientException, NoSuchFieldException, IllegalAccessException { setDefaultMQProducerImpl(); producer.sendOneway(message); MessageQueue mq = mock(MessageQueue.class); producer.sendOneway(message, mq); MessageQueueSelector selector = mock(MessageQueueSelector.class); producer.sendOneway(message, selector, 1); }
@Override public void execute(ComputationStep.Context context) { PostMeasuresComputationCheck.Context extensionContext = new ContextImpl(); for (PostMeasuresComputationCheck extension : extensions) { extension.onCheck(extensionContext); } }
@Test public void execute_extensions() { PostMeasuresComputationCheck check1 = mock(PostMeasuresComputationCheck.class); PostMeasuresComputationCheck check2 = mock(PostMeasuresComputationCheck.class); newStep(check1, check2).execute(new TestComputationStepContext()); InOrder inOrder = inOrder(check1, check2); inOrder.verify(check1).onCheck(any(Context.class)); inOrder.verify(check2).onCheck(any(Context.class)); }
public FluentBackoff withExponent(double exponent) { checkArgument(exponent > 0, "exponent %s must be greater than 0", exponent); return new FluentBackoff( exponent, initialBackoff, maxBackoff, maxCumulativeBackoff, maxRetries, throttledTimeCounter); }
@Test public void testInvalidExponent() { thrown.expect(IllegalArgumentException.class); thrown.expectMessage("exponent -2.0 must be greater than 0"); defaultBackoff.withExponent(-2.0); }
public TopicName createTopicWithoutPrefix(String topicName) { checkArgument(!topicName.isEmpty(), "topicName can not be empty"); checkIsUsable(); TopicName name = TopicName.of(projectId, topicName); return createTopicInternal(name); }
@Test public void testCreateTopicWithoutPrefixWithInvalidNameShouldFail() { IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, () -> testManager.createTopicWithoutPrefix("")); assertThat(exception).hasMessageThat().contains("topicName can not be empty"); }
public static boolean supportsTextBlocks(Context context) { return sourceIsAtLeast(context, 15); }
@Test public void supportsTextBlocks_conditionallySupported() { Context context = contextWithSourceVersion("15"); assertThat(SourceVersion.supportsTextBlocks(context)) .isEqualTo(Runtime.version().feature() >= 15); }
static <T> T getWildcardMappedObject(final Map<String, T> mapping, final String query) { T value = mapping.get(query); if (value == null) { for (String key : mapping.keySet()) { // Turn the search key into a regex, using all characters but the * as a literal. String regex = Arrays.stream(key.split("\\*")) // split in parts that do not have a wildcard in them .map(Pattern::quote) // each part should be used as a literal (not as a regex or partial regex) .collect(Collectors.joining(".*")); // join all literal parts with a regex representation on the wildcard. if (key.endsWith("*")) { // the 'split' will have removed any trailing wildcard characters. Correct for that. regex += ".*"; } if (query.matches(regex)) { value = mapping.get(key); break; } } } return value; }
@Test public void testSubdirWildcard() throws Exception { // Setup test fixture. final Map<String, Object> haystack = Map.of("myplugin/baz/*", new Object()); // Execute system under test. final Object result = PluginServlet.getWildcardMappedObject(haystack, "myplugin/baz/foo.jsp"); // Verify results. assertNotNull(result); }
@Override public void execute(ComputationStep.Context context) { DuplicationVisitor visitor = new DuplicationVisitor(); new DepthTraversalTypeAwareCrawler(visitor).visit(treeRootHolder.getReportTreeRoot()); context.getStatistics().add("duplications", visitor.count); }
@Test public void loads_duplication_without_otherFileRef_as_inner_duplication() { reportReader.putDuplications(FILE_2_REF, createDuplication(singleLineTextRange(LINE), createInnerDuplicate(LINE + 1))); TestComputationStepContext context = new TestComputationStepContext(); underTest.execute(context); assertNoDuplication(FILE_1_REF); assertDuplications(FILE_2_REF, singleLineDetailedTextBlock(1, LINE), new InnerDuplicate(singleLineTextBlock(LINE + 1))); assertNbOfDuplications(context, 1); }
public double[][] test(DataFrame data) { DataFrame x = formula.x(data); int n = x.nrow(); int ntrees = trees.length; double[][] prediction = new double[ntrees][n]; for (int j = 0; j < n; j++) { Tuple xj = x.get(j); double base = b; for (int i = 0; i < ntrees; i++) { base += shrinkage * trees[i].predict(xj); prediction[i][j] = base; } } return prediction; }
@Test public void testCalHousingLS() { test(Loss.ls(), "cal_housing", CalHousing.formula, CalHousing.data, 60581.4183); }
public static Map<String, Map<String, Field>> rowListToMap(List<Row> rowList, List<String> primaryKeyList) { // {value of primaryKey, value of all columns} Map<String, Map<String, Field>> rowMap = new HashMap<>(); for (Row row : rowList) { //ensure the order of column List<Field> rowFieldList = row.getFields().stream() .sorted(Comparator.comparing(Field::getName)) .collect(Collectors.toList()); // {uppercase fieldName : field} Map<String, Field> colsMap = new HashMap<>(); StringBuilder rowKey = new StringBuilder(); boolean firstUnderline = false; for (int j = 0; j < rowFieldList.size(); j++) { Field field = rowFieldList.get(j); if (primaryKeyList.stream().anyMatch(e -> field.getName().equals(e))) { if (firstUnderline && j > 0) { rowKey.append("_"); } rowKey.append(String.valueOf(field.getValue())); firstUnderline = true; } colsMap.put(field.getName().trim().toUpperCase(), field); } rowMap.put(rowKey.toString(), colsMap); } return rowMap; }
@Test public void testRowListToMapWithSinglePk(){ List<String> primaryKeyList = new ArrayList<>(); primaryKeyList.add("id"); List<Row> rows = new ArrayList<>(); Field field = new Field("id", 1, "1"); Row row = new Row(); row.add(field); rows.add(row); Field field2 = new Field("id", 1, "2"); Row row2 = new Row(); row2.add(field2); rows.add(row2); Field field3 = new Field("id", 1, "3"); Row row3 = new Row(); row3.add(field3); rows.add(row3); Map<String, Map<String, Field>> result =DataCompareUtils.rowListToMap(rows,primaryKeyList); Assertions.assertEquals(3, result.size()); Assertions.assertEquals(result.keySet().iterator().next(),"1"); }
public static PTransform<PCollection<String>, PCollection<Row>> withSchema(Schema rowSchema) { RowJson.verifySchemaSupported(rowSchema); return new JsonToRowFn(rowSchema, NullBehavior.ACCEPT_MISSING_OR_NULL); }
@Test @Category(NeedsRunner.class) public void testParsesRows() throws Exception { Schema personSchema = Schema.builder() .addStringField("name") .addInt32Field("height") .addBooleanField("knowsJavascript") .build(); PCollection<String> jsonPersons = pipeline.apply( "jsonPersons", Create.of( jsonPerson("person1", "80", "true"), jsonPerson("person2", "70", "false"), jsonPerson("person3", "60", "true"), jsonPerson("person4", "50", "false"), jsonPerson("person5", "40", "true"))); PCollection<Row> personRows = jsonPersons.apply(JsonToRow.withSchema(personSchema)).setRowSchema(personSchema); PAssert.that(personRows) .containsInAnyOrder( row(personSchema, "person1", 80, true), row(personSchema, "person2", 70, false), row(personSchema, "person3", 60, true), row(personSchema, "person4", 50, false), row(personSchema, "person5", 40, true)); pipeline.run(); }
@Override public ListTransactionsRequest.Builder buildBatchedRequest( int brokerId, Set<AllBrokersStrategy.BrokerKey> keys ) { ListTransactionsRequestData request = new ListTransactionsRequestData(); request.setProducerIdFilters(new ArrayList<>(options.filteredProducerIds())); request.setStateFilters(options.filteredStates().stream() .map(TransactionState::toString) .collect(Collectors.toList())); request.setDurationFilter(options.filteredDuration()); return new ListTransactionsRequest.Builder(request); }
@Test public void testBuildRequestWithoutFilters() { int brokerId = 1; BrokerKey brokerKey = new BrokerKey(OptionalInt.of(brokerId)); ListTransactionsOptions options = new ListTransactionsOptions(); ListTransactionsHandler handler = new ListTransactionsHandler(options, logContext); ListTransactionsRequest request = handler.buildBatchedRequest(brokerId, singleton(brokerKey)).build(); assertEquals(Collections.emptyList(), request.data().producerIdFilters()); assertEquals(Collections.emptyList(), request.data().stateFilters()); }
public static int getLength(int v) { return getLength(convertIntToLongNoSignExtend(v)); }
@Test public void encodeValuesAndGetLength() throws IOException { assertEquals(LONG_VALUES.length, LONG_ENCODED.length); for (int i = 0; i < LONG_VALUES.length; ++i) { byte[] encoded = encodeLong(LONG_VALUES[i]); assertThat(encoded, equalTo(LONG_ENCODED[i])); assertEquals(LONG_ENCODED[i].length, VarInt.getLength(LONG_VALUES[i])); } assertEquals(INT_VALUES.length, INT_ENCODED.length); for (int i = 0; i < INT_VALUES.length; ++i) { byte[] encoded = encodeInt(INT_VALUES[i]); assertThat(encoded, equalTo(INT_ENCODED[i])); assertEquals(INT_ENCODED[i].length, VarInt.getLength(INT_VALUES[i])); } }
public Result checkConnectionToRepository(String pluginId, final RepositoryConfiguration repositoryConfiguration) { return pluginRequestHelper.submitRequest(pluginId, REQUEST_CHECK_REPOSITORY_CONNECTION, new DefaultPluginInteractionCallback<>() { @Override public String requestBody(String resolvedExtensionVersion) { return messageConverter(resolvedExtensionVersion).requestMessageForCheckConnectionToRepository(repositoryConfiguration); } @Override public Result onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) { return messageConverter(resolvedExtensionVersion).responseMessageForCheckConnectionToRepository(responseBody); } }); }
@Test public void shouldTalkToPluginToCheckRepositoryConnectionSuccessful() throws Exception { String expectedRequestBody = "{\"repository-configuration\":{\"key-one\":{\"value\":\"value-one\"},\"key-two\":{\"value\":\"value-two\"}}}"; String expectedResponseBody = "{\"status\":\"success\",messages=[\"message-one\",\"message-two\"]}"; when(pluginManager.isPluginOfType(PACKAGE_MATERIAL_EXTENSION, PLUGIN_ID)).thenReturn(true); when(pluginManager.submitTo(eq(PLUGIN_ID), eq(PACKAGE_MATERIAL_EXTENSION), requestArgumentCaptor.capture())).thenReturn(DefaultGoPluginApiResponse.success(expectedResponseBody)); Result result = extension.checkConnectionToRepository(PLUGIN_ID, repositoryConfiguration); assertRequest(requestArgumentCaptor.getValue(), PACKAGE_MATERIAL_EXTENSION, "1.0", PackageRepositoryExtension.REQUEST_CHECK_REPOSITORY_CONNECTION, expectedRequestBody); assertSuccessResult(result, List.of("message-one", "message-two")); }
public synchronized List<Page> getPages( Long tableId, int partNumber, int totalParts, List<Integer> columnIndexes, long expectedRows) { if (!contains(tableId)) { throw new PrestoException(MISSING_DATA, "Failed to find table on a worker."); } TableData tableData = tables.get(tableId); if (tableData.getRows() < expectedRows) { throw new PrestoException(MISSING_DATA, format("Expected to find [%s] rows on a worker, but found [%s].", expectedRows, tableData.getRows())); } ImmutableList.Builder<Page> partitionedPages = ImmutableList.builder(); for (int i = partNumber; i < tableData.getPages().size(); i += totalParts) { partitionedPages.add(getColumns(tableData.getPages().get(i), columnIndexes)); } return partitionedPages.build(); }
@Test(expectedExceptions = PrestoException.class) public void testReadFromUnknownTable() { pagesStore.getPages(0L, 0, 1, ImmutableList.of(0), 0); }
@ApiOperation(value = "Delete a deployment", tags = { "Deployment" }, code = 204) @ApiResponses(value = { @ApiResponse(code = 204, message = "Indicates the deployment was found and has been deleted. Response-body is intentionally empty."), @ApiResponse(code = 404, message = "Indicates the requested deployment was not found.") }) @DeleteMapping(value = "/repository/deployments/{deploymentId}", produces = "application/json") @ResponseStatus(HttpStatus.NO_CONTENT) public void deleteDeployment(@ApiParam(name = "deploymentId") @PathVariable String deploymentId, @RequestParam(value = "cascade", required = false, defaultValue = "false") Boolean cascade) { Deployment deployment = repositoryService.createDeploymentQuery().deploymentId(deploymentId).singleResult(); if (deployment == null) { throw new FlowableObjectNotFoundException("Could not find a deployment with id '" + deploymentId + "'.", Deployment.class); } if (restApiInterceptor != null) { restApiInterceptor.deleteDeployment(deployment); } if (cascade) { repositoryService.deleteDeployment(deploymentId, true); } else { repositoryService.deleteDeployment(deploymentId); } }
@Test @org.flowable.engine.test.Deployment(resources = { "org/flowable/rest/service/api/repository/oneTaskProcess.bpmn20.xml" }) public void testDeleteDeployment() throws Exception { Deployment existingDeployment = repositoryService.createDeploymentQuery().singleResult(); assertThat(existingDeployment).isNotNull(); // Delete the deployment HttpDelete httpDelete = new HttpDelete(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_DEPLOYMENT, existingDeployment.getId())); CloseableHttpResponse response = executeRequest(httpDelete, HttpStatus.SC_NO_CONTENT); closeResponse(response); existingDeployment = repositoryService.createDeploymentQuery().singleResult(); assertThat(existingDeployment).isNull(); }
public static Map<String, String[]> getQueryMap(String query) { Map<String, String[]> map = new HashMap<>(); String[] params = query.split(PARAM_CONCATENATE); for (String param : params) { String[] paramSplit = param.split("="); if (paramSplit.length == 0) { continue; // We found no key-/value-pair, so continue on the next param } String name = decodeQuery(paramSplit[0]); // hack for SOAP request (generally) if (name.trim().startsWith("<?")) { // $NON-NLS-1$ map.put(" ", new String[] {query}); //blank name // $NON-NLS-1$ return map; } // the post payload is not key=value if((param.startsWith("=") && paramSplit.length == 1) || paramSplit.length > 2) { map.put(" ", new String[] {query}); //blank name // $NON-NLS-1$ return map; } String value = ""; if(paramSplit.length>1) { value = decodeQuery(paramSplit[1]); } String[] known = map.get(name); if(known == null) { known = new String[] {value}; } else { String[] tmp = new String[known.length+1]; tmp[tmp.length-1] = value; System.arraycopy(known, 0, tmp, 0, known.length); known = tmp; } map.put(name, known); } return map; }
@Test void testGetQueryMapSoapHack() { String query = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<SOAP-ENV:Envelope xmlns:SOAP-ENV=\"http://schemas.xmlsoap.org/soap/envelope/\"\n" + "xmlns:SOAP-ENC=\"http://schemas.xmlsoap.org/soap/encoding/\"\n" + "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + "xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\">\n" + " <SOAP-ENV:Header>\n" + " <m:Security\n" + "xmlns:m=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd\">\n" + " <UsernameToken>\n" + " <Username>hello</Username>\n" + " <Password>world</Password>\n" + " </UsernameToken>\n" + " </m:Security>\n" + " </SOAP-ENV:Header>\n" + " <SOAP-ENV:Body> \n" + " <m:GeefPersoon xmlns:m=\"http://webservice.namespace\">\n" + " <Vraag>\n" + " <Context>\n" + " <Naam>GeefPersoon</Naam>\n" + " <Versie>01.00.0000</Versie>\n" + " </Context>\n" + " <Inhoud>\n" + " <INSZ>650602505589</INSZ>\n" + " </Inhoud>\n" + " </Vraag>\n" + " </m:GeefPersoon>\n" + " </SOAP-ENV:Body>\n" + "</SOAP-ENV:Envelope>"; Map<String, String[]> params = RequestViewHTTP.getQueryMap(query); Assertions.assertNotNull(params); Assertions.assertEquals(1, params.size()); Map.Entry<String, String[]> param1 = params.entrySet().iterator().next(); Assertions.assertNotNull(param1); Assertions.assertEquals(1, param1.getValue().length); Assertions.assertEquals(query, param1.getValue()[0]); Assertions.assertTrue(StringUtils.isBlank(param1.getKey())); }
@Override public FSDataOutputStream build() throws IOException { Path path = getPath(); final Configuration options = getOptions(); final Map<String, String> headers = new HashMap<>(); final Set<String> mandatoryKeys = getMandatoryKeys(); final Set<String> keysToValidate = new HashSet<>(); // pick up all headers from the mandatory list and strip them before // validating the keys String headerPrefix = FS_S3A_CREATE_HEADER + "."; final int prefixLen = headerPrefix.length(); mandatoryKeys.stream().forEach(key -> { if (key.startsWith(headerPrefix) && key.length() > prefixLen) { headers.put(key.substring(prefixLen), options.get(key)); } else { keysToValidate.add(key); } }); rejectUnknownMandatoryKeys(keysToValidate, CREATE_FILE_KEYS, "for " + path); // and add any optional headers getOptionalKeys().stream() .filter(key -> key.startsWith(headerPrefix) && key.length() > prefixLen) .forEach(key -> headers.put(key.substring(prefixLen), options.get(key))); EnumSet<CreateFlag> flags = getFlags(); if (flags.contains(CreateFlag.APPEND)) { throw new UnsupportedOperationException("Append is not supported"); } if (!flags.contains(CreateFlag.CREATE) && !flags.contains(CreateFlag.OVERWRITE)) { throw new PathIOException(path.toString(), "Must specify either create or overwrite"); } final boolean performance = options.getBoolean(Constants.FS_S3A_CREATE_PERFORMANCE, false); return callbacks.createFileFromBuilder( path, getProgress(), new CreateFileOptions(flags, isRecursive(), performance, headers)); }
@Test public void testPerformanceSupport() throws Throwable { CreateFileBuilder builder = mkBuilder().create(); builder.must(FS_S3A_CREATE_PERFORMANCE, true); Assertions.assertThat(build(builder)) .matches(p -> p.isPerformance()); }
public static Predicate parse(String expression) { final Stack<Predicate> predicateStack = new Stack<>(); final Stack<Character> operatorStack = new Stack<>(); final String trimmedExpression = TRIMMER_PATTERN.matcher(expression).replaceAll(""); final StringTokenizer tokenizer = new StringTokenizer(trimmedExpression, OPERATORS, true); boolean isTokenMode = true; while (true) { final Character operator; final String token; if (isTokenMode) { if (tokenizer.hasMoreTokens()) { token = tokenizer.nextToken(); } else { break; } if (OPERATORS.contains(token)) { operator = token.charAt(0); } else { operator = null; } } else { operator = operatorStack.pop(); token = null; } isTokenMode = true; if (operator == null) { try { predicateStack.push(Class.forName(token).asSubclass(Predicate.class).getDeclaredConstructor().newInstance()); } catch (ClassCastException e) { throw new RuntimeException(token + " must implement " + Predicate.class.getName(), e); } catch (Exception e) { throw new RuntimeException(e); } } else { if (operatorStack.empty() || operator == '(') { operatorStack.push(operator); } else if (operator == ')') { while (operatorStack.peek() != '(') { evaluate(predicateStack, operatorStack); } operatorStack.pop(); } else { if (OPERATOR_PRECEDENCE.get(operator) < OPERATOR_PRECEDENCE.get(operatorStack.peek())) { evaluate(predicateStack, operatorStack); isTokenMode = false; } operatorStack.push(operator); } } } while (!operatorStack.empty()) { evaluate(predicateStack, operatorStack); } if (predicateStack.size() > 1) { throw new RuntimeException("Invalid logical expression"); } return predicateStack.pop(); }
@Test public void testOperandNotMissingOperandOr() { PredicateExpressionParser.parse("com.linkedin.data.it.AlwaysTruePredicate ! | com.linkedin.data.it.AlwaysFalsePredicate"); }
public static Set<Result> anaylze(String log) { Set<Result> results = new HashSet<>(); for (Rule rule : Rule.values()) { Matcher matcher = rule.pattern.matcher(log); if (matcher.find()) { results.add(new Result(rule, log, matcher)); } } return results; }
@Test public void incompleteForgeInstallation4() throws IOException { CrashReportAnalyzer.Result result = findResultByRule( CrashReportAnalyzer.anaylze(loadLog("/logs/incomplete_forge_installation4.txt")), CrashReportAnalyzer.Rule.INCOMPLETE_FORGE_INSTALLATION); }
@Override public void processElement(StreamRecord<CoordinatorOutput> element) throws Exception { CoordinatorOutput value = element.getValue(); if (value instanceof CompactionUnit) { CompactionUnit unit = (CompactionUnit) value; if (unit.isTaskMessage( getRuntimeContext().getTaskInfo().getNumberOfParallelSubtasks(), getRuntimeContext().getTaskInfo().getIndexOfThisSubtask())) { String partition = unit.getPartition(); List<Path> paths = unit.getPaths(); // create a target file to compact to Path targetPath = createCompactedFile(paths); // do compaction CompactFileUtils.doCompact( fileSystem, partition, paths, targetPath, getContainingTask() .getEnvironment() .getTaskManagerInfo() .getConfiguration(), readerFactory, writerFactory); this.partitions.add(partition); // Only after the current checkpoint is successfully executed can delete // the expired files, so as to ensure the existence of the files. this.currentExpiredFiles.addAll(paths); } } else if (value instanceof EndCompaction) { endCompaction(((EndCompaction) value).getCheckpointId()); } }
@Test void testUnitSelection() throws Exception { OneInputStreamOperatorTestHarness<CoordinatorOutput, PartitionCommitInfo> harness0 = create(2, 0); harness0.setup(); harness0.open(); OneInputStreamOperatorTestHarness<CoordinatorOutput, PartitionCommitInfo> harness1 = create(2, 1); harness1.setup(); harness1.open(); Path f0 = newFile(".uncompacted-f0", 3); Path f1 = newFile(".uncompacted-f1", 2); Path f2 = newFile(".uncompacted-f2", 2); Path f3 = newFile(".uncompacted-f3", 5); Path f4 = newFile(".uncompacted-f4", 1); Path f5 = newFile(".uncompacted-f5", 5); Path f6 = newFile(".uncompacted-f6", 4); FileSystem fs = f0.getFileSystem(); // broadcast harness0.processElement(new CompactionUnit(0, "p0", Arrays.asList(f0, f1, f4)), 0); harness0.processElement(new CompactionUnit(1, "p0", Collections.singletonList(f3)), 0); harness0.processElement(new CompactionUnit(2, "p0", Arrays.asList(f2, f5)), 0); harness0.processElement(new CompactionUnit(3, "p0", Collections.singletonList(f6)), 0); harness0.processElement(new EndCompaction(1), 0); // check compacted file generated assertThat(fs.exists(new Path(folder, "compacted-f0"))).isTrue(); assertThat(fs.exists(new Path(folder, "compacted-f2"))).isTrue(); // f3 and f6 are in the charge of another task assertThat(fs.exists(new Path(folder, "compacted-f3"))).isFalse(); assertThat(fs.exists(new Path(folder, "compacted-f6"))).isFalse(); harness1.processElement(new CompactionUnit(0, "p0", Arrays.asList(f0, f1, f4)), 0); harness1.processElement(new CompactionUnit(1, "p0", Collections.singletonList(f3)), 0); harness1.processElement(new CompactionUnit(2, "p0", Arrays.asList(f2, f5)), 0); harness1.processElement(new CompactionUnit(3, "p0", Collections.singletonList(f6)), 0); harness1.processElement(new EndCompaction(1), 0); // check compacted file generated assertThat(fs.exists(new Path(folder, "compacted-f3"))).isTrue(); assertThat(fs.exists(new Path(folder, "compacted-f6"))).isTrue(); harness0.close(); harness1.close(); }
public boolean isGuiAvailable() { return guiAvailable; }
@Test public void testDefaultGuiAvailable() { assertEquals(false, jt400Configuration.isGuiAvailable()); }
private HoodieSparkQuickstart() { }
@Test public void testHoodieSparkQuickstart() { String tableName = "spark_quick_start"; String tablePath = tablePath(tableName); try { runQuickstart(jsc, spark, tableName, tablePath); } finally { Utils.deleteRecursively(new File(tablePath)); } }
public static <T> CsvIOParse<T> parse(Class<T> klass, CSVFormat csvFormat) { CsvIOParseHelpers.validateCsvFormat(csvFormat); SchemaProvider provider = new DefaultSchema.DefaultSchemaProvider(); TypeDescriptor<T> type = TypeDescriptor.of(klass); Schema schema = checkStateNotNull( provider.schemaFor(type), "Illegal %s: Schema could not be generated from given %s class", Schema.class, klass); CsvIOParseHelpers.validateCsvFormatWithSchema(csvFormat, schema); SerializableFunction<Row, T> fromRowFn = checkStateNotNull( provider.fromRowFunction(type), "FromRowFn could not be generated from the given %s class", klass); SerializableFunction<T, Row> toRowFn = checkStateNotNull( provider.toRowFunction(type), "ToRowFn could not be generated from the given %s class", klass); SchemaCoder<T> coder = SchemaCoder.of(schema, type, toRowFn, fromRowFn); CsvIOParseConfiguration.Builder<T> builder = CsvIOParseConfiguration.builder(); builder.setCsvFormat(csvFormat).setSchema(schema).setCoder(coder).setFromRowFn(fromRowFn); return CsvIOParse.<T>builder().setConfigBuilder(builder).build(); }
@Test public void parsesPOJOs() { Pipeline pipeline = Pipeline.create(); PCollection<String> input = csvRecords( pipeline, "# This is a comment", "aBoolean,aDouble,aFloat,anInteger,aLong,aString", "true,1.0,2.0,3,4,foo", "N/A,6.0,7.0,8,9,bar", "false,12.0,14.0,8,24,\"foo\nbar\"", "true,1.0,2.0,3,4,foo$,bar"); List<SchemaAwareJavaBeans.NullableAllPrimitiveDataTypes> want = Arrays.asList( nullableAllPrimitiveDataTypes(true, 1.0d, 2.0f, 3, 4L, "foo"), nullableAllPrimitiveDataTypes(null, 6.0d, 7.0f, 8, 9L, "bar"), nullableAllPrimitiveDataTypes(false, 12.0d, 14.0f, 8, 24L, "foo\nbar"), nullableAllPrimitiveDataTypes(true, 1.0d, 2.0f, 3, 4L, "foo,bar")); CsvIOParse<NullableAllPrimitiveDataTypes> underTest = CsvIO.parse(NullableAllPrimitiveDataTypes.class, csvFormat()); CsvIOParseResult<NullableAllPrimitiveDataTypes> result = input.apply(underTest); PAssert.that(result.getOutput()).containsInAnyOrder(want); PAssert.that(result.getErrors()).empty(); pipeline.run(); }
@PublicEvolving public static <IN, OUT> TypeInformation<OUT> getMapReturnTypes( MapFunction<IN, OUT> mapInterface, TypeInformation<IN> inType) { return getMapReturnTypes(mapInterface, inType, null, false); }
@Test void testEither() { MapFunction<?, ?> function = new MapFunction<Either<String, Boolean>, Either<String, Boolean>>() { @Override public Either<String, Boolean> map(Either<String, Boolean> value) throws Exception { return null; } }; TypeInformation<?> expected = new EitherTypeInfo(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.BOOLEAN_TYPE_INFO); TypeInformation<?> ti = TypeExtractor.getMapReturnTypes((MapFunction) function, expected); assertThat(ti).isEqualTo(expected); }