focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public CompletableFuture<Void> awaitAsync() { phaser.arriveAndDeregister(); return terminationFuture; }
@Test void testAwaitAsyncIsIdempotent() { final CompletableFuture<Void> awaitFuture = inFlightRequestTracker.awaitAsync(); assertThat(awaitFuture).isDone(); assertThat(awaitFuture) .as("The reference to the future must not change") .isSameAs(inFlightRequestTracker.awaitAsync()); }
@JsonIgnore public void configIgnoreFailureMode(StepAction action, WorkflowSummary summary) { ignoreFailureMode = action.getAction() == Actions.StepInstanceAction.KILL && (action.isWorkflowAction() || !(action.getWorkflowId().equals(summary.getWorkflowId()) && action.getWorkflowInstanceId() == summary.getWorkflowInstanceId() && action.getWorkflowRunId() == summary.getWorkflowRunId() && action.getStepId().equals(getStepId()))); }
@Test public void testShouldIgnoreFailureMode() throws Exception { StepRuntimeSummary summary = loadObject( "fixtures/execution/sample-step-runtime-summary-1.json", StepRuntimeSummary.class); assertFalse(summary.isIgnoreFailureMode()); StepAction stepAction = mock(StepAction.class); WorkflowSummary workflowSummary = mock(WorkflowSummary.class); when(stepAction.getAction()).thenReturn(Actions.StepInstanceAction.KILL); when(stepAction.isWorkflowAction()).thenReturn(true); summary.configIgnoreFailureMode(stepAction, workflowSummary); assertTrue(summary.isIgnoreFailureMode()); when(stepAction.getAction()).thenReturn(Actions.StepInstanceAction.KILL); when(stepAction.isWorkflowAction()).thenReturn(false); when(stepAction.getWorkflowId()).thenReturn("wf1"); when(workflowSummary.getWorkflowId()).thenReturn("wf2"); summary.configIgnoreFailureMode(stepAction, workflowSummary); assertTrue(summary.isIgnoreFailureMode()); when(stepAction.getAction()).thenReturn(Actions.StepInstanceAction.KILL); when(stepAction.isWorkflowAction()).thenReturn(false); when(stepAction.getWorkflowId()).thenReturn("wf1"); when(stepAction.getWorkflowInstanceId()).thenReturn(123L); when(stepAction.getWorkflowRunId()).thenReturn(2L); when(stepAction.getStepId()).thenReturn("foo"); when(workflowSummary.getWorkflowId()).thenReturn("wf1"); when(workflowSummary.getWorkflowInstanceId()).thenReturn(123L); when(workflowSummary.getWorkflowRunId()).thenReturn(2L); summary.configIgnoreFailureMode(stepAction, workflowSummary); assertFalse(summary.isIgnoreFailureMode()); when(stepAction.getAction()).thenReturn(Actions.StepInstanceAction.STOP); summary.configIgnoreFailureMode(stepAction, workflowSummary); assertFalse(summary.isIgnoreFailureMode()); when(stepAction.getAction()).thenReturn(Actions.StepInstanceAction.SKIP); summary.configIgnoreFailureMode(stepAction, workflowSummary); assertFalse(summary.isIgnoreFailureMode()); }
@Description("escape a string for use in URL query parameter names and values") @ScalarFunction @LiteralParameters({"x", "y"}) @Constraint(variable = "y", expression = "min(2147483647, x * 12)") @SqlType("varchar(y)") public static Slice urlEncode(@SqlType("varchar(x)") Slice value) { Escaper escaper = UrlEscapers.urlFormParameterEscaper(); return slice(escaper.escape(value.toStringUtf8())); }
@Test public void testUrlEncode() { final String[][] outputInputPairs = { {"http%3A%2F%2Ftest", "http://test"}, {"http%3A%2F%2Ftest%3Fa%3Db%26c%3Dd", "http://test?a=b&c=d"}, {"http%3A%2F%2F%E3%83%86%E3%82%B9%E3%83%88", "http://\u30c6\u30b9\u30c8"}, {"%7E%40%3A.-*_%2B+%E2%98%83", "~@:.-*_+ \u2603"}, {"test", "test"}, }; for (String[] outputInputPair : outputInputPairs) { String input = outputInputPair[1]; String output = outputInputPair[0]; assertFunction("url_encode('" + input + "')", createVarcharType(input.length() * 12), output); } assertFunction("url_encode('\uD867\uDE3D')", createVarcharType(12), "%F0%A9%B8%BD"); }
@Override protected void decode(final ChannelHandlerContext ctx, final ByteBuf in, final List<Object> out) { MySQLPacketPayload payload = new MySQLPacketPayload(in, ctx.channel().attr(CommonConstants.CHARSET_ATTRIBUTE_KEY).get()); if (handshakeReceived) { MySQLPacket responsePacket = decodeResponsePacket(payload); if (responsePacket instanceof MySQLOKPacket) { ctx.channel().pipeline().remove(this); } out.add(responsePacket); } else { out.add(decodeHandshakePacket(payload)); handshakeReceived = true; } }
@Test void assertDecodeUnsupportedProtocolVersion() { MySQLNegotiatePackageDecoder commandPacketDecoder = new MySQLNegotiatePackageDecoder(); assertThrows(IllegalArgumentException.class, () -> commandPacketDecoder.decode(channelHandlerContext, byteBuf, null)); }
@Override public String toString() { return toStringHelper(getClass()) .add("nextHeader", Byte.toString(nextHeader)) .add("fragmentOffset", Short.toString(fragmentOffset)) .add("moreFragment", Byte.toString(moreFragment)) .add("identification", Integer.toString(identification)) .toString(); }
@Test public void testToStringFragment() throws Exception { Fragment frag = deserializer.deserialize(bytePacket, 0, bytePacket.length); String str = frag.toString(); assertTrue(StringUtils.contains(str, "nextHeader=" + (byte) 0x11)); assertTrue(StringUtils.contains(str, "fragmentOffset=" + (short) 0x1f)); assertTrue(StringUtils.contains(str, "moreFragment=" + (byte) 1)); assertTrue(StringUtils.contains(str, "identification=" + 0x1357)); }
public boolean isValid() { return (mPrimaryColor != mPrimaryTextColor) && (mPrimaryDarkColor != mPrimaryTextColor); }
@Test public void isNotValidIfTextIsSame() { Assert.assertFalse(overlay(Color.GRAY, Color.GRAY, Color.GRAY).isValid()); Assert.assertFalse(overlay(Color.BLACK, Color.BLUE, Color.BLACK).isValid()); Assert.assertFalse(overlay(Color.MAGENTA, Color.WHITE, Color.WHITE).isValid()); }
public double bearingTo(final IGeoPoint other) { final double lat1 = Math.toRadians(this.mLatitude); final double long1 = Math.toRadians(this.mLongitude); final double lat2 = Math.toRadians(other.getLatitude()); final double long2 = Math.toRadians(other.getLongitude()); final double delta_long = long2 - long1; final double a = Math.sin(delta_long) * Math.cos(lat2); final double b = Math.cos(lat1) * Math.sin(lat2) - Math.sin(lat1) * Math.cos(lat2) * Math.cos(delta_long); final double bearing = Math.toDegrees(Math.atan2(a, b)); final double bearing_normalized = (bearing + 360) % 360; return bearing_normalized; }
@Test public void test_bearingTo_north() { final GeoPoint target = new GeoPoint(0.0, 0.0); final GeoPoint other = new GeoPoint(10.0, 0.0); assertEquals("directly north", 0, Math.round(target.bearingTo(other))); }
public static HealthStateScope forMaterial(Material material) { return new HealthStateScope(ScopeType.MATERIAL, material.getAttributesForScope().toString()); }
@Test public void shouldHaveAUniqueScopeForEachMaterial() { HealthStateScope scope1 = HealthStateScope.forMaterial(MATERIAL1); HealthStateScope scope2 = HealthStateScope.forMaterial(MATERIAL1); assertThat(scope1, is(scope2)); }
protected abstract void confuseTarget(String target);
@Test void testConfuseTarget() { assertEquals(0, appender.getLogSize()); this.method.confuseTarget(this.expectedTarget); assertEquals(this.expectedConfuseMethod, appender.getLastMessage()); assertEquals(1, appender.getLogSize()); }
@Override // Camel calls this method if the endpoint isSynchronous(), as the // KafkaEndpoint creates a SynchronousDelegateProducer for it public void process(Exchange exchange) throws Exception { // is the message body a list or something that contains multiple values Message message = exchange.getIn(); if (transactionId != null) { startKafkaTransaction(exchange); } if (endpoint.getConfiguration().isUseIterator() && isIterable(message.getBody())) { processIterableSync(exchange, message); } else { processSingleMessageSync(exchange, message); } }
@Test public void processSendsMessageWithMessageKeyHeader() throws Exception { endpoint.getConfiguration().setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.KEY, "someKey"); producer.process(exchange); verifySendMessage("someTopic", "someKey"); assertRecordMetadataExists(); }
public static MacAddress valueOf(final String address) { if (!isValid(address)) { throw new IllegalArgumentException( "Specified MAC Address must contain 12 hex digits" + " separated pairwise by :'s."); } final String[] elements = address.split(":"); final byte[] addressInBytes = new byte[MacAddress.MAC_ADDRESS_LENGTH]; for (int i = 0; i < MacAddress.MAC_ADDRESS_LENGTH; i++) { final String element = elements[i]; addressInBytes[i] = (byte) Integer.parseInt(element, 16); } return new MacAddress(addressInBytes); }
@Test(expected = IllegalArgumentException.class) public void testValueOfInvalidStringWithTooShortOctet() throws Exception { MacAddress.valueOf(INVALID_MAC_OCTET_TOO_SHORT); }
public static Domain singleValue(Type type, Object value) { return new Domain(ValueSet.of(type, value), false); }
@Test(expectedExceptions = IllegalArgumentException.class) public void testUncomparableSingleValue() { Domain.singleValue(HYPER_LOG_LOG, Slices.EMPTY_SLICE); }
@Override public Object parse(Object input) { return input; }
@Test public void passThrough_correctArgument() { // WHEN Object arguments = parser.parse("123"); // THEN assertThat((String) arguments).isEqualTo("123"); }
void flush() throws IOException { requireOpened(); stream.flush(); }
@Test void flushAfterCloseShouldThrowException() { assertThatExceptionOfType(IOException.class) .isThrownBy( () -> { final RefCountedFileWithStream fileUnderTest = getClosedRefCountedFileWithContent( "hello world", TempDirUtils.newFolder(tempFolder).toPath()); fileUnderTest.flush(); }); }
public List<String> build(TablePath tablePath) { List<String> sqls = new ArrayList<>(); StringBuilder createTableSql = new StringBuilder(); createTableSql .append("CREATE TABLE ") .append(tablePath.getSchemaAndTableName("\"")) .append(" (\n"); List<String> columnSqls = columns.stream() .map(column -> CatalogUtils.getFieldIde(buildColumnSql(column), fieldIde)) .collect(Collectors.toList()); // Add primary key directly in the create table statement if (createIndex && primaryKey != null && primaryKey.getColumnNames() != null && primaryKey.getColumnNames().size() > 0) { columnSqls.add(buildPrimaryKeySql(primaryKey)); } createTableSql.append(String.join(",\n", columnSqls)); createTableSql.append("\n)"); sqls.add(createTableSql.toString()); List<String> commentSqls = columns.stream() .filter(column -> StringUtils.isNotBlank(column.getComment())) .map( column -> buildColumnCommentSql( column, tablePath.getSchemaAndTableName("\""))) .collect(Collectors.toList()); sqls.addAll(commentSqls); return sqls; }
@Test public void testBuild() { String dataBaseName = "test_database"; String tableName = "test_table"; TablePath tablePath = TablePath.of(dataBaseName, tableName); TableSchema tableSchema = TableSchema.builder() .column(PhysicalColumn.of("id", BasicType.LONG_TYPE, 22, false, null, "id")) .column( PhysicalColumn.of( "name", BasicType.STRING_TYPE, 128, false, null, "name")) .column( PhysicalColumn.of( "age", BasicType.INT_TYPE, (Long) null, true, null, "age")) .column( PhysicalColumn.of( "blob_v", PrimitiveByteArrayType.INSTANCE, Long.MAX_VALUE, true, null, "blob_v")) .column( PhysicalColumn.of( "createTime", LocalTimeType.LOCAL_DATE_TIME_TYPE, 3, true, null, "createTime")) .column( PhysicalColumn.of( "lastUpdateTime", LocalTimeType.LOCAL_DATE_TIME_TYPE, 3, true, null, "lastUpdateTime")) .primaryKey(PrimaryKey.of("id", Lists.newArrayList("id"))) .constraintKey( Arrays.asList( ConstraintKey.of( ConstraintKey.ConstraintType.INDEX_KEY, "name", Lists.newArrayList( ConstraintKey.ConstraintKeyColumn.of( "name", null))), ConstraintKey.of( ConstraintKey.ConstraintType.INDEX_KEY, "blob_v", Lists.newArrayList( ConstraintKey.ConstraintKeyColumn.of( "blob_v", null))))) .build(); CatalogTable catalogTable = CatalogTable.of( TableIdentifier.of("test_catalog", dataBaseName, tableName), tableSchema, new HashMap<>(), new ArrayList<>(), "User table"); OracleCreateTableSqlBuilder oracleCreateTableSqlBuilder = new OracleCreateTableSqlBuilder(catalogTable, true); String createTableSql = oracleCreateTableSqlBuilder.build(tablePath).get(0); // create table sql is change; The old unit tests are no longer applicable String expect = "CREATE TABLE \"test_table\" (\n" + "\"id\" INTEGER NOT NULL,\n" + "\"name\" VARCHAR2(128) NOT NULL,\n" + "\"age\" INTEGER,\n" + "\"blob_v\" BLOB,\n" + "\"createTime\" TIMESTAMP WITH LOCAL TIME ZONE,\n" + "\"lastUpdateTime\" TIMESTAMP WITH LOCAL TIME ZONE,\n" + "CONSTRAINT id_9a8b PRIMARY KEY (\"id\")\n" + ")"; // replace "CONSTRAINT id_xxxx" because it's dynamically generated(random) String regex = "id_\\w+"; String replacedStr1 = createTableSql.replaceAll(regex, "id_"); String replacedStr2 = expect.replaceAll(regex, "id_"); CONSOLE.println(replacedStr2); Assertions.assertEquals(replacedStr2, replacedStr1); // skip index OracleCreateTableSqlBuilder oracleCreateTableSqlBuilderSkipIndex = new OracleCreateTableSqlBuilder(catalogTable, false); String createTableSqlSkipIndex = oracleCreateTableSqlBuilderSkipIndex.build(tablePath).get(0); String expectSkipIndex = "CREATE TABLE \"test_table\" (\n" + "\"id\" INTEGER NOT NULL,\n" + "\"name\" VARCHAR2(128) NOT NULL,\n" + "\"age\" INTEGER,\n" + "\"blob_v\" BLOB,\n" + "\"createTime\" TIMESTAMP WITH LOCAL TIME ZONE,\n" + "\"lastUpdateTime\" TIMESTAMP WITH LOCAL TIME ZONE\n" + ")"; CONSOLE.println(expectSkipIndex); Assertions.assertEquals(expectSkipIndex, createTableSqlSkipIndex); }
public Token generateToken(final Map<String, Object> claims) { final long currentTimeMillis = System.currentTimeMillis(); final Date tokenIssuedAt = new Date(currentTimeMillis); final Date accessTokenExpiresAt = DateUtils.addMinutes( new Date(currentTimeMillis), tokenConfigurationParameter.getAccessTokenExpireMinute() ); final String accessToken = Jwts.builder() .header() .type(TokenType.BEARER.getValue()) .and() .id(UUID.randomUUID().toString()) .issuedAt(tokenIssuedAt) .expiration(accessTokenExpiresAt) .signWith(tokenConfigurationParameter.getPrivateKey()) .claims(claims) .compact(); final Date refreshTokenExpiresAt = DateUtils.addDays( new Date(currentTimeMillis), tokenConfigurationParameter.getRefreshTokenExpireDay() ); final String refreshToken = Jwts.builder() .header() .type(TokenType.BEARER.getValue()) .and() .id(UUID.randomUUID().toString()) .issuedAt(tokenIssuedAt) .expiration(refreshTokenExpiresAt) .signWith(tokenConfigurationParameter.getPrivateKey()) .claim(TokenClaims.USER_ID.getValue(), claims.get(TokenClaims.USER_ID.getValue())) .compact(); return Token.builder() .accessToken(accessToken) .accessTokenExpiresAt(accessTokenExpiresAt.toInstant().getEpochSecond()) .refreshToken(refreshToken) .build(); }
@Test void givenClaims_whenGenerateToken_thenReturnValidToken() { // Given Map<String, Object> claims = Map.of("user_id", "12345"); when(tokenConfigurationParameter.getAccessTokenExpireMinute()).thenReturn(15); when(tokenConfigurationParameter.getRefreshTokenExpireDay()).thenReturn(30); // When Token token = tokenService.generateToken(claims); // Then assertThat(token).isNotNull(); assertThat(token.getAccessToken()).isNotEmpty(); assertThat(token.getRefreshToken()).isNotEmpty(); }
public static Map<String, Object> map(String metricName, Metric metric) { final Map<String, Object> metricMap = Maps.newHashMap(); metricMap.put("full_name", metricName); metricMap.put("name", metricName.substring(metricName.lastIndexOf(".") + 1)); if (metric instanceof Timer) { metricMap.put("metric", buildTimerMap((Timer) metric)); metricMap.put("type", "timer"); } else if(metric instanceof Meter) { metricMap.put("metric", buildMeterMap((Meter) metric)); metricMap.put("type", "meter"); } else if(metric instanceof Histogram) { metricMap.put("metric", buildHistogramMap((Histogram) metric)); metricMap.put("type", "histogram"); } else if(metric instanceof Counter) { metricMap.put("metric", metric); metricMap.put("type", "counter"); } else if(metric instanceof Gauge) { metricMap.put("metric", metric); metricMap.put("type", "gauge"); } else { throw new IllegalArgumentException("Unknown metric type " + metric.getClass()); } return metricMap; }
@Test public void mapSupportsHistogram() { final Histogram histogram = new Histogram(new UniformReservoir()); histogram.update(23); final Map<String, Object> map = MetricUtils.map("metric", histogram); assertThat(map) .containsEntry("type", "histogram") .extracting("metric") .extracting("count") .isEqualTo(1L); }
public static boolean verify(@Nonnull UserModel currentUser, @Nonnull CertificateMetadata edsMetadata) { logger.info("Trying to match via user attributes and bin & taxCode..."); Map<String, List<String>> attrs = currentUser.getAttributes(); String bin = edsMetadata.getBin(), taxCode = edsMetadata.getTaxCode(); if (null == bin && null == taxCode) { logger.warn("Input bin {} or taxCode {} is null", bin, taxCode); return false; } if (attrs != null && !attrs.isEmpty()) { List<String> taxCodeValues = attrs.get(KeycloakAttributes.TAX_CODE); List<String> binValues = attrs.get(KeycloakAttributes.BIN); logger.info("Trying to match taxCode {} in values {}," + " bin {} in values {}", taxCode, taxCodeValues, bin, binValues); boolean isValidTaxCode = null == taxCodeValues ? false : taxCodeValues.contains(taxCode); boolean isValidBin = null == binValues ? false : binValues.contains(bin); if (isValidBin && isValidTaxCode) { return true; } logger.info("Not matched by this verifier."); } return false; }
@Test public void testVerifyEmptyData() { CertificateMetadata metadata = new CertificateMetadata(); metadata.withBin("BIN_VALUE"); metadata.withTaxCode("TAX_CODE_VALUE"); UserModel user = Mockito.mock(UserModel.class); HashMap<String, List<String>> attributes = new HashMap<>(); Mockito.when(user.getAttributes()).thenReturn(attributes); var res = CorporateUserVerifierImpl.verify(user, metadata); assertThat(res).isFalse(); }
private MultiStepCombine( CombineFn<InputT, AccumT, OutputT> combineFn, Coder<KV<K, OutputT>> outputCoder) { this.combineFn = combineFn; this.outputCoder = outputCoder; }
@Test public void testMultiStepCombine() { PCollection<KV<String, Long>> combined = pipeline .apply( Create.of( KV.of("foo", 1L), KV.of("bar", 2L), KV.of("bizzle", 3L), KV.of("bar", 4L), KV.of("bizzle", 11L))) .apply(Combine.perKey(new MultiStepCombineFn())); PAssert.that(combined) .containsInAnyOrder(KV.of("foo", 1L), KV.of("bar", 6L), KV.of("bizzle", 14L)); pipeline.run(); }
@VisibleForTesting static void initAddrUseFqdn(List<InetAddress> addrs) { useFqdn = true; analyzePriorityCidrs(); String fqdn = null; if (PRIORITY_CIDRS.isEmpty()) { // Get FQDN from local host by default. try { InetAddress localHost = InetAddress.getLocalHost(); fqdn = localHost.getCanonicalHostName(); String ip = localHost.getHostAddress(); LOG.info("Get FQDN from local host by default, FQDN: {}, ip: {}, v6: {}", fqdn, ip, localHost instanceof Inet6Address); } catch (UnknownHostException e) { LOG.error("failed to get FQDN from local host, will exit", e); System.exit(-1); } if (fqdn == null) { LOG.error("priority_networks is not set and we cannot get FQDN from local host"); System.exit(-1); } // Try to resolve addr from FQDN InetAddress uncheckedInetAddress = null; try { uncheckedInetAddress = InetAddress.getByName(fqdn); } catch (UnknownHostException e) { LOG.error("failed to parse FQDN: {}, message: {}", fqdn, e.getMessage(), e); System.exit(-1); } if (null == uncheckedInetAddress) { LOG.error("failed to parse FQDN: {}", fqdn); System.exit(-1); } // Check whether the InetAddress obtained via FQDN is bound to some network interface boolean hasInetAddr = false; for (InetAddress addr : addrs) { LOG.info("Try to match addr in fqdn mode, ip: {}, FQDN: {}", addr.getHostAddress(), addr.getCanonicalHostName()); if (addr.getCanonicalHostName() .equals(uncheckedInetAddress.getCanonicalHostName())) { hasInetAddr = true; break; } } if (hasInetAddr) { localAddr = uncheckedInetAddress; LOG.info("Using FQDN from local host by default, FQDN: {}, ip: {}, v6: {}", localAddr.getCanonicalHostName(), localAddr.getHostAddress(), localAddr instanceof Inet6Address); } else { LOG.error("Cannot find a network interface matching FQDN: {}", fqdn); System.exit(-1); } } else { LOG.info("using priority_networks in fqdn mode to decide whether ipv6 or ipv4 is preferred"); for (InetAddress addr : addrs) { String hostAddr = addr.getHostAddress(); String canonicalHostName = addr.getCanonicalHostName(); LOG.info("Try to match addr in fqdn mode, ip: {}, FQDN: {}", hostAddr, canonicalHostName); if (isInPriorNetwork(hostAddr)) { localAddr = addr; fqdn = canonicalHostName; LOG.info("Using FQDN from matched addr, FQDN: {}, ip: {}, v6: {}", fqdn, hostAddr, addr instanceof Inet6Address); break; } LOG.info("skip addr {} not belonged to priority networks in FQDN mode", addr); } if (fqdn == null) { LOG.error("priority_networks has been set and we cannot find matched addr, will exit"); System.exit(-1); } } // double-check the reverse resolve String canonicalHostName = localAddr.getCanonicalHostName(); if (!canonicalHostName.equals(fqdn)) { LOG.error("The FQDN of the parsed address [{}] is not the same as " + "the FQDN obtained from the host [{}]", canonicalHostName, fqdn); System.exit(-1); } }
@Test(expected = IllegalAccessException.class) public void testGetStartWithFQDNNotFindAddr() { new MockUp<System>() { @Mock public void exit(int status) throws IllegalAccessException { throw new IllegalAccessException(); } }; new MockUp<NetUtils>() { @Mock public List<InetAddress> getHosts() { List<InetAddress> hosts = new ArrayList<>(); return hosts; } }; List<InetAddress> hosts = NetUtils.getHosts(); new MockUp<InetAddress>() { @Mock public InetAddress getLocalHost() throws UnknownHostException { return addr; } @Mock public String getHostAddress() { return "127.0.0.10"; } @Mock public String getCanonicalHostName() { return "sandbox"; } @Mock public InetAddress getByName(String host) throws UnknownHostException { return addr; } }; FrontendOptions.initAddrUseFqdn(hosts); }
@Override public String buildAuthRequestUrl(ServerConfiguration serverConfig, RegisteredClient clientConfig, String redirectUri, String nonce, String state, Map<String, String> options, String loginHint) { try { URIBuilder uriBuilder = new URIBuilder(serverConfig.getAuthorizationEndpointUri()); uriBuilder.addParameter("response_type", "code"); uriBuilder.addParameter("client_id", clientConfig.getClientId()); uriBuilder.addParameter("scope", Joiner.on(" ").join(clientConfig.getScope())); uriBuilder.addParameter("redirect_uri", redirectUri); uriBuilder.addParameter("nonce", nonce); uriBuilder.addParameter("state", state); // Optional parameters: for (Entry<String, String> option : options.entrySet()) { uriBuilder.addParameter(option.getKey(), option.getValue()); } // if there's a login hint, send it if (!Strings.isNullOrEmpty(loginHint)) { uriBuilder.addParameter("login_hint", loginHint); } return uriBuilder.build().toString(); } catch (URISyntaxException e) { throw new AuthenticationServiceException("Malformed Authorization Endpoint Uri", e); } }
@Test(expected = AuthenticationServiceException.class) public void buildAuthRequestUrl_badUri() { Mockito.when(serverConfig.getAuthorizationEndpointUri()).thenReturn("e=mc^2"); Map<String, String> options = ImmutableMap.of("foo", "bar"); urlBuilder.buildAuthRequestUrl(serverConfig, clientConfig, "example.com", "", "", options, null); }
@Override @Nullable public String errorCode() { if (status.isOk()) return null; return status.getCode().name(); }
@Test void errorCode() { assertThat(response.errorCode()).isEqualTo("CANCELLED"); }
@Around(CLIENT_INTERFACE_PUBLISH_CONFIG_RPC) Object publishConfigAroundRpc(ProceedingJoinPoint pjp, ConfigPublishRequest request, RequestMeta meta) throws Throwable { final ConfigChangePointCutTypes configChangePointCutType = ConfigChangePointCutTypes.PUBLISH_BY_RPC; final List<ConfigChangePluginService> pluginServices = getPluginServices( configChangePointCutType); // didn't enabled or add relative plugin if (pluginServices.isEmpty()) { return pjp.proceed(); } ConfigChangeRequest configChangeRequest = new ConfigChangeRequest(configChangePointCutType); configChangeRequest.setArg("dataId", request.getDataId()); configChangeRequest.setArg("group", request.getGroup()); configChangeRequest.setArg("tenant", request.getTenant()); configChangeRequest.setArg("content", request.getContent()); configChangeRequest.setArg("type", request.getAdditionParam("type")); configChangeRequest.setArg("tag", request.getAdditionParam("tag")); configChangeRequest.setArg("configTags", request.getAdditionParam("config_tags")); configChangeRequest.setArg("desc", request.getAdditionParam("desc")); configChangeRequest.setArg("effect", request.getAdditionParam("effect")); configChangeRequest.setArg("appName", request.getAdditionParam("appName")); configChangeRequest.setArg("srcIp", meta.getClientIp()); configChangeRequest.setArg("requestIpApp", request.getAdditionParam("requestIpApp")); configChangeRequest.setArg("srcUser", request.getAdditionParam("src_user")); configChangeRequest.setArg("use", request.getAdditionParam("use")); return configChangeServiceHandle(pjp, pluginServices, configChangeRequest); }
@Test void testPublishConfigAroundRpcException() throws Throwable { Mockito.when(configChangePluginService.executeType()).thenReturn(ConfigChangeExecuteTypes.EXECUTE_BEFORE_TYPE); ProceedingJoinPoint proceedingJoinPoint = Mockito.mock(ProceedingJoinPoint.class); ConfigPublishRequest request = new ConfigPublishRequest(); RequestMeta requestMeta = new RequestMeta(); Mockito.when(proceedingJoinPoint.proceed(any())).thenThrow(new NacosRuntimeException(503)); //execute Object o = configChangeAspect.publishConfigAroundRpc(proceedingJoinPoint, request, requestMeta); //expect Mockito.verify(configChangePluginService, Mockito.times(1)) .execute(any(ConfigChangeRequest.class), any(ConfigChangeResponse.class)); assertTrue(((ConfigPublishResponse) o).getMessage().contains("config change join point fail")); }
@Nullable public static <T extends KeyedStateHandle> T chooseTheBestStateHandleForInitial( @Nonnull List<T> restoreStateHandles, @Nonnull KeyGroupRange targetKeyGroupRange, double overlapFractionThreshold) { int pos = findTheBestStateHandleForInitial( restoreStateHandles, targetKeyGroupRange, overlapFractionThreshold); return pos >= 0 ? restoreStateHandles.get(pos) : null; }
@Test public void testChooseTheBestStateHandleForInitial() { List<KeyedStateHandle> keyedStateHandles = new ArrayList<>(3); KeyedStateHandle keyedStateHandle1 = mock(KeyedStateHandle.class); when(keyedStateHandle1.getKeyGroupRange()).thenReturn(new KeyGroupRange(0, 3)); keyedStateHandles.add(keyedStateHandle1); KeyedStateHandle keyedStateHandle2 = mock(KeyedStateHandle.class); when(keyedStateHandle2.getKeyGroupRange()).thenReturn(new KeyGroupRange(4, 7)); keyedStateHandles.add(keyedStateHandle2); KeyedStateHandle keyedStateHandle3 = mock(KeyedStateHandle.class); when(keyedStateHandle3.getKeyGroupRange()).thenReturn(new KeyGroupRange(8, 12)); keyedStateHandles.add(keyedStateHandle3); // this should choose keyedStateHandle2, because keyedStateHandle2's key-group range // satisfies the overlap fraction demand. Assert.assertEquals( keyedStateHandle2, RocksDBIncrementalCheckpointUtils.chooseTheBestStateHandleForInitial( keyedStateHandles, new KeyGroupRange(3, 6), RESTORE_OVERLAP_FRACTION_THRESHOLD.defaultValue())); // both keyedStateHandle2 & keyedStateHandle3's key-group range satisfies the overlap // fraction, but keyedStateHandle3's key group range is better. Assert.assertEquals( keyedStateHandle3, RocksDBIncrementalCheckpointUtils.chooseTheBestStateHandleForInitial( keyedStateHandles, new KeyGroupRange(5, 12), RESTORE_OVERLAP_FRACTION_THRESHOLD.defaultValue())); // The intersect key group number of keyedStateHandle2 & keyedStateHandle3's with [4, 11] // are 4. But the over fraction of keyedStateHandle2 is better. Assert.assertEquals( keyedStateHandle2, RocksDBIncrementalCheckpointUtils.chooseTheBestStateHandleForInitial( keyedStateHandles, new KeyGroupRange(4, 11), RESTORE_OVERLAP_FRACTION_THRESHOLD.defaultValue())); // both keyedStateHandle2 & keyedStateHandle3's key-group range are covered by [3, 12], // but this should choose the keyedStateHandle3, because keyedStateHandle3's key-group is // bigger than keyedStateHandle2. Assert.assertEquals( keyedStateHandle3, RocksDBIncrementalCheckpointUtils.chooseTheBestStateHandleForInitial( keyedStateHandles, new KeyGroupRange(3, 12), RESTORE_OVERLAP_FRACTION_THRESHOLD.defaultValue())); }
public void submitIndexingErrors(Collection<IndexingError> indexingErrors) { try { final FailureBatch fb = FailureBatch.indexingFailureBatch( indexingErrors.stream() .filter(ie -> { if (!ie.message().supportsFailureHandling()) { logger.warn("Submitted a message with indexing errors, which doesn't support failure handling!"); return false; } else { return true; } }) .map(this::fromIndexingError) .collect(Collectors.toList())); if (fb.size() > 0) { failureSubmissionQueue.submitBlocking(fb); } } catch (InterruptedException ignored) { logger.warn("Failed to submit {} indexing errors for failure handling. The thread has been interrupted!", indexingErrors.size()); Thread.currentThread().interrupt(); } }
@Test public void submitIndexingErrors_messageNotSupportingFailureHandlingNotSubmittedToQueue() throws Exception { // given final Message msg1 = Mockito.mock(Message.class); when(msg1.getMessageId()).thenReturn("msg-1"); when(msg1.supportsFailureHandling()).thenReturn(false); final Message msg2 = Mockito.mock(Message.class); when(msg2.getMessageId()).thenReturn("msg-2"); when(msg2.supportsFailureHandling()).thenReturn(false); final List<IndexingError> indexingErrors = List.of( IndexingError.create(msg1, "index-1", MappingError, "Error"), IndexingError.create(msg2, "index-2", Unknown, "Error2") ); // when underTest.submitIndexingErrors(indexingErrors); // then verifyNoInteractions(failureSubmissionQueue); }
public Stream<Hit> stream() { if (nPostingLists == 0) { return Stream.empty(); } return StreamSupport.stream(new PredicateSpliterator(), false); }
@Test void requireThatThereCanBeManyIntervals() { PredicateSearch search = createPredicateSearch( new byte[]{1}, postingList(SubqueryBitmap.ALL_SUBQUERIES, entry(0, 0x00010001, 0x00020002, 0x00030003, 0x000100ff, 0x00040004, 0x00050005, 0x00060006))); assertEquals(List.of(new Hit(0)).toString(), search.stream().toList().toString()); }
@Override public boolean sendLeaderMessage(int currentId, int leaderId) { var leaderMessage = new Message(MessageType.LEADER, String.valueOf(leaderId)); instanceMap.keySet() .stream() .filter((i) -> i != currentId) .forEach((i) -> instanceMap.get(i).onMessage(leaderMessage)); return false; }
@Test void testSendLeaderMessage() { try { var instance1 = new BullyInstance(null, 1, 1); var instance2 = new BullyInstance(null, 1, 2); var instance3 = new BullyInstance(null, 1, 3); var instance4 = new BullyInstance(null, 1, 4); Map<Integer, Instance> instanceMap = Map.of(1, instance1, 2, instance2, 3, instance3, 4, instance4); instance1.setAlive(false); var messageManager = new BullyMessageManager(instanceMap); messageManager.sendLeaderMessage(2, 2); var instanceClass = AbstractInstance.class; var messageQueueField = instanceClass.getDeclaredField("messageQueue"); messageQueueField.setAccessible(true); var message3 = ((Queue<Message>) messageQueueField.get(instance3)).poll(); var message4 = ((Queue<Message>) messageQueueField.get(instance4)).poll(); var expectedMessage = new Message(MessageType.LEADER, "2"); assertEquals(message3, expectedMessage); assertEquals(message4, expectedMessage); } catch (IllegalAccessException | NoSuchFieldException e) { fail("Error to access private field."); } }
public static void requestDeferredDeepLink(Context context, JSONObject params, String androidId, String oaid, JSONObject presetProperties, String url, boolean isSaveDeepLinkInfo) { if (mIsDeepLink) return; try { JSONObject jsonObject = new JSONObject(); String ids; String reflectionOAID = SAOaidHelper.getOpenAdIdentifierByReflection(context); if (params != null) { if (params.has("$oaid")) { oaid = params.optString("$oaid"); reflectionOAID = ""; params.remove("$oaid"); } ids = ChannelUtils.getDeviceInfo(context, androidId, oaid, reflectionOAID); if (params.has("$gaid")) { String gaid = params.optString("$gaid"); ids = String.format("%s##gaid=%s", ids, gaid); params.remove("$gaid"); } if (params.has("$user_agent")) { jsonObject.put("ua", params.optString("$user_agent")); params.remove("$user_agent"); } jsonObject.put("app_parameter", params.toString()); } else { ids = ChannelUtils.getDeviceInfo(context, androidId, oaid, reflectionOAID); } jsonObject.put("ids", Base64Coder.encodeString(ids)); jsonObject.put("model", presetProperties.optString("$model")); jsonObject.put("os", presetProperties.optString("$os")); jsonObject.put("os_version", presetProperties.optString("$os_version")); jsonObject.put("network", presetProperties.optString("$network_type")); jsonObject.put("app_id", presetProperties.optString("$app_id")); jsonObject.put("app_version", presetProperties.optString("$app_version")); jsonObject.put("timestamp", String.valueOf(System.currentTimeMillis())); jsonObject.put("project", new ServerUrl(SensorsDataAPI.sharedInstance().getServerUrl()).getProject()); DeferredDeepLinkHelper.request(jsonObject, mDeferredDeepLinkCallback, url, isSaveDeepLinkInfo); } catch (Exception e) { SALog.printStackTrace(e); } }
@Test public void requestDeferredDeepLink() { SAStoreManager.getInstance().remove(DbParams.PersistentName.REQUEST_DEFERRER_DEEPLINK); Assert.assertFalse(ChannelUtils.isExistRequestDeferredDeeplink()); Assert.assertTrue(ChannelUtils.isRequestDeferredDeeplink()); SensorsDataAPI.sharedInstance().requestDeferredDeepLink(null); try { Thread.sleep(500); } catch (InterruptedException e) { e.printStackTrace(); } Assert.assertTrue(ChannelUtils.isExistRequestDeferredDeeplink()); Assert.assertFalse(ChannelUtils.isRequestDeferredDeeplink()); }
public static String getMaskedStatement(final String query) { try { final ParseTree tree = DefaultKsqlParser.getParseTree(query); return new Visitor().visit(tree); } catch (final Exception | StackOverflowError e) { return fallbackMasking(query); } }
@Test public void shouldNotMaskInvalidCreateStreamWithoutQuote() { // Given: // Typo in "WITH" => "WIT" final String query = "CREATE STREAM `stream` WIT (" + " format = 'avro', \n" + " kafka_topic = 'test_topic'," + "\"partitions\"= 3,\n" + ");"; // When final String maskedQuery = QueryMask.getMaskedStatement(query); // Then assertThat(maskedQuery, is(query)); }
public static List<HollowSchema> dependencyOrderedSchemaList(HollowDataset dataset) { return dependencyOrderedSchemaList(dataset.getSchemas()); }
@Test public void schemasAreSortedBasedOnDependencies() throws IOException { String schemasText = "TypeB {" + " ListOfString str;" + "}" + "" + "String {" + " string value;" + "}" + "" + "ListOfString List<String>;" + "" + "TypeA {" + " TypeB b;" + " String str;" + "}"; List<HollowSchema> schemas = HollowSchemaParser.parseCollectionOfSchemas(schemasText); List<HollowSchema> sortedSchemas = HollowSchemaSorter.dependencyOrderedSchemaList(schemas); Assert.assertEquals(4, sortedSchemas.size()); Assert.assertEquals("String", sortedSchemas.get(0).getName()); Assert.assertEquals("ListOfString", sortedSchemas.get(1).getName()); Assert.assertEquals("TypeB", sortedSchemas.get(2).getName()); Assert.assertEquals("TypeA", sortedSchemas.get(3).getName()); }
public static String getHostName () { return getDefaults().vespaHostname(); }
@Test public void testSimple () { String name = LogUtils.getHostName(); assertNotNull(name); assertFalse(name.equals("")); }
public String getDiscriminatingValue(ILoggingEvent event) { // http://jira.qos.ch/browse/LBCLASSIC-213 Map<String, String> mdcMap = event.getMDCPropertyMap(); if (mdcMap == null) { return defaultValue; } String mdcValue = mdcMap.get(key); if (mdcValue == null) { return defaultValue; } else { return mdcValue; } }
@Test public void nullMDC() { event = new LoggingEvent("a", logger, Level.DEBUG, "", null, null); assertEquals(new HashMap<String, String>(), event.getMDCPropertyMap()); String discriminatorValue = discriminator.getDiscriminatingValue(event); assertEquals(DEFAULT_VAL, discriminatorValue); }
static public void addOnConsoleListenerInstance(Context context, OnConsoleStatusListener onConsoleStatusListener) { onConsoleStatusListener.setContext(context); boolean effectivelyAdded = context.getStatusManager().add(onConsoleStatusListener); if (effectivelyAdded) { onConsoleStatusListener.start(); } }
@Test public void addOnConsoleListenerInstanceShouldNotStartSecondListener() { OnConsoleStatusListener ocl0 = new OnConsoleStatusListener(); OnConsoleStatusListener ocl1 = new OnConsoleStatusListener(); StatusListenerConfigHelper.addOnConsoleListenerInstance(context, ocl0); { List<StatusListener> listeners = sm.getCopyOfStatusListenerList(); assertEquals(1, listeners.size()); assertTrue(ocl0.isStarted()); } // second listener should not have been started StatusListenerConfigHelper.addOnConsoleListenerInstance(context, ocl1); { List<StatusListener> listeners = sm.getCopyOfStatusListenerList(); assertEquals(1, listeners.size()); assertFalse(ocl1.isStarted()); } }
public Map<String, Object> getKsqlStreamConfigProps(final String applicationId) { final Map<String, Object> map = new HashMap<>(getKsqlStreamConfigProps()); map.put( MetricCollectors.RESOURCE_LABEL_PREFIX + StreamsConfig.APPLICATION_ID_CONFIG, applicationId ); // Streams client metrics aren't used in Confluent deployment possiblyConfigureConfluentTelemetry(map); return Collections.unmodifiableMap(map); }
@Test public void shouldNotSetAutoOffsetResetByDefault() { final KsqlConfig ksqlConfig = new KsqlConfig(Collections.emptyMap()); final Object result = ksqlConfig.getKsqlStreamConfigProps().get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG); assertThat(result, is(nullValue())); }
public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException { final List<Path> containers = new ArrayList<>(); for(Path file : files.keySet()) { if(containerService.isContainer(file)) { containers.add(file); } else { callback.delete(file); final Path bucket = containerService.getContainer(file); if(file.getType().contains(Path.Type.upload)) { // In-progress multipart upload try { multipartService.delete(new MultipartUpload(file.attributes().getVersionId(), bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), containerService.getKey(file))); } catch(NotfoundException ignored) { log.warn(String.format("Ignore failure deleting multipart upload %s", file)); } } else { try { // Always returning 204 even if the key does not exist. Does not return 404 for non-existing keys session.getClient().deleteVersionedObject( file.attributes().getVersionId(), bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), containerService.getKey(file)); } catch(ServiceException e) { throw new S3ExceptionMappingService().map("Cannot delete {0}", e, file); } } } } for(Path file : containers) { callback.delete(file); try { final String bucket = containerService.getContainer(file).getName(); session.getClient().deleteBucket(bucket); session.getClient().getRegionEndpointCache().removeRegionForBucketName(bucket); } catch(ServiceException e) { throw new S3ExceptionMappingService().map("Cannot delete {0}", e, file); } } }
@Test public void testDeleteFileBackslash() throws Exception { final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.volume, Path.Type.directory)); final Path test = new Path(container, String.format("%s\\%s", new AlphanumericRandomStringService().random(), new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.file)); new S3TouchFeature(session, new S3AccessControlListFeature(session)).touch(test, new TransferStatus()); assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(test)); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertFalse(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(test)); }
@Override public String toString() { return "EntryTaskScheduler{" + "numberOfEntries=" + size() + ", numberOfKeys=" + keys.size() + ", numberOfGroups=" + groups.size() + '}'; }
@Test public void test_toString() { scheduler = new SecondsBasedEntryTaskScheduler<>(taskScheduler, entryProcessor, FOR_EACH); assertNotNull(scheduler.toString()); }
@Override public ExecuteContext after(ExecuteContext context) { configService.init(RouterConstant.SPRING_CACHE_NAME, AppCache.INSTANCE.getAppName()); return context; }
@Test public void testAfter() { AppCache.INSTANCE.setAppName("foo"); interceptor.after(context); Assert.assertEquals(RouterConstant.SPRING_CACHE_NAME, configService.getCacheName()); Assert.assertEquals("foo", configService.getServiceName()); }
@Override protected void decode(ChannelHandlerContext ctx, Object object, List out) throws Exception { try { if (object instanceof XMLEvent) { final XMLEvent event = (XMLEvent) object; if (event.isStartDocument() || event.isEndDocument()) { return; } if (event.isCharacters() && depth <= 1) { return; } if (depth < 1 && event.isStartElement()) { out.add(object); depth++; return; } if (depth <= 1 && event.isEndElement()) { out.add(object); depth--; return; } writer.add(event); if (event.isStartElement()) { depth++; } else if (event.isEndElement()) { depth--; if (depth == 1) { writer.flush(); org.dom4j.Element xmlElement = transform().getRootElement(); out.add(xmlElement); writer.close(); resetWriter(); } } } } catch (Exception e) { logger.info(e.getCause().getMessage()); throw e; } }
@Test public void testMergePublishMsg() throws Exception { List<Object> list = Lists.newArrayList(); xmlMerger.depth = 1; publishMsgEventList.forEach(xmlEvent -> { try { xmlMerger.decode(new ChannelHandlerContextAdapter(), xmlEvent, list); } catch (Exception e) { fail(); } }); Element root = (Element) list.get(0); assertThat("Top level element should be of type IQ", root.getQName().getName(), Matchers.is("iq")); assertThat(root.attributes().size(), Matchers.is(4)); assertNotNull("<pubsub> element should be accessible", root.element("pubsub")); assertNotNull("<publish> element should be accessible", root.element("pubsub").element("publish")); assertThat(root.element("pubsub").getNamespaceURI(), Matchers.is("http://jabber.org/protocol/pubsub")); assertThat(root.element("pubsub").element("publish").attribute("node").getValue(), Matchers.is("test")); }
@Override public ValidationResult validate(Object value) { ValidationResult result = super.validate(value); if (result instanceof ValidationResult.ValidationPassed) { final String sValue = (String)value; if (sValue.length() < minLength || sValue.length() > maxLength) { result = new ValidationResult.ValidationFailed("Value is not between " + minLength + " and " + maxLength + " in length!"); } } return result; }
@Test public void testValidateShortString() { assertThat(new LimitedStringValidator(2, 2).validate("1")) .isInstanceOf(ValidationResult.ValidationFailed.class); }
static String formatAuthorizationHeader(String clientId, String clientSecret, boolean urlencode) throws UnsupportedEncodingException { clientId = sanitizeString("the token endpoint request client ID parameter", clientId); clientSecret = sanitizeString("the token endpoint request client secret parameter", clientSecret); // according to RFC-6749 clientId & clientSecret must be urlencoded, see https://tools.ietf.org/html/rfc6749#section-2.3.1 if (urlencode) { clientId = URLEncoder.encode(clientId, StandardCharsets.UTF_8.name()); clientSecret = URLEncoder.encode(clientSecret, StandardCharsets.UTF_8.name()); } String s = String.format("%s:%s", clientId, clientSecret); // Per RFC-7617, we need to use the *non-URL safe* base64 encoder. See KAFKA-14496. String encoded = Base64.getEncoder().encodeToString(Utils.utf8(s)); return String.format("Basic %s", encoded); }
@Test public void testFormatAuthorizationHeaderMissingValues() { assertThrows(IllegalArgumentException.class, () -> HttpAccessTokenRetriever.formatAuthorizationHeader(null, "secret", false)); assertThrows(IllegalArgumentException.class, () -> HttpAccessTokenRetriever.formatAuthorizationHeader("id", null, false)); assertThrows(IllegalArgumentException.class, () -> HttpAccessTokenRetriever.formatAuthorizationHeader(null, null, false)); assertThrows(IllegalArgumentException.class, () -> HttpAccessTokenRetriever.formatAuthorizationHeader("", "secret", false)); assertThrows(IllegalArgumentException.class, () -> HttpAccessTokenRetriever.formatAuthorizationHeader("id", "", false)); assertThrows(IllegalArgumentException.class, () -> HttpAccessTokenRetriever.formatAuthorizationHeader("", "", false)); assertThrows(IllegalArgumentException.class, () -> HttpAccessTokenRetriever.formatAuthorizationHeader(" ", "secret", false)); assertThrows(IllegalArgumentException.class, () -> HttpAccessTokenRetriever.formatAuthorizationHeader("id", " ", false)); assertThrows(IllegalArgumentException.class, () -> HttpAccessTokenRetriever.formatAuthorizationHeader(" ", " ", false)); }
@Deprecated public static EnvironmentSettings fromConfiguration(ReadableConfig configuration) { return new EnvironmentSettings( (Configuration) configuration, Thread.currentThread().getContextClassLoader()); }
@Test void testFromConfiguration() { Configuration configuration = new Configuration(); configuration.setString("execution.runtime-mode", "batch"); EnvironmentSettings settings = EnvironmentSettings.newInstance().withConfiguration(configuration).build(); assertThat(settings.isStreamingMode()).as("Expect batch mode.").isFalse(); }
private void fail(long frameLength) { if (frameLength > 0) { throw new TooLongFrameException( "frame length exceeds " + maxFrameLength + ": " + frameLength + " - discarded"); } else { throw new TooLongFrameException( "frame length exceeds " + maxFrameLength + " - discarding"); } }
@Test public void testFailSlowTooLongFrameRecovery() throws Exception { EmbeddedChannel ch = new EmbeddedChannel( new LenientDelimiterBasedFrameDecoder(1, true, false, false, Delimiters.nulDelimiter())); for (int i = 0; i < 2; i++) { ch.writeInbound(Unpooled.wrappedBuffer(new byte[]{1, 2})); try { assertTrue(ch.writeInbound(Unpooled.wrappedBuffer(new byte[]{0}))); fail(DecoderException.class.getSimpleName() + " must be raised."); } catch (TooLongFrameException e) { // Expected } ch.writeInbound(Unpooled.wrappedBuffer(new byte[]{'A', 0})); ByteBuf buf = ch.readInbound(); assertEquals("A", buf.toString(CharsetUtil.ISO_8859_1)); buf.release(); } }
public MessageDescription shallowParseMessage(ByteBuf packet) { final ByteBuf buffer = packet.readSlice(MessageHeader.LENGTH); LOG.debug("Shallow parse header\n{}", ByteBufUtil.prettyHexDump(buffer)); final MessageHeader header = parseMessageHeader(buffer); final MessageDescription messageDescription = new MessageDescription(header); // sanity check: we need the complete packet in the buffer if (header.length() != packet.readableBytes() + MessageHeader.LENGTH) { throw new IllegalArgumentException("Buffer does not contain the complete IPFIX message"); } // loop over all the contained sets in the message while (packet.isReadable()) { final int setId = packet.readUnsignedShort(); final int setLength = packet.readUnsignedShort(); // the buffer limited to the declared length of the set. final ByteBuf setContent = packet.readSlice(setLength - 4); switch (setId) { case 0: case 1: throw new IpfixException("Invalid set id in IPFIX message: " + setId); case 2: final ShallowTemplateSet templateSet = shallowParseTemplateSet(setContent); messageDescription.addTemplateSet(templateSet); break; case 3: final ShallowOptionsTemplateSet optionsTemplateSet = shallowParseOptionsTemplateSet(setContent); messageDescription.addOptionsTemplateSet(optionsTemplateSet); break; default: final ShallowDataSet dataSet = shallowParseDataSet(setId, setLength, setContent, header.exportTime()); messageDescription.addDataSet(dataSet); break; } } return messageDescription; }
@Test public void onlyDataSets() throws IOException { final ByteBuf packet = Utils.readPacket("dataset-only.ipfix"); final IpfixParser.MessageDescription description = new IpfixParser(definitions).shallowParseMessage(packet); assertThat(description.templateRecords()).isEmpty(); assertThat(description.optionsTemplateRecords()).isEmpty(); assertThat(description.dataSets()) .hasSize(1) .extracting(ShallowDataSet::templateId).containsExactly(256); }
public FEELFnResult<BigDecimal> invoke(@ParameterName("from") String from, @ParameterName("grouping separator") String group, @ParameterName("decimal separator") String decimal) { if ( from == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null")); } if ( group != null && !group.equals( " " ) && !group.equals( "." ) && !group.equals( "," ) ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "group", "not a valid one, can only be one of: dot ('.'), comma (','), space (' ') ")); } if ( decimal != null ) { if (!decimal.equals( "." ) && !decimal.equals( "," )) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "not a valid one, can only be one of: dot ('.'), comma (',') ")); } else if (group != null && decimal.equals( group )) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "cannot be the same as parameter 'group' ")); } } if ( group != null ) { from = from.replaceAll( "\\" + group, "" ); } if ( decimal != null ) { from = from.replaceAll( "\\" + decimal, "." ); } BigDecimal result = NumberEvalHelper.getBigDecimalOrNull(from ); if( from != null && result == null ) { // conversion failed return FEELFnResult.ofError( new InvalidParametersEvent(Severity.ERROR, "unable to calculate final number result" ) ); } else { return FEELFnResult.ofResult( result ); } }
@Test void invokeNumberWithGroupCharComma() { FunctionTestUtil.assertResult(numberFunction.invoke("9,876", ",", null), BigDecimal.valueOf(9876)); FunctionTestUtil.assertResult(numberFunction.invoke("9,876,000", ",", null), BigDecimal.valueOf(9876000)); }
@Override public void addAt(long offset, T value) { // we consider -1 a legal offset to account for loading values from the 0-0.checkpoint if (offset < -1) { throw new IllegalArgumentException( String.format("Next offset %d must be greater than or equal to -1", offset) ); } Map.Entry<Long, ?> lastEntry = history.lastEntry(); if (lastEntry != null && offset <= lastEntry.getKey()) { throw new IllegalArgumentException( String.format("Next offset %d must be greater than the last offset %d", offset, lastEntry.getKey()) ); } history.put(offset, value); }
@Test void testAddAt() { TreeMapLogHistory<String> history = new TreeMapLogHistory<>(); assertThrows(IllegalArgumentException.class, () -> history.addAt(-2, "")); assertEquals(Optional.empty(), history.lastEntry()); history.addAt(-1, "-1"); assertEquals(Optional.of("-1"), history.valueAtOrBefore(-1)); assertEquals(Optional.of("-1"), history.valueAtOrBefore(0)); history.addAt(100, "100"); assertThrows(IllegalArgumentException.class, () -> history.addAt(99, "")); assertThrows(IllegalArgumentException.class, () -> history.addAt(100, "")); assertEquals(Optional.of("100"), history.valueAtOrBefore(100)); assertEquals(Optional.of("100"), history.valueAtOrBefore(201)); history.addAt(200, "200"); assertEquals(Optional.empty(), history.valueAtOrBefore(-2)); assertEquals(Optional.of("-1"), history.valueAtOrBefore(-1)); assertEquals(Optional.of("100"), history.valueAtOrBefore(100)); assertEquals(Optional.of("100"), history.valueAtOrBefore(101)); assertEquals(Optional.of("100"), history.valueAtOrBefore(199)); assertEquals(Optional.of("200"), history.valueAtOrBefore(200)); assertEquals(Optional.of("200"), history.valueAtOrBefore(201)); assertEquals(Optional.of(new LogHistory.Entry<>(200, "200")), history.lastEntry()); }
public void serialize(Node node, JsonGenerator generator) { requireNonNull(node); Log.info("Serializing Node to JSON."); try { serialize(null, node, generator); } finally { generator.close(); } }
@Test void test() { CompilationUnit cu = parse("class X{java.util.Y y;}"); String serialized = serialize(cu, false); assertEquals( "{\"!\":\"com.github.javaparser.ast.CompilationUnit\",\"range\":{\"beginLine\":1,\"beginColumn\":1,\"endLine\":1,\"endColumn\":23},\"tokenRange\":{\"beginToken\":{\"kind\":19,\"text\":\"class\"},\"endToken\":{\"kind\":0,\"text\":\"\"}},\"imports\":[],\"types\":[{\"!\":\"com.github.javaparser.ast.body.ClassOrInterfaceDeclaration\",\"range\":{\"beginLine\":1,\"beginColumn\":1,\"endLine\":1,\"endColumn\":23},\"tokenRange\":{\"beginToken\":{\"kind\":19,\"text\":\"class\"},\"endToken\":{\"kind\":104,\"text\":\"}\"}},\"extendedTypes\":[],\"implementedTypes\":[],\"isInterface\":\"false\",\"permittedTypes\":[],\"typeParameters\":[],\"members\":[{\"!\":\"com.github.javaparser.ast.body.FieldDeclaration\",\"range\":{\"beginLine\":1,\"beginColumn\":9,\"endLine\":1,\"endColumn\":22},\"tokenRange\":{\"beginToken\":{\"kind\":98,\"text\":\"java\"},\"endToken\":{\"kind\":107,\"text\":\";\"}},\"modifiers\":[],\"variables\":[{\"!\":\"com.github.javaparser.ast.body.VariableDeclarator\",\"range\":{\"beginLine\":1,\"beginColumn\":21,\"endLine\":1,\"endColumn\":21},\"tokenRange\":{\"beginToken\":{\"kind\":98,\"text\":\"y\"},\"endToken\":{\"kind\":98,\"text\":\"y\"}},\"name\":{\"!\":\"com.github.javaparser.ast.expr.SimpleName\",\"range\":{\"beginLine\":1,\"beginColumn\":21,\"endLine\":1,\"endColumn\":21},\"tokenRange\":{\"beginToken\":{\"kind\":98,\"text\":\"y\"},\"endToken\":{\"kind\":98,\"text\":\"y\"}},\"identifier\":\"y\"},\"type\":{\"!\":\"com.github.javaparser.ast.type.ClassOrInterfaceType\",\"range\":{\"beginLine\":1,\"beginColumn\":9,\"endLine\":1,\"endColumn\":19},\"tokenRange\":{\"beginToken\":{\"kind\":98,\"text\":\"java\"},\"endToken\":{\"kind\":98,\"text\":\"Y\"}},\"name\":{\"!\":\"com.github.javaparser.ast.expr.SimpleName\",\"range\":{\"beginLine\":1,\"beginColumn\":19,\"endLine\":1,\"endColumn\":19},\"tokenRange\":{\"beginToken\":{\"kind\":98,\"text\":\"Y\"},\"endToken\":{\"kind\":98,\"text\":\"Y\"}},\"identifier\":\"Y\"},\"scope\":{\"!\":\"com.github.javaparser.ast.type.ClassOrInterfaceType\",\"range\":{\"beginLine\":1,\"beginColumn\":9,\"endLine\":1,\"endColumn\":17},\"tokenRange\":{\"beginToken\":{\"kind\":98,\"text\":\"java\"},\"endToken\":{\"kind\":98,\"text\":\"util\"}},\"name\":{\"!\":\"com.github.javaparser.ast.expr.SimpleName\",\"range\":{\"beginLine\":1,\"beginColumn\":14,\"endLine\":1,\"endColumn\":17},\"tokenRange\":{\"beginToken\":{\"kind\":98,\"text\":\"util\"},\"endToken\":{\"kind\":98,\"text\":\"util\"}},\"identifier\":\"util\"},\"scope\":{\"!\":\"com.github.javaparser.ast.type.ClassOrInterfaceType\",\"range\":{\"beginLine\":1,\"beginColumn\":9,\"endLine\":1,\"endColumn\":12},\"tokenRange\":{\"beginToken\":{\"kind\":98,\"text\":\"java\"},\"endToken\":{\"kind\":98,\"text\":\"java\"}},\"name\":{\"!\":\"com.github.javaparser.ast.expr.SimpleName\",\"range\":{\"beginLine\":1,\"beginColumn\":9,\"endLine\":1,\"endColumn\":12},\"tokenRange\":{\"beginToken\":{\"kind\":98,\"text\":\"java\"},\"endToken\":{\"kind\":98,\"text\":\"java\"}},\"identifier\":\"java\"},\"annotations\":[]},\"annotations\":[]},\"annotations\":[]}}],\"annotations\":[]}],\"modifiers\":[],\"name\":{\"!\":\"com.github.javaparser.ast.expr.SimpleName\",\"range\":{\"beginLine\":1,\"beginColumn\":7,\"endLine\":1,\"endColumn\":7},\"tokenRange\":{\"beginToken\":{\"kind\":98,\"text\":\"X\"},\"endToken\":{\"kind\":98,\"text\":\"X\"}},\"identifier\":\"X\"},\"annotations\":[]}]}", serialized); }
@Override public TimestampedSegment getOrCreateSegmentIfLive(final long segmentId, final ProcessorContext context, final long streamTime) { final TimestampedSegment segment = super.getOrCreateSegmentIfLive(segmentId, context, streamTime); cleanupExpiredSegments(streamTime); return segment; }
@Test public void shouldGetCorrectSegmentString() { final TimestampedSegment segment = segments.getOrCreateSegmentIfLive(0, context, -1L); assertEquals("TimestampedSegment(id=0, name=test.0)", segment.toString()); }
public int doWork() { final long nowNs = nanoClock.nanoTime(); trackTime(nowNs); int workCount = 0; workCount += processTimers(nowNs); if (!asyncClientCommandInFlight) { workCount += clientCommandAdapter.receive(); } workCount += drainCommandQueue(); workCount += trackStreamPositions(workCount, nowNs); workCount += nameResolver.doWork(cachedEpochClock.time()); workCount += freeEndOfLifeResources(ctx.resourceFreeLimit()); return workCount; }
@Test void shouldBeAbleToAddSingleSubscription() { final long id = driverProxy.addSubscription(CHANNEL_4000, STREAM_ID_1); driverConductor.doWork(); final ArgumentCaptor<ReceiveChannelEndpoint> captor = ArgumentCaptor.forClass(ReceiveChannelEndpoint.class); verify(receiverProxy).registerReceiveChannelEndpoint(captor.capture()); receiveChannelEndpoint = captor.getValue(); verify(receiverProxy).addSubscription(any(), eq(STREAM_ID_1)); verify(mockClientProxy).onSubscriptionReady(eq(id), anyInt()); }
@Override public List<ServiceDTO> getServiceInstances(String serviceId) { String configName = SERVICE_ID_TO_CONFIG_NAME.get(serviceId); if (configName == null) { return Collections.emptyList(); } return assembleServiceDTO(serviceId, bizConfig.getValue(configName)); }
@Test public void testGetAdminServiceInstances() { String someUrl = "http://some-host/some-path"; String anotherUrl = "http://another-host/another-path"; when(bizConfig.getValue(adminServiceConfigName)) .thenReturn(String.format("%s,%s", someUrl, anotherUrl)); List<ServiceDTO> serviceDTOList = kubernetesDiscoveryService .getServiceInstances(ServiceNameConsts.APOLLO_ADMINSERVICE); assertEquals(2, serviceDTOList.size()); ServiceDTO serviceDTO = serviceDTOList.get(0); assertEquals(ServiceNameConsts.APOLLO_ADMINSERVICE, serviceDTO.getAppName()); assertEquals(String.format("%s:%s", ServiceNameConsts.APOLLO_ADMINSERVICE, someUrl), serviceDTO.getInstanceId()); assertEquals(someUrl, serviceDTO.getHomepageUrl()); ServiceDTO anotherServiceDTO = serviceDTOList.get(1); assertEquals(ServiceNameConsts.APOLLO_ADMINSERVICE, anotherServiceDTO.getAppName()); assertEquals(String.format("%s:%s", ServiceNameConsts.APOLLO_ADMINSERVICE, anotherUrl), anotherServiceDTO.getInstanceId()); assertEquals(anotherUrl, anotherServiceDTO.getHomepageUrl()); }
@Override public SparseVector getColumn(int i) { if (i < 0 || i > dim2) { throw new IllegalArgumentException("Invalid column index, must be [0,"+dim2+"), received " + i); } List<Integer> indexList = new ArrayList<>(); List<Double> valueList = new ArrayList<>(); for (int j = 0; j < dim1; j++) { double tmp = values[j].get(i); if (tmp != 0) { indexList.add(j); valueList.add(tmp); } } int[] indicesArr = new int[valueList.size()]; double[] valuesArr = new double[valueList.size()]; for (int j = 0; j < valueList.size(); j++) { indicesArr[j] = indexList.get(j); valuesArr[j] = valueList.get(j); } return new SparseVector(dim1, indicesArr, valuesArr); }
@Test public void testGetColumn() { DenseSparseMatrix diagonal = DenseSparseMatrix.createDiagonal(new DenseVector(new double[] {1.618033988749894, Math.E, Math.PI})); SparseVector column = diagonal.getColumn(1); assertEquals(3, column.size()); assertEquals(0, column.get(0)); assertEquals(Math.E, column.get(1)); assertEquals(0, column.get(2)); }
@Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { var triggers = annotations.stream() .filter(te -> { for (var trigger : KoraSchedulingAnnotationProcessor.triggers) { if (te.getQualifiedName().contentEquals(trigger.canonicalName())) { return true; } } return false; }) .toArray(TypeElement[]::new); var scheduledMethods = roundEnv.getElementsAnnotatedWithAny(triggers); var scheduledTypes = scheduledMethods.stream().collect(Collectors.groupingBy(e -> { var type = (TypeElement) e.getEnclosingElement(); return type.getQualifiedName().toString(); })); for (var entry : scheduledTypes.entrySet()) { var methods = entry.getValue(); var type = (TypeElement) entry.getValue().get(0).getEnclosingElement(); try { this.generateModule(type, methods); } catch (ProcessingErrorException e) { e.printError(this.processingEnv); } catch (IOException e) { throw new RuntimeException(e); } // todo exceptions } return false; }
@Test void testScheduledJdkAtFixedRateTest() throws Exception { process(ScheduledJdkAtFixedRateTest.class); }
@Override protected void doProcess(Exchange exchange, MetricsEndpoint endpoint, MetricRegistry registry, String metricsName) throws Exception { Message in = exchange.getIn(); Histogram histogram = registry.histogram(metricsName); Long value = endpoint.getValue(); Long finalValue = getLongHeader(in, HEADER_HISTOGRAM_VALUE, value); if (finalValue != null) { histogram.update(finalValue); } else { LOG.warn("Cannot update histogram \"{}\" with null value", metricsName); } }
@Test public void testProcessOverrideValue() throws Exception { when(endpoint.getValue()).thenReturn(VALUE); when(in.getHeader(HEADER_HISTOGRAM_VALUE, VALUE, Long.class)).thenReturn(VALUE + 3); producer.doProcess(exchange, endpoint, registry, METRICS_NAME); inOrder.verify(exchange, times(1)).getIn(); inOrder.verify(registry, times(1)).histogram(METRICS_NAME); inOrder.verify(endpoint, times(1)).getValue(); inOrder.verify(in, times(1)).getHeader(HEADER_HISTOGRAM_VALUE, VALUE, Long.class); inOrder.verify(histogram, times(1)).update(VALUE + 3); inOrder.verifyNoMoreInteractions(); }
@Override public void writeShort(final int v) throws IOException { ensureAvailable(SHORT_SIZE_IN_BYTES); MEM.putShort(buffer, ARRAY_BYTE_BASE_OFFSET + pos, (short) v); pos += SHORT_SIZE_IN_BYTES; }
@Test public void testWriteShortForPositionVByteOrder() throws Exception { short expected = 100; out.writeShort(1, expected, ByteOrder.LITTLE_ENDIAN); out.writeShort(3, expected, ByteOrder.BIG_ENDIAN); short actual1 = Bits.readShort(out.buffer, 1, false); short actual2 = Bits.readShort(out.buffer, 3, true); assertEquals(expected, actual1); assertEquals(expected, actual2); }
@Override public Long getTenantCountByPackageId(Long packageId) { return tenantMapper.selectCountByPackageId(packageId); }
@Test public void testGetTenantCountByPackageId() { // mock 数据 TenantDO dbTenant1 = randomPojo(TenantDO.class, o -> o.setPackageId(1L)); tenantMapper.insert(dbTenant1);// @Sql: 先插入出一条存在的数据 TenantDO dbTenant2 = randomPojo(TenantDO.class, o -> o.setPackageId(2L)); tenantMapper.insert(dbTenant2);// @Sql: 先插入出一条存在的数据 // 调用 Long count = tenantService.getTenantCountByPackageId(1L); assertEquals(1, count); }
static void checkValidIndexName(String indexName) { if (indexName.length() > MAX_INDEX_NAME_LENGTH) { throw new IllegalArgumentException( "Index name " + indexName + " cannot be longer than " + MAX_INDEX_NAME_LENGTH + " characters."); } Matcher matcher = ILLEGAL_INDEX_NAME_CHARS.matcher(indexName); if (matcher.find()) { throw new IllegalArgumentException( "Index name " + indexName + " is not a valid name. Character \"" + matcher.group() + "\" is not allowed."); } if (indexName.charAt(0) == '-' || indexName.charAt(0) == '_' || indexName.charAt(0) == '+') { throw new IllegalArgumentException( "Index name " + indexName + " can not start with -, _ or +."); } }
@Test public void testCheckValidIndexNameThrowsErrorWhenNameBeginsWithUnderscore() { assertThrows(IllegalArgumentException.class, () -> checkValidIndexName("_test-index")); }
@SuppressWarnings("MethodLength") public void onFragment(final DirectBuffer buffer, final int offset, final int length, final Header header) { messageHeaderDecoder.wrap(buffer, offset); final int templateId = messageHeaderDecoder.templateId(); final int schemaId = messageHeaderDecoder.schemaId(); if (schemaId != MessageHeaderDecoder.SCHEMA_ID) { if (listenerExtension != null) { listenerExtension.onExtensionMessage( messageHeaderDecoder.blockLength(), templateId, schemaId, messageHeaderDecoder.version(), buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, length - MessageHeaderDecoder.ENCODED_LENGTH); return; } throw new ClusterException("expected schemaId=" + MessageHeaderDecoder.SCHEMA_ID + ", actual=" + schemaId); } switch (templateId) { case SessionMessageHeaderDecoder.TEMPLATE_ID: { sessionMessageHeaderDecoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, messageHeaderDecoder.blockLength(), messageHeaderDecoder.version()); final long sessionId = sessionMessageHeaderDecoder.clusterSessionId(); if (sessionId == clusterSessionId) { listener.onMessage( sessionId, sessionMessageHeaderDecoder.timestamp(), buffer, offset + SESSION_HEADER_LENGTH, length - SESSION_HEADER_LENGTH, header); } break; } case SessionEventDecoder.TEMPLATE_ID: { sessionEventDecoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, messageHeaderDecoder.blockLength(), messageHeaderDecoder.version()); final long sessionId = sessionEventDecoder.clusterSessionId(); if (sessionId == clusterSessionId) { listener.onSessionEvent( sessionEventDecoder.correlationId(), sessionId, sessionEventDecoder.leadershipTermId(), sessionEventDecoder.leaderMemberId(), sessionEventDecoder.code(), sessionEventDecoder.detail()); } break; } case NewLeaderEventDecoder.TEMPLATE_ID: { newLeaderEventDecoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, messageHeaderDecoder.blockLength(), messageHeaderDecoder.version()); final long sessionId = newLeaderEventDecoder.clusterSessionId(); if (sessionId == clusterSessionId) { listener.onNewLeader( sessionId, newLeaderEventDecoder.leadershipTermId(), newLeaderEventDecoder.leaderMemberId(), newLeaderEventDecoder.ingressEndpoints()); } break; } case AdminResponseDecoder.TEMPLATE_ID: { adminResponseDecoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, messageHeaderDecoder.blockLength(), messageHeaderDecoder.version()); final long sessionId = adminResponseDecoder.clusterSessionId(); if (sessionId == clusterSessionId) { final long correlationId = adminResponseDecoder.correlationId(); final AdminRequestType requestType = adminResponseDecoder.requestType(); final AdminResponseCode responseCode = adminResponseDecoder.responseCode(); final String message = adminResponseDecoder.message(); final int payloadOffset = adminResponseDecoder.offset() + AdminResponseDecoder.BLOCK_LENGTH + AdminResponseDecoder.messageHeaderLength() + message.length() + AdminResponseDecoder.payloadHeaderLength(); final int payloadLength = adminResponseDecoder.payloadLength(); listener.onAdminResponse( sessionId, correlationId, requestType, responseCode, message, buffer, payloadOffset, payloadLength); } break; } default: break; } }
@Test void onFragmentIsANoOpIfSessionIdDoesNotMatchOnSessionMessage() { final int offset = 18; final long sessionId = 21; final long timestamp = 1000; sessionMessageHeaderEncoder .wrapAndApplyHeader(buffer, offset, messageHeaderEncoder) .clusterSessionId(sessionId) .timestamp(timestamp); final EgressListener egressListener = mock(EgressListener.class); final Header header = new Header(0, 0); final EgressAdapter adapter = new EgressAdapter(egressListener, -19, mock(Subscription.class), 3); adapter.onFragment(buffer, offset, sessionMessageHeaderEncoder.encodedLength(), header); verifyNoInteractions(egressListener); }
public static AtomicInteger getFailedPushMonitor() { return INSTANCE.failedPush; }
@Test void testGetFailedPush() { assertEquals(0, MetricsMonitor.getFailedPushMonitor().get()); assertEquals(1, MetricsMonitor.getFailedPushMonitor().incrementAndGet()); }
public Optional<PushEventDto> raiseEventOnIssue(String projectUuid, DefaultIssue currentIssue) { var currentIssueComponentUuid = currentIssue.componentUuid(); if (currentIssueComponentUuid == null) { return Optional.empty(); } var component = treeRootHolder.getComponentByUuid(currentIssueComponentUuid); if (isTaintVulnerability(currentIssue)) { return raiseTaintVulnerabilityEvent(projectUuid, component, currentIssue); } if (isSecurityHotspot(currentIssue)) { return raiseSecurityHotspotEvent(projectUuid, component, currentIssue); } return Optional.empty(); }
@Test public void raiseEventOnIssue_whenClosedHotspot_shouldCreateClosedEvent() { DefaultIssue defaultIssue = createDefaultIssue() .setType(RuleType.SECURITY_HOTSPOT) .setNew(false) .setCopied(false) .setBeingClosed(true) .setStatus(Issue.STATUS_CLOSED) .setResolution(Issue.RESOLUTION_FIXED); assertThat(underTest.raiseEventOnIssue("some-project-uuid", defaultIssue)) .isNotEmpty() .hasValueSatisfying(pushEventDto -> { assertThat(pushEventDto.getName()).isEqualTo(SecurityHotspotClosed.EVENT_NAME); verifyHotspotClosedEventPayload(pushEventDto.getPayload(), defaultIssue); assertThat(pushEventDto.getLanguage()).isEqualTo("java"); assertThat(pushEventDto.getProjectUuid()).isEqualTo("some-project-uuid"); }); }
public static void main(String[] args) { final var logger = LoggerFactory.getLogger(App.class); Function<Integer, Integer> timesTwo = x -> x * 2; Function<Integer, Integer> square = x -> x * x; Function<Integer, Integer> composedFunction = FunctionComposer.composeFunctions(timesTwo, square); int result = composedFunction.apply(3); logger.info("Result of composing 'timesTwo' and 'square' functions applied to 3 is: " + result); }
@Test void shouldExecuteApplicationWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
@Override public String formatSmsTemplateContent(String content, Map<String, Object> params) { return StrUtil.format(content, params); }
@Test public void testFormatSmsTemplateContent() { // 准备参数 String content = "正在进行登录操作{operation},您的验证码是{code}"; Map<String, Object> params = MapUtil.<String, Object>builder("operation", "登录") .put("code", "1234").build(); // 调用 String result = smsTemplateService.formatSmsTemplateContent(content, params); // 断言 assertEquals("正在进行登录操作登录,您的验证码是1234", result); }
@Override public void remove(NamedNode master) { connection.sync(RedisCommands.SENTINEL_REMOVE, master.getName()); }
@Test public void testRemove() { Collection<RedisServer> masters = connection.masters(); connection.remove(masters.iterator().next()); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback delete, final ConnectionCallback callback) throws BackgroundException { try { if(status.isExists()) { if(log.isWarnEnabled()) { log.warn(String.format("Delete file %s to be replaced with %s", renamed, file)); } new BoxDeleteFeature(session, fileid).delete(Collections.singletonList(renamed), callback, delete); } final String id = fileid.getFileId(file); if(file.isDirectory()) { final Folder result = new FoldersApi(new BoxApiClient(session.getClient())).putFoldersId( id, new FoldersFolderIdBody() .name(renamed.getName()) .parent(new FoldersfolderIdParent() .id(fileid.getFileId(renamed.getParent()))), null, BoxAttributesFinderFeature.DEFAULT_FIELDS); fileid.cache(file, null); fileid.cache(renamed, id); return renamed.withAttributes(new BoxAttributesFinderFeature(session, fileid).toAttributes(result)); } final File result = new FilesApi(new BoxApiClient(session.getClient())).putFilesId( id, new FilesFileIdBody() .name(renamed.getName()) .parent(new FilesfileIdParent() .id(fileid.getFileId(renamed.getParent()))), null, BoxAttributesFinderFeature.DEFAULT_FIELDS); fileid.cache(file, null); fileid.cache(renamed, id); return renamed.withAttributes(new BoxAttributesFinderFeature(session, fileid).toAttributes(result)); } catch(ApiException e) { throw new BoxExceptionMappingService(fileid).map("Cannot rename {0}", e, file); } }
@Test public void testMove() throws Exception { final BoxFileidProvider fileid = new BoxFileidProvider(session); final Path test = new BoxTouchFeature(session, fileid).touch(new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); assertNotNull(test.attributes().getFileId()); assertEquals(0L, test.attributes().getSize()); assertNotEquals(-1L, test.attributes().getModificationDate()); final Path target = new BoxMoveFeature(session, fileid).move(test, new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback()); assertFalse(new BoxFindFeature(session, fileid).find(new Path(test).withAttributes(PathAttributes.EMPTY))); assertTrue(new BoxFindFeature(session, fileid).find(target)); assertEquals(test.attributes().getModificationDate(), target.attributes().getModificationDate()); assertEquals(test.attributes().getChecksum(), target.attributes().getChecksum()); assertEquals(Comparison.equal, session.getHost().getProtocol().getFeature(ComparisonService.class).compare(Path.Type.file, target.attributes(), new BoxAttributesFinderFeature(session, fileid).find(target))); new BoxDeleteFeature(session, fileid).delete(Collections.singletonList(target), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public DoubleArrayAsIterable usingExactEquality() { return new DoubleArrayAsIterable(EXACT_EQUALITY_CORRESPONDENCE, iterableSubject()); }
@Test public void usingExactEquality_contains_successWithInfinity() { assertThat(array(1.1, POSITIVE_INFINITY, 3.3)).usingExactEquality().contains(POSITIVE_INFINITY); }
@Override public Iterable<ConnectorFactory> getConnectorFactories() { return ImmutableList.of(new ClickHouseConnectorFactory("clickhouse", getClassLoader())); }
@Test public void testCreateConnector() { Plugin plugin = new ClickHousePlugin(); ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories()); factory.create("test", ImmutableMap.of("clickhouse.connection-url", "jdbc:clickhouse://test"), new TestingConnectorContext()); }
public MemorySize divide(long by) { if (by < 0) { throw new IllegalArgumentException("divisor must be != 0"); } return new MemorySize(bytes / by); }
@Test void testDivideByNegativeLong() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy( () -> { final MemorySize memory = new MemorySize(100L); memory.divide(-23L); }); }
public static int toSignedInt(long x) { return (int) x; }
@Test public void testUnsignedConversions() { long l = Integer.toUnsignedLong(-1); assertEquals(4294967295L, l); assertEquals(-1, BitUtil.toSignedInt(l)); int intVal = Integer.MAX_VALUE; long maxInt = intVal; assertEquals(intVal, BitUtil.toSignedInt(maxInt)); intVal++; maxInt = Integer.toUnsignedLong(intVal); assertEquals(intVal, BitUtil.toSignedInt(maxInt)); intVal++; maxInt = Integer.toUnsignedLong(intVal); assertEquals(intVal, BitUtil.toSignedInt(maxInt)); assertEquals(0xFFFFffffL, (1L << 32) - 1); assertTrue(0xFFFFffffL > 0L); }
public void setProperty(String name, String value) { if (value == null) { return; } name = Introspector.decapitalize(name); PropertyDescriptor prop = getPropertyDescriptor(name); if (prop == null) { addWarn("No such property [" + name + "] in " + objClass.getName() + "."); } else { try { setProperty(prop, name, value); } catch (PropertySetterException ex) { addWarn("Failed to set property [" + name + "] to value \"" + value + "\". ", ex); } } }
@Test public void testDuration() { setter.setProperty("duration", "1.4 seconds"); assertEquals(1400, house.getDuration().getMilliseconds()); }
@Override public DefaultBasicAuthRuleHandle parseHandleJson(final String handleJson) { try { return GsonUtils.getInstance().fromJson(handleJson, DefaultBasicAuthRuleHandle.class); } catch (Exception exception) { LOG.error("Failed to parse json , please check json format", exception); return null; } }
@Test public void testParseHandleJson() { String handleJson = "{\"authorization\":\"test:test123\"}"; MatcherAssert.assertThat(defaultBasicAuthAuthenticationStrategy.parseHandleJson(handleJson), notNullValue(DefaultBasicAuthRuleHandle.class)); MatcherAssert.assertThat(defaultBasicAuthAuthenticationStrategy.parseHandleJson(null), nullValue()); }
@VisibleForTesting protected void setInitialFlushTime(Date now) { // Start with the beginning of the current hour nextFlush = Calendar.getInstance(); nextFlush.setTime(now); nextFlush.set(Calendar.MILLISECOND, 0); nextFlush.set(Calendar.SECOND, 0); nextFlush.set(Calendar.MINUTE, 0); // In the first round, calculate the first flush as the largest number of // intervals from the beginning of the current hour that's not in the // future by: // 1. Subtract the beginning of the hour from the current time // 2. Divide by the roll interval and round down to get the number of whole // intervals that have passed since the beginning of the hour // 3. Multiply by the roll interval to get the number of millis between // the beginning of the current hour and the beginning of the current // interval. int millis = (int) (((now.getTime() - nextFlush.getTimeInMillis()) / rollIntervalMillis) * rollIntervalMillis); // Then add some noise to help prevent all the nodes from // closing their files at the same time. if (rollOffsetIntervalMillis > 0) { millis += ThreadLocalRandom.current().nextLong(rollOffsetIntervalMillis); // If the added time puts us into the future, step back one roll interval // because the code to increment nextFlush to the next flush expects that // nextFlush is the next flush from the previous interval. There wasn't // a previous interval, so we just fake it with the time in the past that // would have been the previous interval if there had been one. // // It's OK if millis comes out negative. while (nextFlush.getTimeInMillis() + millis > now.getTime()) { millis -= rollIntervalMillis; } } // Adjust the next flush time by millis to get the time of our ficticious // previous next flush nextFlush.add(Calendar.MILLISECOND, millis); }
@Test public void testSetInitialFlushTime() { RollingFileSystemSink rfsSink = new RollingFileSystemSink(1000, 0); Calendar calendar = Calendar.getInstance(); calendar.set(Calendar.MILLISECOND, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.HOUR, 0); calendar.set(Calendar.DAY_OF_YEAR, 1); calendar.set(Calendar.YEAR, 2016); assertNull("Last flush time should have been null prior to calling init()", rfsSink.nextFlush); rfsSink.setInitialFlushTime(calendar.getTime()); long diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); assertEquals("The initial flush time was calculated incorrectly", 0L, diff); calendar.set(Calendar.MILLISECOND, 10); rfsSink.setInitialFlushTime(calendar.getTime()); diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); assertEquals("The initial flush time was calculated incorrectly", -10L, diff); calendar.set(Calendar.SECOND, 1); calendar.set(Calendar.MILLISECOND, 10); rfsSink.setInitialFlushTime(calendar.getTime()); diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); assertEquals("The initial flush time was calculated incorrectly", -10L, diff); // Try again with a random offset rfsSink = new RollingFileSystemSink(1000, 100); assertNull("Last flush time should have been null prior to calling init()", rfsSink.nextFlush); calendar.set(Calendar.MILLISECOND, 0); calendar.set(Calendar.SECOND, 0); rfsSink.setInitialFlushTime(calendar.getTime()); diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); assertTrue("The initial flush time was calculated incorrectly: " + diff, (diff == 0L) || ((diff > -1000L) && (diff < -900L))); calendar.set(Calendar.MILLISECOND, 10); rfsSink.setInitialFlushTime(calendar.getTime()); diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); assertTrue("The initial flush time was calculated incorrectly: " + diff, (diff >= -10L) && (diff <= 0L) || ((diff > -1000L) && (diff < -910L))); calendar.set(Calendar.SECOND, 1); calendar.set(Calendar.MILLISECOND, 10); rfsSink.setInitialFlushTime(calendar.getTime()); diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); assertTrue("The initial flush time was calculated incorrectly: " + diff, (diff >= -10L) && (diff <= 0L) || ((diff > -1000L) && (diff < -910L))); // Now try pathological settings rfsSink = new RollingFileSystemSink(1000, 1000000); assertNull("Last flush time should have been null prior to calling init()", rfsSink.nextFlush); calendar.set(Calendar.MILLISECOND, 1); calendar.set(Calendar.SECOND, 0); rfsSink.setInitialFlushTime(calendar.getTime()); diff = rfsSink.nextFlush.getTimeInMillis() - calendar.getTimeInMillis(); assertTrue("The initial flush time was calculated incorrectly: " + diff, (diff > -1000L) && (diff <= 0L)); }
@NonNull @Override public Object configure(CNode config, ConfigurationContext context) throws ConfiguratorException { return Stapler.lookupConverter(target) .convert( target, context.getSecretSourceResolver() .resolve(config.asScalar().toString())); }
@Test public void _string() throws Exception { Configurator c = registry.lookupOrFail(String.class); final Object value = c.configure(new Scalar("abc"), context); assertEquals("abc", value); }
public synchronized void set(float progress) { if (Float.isNaN(progress)) { progress = 0; LOG.debug("Illegal progress value found, progress is Float.NaN. " + "Progress will be changed to 0"); } else if (progress == Float.NEGATIVE_INFINITY) { progress = 0; LOG.debug("Illegal progress value found, progress is " + "Float.NEGATIVE_INFINITY. Progress will be changed to 0"); } else if (progress < 0) { progress = 0; LOG.debug("Illegal progress value found, progress is less than 0." + " Progress will be changed to 0"); } else if (progress > 1) { progress = 1; LOG.debug("Illegal progress value found, progress is larger than 1." + " Progress will be changed to 1"); } else if (progress == Float.POSITIVE_INFINITY) { progress = 1; LOG.debug("Illegal progress value found, progress is " + "Float.POSITIVE_INFINITY. Progress will be changed to 1"); } this.progress = progress; }
@Test public void testSet(){ Progress progress = new Progress(); progress.set(Float.NaN); Assert.assertEquals(0, progress.getProgress(), 0.0); progress.set(Float.NEGATIVE_INFINITY); Assert.assertEquals(0,progress.getProgress(),0.0); progress.set(-1); Assert.assertEquals(0,progress.getProgress(),0.0); progress.set((float) 1.1); Assert.assertEquals(1,progress.getProgress(),0.0); progress.set(Float.POSITIVE_INFINITY); Assert.assertEquals(1,progress.getProgress(),0.0); }
public static List<Type> decode(String rawInput, List<TypeReference<Type>> outputParameters) { return decoder.decodeFunctionResult(rawInput, outputParameters); }
@Test public void testDecodeMultipleDynamicStructStaticDynamicArrays() { String rawInput = "0x0000000000000000000000000000000000000000000000000000000000000140" + "0000000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000000" + "000000000000000000000000000000000000000000000000000000000000007b" + "000000000000000000000000000000000000000000000000000000000000007b" + "0000000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000460" + "0000000000000000000000000000000000000000000000000000000000000560" + "00000000000000000000000000000000000000000000000000000000000008a0" + "0000000000000000000000000000000000000000000000000000000000000060" + "0000000000000000000000000000000000000000000000000000000000000160" + "0000000000000000000000000000000000000000000000000000000000000220" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000040" + "0000000000000000000000000000000000000000000000000000000000000080" + "0000000000000000000000000000000000000000000000000000000000000001" + "3400000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000009" + "6e6573746564466f6f0000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000040" + "0000000000000000000000000000000000000000000000000000000000000060" + "0000000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000040" + "0000000000000000000000000000000000000000000000000000000000000080" + "0000000000000000000000000000000000000000000000000000000000000001" + "3400000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000009" + "6e6573746564466f6f0000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000001" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000040" + "0000000000000000000000000000000000000000000000000000000000000080" + "0000000000000000000000000000000000000000000000000000000000000002" + "6964000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000004" + "6e616d6500000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000003" + "0000000000000000000000000000000000000000000000000000000000000060" + "0000000000000000000000000000000000000000000000000000000000000160" + "0000000000000000000000000000000000000000000000000000000000000260" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000040" + "0000000000000000000000000000000000000000000000000000000000000080" + "0000000000000000000000000000000000000000000000000000000000000001" + "3400000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000009" + "6e6573746564466f6f0000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000040" + "0000000000000000000000000000000000000000000000000000000000000080" + "0000000000000000000000000000000000000000000000000000000000000001" + "3400000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000009" + "6e6573746564466f6f0000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000040" + "0000000000000000000000000000000000000000000000000000000000000060" + "0000000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000060" + "0000000000000000000000000000000000000000000000000000000000000120" + "00000000000000000000000000000000000000000000000000000000000001e0" + "0000000000000000000000000000000000000000000000000000000000000040" + "0000000000000000000000000000000000000000000000000000000000000080" + "0000000000000000000000000000000000000000000000000000000000000002" + "6964000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000004" + "6e616d6500000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000040" + "0000000000000000000000000000000000000000000000000000000000000080" + "0000000000000000000000000000000000000000000000000000000000000002" + "6964000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000004" + "6e616d6500000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000040" + "0000000000000000000000000000000000000000000000000000000000000080" + "0000000000000000000000000000000000000000000000000000000000000002" + "6964000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000004" + "6e616d6500000000000000000000000000000000000000000000000000000000"; assertEquals( FunctionReturnDecoder.decode( rawInput, AbiV2TestFixture.getNarBarFooNarFooDynamicArrayFunction .getOutputParameters()), Arrays.asList( new StaticArray3<>( AbiV2TestFixture.Nar.class, new AbiV2TestFixture.Nar( new AbiV2TestFixture.Nuu( new AbiV2TestFixture.Foo("4", "nestedFoo"))), new AbiV2TestFixture.Nar( new AbiV2TestFixture.Nuu(new AbiV2TestFixture.Foo("", ""))), new AbiV2TestFixture.Nar( new AbiV2TestFixture.Nuu( new AbiV2TestFixture.Foo("4", "nestedFoo")))), new StaticArray3<>( AbiV2TestFixture.Bar.class, new AbiV2TestFixture.Bar(BigInteger.ZERO, BigInteger.ZERO), new AbiV2TestFixture.Bar( BigInteger.valueOf(123), BigInteger.valueOf(123)), new AbiV2TestFixture.Bar(BigInteger.ZERO, BigInteger.ZERO)), new DynamicArray<>( AbiV2TestFixture.Foo.class, new AbiV2TestFixture.Foo("id", "name")), new DynamicArray<>( AbiV2TestFixture.Nar.class, new AbiV2TestFixture.Nar( new AbiV2TestFixture.Nuu( new AbiV2TestFixture.Foo("4", "nestedFoo"))), new AbiV2TestFixture.Nar( new AbiV2TestFixture.Nuu( new AbiV2TestFixture.Foo("4", "nestedFoo"))), new AbiV2TestFixture.Nar( new AbiV2TestFixture.Nuu( new AbiV2TestFixture.Foo("", "")))), new StaticArray3<>( AbiV2TestFixture.Foo.class, new AbiV2TestFixture.Foo("id", "name"), new AbiV2TestFixture.Foo("id", "name"), new AbiV2TestFixture.Foo("id", "name")))); }
@Override public List<Long> getAllAvailableNodes() { List<Long> nodeIds = Lists.newArrayList(); if (usedComputeNode || availableID2Backend.isEmpty()) { nodeIds.addAll(availableID2ComputeNode.keySet()); } if (preferComputeNode) { return nodeIds; } nodeIds.addAll(availableID2Backend.keySet()); return nodeIds; }
@Test public void testSelectBackendAndComputeNode() { new MockUp<SystemInfoService>() { @Mock public ImmutableMap<Long, ComputeNode> getIdToBackend() { return availableId2Backend; } @Mock public ImmutableMap<Long, ComputeNode> getIdComputeNode() { return availableId2ComputeNode; } }; DefaultWorkerProvider.Factory workerProviderFactory = new DefaultWorkerProvider.Factory(); DefaultWorkerProvider workerProvider; List<Integer> numUsedComputeNodesList = ImmutableList.of(-1, 0, 2, 3, 5, 8, 10); // test ComputeNode only for (Integer numUsedComputeNodes : numUsedComputeNodesList) { workerProvider = workerProviderFactory.captureAvailableWorkers(GlobalStateMgr.getCurrentState().getNodeMgr().getClusterInfo(), true, numUsedComputeNodes, ComputationFragmentSchedulingPolicy.COMPUTE_NODES_ONLY, WarehouseManager.DEFAULT_WAREHOUSE_ID); List<Long> selectedWorkerIdsList = workerProvider.getAllAvailableNodes(); for (Long selectedWorkerId : selectedWorkerIdsList) { Assert.assertTrue("selectedWorkerId:" + selectedWorkerId, availableId2ComputeNode.containsKey(selectedWorkerId)); } } // test Backend only for (Integer numUsedComputeNodes : numUsedComputeNodesList) { workerProvider = workerProviderFactory.captureAvailableWorkers(GlobalStateMgr.getCurrentState().getNodeMgr().getClusterInfo(), false, numUsedComputeNodes, ComputationFragmentSchedulingPolicy.COMPUTE_NODES_ONLY, WarehouseManager.DEFAULT_WAREHOUSE_ID); List<Long> selectedWorkerIdsList = workerProvider.getAllAvailableNodes(); Assert.assertEquals(availableId2Backend.size(), selectedWorkerIdsList.size()); for (Long selectedWorkerId : selectedWorkerIdsList) { Assert.assertTrue("selectedWorkerId:" + selectedWorkerId, availableId2Backend.containsKey(selectedWorkerId)); } } // test Backend and ComputeNode for (Integer numUsedComputeNodes : numUsedComputeNodesList) { workerProvider = workerProviderFactory.captureAvailableWorkers(GlobalStateMgr.getCurrentState().getNodeMgr().getClusterInfo(), true, numUsedComputeNodes, ComputationFragmentSchedulingPolicy.ALL_NODES, WarehouseManager.DEFAULT_WAREHOUSE_ID); List<Long> selectedWorkerIdsList = workerProvider.getAllAvailableNodes(); Collections.reverse(selectedWorkerIdsList); //put ComputeNode id to the front,Backend id to the back //test ComputeNode for (int i = 0; i < availableId2ComputeNode.size() && i < selectedWorkerIdsList.size(); i++) { Assert.assertTrue("selectedWorkerId:" + selectedWorkerIdsList.get(i), availableId2ComputeNode.containsKey(selectedWorkerIdsList.get(i))); } //test Backend for (int i = availableId2ComputeNode.size(); i < selectedWorkerIdsList.size(); i++) { Assert.assertTrue("selectedWorkerId:" + selectedWorkerIdsList.get(i), availableId2Backend.containsKey(selectedWorkerIdsList.get(i))); } } }
@Override public URL getResource(String name) { ClassLoadingStrategy loadingStrategy = getClassLoadingStrategy(name); log.trace("Received request to load resource '{}'", name); for (ClassLoadingStrategy.Source classLoadingSource : loadingStrategy.getSources()) { URL url = null; switch (classLoadingSource) { case APPLICATION: url = super.getResource(name); break; case PLUGIN: url = findResource(name); break; case DEPENDENCIES: url = findResourceFromDependencies(name); break; } if (url != null) { log.trace("Found resource '{}' in {} classpath", name, classLoadingSource); return url; } else { log.trace("Couldn't find resource '{}' in {}", name, classLoadingSource); } } return null; }
@Test void parentFirstGetResourceExistsInParent() throws IOException, URISyntaxException { URL resource = parentFirstPluginClassLoader.getResource("META-INF/file-only-in-parent"); assertFirstLine("parent", resource); }
static Runnable decorateRunnable(Observation observation, Runnable runnable) { return () -> observation.observe(runnable); }
@Test public void shouldDecorateRunnable() throws Throwable { Runnable timedRunnable = Observations.decorateRunnable(observation, helloWorldService::sayHelloWorld); timedRunnable.run(); assertThatObservationWasStartedAndFinishedWithoutErrors(); then(helloWorldService).should(times(1)).sayHelloWorld(); }
@Override public void release(String permitId) { get(releaseAsync(permitId)); }
@Test public void testReleaseWithoutPermits() { Assertions.assertThrows(RedisException.class, () -> { RPermitExpirableSemaphore s = redisson.getPermitExpirableSemaphore("test"); s.release("1234"); }); }
public Entry getRoot() { return parent == null ? this : parent.getRoot(); }
@Test public void selfIsRoot() { final Entry entry = new Entry(); assertThat(entry.getRoot(), equalTo(entry)); }
public static Coordinate bd09ToGcj02(double lng, double lat) { double x = lng - 0.0065; double y = lat - 0.006; double z = Math.sqrt(x * x + y * y) - 0.00002 * Math.sin(y * X_PI); double theta = Math.atan2(y, x) - 0.000003 * Math.cos(x * X_PI); double gg_lng = z * Math.cos(theta); double gg_lat = z * Math.sin(theta); return new Coordinate(gg_lng, gg_lat); }
@Test public void bd09toGcj02Test() { final CoordinateUtil.Coordinate coordinate = CoordinateUtil.bd09ToGcj02(116.404, 39.915); assertEquals(116.39762729119315D, coordinate.getLng(), 0); assertEquals(39.90865673957631D, coordinate.getLat(), 0); }
public static List<String> getJavaOpts(Configuration conf) { String adminOpts = conf.get(YarnConfiguration.NM_CONTAINER_LOCALIZER_ADMIN_JAVA_OPTS_KEY, YarnConfiguration.NM_CONTAINER_LOCALIZER_ADMIN_JAVA_OPTS_DEFAULT); String userOpts = conf.get(YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_KEY, YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_DEFAULT); boolean isExtraJDK17OptionsConfigured = conf.getBoolean(YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_ADD_EXPORTS_KEY, YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_ADD_EXPORTS_DEFAULT); if (Shell.isJavaVersionAtLeast(17) && isExtraJDK17OptionsConfigured) { userOpts = userOpts.trim().concat(" " + ADDITIONAL_JDK17_PLUS_OPTIONS); } List<String> adminOptionList = Arrays.asList(adminOpts.split("\\s+")); List<String> userOptionList = Arrays.asList(userOpts.split("\\s+")); return Stream.concat(adminOptionList.stream(), userOptionList.stream()) .filter(s -> !s.isEmpty()) .collect(Collectors.toList()); }
@Test public void testDefaultJavaOptionsWhenExtraJDK17OptionsAreNotConfigured() throws Exception { ContainerLocalizerWrapper wrapper = new ContainerLocalizerWrapper(); ContainerLocalizer localizer = wrapper.setupContainerLocalizerForTest(); Configuration conf = new Configuration(); conf.setBoolean(YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_ADD_EXPORTS_KEY, false); List<String> javaOpts = localizer.getJavaOpts(conf); if (Shell.isJavaVersionAtLeast(17)) { Assert.assertFalse(javaOpts.contains("--add-exports=java.base/sun.net.dns=ALL-UNNAMED")); Assert.assertFalse(javaOpts.contains("--add-exports=java.base/sun.net.util=ALL-UNNAMED")); } Assert.assertTrue(javaOpts.contains("-Xmx256m")); }
@Override public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest) req; HttpServletResponse response = (HttpServletResponse) resp; String appId = accessKeyUtil.extractAppIdFromRequest(request); if (StringUtils.isBlank(appId)) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "InvalidAppId"); return; } List<String> availableSecrets = accessKeyUtil.findAvailableSecret(appId); if (!CollectionUtils.isEmpty(availableSecrets)) { String timestamp = request.getHeader(Signature.HTTP_HEADER_TIMESTAMP); String authorization = request.getHeader(HttpHeaders.AUTHORIZATION); // check timestamp, valid within 1 minute if (!checkTimestamp(timestamp)) { logger.warn("Invalid timestamp. appId={},timestamp={}", appId, timestamp); response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "RequestTimeTooSkewed"); return; } // check signature String uri = request.getRequestURI(); String query = request.getQueryString(); if (!checkAuthorization(authorization, availableSecrets, timestamp, uri, query)) { logger.warn("Invalid authorization. appId={},authorization={}", appId, authorization); response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized"); return; } } chain.doFilter(request, response); }
@Test public void testRequestTimeTooSkewed() throws Exception { String appId = "someAppId"; List<String> secrets = Lists.newArrayList("someSecret"); String oneMinAgoTimestamp = Long.toString(System.currentTimeMillis() - 61 * 1000); when(accessKeyUtil.extractAppIdFromRequest(any())).thenReturn(appId); when(accessKeyUtil.findAvailableSecret(appId)).thenReturn(secrets); when(request.getHeader(Signature.HTTP_HEADER_TIMESTAMP)).thenReturn(oneMinAgoTimestamp); clientAuthenticationFilter.doFilter(request, response, filterChain); verify(response).sendError(HttpServletResponse.SC_UNAUTHORIZED, "RequestTimeTooSkewed"); verify(filterChain, never()).doFilter(request, response); }
public List<String> getAddresses() { if (args.length < 3) { return Collections.singletonList(DEFAULT_BIND_ADDRESS); } List<String> addresses = Arrays.asList(args[2].split(",")); return addresses.stream().filter(InetAddresses::isInetAddress).collect(Collectors.toList()); }
@Test void assertGetAddressesWithThreeArguments() { assertThat(new BootstrapArguments(new String[]{"3306", "test_conf", "127.0.0.1"}).getAddresses(), is(Collections.singletonList("127.0.0.1"))); assertThat(new BootstrapArguments(new String[]{"3306", "test_conf", "1.1.1.1,127.0.0.1"}).getAddresses(), is(Arrays.asList("1.1.1.1", "127.0.0.1"))); }
public HCatRecord getWritable() throws HCatException { DefaultHCatRecord d = new DefaultHCatRecord(); d.copy(this); return d; }
@Test public void testGetWritable() throws Exception { HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector()).getWritable(); Assert.assertEquals(INT_CONST, ((Integer) r.get(0)).intValue()); Assert.assertEquals(LONG_CONST, ((Long) r.get(1)).longValue()); Assert.assertEquals(DOUBLE_CONST, ((Double) r.get(2)).doubleValue(), 0); Assert.assertEquals(STRING_CONST, r.get(3)); Assert.assertEquals("org.apache.hive.hcatalog.data.DefaultHCatRecord", r.getClass().getName()); }
@Override public HashSlotCursor12byteKey cursor() { return new CursorIntKey2(); }
@Test(expected = AssertionError.class) public void testCursor_key2_whenDisposed() { HashSlotCursor12byteKey cursor = hsa.cursor(); hsa.dispose(); cursor.key2(); }
@Override public List<BlockWorkerInfo> getPreferredWorkers(WorkerClusterView workerClusterView, String fileId, int count) throws ResourceExhaustedException { if (workerClusterView.size() < count) { throw new ResourceExhaustedException(String.format( "Not enough workers in the cluster %d workers in the cluster but %d required", workerClusterView.size(), count)); } Set<WorkerIdentity> workerIdentities = workerClusterView.workerIds(); mHashProvider.refresh(workerIdentities); List<WorkerIdentity> workers = mHashProvider.getMultiple(fileId, count); if (workers.size() != count) { throw new ResourceExhaustedException(String.format( "Found %d workers from the hash ring but %d required", workers.size(), count)); } ImmutableList.Builder<BlockWorkerInfo> builder = ImmutableList.builder(); for (WorkerIdentity worker : workers) { Optional<WorkerInfo> optionalWorkerInfo = workerClusterView.getWorkerById(worker); final WorkerInfo workerInfo; if (optionalWorkerInfo.isPresent()) { workerInfo = optionalWorkerInfo.get(); } else { // the worker returned by the policy does not exist in the cluster view // supplied by the client. // this can happen when the membership changes and some callers fail to update // to the latest worker cluster view. // in this case, just skip this worker LOG.debug("Inconsistency between caller's view of cluster and that of " + "the consistent hash policy's: worker {} selected by policy does not exist in " + "caller's view {}. Skipping this worker.", worker, workerClusterView); continue; } BlockWorkerInfo blockWorkerInfo = new BlockWorkerInfo( worker, workerInfo.getAddress(), workerInfo.getCapacityBytes(), workerInfo.getUsedBytes(), workerInfo.getState() == WorkerState.LIVE ); builder.add(blockWorkerInfo); } List<BlockWorkerInfo> infos = builder.build(); return infos; }
@Test public void workerAddrUpdateWithIdUnchanged() throws Exception { JumpHashPolicy policy = new JumpHashPolicy(mConf); List<WorkerInfo> workers = new ArrayList<>(); workers.add(new WorkerInfo().setIdentity(WorkerIdentityTestUtils.ofLegacyId(1L)) .setAddress(new WorkerNetAddress().setHost("host1")) .setCapacityBytes(0) .setUsedBytes(0) .setState(WorkerState.LIVE)); workers.add(new WorkerInfo().setIdentity(WorkerIdentityTestUtils.ofLegacyId(2L)) .setAddress(new WorkerNetAddress().setHost("host2")) .setCapacityBytes(0) .setUsedBytes(0) .setState(WorkerState.LIVE)); List<BlockWorkerInfo> selectedWorkers = policy.getPreferredWorkers(new WorkerClusterView(workers), "fileId", 2); assertEquals("host1", selectedWorkers.stream() .filter(w -> w.getIdentity().equals(WorkerIdentityTestUtils.ofLegacyId(1L))) .findFirst() .get() .getNetAddress() .getHost()); // now the worker 1 has migrated to host 3 workers.set(0, new WorkerInfo().setIdentity(WorkerIdentityTestUtils.ofLegacyId(1L)) .setAddress(new WorkerNetAddress().setHost("host3")) .setCapacityBytes(0) .setUsedBytes(0) .setState(WorkerState.LIVE)); List<BlockWorkerInfo> updatedWorkers = policy.getPreferredWorkers(new WorkerClusterView(workers), "fileId", 2); assertEquals( selectedWorkers.stream().map(BlockWorkerInfo::getIdentity).collect(Collectors.toList()), updatedWorkers.stream().map(BlockWorkerInfo::getIdentity).collect(Collectors.toList())); assertEquals("host3", updatedWorkers.stream() .filter(w -> w.getIdentity().equals(WorkerIdentityTestUtils.ofLegacyId(1L))) .findFirst() .get() .getNetAddress() .getHost()); }
public T get(K key) { T metric = metrics.get(key); if (metric == null) { metric = factory.createInstance(key); metric = MoreObjects.firstNonNull(metrics.putIfAbsent(key, metric), metric); } return metric; }
@Test public void testGet() { assertThat(metricsMap.tryGet("foo"), nullValue(AtomicLong.class)); AtomicLong foo = metricsMap.get("foo"); assertThat(metricsMap.tryGet("foo"), sameInstance(foo)); }
@VisibleForTesting public void validateSmsTemplateCodeDuplicate(Long id, String code) { SmsTemplateDO template = smsTemplateMapper.selectByCode(code); if (template == null) { return; } // 如果 id 为空,说明不用比较是否为相同 id 的字典类型 if (id == null) { throw exception(SMS_TEMPLATE_CODE_DUPLICATE, code); } if (!template.getId().equals(id)) { throw exception(SMS_TEMPLATE_CODE_DUPLICATE, code); } }
@Test public void testValidateDictDataValueUnique_valueDuplicateForUpdate() { // 准备参数 Long id = randomLongId(); String code = randomString(); // mock 数据 smsTemplateMapper.insert(randomSmsTemplateDO(o -> o.setCode(code))); // 调用,校验异常 assertServiceException(() -> smsTemplateService.validateSmsTemplateCodeDuplicate(id, code), SMS_TEMPLATE_CODE_DUPLICATE, code); }
@Override public List<IndexSegment> prune(List<IndexSegment> segments, QueryContext query) { if (segments.isEmpty()) { return segments; } // For LIMIT 0 case, keep one segment to create the schema int limit = query.getLimit(); if (limit == 0) { return Collections.singletonList(segments.get(0)); } // Skip pruning segments for upsert table because valid doc index is equivalent to a filter if (segments.get(0).getValidDocIds() != null) { return segments; } if (query.getOrderByExpressions() == null) { return pruneSelectionOnly(segments, query); } else { return pruneSelectionOrderBy(segments, query); } }
@Test public void testUpsertTable() { List<IndexSegment> indexSegments = Arrays .asList(getIndexSegment(0L, 10L, 10, true), getIndexSegment(20L, 30L, 10, true), getIndexSegment(40L, 50L, 10, true)); // Should not prune any segment for upsert table QueryContext queryContext = QueryContextConverterUtils.getQueryContext("SELECT * FROM testTable LIMIT 5"); List<IndexSegment> result = _segmentPruner.prune(indexSegments, queryContext); assertEquals(result.size(), 3); queryContext = QueryContextConverterUtils.getQueryContext("SELECT * FROM testTable ORDER BY testColumn LIMIT 5"); result = _segmentPruner.prune(indexSegments, queryContext); assertEquals(result.size(), 3); }
public RuntimeProfile buildTopLevelProfile(boolean isFinished) { RuntimeProfile profile = new RuntimeProfile("Load"); RuntimeProfile summaryProfile = new RuntimeProfile("Summary"); summaryProfile.addInfoString(ProfileManager.QUERY_ID, DebugUtil.printId(getLoadId())); summaryProfile.addInfoString(ProfileManager.START_TIME, TimeUtils.longToTimeString(createTimestamp)); long currentTimestamp = System.currentTimeMillis(); long totalTimeMs = currentTimestamp - createTimestamp; summaryProfile.addInfoString(ProfileManager.END_TIME, TimeUtils.longToTimeString(currentTimestamp)); summaryProfile.addInfoString(ProfileManager.TOTAL_TIME, DebugUtil.getPrettyStringMs(totalTimeMs)); summaryProfile.addInfoString(ProfileManager.QUERY_TYPE, "Load"); summaryProfile.addInfoString(ProfileManager.QUERY_STATE, isFinished ? "Finished" : "Running"); summaryProfile.addInfoString("StarRocks Version", String.format("%s-%s", Version.STARROCKS_VERSION, Version.STARROCKS_COMMIT_HASH)); summaryProfile.addInfoString(ProfileManager.USER, context.getQualifiedUser()); summaryProfile.addInfoString(ProfileManager.DEFAULT_DB, context.getDatabase()); summaryProfile.addInfoString(ProfileManager.SQL_STATEMENT, originStmt.originStmt); summaryProfile.addInfoString("Timeout", DebugUtil.getPrettyStringMs(timeoutS * 1000)); summaryProfile.addInfoString("Strict Mode", String.valueOf(strictMode)); summaryProfile.addInfoString("Partial Update", String.valueOf(partialUpdate)); SessionVariable variables = context.getSessionVariable(); if (variables != null) { StringBuilder sb = new StringBuilder(); sb.append("load_parallel_instance_num=").append(Config.load_parallel_instance_num).append(","); sb.append(SessionVariable.PARALLEL_FRAGMENT_EXEC_INSTANCE_NUM).append("=") .append(variables.getParallelExecInstanceNum()).append(","); sb.append(SessionVariable.MAX_PARALLEL_SCAN_INSTANCE_NUM).append("=") .append(variables.getMaxParallelScanInstanceNum()).append(","); sb.append(SessionVariable.PIPELINE_DOP).append("=").append(variables.getPipelineDop()).append(","); sb.append(SessionVariable.ENABLE_ADAPTIVE_SINK_DOP).append("=") .append(variables.getEnableAdaptiveSinkDop()) .append(","); if (context.getResourceGroup() != null) { sb.append(SessionVariable.RESOURCE_GROUP).append("=") .append(context.getResourceGroup().getName()) .append(","); } sb.deleteCharAt(sb.length() - 1); summaryProfile.addInfoString(ProfileManager.VARIABLES, sb.toString()); summaryProfile.addInfoString("NonDefaultSessionVariables", variables.getNonDefaultVariablesJson()); } profile.addChild(summaryProfile); return profile; }
@Test public void testBuildTopLevelProfile(@Mocked GlobalStateMgr globalStateMgr, @Mocked GlobalTransactionMgr globalTransactionMgr, @Mocked TransactionState transactionState) { new Expectations() { { GlobalStateMgr.getCurrentState().getNextId(); result = 1; connectContext.getSessionVariable(); result = new SessionVariable(); connectContext.getQualifiedUser(); result = "test_user"; connectContext.getDatabase(); result = "test_db"; GlobalStateMgr.getCurrentState(); result = globalStateMgr; } }; // Call the method under test Database database = new Database(10000L, "test"); OlapTable olapTable = new OlapTable(10001L, "tbl", null, KeysType.AGG_KEYS, null, null); LoadLoadingTask loadLoadingTask = new LoadLoadingTask.Builder().setDb(database) .setTable(olapTable).setContext(connectContext).setOriginStmt(new OriginStatement("")).build(); RuntimeProfile profile = loadLoadingTask.buildRunningTopLevelProfile(); // Perform assertions to verify the behavior assertNotNull("Profile should not be null", profile); profile = loadLoadingTask.buildFinishedTopLevelProfile(); // Perform assertions to verify the behavior assertNotNull("Profile should not be null", profile); }
@UdafFactory(description = "Compute sample standard deviation of column with type Integer.", aggregateSchema = "STRUCT<SUM integer, COUNT bigint, M2 double>") public static TableUdaf<Integer, Struct, Double> stdDevInt() { return getStdDevImplementation( 0, STRUCT_INT, (agg, newValue) -> newValue + agg.getInt32(SUM), (agg, newValue) -> Double.valueOf(newValue * (agg.getInt64(COUNT) + 1) - (agg.getInt32(SUM) + newValue)), (agg1, agg2) -> Double.valueOf( agg1.getInt32(SUM) / agg1.getInt64(COUNT) - agg2.getInt32(SUM) / agg2.getInt64(COUNT)), (agg1, agg2) -> agg1.getInt32(SUM) + agg2.getInt32(SUM), (agg, valueToRemove) -> agg.getInt32(SUM) - valueToRemove); }
@Test public void shouldAverageEmpty() { final TableUdaf<Integer, Struct, Double> udaf = stdDevInt(); final Struct agg = udaf.initialize(); final double standardDev = udaf.map(agg); assertThat(standardDev, equalTo(0.0)); }
static SelType callJavaMethod(Object javaObj, SelType[] args, MethodHandle m, String methodName) { try { if (args.length == 0) { return callJavaMethod0(javaObj, m); } else if (args.length == 1) { return callJavaMethod1(javaObj, args[0], m); } else if (args.length == 2) { return callJavaMethod2(javaObj, args[0], args[1], m); } } catch (IllegalStateException e) { throw e; } catch (Throwable t) { throw new IllegalArgumentException("Failed calling method " + methodName, t); } throw new UnsupportedOperationException( "DO NOT support calling method: " + methodName + " with args: " + Arrays.toString(args)); }
@Test public void testCallJavaMethodWithoutArg() throws Throwable { m1 = MethodHandles.lookup() .findStatic(MockType.class, "staticNoArg", MethodType.methodType(void.class)); m2 = MethodHandles.lookup() .findVirtual(MockType.class, "noArg", MethodType.methodType(String.class)); SelType res = SelTypeUtil.callJavaMethod(null, new SelType[0], m1, "staticNoArg"); assertEquals(SelType.NULL, res); res = SelTypeUtil.callJavaMethod(new MockType(), new SelType[0], m2, "noArg"); assertEquals(SelTypes.STRING, res.type()); assertEquals("noArg", res.toString()); }
public static Map<String, String> alterCurrentAttributes(boolean create, Map<String, Attribute> all, ImmutableMap<String, String> currentAttributes, ImmutableMap<String, String> newAttributes) { Map<String, String> init = new HashMap<>(); Map<String, String> add = new HashMap<>(); Map<String, String> update = new HashMap<>(); Map<String, String> delete = new HashMap<>(); Set<String> keys = new HashSet<>(); for (Map.Entry<String, String> attribute : newAttributes.entrySet()) { String key = attribute.getKey(); String realKey = realKey(key); String value = attribute.getValue(); validate(realKey); duplicationCheck(keys, realKey); if (create) { if (key.startsWith("+")) { init.put(realKey, value); } else { throw new RuntimeException("only add attribute is supported while creating topic. key: " + realKey); } } else { if (key.startsWith("+")) { if (!currentAttributes.containsKey(realKey)) { add.put(realKey, value); } else { update.put(realKey, value); } } else if (key.startsWith("-")) { if (!currentAttributes.containsKey(realKey)) { throw new RuntimeException("attempt to delete a nonexistent key: " + realKey); } delete.put(realKey, value); } else { throw new RuntimeException("wrong format key: " + realKey); } } } validateAlter(all, init, true, false); validateAlter(all, add, false, false); validateAlter(all, update, false, false); validateAlter(all, delete, false, true); log.info("add: {}, update: {}, delete: {}", add, update, delete); HashMap<String, String> finalAttributes = new HashMap<>(currentAttributes); finalAttributes.putAll(init); finalAttributes.putAll(add); finalAttributes.putAll(update); for (String s : delete.keySet()) { finalAttributes.remove(s); } return finalAttributes; }
@Test(expected = RuntimeException.class) public void alterCurrentAttributes_UpdateMode_DeleteNonExistentAttribute_ShouldThrowException() { ImmutableMap<String, String> newAttributes = ImmutableMap.of("-attr4", "value4"); AttributeUtil.alterCurrentAttributes(false, allAttributes, currentAttributes, newAttributes); }
static long initNotifyWarnTimeout() { String notifyTimeouts = System.getProperty("nacos.listener.notify.warn.timeout"); if (StringUtils.isNotBlank(notifyTimeouts) && NumberUtils.isDigits(notifyTimeouts)) { notifyWarnTimeout = Long.valueOf(notifyTimeouts); LOGGER.info("config listener notify warn timeout millis is set to {}", notifyWarnTimeout); } else { LOGGER.info("config listener notify warn timeout millis use default {} millis ", DEFAULT_NOTIF_WARN_TIMEOUTS); notifyWarnTimeout = DEFAULT_NOTIF_WARN_TIMEOUTS; } return notifyWarnTimeout; }
@Test void testNotifyWarnTimeout() { System.setProperty("nacos.listener.notify.warn.timeout", "5000"); long notifyWarnTimeout = CacheData.initNotifyWarnTimeout(); assertEquals(5000, notifyWarnTimeout); System.setProperty("nacos.listener.notify.warn.timeout", "1bf000abc"); long notifyWarnTimeout2 = CacheData.initNotifyWarnTimeout(); assertEquals(60000, notifyWarnTimeout2); }
@Override public float floatValue() { return value; }
@Test void testDoubleNegative() throws IOException { // PDFBOX-4289 COSFloat cosFloat = new COSFloat("--16.33"); assertEquals(-16.33f, cosFloat.floatValue()); }
public void generate() throws IOException { packageNameByTypes.clear(); generatePackageInfo(); generateTypeStubs(); generateMessageHeaderStub(); for (final List<Token> tokens : ir.messages()) { final Token msgToken = tokens.get(0); final List<Token> messageBody = getMessageBody(tokens); final boolean hasVarData = -1 != findSignal(messageBody, Signal.BEGIN_VAR_DATA); int i = 0; final List<Token> fields = new ArrayList<>(); i = collectFields(messageBody, i, fields); final List<Token> groups = new ArrayList<>(); i = collectGroups(messageBody, i, groups); final List<Token> varData = new ArrayList<>(); collectVarData(messageBody, i, varData); final String decoderClassName = formatClassName(decoderName(msgToken.name())); final String decoderStateClassName = decoderClassName + "#CodecStates"; final FieldPrecedenceModel decoderPrecedenceModel = precedenceChecks.createDecoderModel( decoderStateClassName, tokens); generateDecoder(decoderClassName, msgToken, fields, groups, varData, hasVarData, decoderPrecedenceModel); final String encoderClassName = formatClassName(encoderName(msgToken.name())); final String encoderStateClassName = encoderClassName + "#CodecStates"; final FieldPrecedenceModel encoderPrecedenceModel = precedenceChecks.createEncoderModel( encoderStateClassName, tokens); generateEncoder(encoderClassName, msgToken, fields, groups, varData, hasVarData, encoderPrecedenceModel); } }
@Test void shouldGeneratePrecedenceChecksWhenEnabled() throws Exception { final PrecedenceChecks.Context context = new PrecedenceChecks.Context() .shouldGeneratePrecedenceChecks(true); final PrecedenceChecks precedenceChecks = PrecedenceChecks.newInstance(context); generator(precedenceChecks).generate(); final Field field = Arrays.stream(compileCarEncoder().getDeclaredFields()) .filter(f -> f.getName().equals(context.precedenceChecksFlagName())) .findFirst() .orElse(null); assertNotNull(field); }
@Override public YamlUserConfiguration swapToYamlConfiguration(final ShardingSphereUser data) { if (null == data) { return null; } YamlUserConfiguration result = new YamlUserConfiguration(); result.setUser(data.getGrantee().toString()); result.setPassword(data.getPassword()); result.setAuthenticationMethodName(data.getAuthenticationMethodName()); result.setAdmin(data.isAdmin()); return result; }
@Test void assertSwapToYamlConfiguration() { YamlUserConfiguration actual = new YamlUserSwapper().swapToYamlConfiguration(new ShardingSphereUser("foo_user", "foo_pwd", "127.0.0.1")); assertNotNull(actual); assertThat(actual.getUser(), is("foo_user@127.0.0.1")); assertThat(actual.getPassword(), is("foo_pwd")); }
public static String asRomanNumeralsLower(int i) { return asRomanNumerals(i).toLowerCase(Locale.ROOT); }
@Test public void testRomanLower() { assertEquals("i", AutoPageNumberUtils.asRomanNumeralsLower(1)); assertEquals("xxvi", AutoPageNumberUtils.asRomanNumeralsLower(26)); assertEquals("xxvii", AutoPageNumberUtils.asRomanNumeralsLower(27)); }
@Override public void run() { if (processor != null) { processor.execute(); } else { if (!beforeHook()) { logger.info("before-feature hook returned [false], aborting: {}", this); } else { scenarios.forEachRemaining(this::processScenario); } afterFeature(); } }
@Test void testCallSelf() { run("call-self.feature"); matchContains(fr.result.getVariables(), "{ result: 'second' }"); }
String getEstimatedRunTimeRemaining(boolean inSeconds) { // Calculate the amount of energy lost every tick. // Negative weight has the same depletion effect as 0 kg. >64kg counts as 64kg. final int effectiveWeight = Math.min(Math.max(client.getWeight(), 0), 64); // 100% energy is 10000 energy units int energyUnitsLost = effectiveWeight * 67 / 64 + 67; if (client.getVarbitValue(Varbits.RUN_SLOWED_DEPLETION_ACTIVE) != 0) { energyUnitsLost *= 0.3; // Stamina effect reduces energy depletion to 30% } else if (isRingOfEnduranceEquipped()) // Ring of Endurance passive effect does not stack with stamina potion { Integer charges = getRingOfEnduranceCharges(); if (charges == null) { return "?"; } if (charges >= RING_OF_ENDURANCE_PASSIVE_EFFECT) { energyUnitsLost *= 0.85; // Ring of Endurance passive effect reduces energy depletion to 85% } } // Math.ceil is correct here - only need 1 energy unit to run final double ticksLeft = Math.ceil(client.getEnergy() / (double) energyUnitsLost); final double secondsLeft = ticksLeft * Constants.GAME_TICK_LENGTH / 1000.0; // Return the text if (inSeconds) { return (int) Math.floor(secondsLeft) + "s"; } else { final int minutes = (int) Math.floor(secondsLeft / 60.0); final int seconds = (int) Math.floor(secondsLeft - (minutes * 60.0)); return minutes + ":" + StringUtils.leftPad(Integer.toString(seconds), 2, "0"); } }
@Test public void testEstimatedRuntimeRemaining() { ItemContainer equipment = mock(ItemContainer.class); when(client.getItemContainer(InventoryID.EQUIPMENT)).thenReturn(equipment); when(equipment.count(RING_OF_ENDURANCE)).thenReturn(1); when(client.getVarbitValue(Varbits.RUN_SLOWED_DEPLETION_ACTIVE)).thenReturn(1); when(client.getEnergy()).thenReturn(10000); assertEquals("300s", runEnergyPlugin.getEstimatedRunTimeRemaining(true)); when(client.getVarbitValue(Varbits.RUN_SLOWED_DEPLETION_ACTIVE)).thenReturn(0); when(configManager.getRSProfileConfiguration(RunEnergyConfig.GROUP_NAME, "ringOfEnduranceCharges", Integer.class)).thenReturn(512); assertEquals("1:47", runEnergyPlugin.getEstimatedRunTimeRemaining(false)); }
public String anonymize(final ParseTree tree) { return build(tree); }
@Test public void shouldAnonymizeAlterOptionCorrectly() { final String output = anon.anonymize( "ALTER STREAM my_stream ADD COLUMN c3 INT, ADD COLUMN c4 INT;"); Approvals.verify(output); }
public static <E> List<E> filterToList(Iterator<E> iter, Filter<E> filter) { return toList(filtered(iter, filter)); }
@Test public void filterToListTest(){ final List<String> obj2 = ListUtil.toList("3"); final List<String> obj = ListUtil.toList("1", "3"); final List<String> filtered = IterUtil.filterToList(obj.iterator(), obj2::contains); assertEquals(1, filtered.size()); assertEquals("3", filtered.get(0)); }