focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public void initLogin(String callbackUrl, String relayState, HttpRequest request, HttpResponse response) { Auth auth = this.initSamlAuth(callbackUrl, request, response); login(auth, relayState); }
@Test public void givenMissingSpCertificate_whenInitializingTheAuthentication_thenExceptionIsThrown() { initBasicSamlSettings(); settings.setProperty("sonar.auth.saml.signature.enabled", true); settings.setProperty("sonar.auth.saml.sp.privateKey.secured", "PRIVATE_KEY"); assertThatIllegalStateException() .isThrownBy(() -> underTest.initLogin("","", mock(JavaxHttpRequest.class), mock(JavaxHttpResponse.class))) .withMessage("Failed to create a SAML Auth") .havingCause() .withMessage("Service provider certificate is missing"); }
public static String configMapToRedactedString(Map<String, Object> map, ConfigDef configDef) { StringBuilder bld = new StringBuilder("{"); List<String> keys = new ArrayList<>(map.keySet()); Collections.sort(keys); String prefix = ""; for (String key : keys) { bld.append(prefix).append(key).append("="); ConfigKey configKey = configDef.configKeys().get(key); if (configKey == null || configKey.type().isSensitive()) { bld.append("(redacted)"); } else { Object value = map.get(key); if (value == null) { bld.append("null"); } else if (configKey.type() == Type.STRING) { bld.append("\"").append(value).append("\""); } else { bld.append(value); } } prefix = ", "; } bld.append("}"); return bld.toString(); }
@Test public void testConfigMapToRedactedStringForEmptyMap() { assertEquals("{}", ConfigUtils. configMapToRedactedString(Collections.emptyMap(), CONFIG)); }
@VisibleForTesting boolean areAllMapsEmpty() { return taskExecutorToDataSets.isEmpty() && dataSetToTaskExecutors.isEmpty() && dataSetMetaInfo.isEmpty() && partitionReleaseCompletionFutures.isEmpty(); }
@Test void testProcessEmptyClusterPartitionReport() { TestClusterPartitionReleaser partitionReleaser = new TestClusterPartitionReleaser(); final ResourceManagerPartitionTrackerImpl tracker = new ResourceManagerPartitionTrackerImpl(partitionReleaser); reportEmpty(tracker, TASK_EXECUTOR_ID_1); assertThat(partitionReleaser.releaseCalls).isEmpty(); assertThat(tracker.areAllMapsEmpty()).isTrue(); }
public void expand(String key, long value, RangeHandler rangeHandler, EdgeHandler edgeHandler) { if (value < lowerBound || value > upperBound) { // Value outside bounds -> expand to nothing. return; } int maxLevels = value > 0 ? maxPositiveLevels : maxNegativeLevels; int sign = value > 0 ? 1 : -1; // Append key to feature string builder StringBuilder builder = new StringBuilder(128); builder.append(key).append('='); long levelSize = arity; long edgeInterval = (value / arity) * arity; edgeHandler.handleEdge(createEdgeFeatureHash(builder, edgeInterval), (int) Math.abs(value - edgeInterval)); for (int i = 0; i < maxLevels; ++i) { long start = (value / levelSize) * levelSize; if (Math.abs(start) + levelSize - 1 < 0) { // overflow break; } rangeHandler.handleRange(createRangeFeatureHash(builder, start, start + sign * (levelSize - 1))); levelSize *= arity; if (levelSize <= 0 && levelSize != Long.MIN_VALUE) { //overflow break; } } }
@Test void requireThatSmallNegativeRangeIsExpandedInArity2() { PredicateRangeTermExpander expander = new PredicateRangeTermExpander(2); Iterator<String> expectedLabels = List.of( "key=-43-42", "key=-43-40", "key=-47-40", "key=-47-32", "key=-63-32", "key=-63-0", "key=-127-0", "key=-255-0", "key=-511-0", "key=-1023-0", "key=-2047-0", "key=-4095-0", "key=-8191-0", "key=-16383-0", "key=-32767-0", "key=-65535-0", "key=-131071-0", "key=-262143-0", "key=-524287-0", "key=-1048575-0", "key=-2097151-0", "key=-4194303-0", "key=-8388607-0", "key=-16777215-0", "key=-33554431-0", "key=-67108863-0", "key=-134217727-0", "key=-268435455-0", "key=-536870911-0", "key=-1073741823-0", "key=-2147483647-0", "key=-4294967295-0", "key=-8589934591-0", "key=-17179869183-0", "key=-34359738367-0", "key=-68719476735-0", "key=-137438953471-0", "key=-274877906943-0", "key=-549755813887-0", "key=-1099511627775-0", "key=-2199023255551-0", "key=-4398046511103-0", "key=-8796093022207-0", "key=-17592186044415-0", "key=-35184372088831-0", "key=-70368744177663-0", "key=-140737488355327-0", "key=-281474976710655-0", "key=-562949953421311-0", "key=-1125899906842623-0", "key=-2251799813685247-0", "key=-4503599627370495-0", "key=-9007199254740991-0", "key=-18014398509481983-0", "key=-36028797018963967-0", "key=-72057594037927935-0", "key=-144115188075855871-0", "key=-288230376151711743-0", "key=-576460752303423487-0", "key=-1152921504606846975-0", "key=-2305843009213693951-0", "key=-4611686018427387903-0", "key=-9223372036854775807-0").iterator(); expander.expand("key", -42, range -> assertEquals(PredicateHash.hash64(expectedLabels.next()), range), (edge, value) -> { assertEquals(PredicateHash.hash64("key=-42"), edge); assertEquals(0, value); }); assertFalse(expectedLabels.hasNext()); }
@Override public LossMeasurementEntry getLm(MdId mdName, MaIdShort maName, MepId mepId, SoamId lmId) throws CfmConfigException { throw new UnsupportedOperationException("Not yet implemented"); }
@Test public void testGetLm() throws CfmConfigException { //TODO: Implement underlying method try { soamManager.getLm(MDNAME1, MANAME1, MEPID1, LMID101); fail("Expecting UnsupportedOperationException"); } catch (UnsupportedOperationException e) { } }
@Override @Deprecated public <VR> KStream<K, VR> transformValues(final org.apache.kafka.streams.kstream.ValueTransformerSupplier<? super V, ? extends VR> valueTransformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doTransformValues( toValueTransformerWithKeySupplier(valueTransformerSupplier), NamedInternal.empty(), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowNullValueTransformerWithKeySupplierOnTransformValuesWithStores() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.transformValues( (ValueTransformerWithKeySupplier<Object, Object, Object>) null, "storeName")); assertThat(exception.getMessage(), equalTo("valueTransformerSupplier can't be null")); }
public AccessPrivilege getAccessPrivilege(InetAddress addr) { return getAccessPrivilege(addr.getHostAddress(), addr.getCanonicalHostName()); }
@Test public void testWildcardRO() { NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, "* ro"); Assert.assertEquals(AccessPrivilege.READ_ONLY, matcher.getAccessPrivilege(address1, hostname1)); }
@Deprecated public String getJSON() { return getJSON(Long.MAX_VALUE); }
@Test public void testGetJSONFromGZIPCompressedMessage() throws Exception { GELFMessage msg = new GELFMessage(TestHelper.gzipCompress(GELF_JSON)); assertEquals(GELF_JSON, msg.getJSON(1024)); }
public CoercedExpressionResult coerce() { final Class<?> leftClass = left.getRawClass(); final Class<?> nonPrimitiveLeftClass = toNonPrimitiveType(leftClass); final Class<?> rightClass = right.getRawClass(); final Class<?> nonPrimitiveRightClass = toNonPrimitiveType(rightClass); boolean sameClass = leftClass == rightClass; boolean isUnificationExpression = left instanceof UnificationTypedExpression || right instanceof UnificationTypedExpression; if (sameClass || isUnificationExpression) { return new CoercedExpressionResult(left, right); } if (!canCoerce()) { throw new CoercedExpressionException(new InvalidExpressionErrorResult("Comparison operation requires compatible types. Found " + leftClass + " and " + rightClass)); } if ((nonPrimitiveLeftClass == Integer.class || nonPrimitiveLeftClass == Long.class) && nonPrimitiveRightClass == Double.class) { CastExpr castExpression = new CastExpr(PrimitiveType.doubleType(), this.left.getExpression()); return new CoercedExpressionResult( new TypedExpression(castExpression, double.class, left.getType()), right, false); } final boolean leftIsPrimitive = leftClass.isPrimitive() || Number.class.isAssignableFrom( leftClass ); final boolean canCoerceLiteralNumberExpr = canCoerceLiteralNumberExpr(leftClass); boolean rightAsStaticField = false; final Expression rightExpression = right.getExpression(); final TypedExpression coercedRight; if (leftIsPrimitive && canCoerceLiteralNumberExpr && rightExpression instanceof LiteralStringValueExpr) { final Expression coercedLiteralNumberExprToType = coerceLiteralNumberExprToType((LiteralStringValueExpr) right.getExpression(), leftClass); coercedRight = right.cloneWithNewExpression(coercedLiteralNumberExprToType); coercedRight.setType( leftClass ); } else if (shouldCoerceBToString(left, right)) { coercedRight = coerceToString(right); } else if (isNotBinaryExpression(right) && canBeNarrowed(leftClass, rightClass) && right.isNumberLiteral()) { coercedRight = castToClass(leftClass); } else if (leftClass == long.class && rightClass == int.class) { coercedRight = right.cloneWithNewExpression(new CastExpr(PrimitiveType.longType(), right.getExpression())); } else if (leftClass == Date.class && rightClass == String.class) { coercedRight = coerceToDate(right); rightAsStaticField = true; } else if (leftClass == LocalDate.class && rightClass == String.class) { coercedRight = coerceToLocalDate(right); rightAsStaticField = true; } else if (leftClass == LocalDateTime.class && rightClass == String.class) { coercedRight = coerceToLocalDateTime(right); rightAsStaticField = true; } else if (shouldCoerceBToMap()) { coercedRight = castToClass(toNonPrimitiveType(leftClass)); } else if (isBoolean(leftClass) && !isBoolean(rightClass)) { coercedRight = coerceBoolean(right); } else { coercedRight = right; } final TypedExpression coercedLeft; if (nonPrimitiveLeftClass == Character.class && shouldCoerceBToString(right, left)) { coercedLeft = coerceToString(left); } else { coercedLeft = left; } return new CoercedExpressionResult(coercedLeft, coercedRight, rightAsStaticField); }
@Test public void castMaps() { final TypedExpression left = expr(THIS_PLACEHOLDER + ".getAge()", Integer.class); final TypedExpression right = expr("$m.get(\"age\")", java.util.Map.class); final CoercedExpression.CoercedExpressionResult coerce = new CoercedExpression(left, right, false).coerce(); assertThat(coerce.getCoercedRight()).isEqualTo(expr("(java.lang.Integer)$m.get(\"age\")", Map.class)); }
public synchronized void update(ApplicationId key, Collection<String> newHosts) { verifyHosts(key, newHosts); Collection<String> currentHosts = getHosts(key); log.log(Level.FINE, () -> "Setting hosts for key '" + key + "', " + "newHosts: " + newHosts + ", " + "currentHosts: " + currentHosts); Collection<String> removedHosts = findRemovedHosts(newHosts, currentHosts); removeHosts(removedHosts); addHosts(key, newHosts); }
@Test public void multiple_keys_are_handled() { HostRegistry reg = new HostRegistry(); reg.update(foo, List.of("foo.com", "bar.com")); reg.update(bar, List.of("baz.com", "quux.com")); assertGetKey(reg, "foo.com", foo); assertGetKey(reg, "bar.com", foo); assertGetKey(reg, "baz.com", bar); assertGetKey(reg, "quux.com", bar); }
@Override protected int command() { if (!validateConfigFilePresent()) { return 1; } final MigrationConfig config; try { config = MigrationConfig.load(getConfigFile()); } catch (KsqlException | MigrationException e) { LOGGER.error(e.getMessage()); return 1; } return command( config, MigrationsUtil::getKsqlClient, getMigrationsDir(getConfigFile(), config), Clock.systemDefaultZone() ); }
@Test public void shouldApplyAssertTopicCommands() throws Exception { command = PARSER.parse("-v", "3"); createMigrationFile(1, NAME, migrationsDir, COMMAND); createMigrationFile(3, NAME, migrationsDir, ASSERT_TOPIC_COMMANDS); givenCurrentMigrationVersion("1"); givenAppliedMigration(1, NAME, MigrationState.MIGRATED); // When: final int result = command.command(config, (cfg, headers) -> ksqlClient, migrationsDir, Clock.fixed( Instant.ofEpochMilli(1000), ZoneId.systemDefault())); // Then: assertThat(result, is(0)); final InOrder inOrder = inOrder(ksqlClient); verifyMigratedVersion(inOrder, 3, "1", MigrationState.MIGRATED, () -> { inOrder.verify(ksqlClient).assertTopic("abc", ImmutableMap.of(), true); inOrder.verify(ksqlClient).assertTopic("abc", ImmutableMap.of(), false, Duration.ofSeconds(10)); inOrder.verify(ksqlClient).assertTopic("abc", ImmutableMap.of("FOO", 4, "BAR", 6), false); inOrder.verify(ksqlClient).assertTopic("abc", ImmutableMap.of("FOO", 4, "BAR", 6), true, Duration.ofSeconds(10)); }); inOrder.verify(ksqlClient).close(); inOrder.verifyNoMoreInteractions(); }
@Override @Transactional public boolean checkForPreApproval(Long userId, Integer userType, String clientId, Collection<String> requestedScopes) { // 第一步,基于 Client 的自动授权计算,如果 scopes 都在自动授权中,则返回 true 通过 OAuth2ClientDO clientDO = oauth2ClientService.validOAuthClientFromCache(clientId); Assert.notNull(clientDO, "客户端不能为空"); // 防御性编程 if (CollUtil.containsAll(clientDO.getAutoApproveScopes(), requestedScopes)) { // gh-877 - if all scopes are auto approved, approvals still need to be added to the approval store. LocalDateTime expireTime = LocalDateTime.now().plusSeconds(TIMEOUT); for (String scope : requestedScopes) { saveApprove(userId, userType, clientId, scope, true, expireTime); } return true; } // 第二步,算上用户已经批准的授权。如果 scopes 都包含,则返回 true List<OAuth2ApproveDO> approveDOs = getApproveList(userId, userType, clientId); Set<String> scopes = convertSet(approveDOs, OAuth2ApproveDO::getScope, OAuth2ApproveDO::getApproved); // 只保留未过期的 + 同意的 return CollUtil.containsAll(scopes, requestedScopes); }
@Test public void checkForPreApproval_reject() { // 准备参数 Long userId = randomLongId(); Integer userType = randomEle(UserTypeEnum.values()).getValue(); String clientId = randomString(); List<String> requestedScopes = Lists.newArrayList("read"); // mock 方法 when(oauth2ClientService.validOAuthClientFromCache(eq(clientId))) .thenReturn(randomPojo(OAuth2ClientDO.class).setAutoApproveScopes(null)); // mock 数据 OAuth2ApproveDO approve = randomPojo(OAuth2ApproveDO.class).setUserId(userId) .setUserType(userType).setClientId(clientId).setScope("read") .setExpiresTime(LocalDateTimeUtil.offset(LocalDateTime.now(), 1L, ChronoUnit.DAYS)).setApproved(false); // 拒绝 oauth2ApproveMapper.insert(approve); // 调用 boolean success = oauth2ApproveService.checkForPreApproval(userId, userType, clientId, requestedScopes); // 断言 assertFalse(success); }
public static boolean webSocketHostPathMatches(String hostPath, String targetPath) { boolean exactPathMatch = true; if (ObjectHelper.isEmpty(hostPath) || ObjectHelper.isEmpty(targetPath)) { // This scenario should not really be possible as the input args come from the vertx-websocket consumer / producer URI return false; } // Paths ending with '*' are Vert.x wildcard routes so match on the path prefix if (hostPath.endsWith("*")) { exactPathMatch = false; hostPath = hostPath.substring(0, hostPath.lastIndexOf('*')); } String normalizedHostPath = HttpUtils.normalizePath(hostPath + "/"); String normalizedTargetPath = HttpUtils.normalizePath(targetPath + "/"); String[] hostPathElements = normalizedHostPath.split("/"); String[] targetPathElements = normalizedTargetPath.split("/"); if (exactPathMatch && hostPathElements.length != targetPathElements.length) { return false; } if (exactPathMatch) { return normalizedHostPath.equals(normalizedTargetPath); } else { return normalizedTargetPath.startsWith(normalizedHostPath); } }
@Test void webSocketHostWithTrailingSlashPathMatches() { String hostPath = "/foo/bar/cheese/wine"; String targetPath = "/foo/bar/cheese/wine/"; assertTrue(VertxWebsocketHelper.webSocketHostPathMatches(hostPath, targetPath)); }
@Override public void registerRemote(RemoteInstance remoteInstance) throws ServiceRegisterException { try { if (needUsingInternalAddr()) { remoteInstance = new RemoteInstance(new Address(config.getInternalComHost(), config.getInternalComPort(), true)); } this.selfAddress = remoteInstance.getAddress(); ServiceInstance<RemoteInstance> thisInstance = ServiceInstance.<RemoteInstance>builder().name(REMOTE_NAME_PATH) .id(UUID.randomUUID() .toString()) .address(remoteInstance .getAddress() .getHost()) .port(remoteInstance .getAddress() .getPort()) .payload(remoteInstance) .build(); serviceDiscovery.registerService(thisInstance); this.healthChecker.health(); } catch (Throwable e) { this.healthChecker.unHealth(e); throw new ServiceRegisterException(e.getMessage()); } }
@Test public void registerRemoteNoNeedInternal() throws Exception { RemoteInstance instance = new RemoteInstance(address); coordinator.registerRemote(instance); validateServiceInstance(address, instance); }
@Override public void selectWorker(long workerId) throws NonRecoverableException { if (getWorkerById(workerId) == null) { reportWorkerNotFoundException(); } selectWorkerUnchecked(workerId); }
@Test public void testSelectWorker() throws UserException { DefaultWorkerProvider workerProvider = new DefaultWorkerProvider(id2Backend, id2ComputeNode, availableId2Backend, availableId2ComputeNode, true); for (long id = -1; id < 20; id++) { if (availableId2Worker.containsKey(id)) { workerProvider.selectWorker(id); testUsingWorkerHelper(workerProvider, id); } else { long finalId = id; Assert.assertThrows(NonRecoverableException.class, () -> workerProvider.selectWorker(finalId)); } } }
public BrokerFileSystem getFileSystem(String path, Map<String, String> properties) { WildcardURI pathUri = new WildcardURI(path); String scheme = pathUri.getUri().getScheme(); if (Strings.isNullOrEmpty(scheme)) { throw new BrokerException(TBrokerOperationStatusCode.INVALID_INPUT_FILE_PATH, "invalid path. scheme is null"); } BrokerFileSystem brokerFileSystem = null; if (scheme.equals(HDFS_SCHEME) || scheme.equals(VIEWFS_SCHEME)) { brokerFileSystem = getDistributedFileSystem(scheme, path, properties); } else if (scheme.equals(S3A_SCHEME)) { brokerFileSystem = getS3AFileSystem(path, properties); } else if (scheme.equals(OSS_SCHEME)) { brokerFileSystem = getOSSFileSystem(path, properties); } else if (scheme.equals(COS_SCHEME)) { brokerFileSystem = getCOSFileSystem(path, properties); } else if (scheme.equals(KS3_SCHEME)) { brokerFileSystem = getKS3FileSystem(path, properties); } else if (scheme.equals(OBS_SCHEME)) { brokerFileSystem = getOBSFileSystem(path, properties); } else if (scheme.equals(TOS_SCHEME)) { brokerFileSystem = getTOSFileSystem(path, properties); } else { // If all above match fails, then we will read the settings from hdfs-site.xml, core-site.xml of FE, // and try to create a universal file system. The reason why we can do this is because hadoop/s3 // SDK is compatible with nearly all file/object storage system brokerFileSystem = getUniversalFileSystem(path, properties); } return brokerFileSystem; }
@Test public void testGetFileSystemForhHA() throws IOException { Map<String, String> properties = new HashMap<String, String>(); properties.put("username", "user"); properties.put("password", "passwd"); properties.put("fs.defaultFS", "hdfs://starrocks"); properties.put("dfs.nameservices", "starrocks"); properties.put("dfs.ha.namenodes.starrocks", "nn1,nn2"); properties.put("dfs.namenode.rpc-address.starrocks.nn1", "host1:port1"); properties.put("dfs.namenode.rpc-address.starrocks.nn2", "host2:port2"); properties.put("dfs.client.failover.proxy.provider.bdos", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"); BrokerFileSystem fs = fileSystemManager.getFileSystem(testHdfsHost + "/data/abc/logs", properties); assertNotNull(fs); fs.getDFSFileSystem().close(); }
public static Status getServiceStatus() { return getServiceStatus(SERVICE_STATUS_CALLBACK); }
@Test public void testIdealStateMatch() { TestIdealStateAndExternalViewMatchServiceStatusCallback callback; // No ideal state = GOOD callback = buildTestISEVCallback(); callback.setExternalView(new ExternalView(TABLE_NAME)); assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD); // No external view, and ideal state shows this instance is assigned a segment of the resource = STARTING callback = buildTestISEVCallback(); ZNRecord znRecord = new ZNRecord(TABLE_NAME); znRecord.setSimpleField("REBALANCE_MODE", "CUSTOMIZED"); znRecord.setMapField("segment1", Map.of(INSTANCE_NAME, "ONLINE")); callback.setIdealState(new IdealState(znRecord)); assertEquals(callback.getServiceStatus(), ServiceStatus.Status.STARTING); // No external view, and ideal state shows this instance is not assigned a segment of the resource = GOOD callback = buildTestISEVCallback(); znRecord = new ZNRecord(TABLE_NAME); znRecord.setSimpleField("REBALANCE_MODE", "CUSTOMIZED"); znRecord.setMapField("segment1", Map.of("otherServerInstance", "ONLINE")); callback.setIdealState(new IdealState(znRecord)); assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD); // Online ideal state, and external view shows second segment is still offline = STARTING callback = buildTestISEVCallback(); znRecord = new ZNRecord(TABLE_NAME); znRecord.setSimpleField("REBALANCE_MODE", "CUSTOMIZED"); znRecord.setMapField("segment1", Map.of(INSTANCE_NAME, "ONLINE")); znRecord.setMapField("segment2", Map.of(INSTANCE_NAME, "ONLINE")); callback.setIdealState(new IdealState(znRecord)); ExternalView externalView = new ExternalView(TABLE_NAME); externalView.setState("segment1", INSTANCE_NAME, "ONLINE"); externalView.setState("segment2", INSTANCE_NAME, "OFFLINE"); callback.setExternalView(externalView); assertEquals(callback.getServiceStatus(), ServiceStatus.Status.STARTING); // Empty ideal state + empty external view = GOOD callback = buildTestISEVCallback(); callback.setIdealState(new IdealState(TABLE_NAME)); callback.setExternalView(new ExternalView(TABLE_NAME)); assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD); // Once the status is GOOD, it should keep on reporting GOOD no matter what callback.setIdealState(null); callback.setExternalView(null); assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD); // Non empty ideal state + empty external view = STARTING callback = buildTestISEVCallback(); IdealState idealState = new IdealState(TABLE_NAME); idealState.setRebalanceMode(IdealState.RebalanceMode.CUSTOMIZED); idealState.setPartitionState("mySegment", INSTANCE_NAME, "ONLINE"); callback.setIdealState(idealState); callback.setExternalView(new ExternalView(TABLE_NAME)); assertEquals(callback.getServiceStatus(), ServiceStatus.Status.STARTING); // Should be good if the only ideal state is disabled callback.getResourceIdealState(TABLE_NAME).enable(false); assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD); // Should ignore offline segments in ideal state callback = buildTestISEVCallback(); idealState = new IdealState(TABLE_NAME); idealState.setRebalanceMode(IdealState.RebalanceMode.CUSTOMIZED); idealState.setPartitionState("mySegment_1", INSTANCE_NAME, "ONLINE"); idealState.setPartitionState("mySegment_2", INSTANCE_NAME, "OFFLINE"); callback.setIdealState(idealState); externalView = new ExternalView(TABLE_NAME); externalView.setState("mySegment_1", INSTANCE_NAME, "ONLINE"); callback.setExternalView(externalView); assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD); // Should ignore segments in error state in external view callback = buildTestISEVCallback(); idealState = new IdealState(TABLE_NAME); idealState.setRebalanceMode(IdealState.RebalanceMode.CUSTOMIZED); idealState.setPartitionState("mySegment_1", INSTANCE_NAME, "ONLINE"); idealState.setPartitionState("mySegment_2", INSTANCE_NAME, "OFFLINE"); callback.setIdealState(idealState); externalView = new ExternalView(TABLE_NAME); externalView.setState("mySegment_1", INSTANCE_NAME, "ERROR"); callback.setExternalView(externalView); assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD); // Should ignore other instances callback = buildTestISEVCallback(); idealState = new IdealState(TABLE_NAME); idealState.setRebalanceMode(IdealState.RebalanceMode.CUSTOMIZED); idealState.setPartitionState("mySegment_1", INSTANCE_NAME, "ONLINE"); idealState.setPartitionState("mySegment_2", INSTANCE_NAME + "2", "ONLINE"); callback.setIdealState(idealState); externalView = new ExternalView(TABLE_NAME); externalView.setState("mySegment_1", INSTANCE_NAME, "ONLINE"); externalView.setState("mySegment_2", INSTANCE_NAME + "2", "OFFLINE"); callback.setExternalView(externalView); assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD); }
public FEELFnResult<Boolean> invoke(@ParameterName("list") List list) { if (list == null) { return FEELFnResult.ofResult(true); } boolean result = true; for (final Object element : list) { if (element != null && !(element instanceof Boolean)) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "an element in the list is not" + " a Boolean")); } else { if (element != null) { result &= (Boolean) element; } } } return FEELFnResult.ofResult(result); }
@Test void invokeListParamEmptyList() { FunctionTestUtil.assertResult(nnAllFunction.invoke(Collections.emptyList()), true); }
public static LabelSelector clusterOperatorNamespaceSelector(String operandNamespace, String operatorNamespace, Labels operatorNamespaceLabels) { if (!operandNamespace.equals(operatorNamespace)) { // If CO and the operand do not run in the same namespace, we need to handle cross namespace access if (operatorNamespaceLabels != null && !operatorNamespaceLabels.toMap().isEmpty()) { // If user specified the namespace labels, we can use them to make the network policy as tight as possible return new LabelSelectorBuilder().withMatchLabels(operatorNamespaceLabels.toMap()).build(); } else { // If no namespace labels were specified, we open the network policy to COs in all namespaces by returning empty map => selector which match everything return new LabelSelector(); } } else { // They are in the dame namespace => we do not want to set any namespace selector and allow communication only within the same namespace return null; } }
@Test public void testClusterOperatorNamespaceSelector() { assertThat(NetworkPolicyUtils.clusterOperatorNamespaceSelector("my-ns", "my-ns", null), is(nullValue())); assertThat(NetworkPolicyUtils.clusterOperatorNamespaceSelector("my-ns", "my-operator-ns", null).getMatchLabels(), is(Map.of())); assertThat(NetworkPolicyUtils.clusterOperatorNamespaceSelector("my-ns", "my-operator-ns", Labels.EMPTY).getMatchLabels(), is(Map.of())); assertThat(NetworkPolicyUtils.clusterOperatorNamespaceSelector("my-ns", "my-operator-ns", Labels.fromMap(Map.of("labelKey", "labelValue"))).getMatchLabels(), is(Map.of("labelKey", "labelValue"))); }
@Override public CRMaterial deserialize(JsonElement json, Type type, JsonDeserializationContext context) throws JsonParseException { return determineJsonElementForDistinguishingImplementers(json, context, TYPE, ARTIFACT_ORIGIN); }
@Test public void shouldDeserializeSvnMaterialType() { JsonObject jsonObject = new JsonObject(); jsonObject.addProperty("type", "svn"); materialTypeAdapter.deserialize(jsonObject, type, jsonDeserializationContext); verify(jsonDeserializationContext).deserialize(jsonObject, CRSvnMaterial.class); }
@Override public final void isEqualTo(@Nullable Object other) { @SuppressWarnings("UndefinedEquals") // the contract of this method is to follow Multimap.equals boolean isEqual = Objects.equal(actual, other); if (isEqual) { return; } // Fail but with a more descriptive message: if ((actual instanceof ListMultimap && other instanceof SetMultimap) || (actual instanceof SetMultimap && other instanceof ListMultimap)) { String actualType = (actual instanceof ListMultimap) ? "ListMultimap" : "SetMultimap"; String otherType = (other instanceof ListMultimap) ? "ListMultimap" : "SetMultimap"; failWithoutActual( fact("expected", other), fact("an instance of", otherType), fact("but was", actualCustomStringRepresentationForPackageMembersToCall()), fact("an instance of", actualType), simpleFact( lenientFormat( "a %s cannot equal a %s if either is non-empty", actualType, otherType))); } else if (actual instanceof ListMultimap) { containsExactlyEntriesIn((Multimap<?, ?>) checkNotNull(other)).inOrder(); } else if (actual instanceof SetMultimap) { containsExactlyEntriesIn((Multimap<?, ?>) checkNotNull(other)); } else { super.isEqualTo(other); } }
@Test public void setMultimapIsEqualTo_passes() { ImmutableSetMultimap<String, String> multimapA = ImmutableSetMultimap.<String, String>builder() .putAll("kurt", "kluever", "russell", "cobain") .build(); ImmutableSetMultimap<String, String> multimapB = ImmutableSetMultimap.<String, String>builder() .putAll("kurt", "kluever", "cobain", "russell") .build(); assertThat(multimapA.equals(multimapB)).isTrue(); assertThat(multimapA).isEqualTo(multimapB); }
public long getMaxConn() { return maxConn; }
@Test public void testGetMaxConn() { UserProperty userProperty = new UserProperty(); long maxConnections = userProperty.getMaxConn(); Assert.assertEquals(1024, maxConnections); }
@Override public Collection<String> getXADriverClassNames() { return Collections.singletonList("org.h2.jdbcx.JdbcDataSource"); }
@Test void assertGetXADriverClassName() { assertThat(new H2XADataSourceDefinition().getXADriverClassNames(), is(Collections.singletonList("org.h2.jdbcx.JdbcDataSource"))); }
@Override public int run(String launcherVersion, String launcherMd5, ServerUrlGenerator urlGenerator, Map<String, String> env, Map<String, String> context) { int exitValue = 0; LOG.info("Agent launcher is version: {}", CurrentGoCDVersion.getInstance().fullVersion()); String[] command = new String[]{}; try { AgentBootstrapperArgs bootstrapperArgs = AgentBootstrapperArgs.fromProperties(context); ServerBinaryDownloader agentDownloader = new ServerBinaryDownloader(urlGenerator, bootstrapperArgs); agentDownloader.downloadIfNecessary(DownloadableFile.AGENT); ServerBinaryDownloader pluginZipDownloader = new ServerBinaryDownloader(urlGenerator, bootstrapperArgs); pluginZipDownloader.downloadIfNecessary(DownloadableFile.AGENT_PLUGINS); ServerBinaryDownloader tfsImplDownloader = new ServerBinaryDownloader(urlGenerator, bootstrapperArgs); tfsImplDownloader.downloadIfNecessary(DownloadableFile.TFS_IMPL); command = agentInvocationCommand(agentDownloader.getMd5(), launcherMd5, pluginZipDownloader.getMd5(), tfsImplDownloader.getMd5(), env, context, agentDownloader.getExtraProperties()); LOG.info("Launching Agent with command: {}", join(command, " ")); Process agent = invoke(command); // The next lines prevent the child process from blocking on Windows AgentOutputAppender agentOutputAppenderForStdErr = new AgentOutputAppender(GO_AGENT_STDERR_LOG); AgentOutputAppender agentOutputAppenderForStdOut = new AgentOutputAppender(GO_AGENT_STDOUT_LOG); if (new SystemEnvironment().consoleOutToStdout()) { agentOutputAppenderForStdErr.writeTo(AgentOutputAppender.Outstream.STDERR); agentOutputAppenderForStdOut.writeTo(AgentOutputAppender.Outstream.STDOUT); } agent.getOutputStream().close(); AgentConsoleLogThread stdErrThd = new AgentConsoleLogThread(agent.getErrorStream(), agentOutputAppenderForStdErr); stdErrThd.start(); AgentConsoleLogThread stdOutThd = new AgentConsoleLogThread(agent.getInputStream(), agentOutputAppenderForStdOut); stdOutThd.start(); Shutdown shutdownHook = new Shutdown(agent); Runtime.getRuntime().addShutdownHook(shutdownHook); try { exitValue = agent.waitFor(); } catch (InterruptedException ie) { LOG.error("Agent was interrupted. Terminating agent and respawning. {}", ie.toString()); agent.destroy(); } finally { removeShutdownHook(shutdownHook); stdErrThd.stopAndJoin(); stdOutThd.stopAndJoin(); } } catch (Exception e) { LOG.error("Exception while executing command: {} - {}", join(command, " "), e.toString()); exitValue = EXCEPTION_OCCURRED; } return exitValue; }
@Test public void shouldAddBootstrapperVersionAsPropertyIfFoundInContext() throws InterruptedException { final List<String> cmd = new ArrayList<>(); String expectedAgentMd5 = TEST_AGENT.getMd5(); String expectedAgentPluginsMd5 = TEST_AGENT_PLUGINS.getMd5(); String expectedTfsMd5 = TEST_TFS_IMPL.getMd5(); AgentProcessParentImpl bootstrapper = createBootstrapper(cmd); Map<String, String> context = context(); context.put(GoConstants.AGENT_BOOTSTRAPPER_VERSION, "20.3.0-1234"); int returnCode = bootstrapper.run("launcher_version", "bar", getURLGenerator(), new HashMap<>(), context); assertThat(returnCode, is(42)); assertThat(cmd.toArray(new String[]{}), equalTo(new String[]{ (getProperty("java.home") + FileSystems.getDefault().getSeparator() + "bin" + FileSystems.getDefault().getSeparator() + "java"), "-Dagent.plugins.md5=" + expectedAgentPluginsMd5, "-Dagent.binary.md5=" + expectedAgentMd5, "-Dagent.launcher.md5=bar", "-Dagent.tfs.md5=" + expectedTfsMd5, "-Dagent.bootstrapper.version=20.3.0-1234", "-jar", "agent.jar", "-serverUrl", "http://localhost:" + server.getPort() + "/go/", "-sslVerificationMode", "NONE", "-rootCertFile", new File("/path/to/cert.pem").getAbsolutePath() })); }
public String appendQueryProperties(final String jdbcUrl, final Properties queryProps) { Properties currentQueryProps = DatabaseTypedSPILoader.getService(ConnectionPropertiesParser.class, DatabaseTypeFactory.get(jdbcUrl)).parse(jdbcUrl, null, null).getQueryProperties(); return hasConflictedQueryProperties(currentQueryProps, queryProps) ? concat(jdbcUrl.substring(0, jdbcUrl.indexOf('?') + 1), getMergedProperties(currentQueryProps, queryProps)) : concat(jdbcUrl + getURLDelimiter(currentQueryProps), queryProps); }
@Test void assertAppendQueryPropertiesWithOriginalQueryProperties() { String actual = new JdbcUrlAppender().appendQueryProperties( "jdbc:trunk://192.168.0.1:3306/foo_ds?useSSL=false&rewriteBatchedStatements=true", PropertiesBuilder.build(new Property("useSSL", Boolean.FALSE.toString()), new Property("rewriteBatchedStatements", Boolean.TRUE.toString()))); assertThat(actual, startsWith("jdbc:trunk://192.168.0.1:3306/foo_ds?")); assertThat(actual, containsString("rewriteBatchedStatements=true")); assertThat(actual, containsString("useSSL=false")); }
public boolean isNew(Component component, DefaultIssue issue) { if (analysisMetadataHolder.isPullRequest()) { return true; } if (periodHolder.hasPeriod()) { if (periodHolder.hasPeriodDate()) { return periodHolder.getPeriod().isOnPeriod(issue.creationDate()); } if (isOnBranchUsingReferenceBranch()) { return hasAtLeastOneLocationOnChangedLines(component, issue); } } return false; }
@Test public void isNew_returns_false_if_issue_is_not_on_period() { periodHolder.setPeriod(new Period(NewCodePeriodType.NUMBER_OF_DAYS.name(), "10", 1000L)); DefaultIssue issue = mock(DefaultIssue.class); when(issue.creationDate()).thenReturn(new Date(500L)); assertThat(newIssueClassifier.isNew(mock(Component.class), issue)).isFalse(); verify(issue).creationDate(); verifyNoMoreInteractions(issue); }
public static String get(@NonNull SymbolRequest request) { String name = request.getName(); String title = request.getTitle(); String tooltip = request.getTooltip(); String htmlTooltip = request.getHtmlTooltip(); String classes = request.getClasses(); String pluginName = request.getPluginName(); String id = request.getId(); String identifier = (pluginName == null || pluginName.isBlank()) ? "core" : pluginName; String symbol = SYMBOLS .computeIfAbsent(identifier, key -> new ConcurrentHashMap<>()) .computeIfAbsent(name, key -> loadSymbol(identifier, key)); if ((tooltip != null && !tooltip.isBlank()) && (htmlTooltip == null || htmlTooltip.isBlank())) { symbol = symbol.replaceAll("<svg", Matcher.quoteReplacement("<svg tooltip=\"" + Functions.htmlAttributeEscape(tooltip) + "\"")); } if (htmlTooltip != null && !htmlTooltip.isBlank()) { symbol = symbol.replaceAll("<svg", Matcher.quoteReplacement("<svg data-html-tooltip=\"" + Functions.htmlAttributeEscape(htmlTooltip) + "\"")); } if (id != null && !id.isBlank()) { symbol = symbol.replaceAll("<svg", Matcher.quoteReplacement("<svg id=\"" + Functions.htmlAttributeEscape(id) + "\"")); } if (classes != null && !classes.isBlank()) { symbol = symbol.replaceAll("<svg", "<svg class=\"" + Functions.htmlAttributeEscape(classes) + "\""); } if (title != null && !title.isBlank()) { symbol = "<span class=\"jenkins-visually-hidden\">" + Util.xmlEscape(title) + "</span>" + symbol; } return symbol; }
@Test @DisplayName("Given a cached symbol, a new request can specify new attributes to use") void getSymbol_cachedSymbolAllowsSettingAllAttributes() { Symbol.get(new SymbolRequest.Builder() .withName("science") .withTitle("Title") .withTooltip("Tooltip") .withClasses("class1 class2") .withId("id") .build() ); String symbol = Symbol.get(new SymbolRequest.Builder() .withName("science") .withTitle("Title2") .withTooltip("Tooltip2") .withClasses("class3 class4") .withId("id2") .build() ); assertThat(symbol, containsString(SCIENCE_PATH)); assertThat(symbol, not(containsString("<span class=\"jenkins-visually-hidden\">Title</span>"))); assertThat(symbol, not(containsString("tooltip=\"Tooltip\""))); assertThat(symbol, not(containsString("class=\"class1 class2\""))); assertThat(symbol, not(containsString("id=\"id\""))); assertThat(symbol, containsString("<span class=\"jenkins-visually-hidden\">Title2</span>")); assertThat(symbol, containsString("tooltip=\"Tooltip2\"")); assertThat(symbol, containsString("class=\"class3 class4\"")); assertThat(symbol, containsString("id=\"id2\"")); }
public static FileLock obtainLock(Path dirPath, String lockName) throws IOException { if (!Files.isDirectory(dirPath)) { Files.createDirectories(dirPath); } Path lockPath = dirPath.resolve(lockName); try { Files.createFile(lockPath); } catch (IOException ignore) { // we must create the file to have a truly canonical path. // if it's already created, we don't care. if it can't be created, it will fail below. } // fails if the lock file does not exist final Path realLockPath = lockPath.toRealPath(); if (!LOCK_HELD.add(realLockPath.toString())) { throw new LockException("Lock held by this virtual machine on lock path: " + realLockPath); } FileChannel channel = null; FileLock lock; try { channel = FileChannel.open(realLockPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE); lock = channel.tryLock(); if (lock == null) { throw new LockException("Lock held by another program on lock path: " + realLockPath); } } catch (IOException ex) { try { if (channel != null) { channel.close(); } } catch (Throwable t) { // suppress any channel close exceptions } boolean removed = LOCK_HELD.remove(realLockPath.toString()); if (!removed) { throw new LockException("Lock path was cleared but never marked as held: " + realLockPath, ex); } throw ex; } LOCK_MAP.put(lock, realLockPath.toString()); return lock; }
@Test public void ObtainLockOnOtherLocked() throws IOException { FileLock lock2 = FileLockFactory.obtainLock(lockDir, ".test2"); assertThat(lock2.isValid(), is(equalTo(true))); assertThat(lock2.isShared(), is(equalTo(false))); }
@Override public Optional<GaugeMetricFamilyMetricsCollector> export(final String pluginType) { GaugeMetricFamilyMetricsCollector result = MetricsCollectorRegistry.get(config, pluginType); result.cleanMetrics(); result.addMetric(Arrays.asList("ShardingSphere", ShardingSphereVersion.VERSION), 1D); return Optional.of(result); }
@Test void assertExport() { Optional<GaugeMetricFamilyMetricsCollector> collector = new BuildInfoExporter().export("FIXTURE"); assertTrue(collector.isPresent()); assertThat(collector.get().toString(), containsString("ShardingSphere=1")); assertThat(collector.get().toString(), containsString(String.format("%s=1", ShardingSphereVersion.VERSION))); }
static String toSnakeCase(String string) { StringBuilder bld = new StringBuilder(); boolean prevWasCapitalized = true; for (int i = 0; i < string.length(); i++) { char c = string.charAt(i); if (Character.isUpperCase(c)) { if (!prevWasCapitalized) { bld.append('_'); } bld.append(Character.toLowerCase(c)); prevWasCapitalized = true; } else { bld.append(c); prevWasCapitalized = false; } } return bld.toString(); }
@Test public void testToSnakeCase() { assertEquals("", MessageGenerator.toSnakeCase("")); assertEquals("foo_bar_baz", MessageGenerator.toSnakeCase("FooBarBaz")); assertEquals("foo_bar_baz", MessageGenerator.toSnakeCase("fooBarBaz")); assertEquals("fortran", MessageGenerator.toSnakeCase("FORTRAN")); }
@Override public Collection<String> allClientId() { return clients.keySet(); }
@Test void testAllClientId() { Collection<String> allClientIds = ephemeralIpPortClientManager.allClientId(); assertEquals(2, allClientIds.size()); assertTrue(allClientIds.contains(ephemeralIpPortId)); assertTrue(allClientIds.contains(syncedClientId)); }
public static <IN, OUT1, OUT2> Tuple2<TypeInformation<OUT1>, TypeInformation<OUT2>> getOutputTypesForTwoOutputProcessFunction( TwoOutputStreamProcessFunction<IN, OUT1, OUT2> twoOutputStreamProcessFunction, TypeInformation<IN> inTypeInformation) { TypeInformation<OUT1> firstOutputType = TypeExtractor.getUnaryOperatorReturnType( twoOutputStreamProcessFunction, TwoOutputStreamProcessFunction.class, 0, 1, new int[] {1, 0}, inTypeInformation, Utils.getCallLocationName(), true); TypeInformation<OUT2> secondOutputType = TypeExtractor.getUnaryOperatorReturnType( twoOutputStreamProcessFunction, TwoOutputStreamProcessFunction.class, 0, 2, new int[] {2, 0}, inTypeInformation, Utils.getCallLocationName(), true); return Tuple2.of(firstOutputType, secondOutputType); }
@Test void testTwoOutputType() { Tuple2<TypeInformation<Long>, TypeInformation<String>> outputType = StreamUtils.getOutputTypesForTwoOutputProcessFunction( new TwoOutputStreamProcessFunction<Integer, Long, String>() { @Override public void processRecord( Integer record, Collector<Long> output1, Collector<String> output2, PartitionedContext ctx) throws Exception { // ignore } }, Types.INT); assertThat(outputType.f0).isEqualTo(Types.LONG); assertThat(outputType.f1).isEqualTo(Types.STRING); }
public PluginWrapper(PluginManager parent, File archive, Manifest manifest, URL baseResourceURL, ClassLoader classLoader, File disableFile, List<Dependency> dependencies, List<Dependency> optionalDependencies) { this.parent = parent; this.manifest = manifest; this.shortName = Util.intern(computeShortName(manifest, archive.getName())); this.baseResourceURL = baseResourceURL; this.classLoader = classLoader; this.disableFile = disableFile; this.active = !disableFile.exists(); this.dependencies = dependencies; this.optionalDependencies = optionalDependencies; for (Dependency d : optionalDependencies) { assert d.optional : d + " included among optionalDependencies of " + shortName + " but was not marked optional"; } this.archive = archive; }
@Test public void dependencyOutdated() { pluginWrapper("dependency").version("3").buildLoaded(); PluginWrapper pw = pluginWrapper("dependee").deps("dependency:5").buildLoaded(); final IOException ex = assertThrows(IOException.class, pw::resolvePluginDependencies); assertContains(ex, "Failed to load: Dependee (dependee 42)", "Update required: Dependency (dependency 3) to be updated to 5 or higher"); }
@Override public void calculate(TradePriceCalculateReqBO param, TradePriceCalculateRespBO result) { if (param.getDeliveryType() == null) { return; } // TODO @puhui999:需要校验,是不是存在商品不能门店自提,或者不能快递发货的情况。就是说,配送方式不匹配哈 if (DeliveryTypeEnum.PICK_UP.getType().equals(param.getDeliveryType())) { calculateByPickUp(param); } else if (DeliveryTypeEnum.EXPRESS.getType().equals(param.getDeliveryType())) { calculateExpress(param, result); } }
@Test @DisplayName("全场包邮") public void testCalculate_expressGlobalFree() { // mock 方法(全场包邮) when(tradeConfigService.getTradeConfig()).thenReturn(new TradeConfigDO().setDeliveryExpressFreeEnabled(true) .setDeliveryExpressFreePrice(2200)); // 调用 calculator.calculate(reqBO, resultBO); TradePriceCalculateRespBO.Price price = resultBO.getPrice(); assertThat(price) .extracting("totalPrice","discountPrice","couponPrice","pointPrice","deliveryPrice","payPrice") .containsExactly(2200, 0, 0, 0, 0, 2200); assertThat(resultBO.getItems()).hasSize(3); // 断言:SKU1 assertThat(resultBO.getItems().get(0)) .extracting("price", "count","discountPrice" ,"couponPrice", "pointPrice","deliveryPrice","payPrice") .containsExactly(100, 2, 0, 0, 0, 0, 200); // 断言:SKU2 assertThat(resultBO.getItems().get(1)) .extracting("price", "count","discountPrice" ,"couponPrice", "pointPrice","deliveryPrice","payPrice") .containsExactly(200, 10, 0, 0, 0, 0, 2000); // 断言:SKU3 未选中 assertThat(resultBO.getItems().get(2)) .extracting("price", "count","discountPrice" ,"couponPrice", "pointPrice","deliveryPrice","payPrice") .containsExactly(300, 1, 0, 0, 0, 0, 300); }
public static VersionRange parse(String rangeString) { validateRangeString(rangeString); Inclusiveness minVersionInclusiveness = rangeString.startsWith("[") ? Inclusiveness.INCLUSIVE : Inclusiveness.EXCLUSIVE; Inclusiveness maxVersionInclusiveness = rangeString.endsWith("]") ? Inclusiveness.INCLUSIVE : Inclusiveness.EXCLUSIVE; int commaIndex = rangeString.indexOf(','); String minVersionString = rangeString.substring(1, commaIndex).trim(); Version minVersion; if (minVersionString.isEmpty()) { minVersionInclusiveness = Inclusiveness.EXCLUSIVE; minVersion = Version.minimum(); } else { minVersion = Version.fromString(minVersionString); } String maxVersionString = rangeString.substring(commaIndex + 1, rangeString.length() - 1).trim(); Version maxVersion; if (maxVersionString.isEmpty()) { maxVersionInclusiveness = Inclusiveness.EXCLUSIVE; maxVersion = Version.maximum(); } else { maxVersion = Version.fromString(maxVersionString); } if (!minVersion.isLessThan(maxVersion)) { throw new IllegalArgumentException( String.format( "Min version in range must be less than max version in range, got '%s'", rangeString)); } return builder() .setMinVersion(minVersion) .setMinVersionInclusiveness(minVersionInclusiveness) .setMaxVersion(maxVersion) .setMaxVersionInclusiveness(maxVersionInclusiveness) .build(); }
@Test public void parse_withMinimalToMaximalRange_throwsIllegalArgumentException() { IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> VersionRange.parse("(,)")); assertThat(exception).hasMessageThat().isEqualTo("Infinity range is not supported, got '(,)'"); }
@Override public boolean handleUnknownProperty(DeserializationContext deserializationContext, JsonParser p, JsonDeserializer<?> deserializer, Object beanOrClass, String propertyName) throws IOException { Collection<Object> propIds = (deserializer == null) ? null : deserializer.getKnownPropertyNames(); UnrecognizedPropertyException unrecognizedPropertyException = UnrecognizedPropertyException .from(p, beanOrClass, propertyName, propIds); if (skipUnknownProperty){ if (log.isDebugEnabled()) { log.debug(unrecognizedPropertyException.getMessage()); } p.skipChildren(); return skipUnknownProperty; } else { throw unrecognizedPropertyException; } }
@Test public void testHandleUnknownProperty() throws Exception{ DynamicSkipUnknownPropertyHandler handler = new DynamicSkipUnknownPropertyHandler(); handler.setSkipUnknownProperty(true); // Case 1: initial ObjectMapper with "enable feature". ObjectMapper objectMapper = new ObjectMapper(); objectMapper.enable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); objectMapper.addHandler(handler); ObjectReader objectReader = objectMapper.readerFor(TestBean.class); // Assert skip unknown property and logging: objectMapper. String json = "{\"name1\": \"James\",\"nm\":\"Paul\",\"name2\":\"Eric\"}"; TestBean testBean = objectMapper.readValue(json, TestBean.class); Assert.assertNull(testBean.getName()); Assert.assertEquals(testBean.getName1(), "James"); Assert.assertEquals(testBean.getName2(), "Eric"); // Assert skip unknown property and logging: objectReader. testBean = objectReader.readValue(json, TestBean.class); Assert.assertNull(testBean.getName()); Assert.assertEquals(testBean.getName1(), "James"); Assert.assertEquals(testBean.getName2(), "Eric"); // Assert failure on unknown property. handler.setSkipUnknownProperty(false); try { objectMapper.readValue(json, TestBean.class); Assert.fail("Expect UnrecognizedPropertyException when set skipUnknownProperty false."); } catch (UnrecognizedPropertyException e){ } try { objectReader.readValue(json, TestBean.class); Assert.fail("Expect UnrecognizedPropertyException when set skipUnknownProperty false."); } catch (UnrecognizedPropertyException e){ } // Case 2: initial ObjectMapper with "disabled feature". objectMapper = new ObjectMapper(); objectMapper.enable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); objectMapper.addHandler(handler); objectReader = objectMapper.readerFor(TestBean.class); // Assert failure on unknown property. try { objectMapper.readValue(json, TestBean.class); Assert.fail("Expect UnrecognizedPropertyException when set skipUnknownProperty false."); } catch (UnrecognizedPropertyException e){ } try { objectReader.readValue(json, TestBean.class); Assert.fail("Expect UnrecognizedPropertyException when set skipUnknownProperty false."); } catch (UnrecognizedPropertyException e){ } // Assert skip unknown property and logging. handler.setSkipUnknownProperty(true); testBean = objectMapper.readValue(json, TestBean.class); Assert.assertNull(testBean.getName()); Assert.assertEquals(testBean.getName1(), "James"); Assert.assertEquals(testBean.getName2(), "Eric"); testBean = objectReader.readValue(json, TestBean.class); Assert.assertNull(testBean.getName()); Assert.assertEquals(testBean.getName1(), "James"); Assert.assertEquals(testBean.getName2(), "Eric"); // Case 3: unknown property deserialize by object json. json = "{\"name1\": \"James\",\"nm\":{\"name\":\"Paul\",\"age\":18},\"name2\":\"Eric\"}"; // Assert skip unknown property and logging. handler.setSkipUnknownProperty(true); testBean = objectMapper.readValue(json, TestBean.class); Assert.assertNull(testBean.getName()); Assert.assertEquals(testBean.getName1(), "James"); Assert.assertEquals(testBean.getName2(), "Eric"); testBean = objectReader.readValue(json, TestBean.class); Assert.assertNull(testBean.getName()); Assert.assertEquals(testBean.getName1(), "James"); Assert.assertEquals(testBean.getName2(), "Eric"); // Case 4: unknown property deserialize by array json. json = "{\"name1\": \"James\",\"nm\":[\"name\",\"Paul\"],\"name2\":\"Eric\"}"; // Assert skip unknown property and logging. handler.setSkipUnknownProperty(true); testBean = objectMapper.readValue(json, TestBean.class); Assert.assertNull(testBean.getName()); Assert.assertEquals(testBean.getName1(), "James"); Assert.assertEquals(testBean.getName2(), "Eric"); testBean = objectReader.readValue(json, TestBean.class); Assert.assertNull(testBean.getName()); Assert.assertEquals(testBean.getName1(), "James"); Assert.assertEquals(testBean.getName2(), "Eric"); }
public List<Duration> calculatePreciseDuration(final Date then) { return calculatePreciseDuration(then != null ? then.toInstant() : null); }
@Test public void testCalculatePreciseDuration() { PrettyTime prettyTime = new PrettyTime(); prettyTime.clearUnits(); Minute minutes = new Minute(); prettyTime.registerUnit(minutes, new ResourcesTimeFormat(minutes)); Assert.assertEquals("41 minutes ago", prettyTime.format(prettyTime.calculatePreciseDuration(now.minusMinutes(40).minusSeconds(40)))); }
public static File getPluginFile(final String path) { String pluginPath = getPluginPath(path); return new File(pluginPath); }
@Test public void testGetPluginPathByExtLib() { File jarFile = ShenyuPluginPathBuilder.getPluginFile(""); assertNotNull(jarFile); }
public MaintenanceAssociation decode(ObjectNode json, CodecContext context, int mdNameLen) { if (json == null || !json.isObject()) { return null; } JsonNode maNode = json.get(MA); String maName = nullIsIllegal(maNode.get(MA_NAME), "maName is required").asText(); String maNameType = MaIdShort.MaIdType.CHARACTERSTRING.name(); if (maNode.get(MA_NAME_TYPE) != null) { maNameType = maNode.get(MA_NAME_TYPE).asText(); } try { MaIdShort maId = MdMaNameUtil.parseMaName(maNameType, maName); MaBuilder builder = DefaultMaintenanceAssociation.builder(maId, mdNameLen); JsonNode maNumericIdNode = maNode.get(MA_NUMERIC_ID); if (maNumericIdNode != null) { short mdNumericId = (short) maNumericIdNode.asInt(); builder = builder.maNumericId(mdNumericId); } if (maNode.get(CCM_INTERVAL) != null) { builder.ccmInterval(CcmInterval.valueOf(maNode.get(CCM_INTERVAL).asText())); } List<Component> componentList = (new ComponentCodec()).decode((ArrayNode) nullIsIllegal(maNode.get(COMPONENT_LIST), "component-list is required"), context); for (Component component:componentList) { builder = builder.addToComponentList(component); } JsonNode rmepListJson = maNode.get(RMEP_LIST); if (rmepListJson != null) { List<MepId> remoteMeps = (new RMepCodec()).decode( (ArrayNode) rmepListJson, context); for (MepId remoteMep:remoteMeps) { builder = builder.addToRemoteMepIdList(remoteMep); } } return builder.build(); } catch (CfmConfigException e) { throw new IllegalArgumentException(e); } }
@Test public void testDecodeMa5() throws IOException { String mdString = "{\"ma\": { \"maName\": \"abc:defghij\"," + "\"maNameType\": \"ICCY1731\"," + "\"component-list\": [], " + "\"rmep-list\": [], " + "\"maNumericId\": 5}}"; InputStream input = new ByteArrayInputStream( mdString.getBytes(StandardCharsets.UTF_8)); JsonNode cfg = mapper.readTree(input); MaintenanceAssociation maDecode5 = ((MaintenanceAssociationCodec) context .codec(MaintenanceAssociation.class)) .decode((ObjectNode) cfg, context, 10); assertEquals(MAID5_Y1731, maDecode5.maId()); }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String sqlServerType = typeDefine.getDataType().toUpperCase(); switch (sqlServerType) { case SQLSERVER_BIT: builder.sourceType(SQLSERVER_BIT); builder.dataType(BasicType.BOOLEAN_TYPE); break; case SQLSERVER_TINYINT: case SQLSERVER_TINYINT_IDENTITY: builder.sourceType(SQLSERVER_TINYINT); builder.dataType(BasicType.SHORT_TYPE); break; case SQLSERVER_SMALLINT: case SQLSERVER_SMALLINT_IDENTITY: builder.sourceType(SQLSERVER_SMALLINT); builder.dataType(BasicType.SHORT_TYPE); break; case SQLSERVER_INTEGER: case SQLSERVER_INTEGER_IDENTITY: case SQLSERVER_INT: case SQLSERVER_INT_IDENTITY: builder.sourceType(SQLSERVER_INT); builder.dataType(BasicType.INT_TYPE); break; case SQLSERVER_BIGINT: case SQLSERVER_BIGINT_IDENTITY: builder.sourceType(SQLSERVER_BIGINT); builder.dataType(BasicType.LONG_TYPE); break; case SQLSERVER_REAL: builder.sourceType(SQLSERVER_REAL); builder.dataType(BasicType.FLOAT_TYPE); break; case SQLSERVER_FLOAT: if (typeDefine.getPrecision() != null && typeDefine.getPrecision() <= 24) { builder.sourceType(SQLSERVER_REAL); builder.dataType(BasicType.FLOAT_TYPE); } else { builder.sourceType(SQLSERVER_FLOAT); builder.dataType(BasicType.DOUBLE_TYPE); } break; case SQLSERVER_DECIMAL: case SQLSERVER_NUMERIC: builder.sourceType( String.format( "%s(%s,%s)", SQLSERVER_DECIMAL, typeDefine.getPrecision(), typeDefine.getScale())); builder.dataType( new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale())); builder.columnLength(typeDefine.getPrecision()); builder.scale(typeDefine.getScale()); break; case SQLSERVER_MONEY: builder.sourceType(SQLSERVER_MONEY); builder.dataType( new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale())); builder.columnLength(typeDefine.getPrecision()); builder.scale(typeDefine.getScale()); break; case SQLSERVER_SMALLMONEY: builder.sourceType(SQLSERVER_SMALLMONEY); builder.dataType( new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale())); builder.columnLength(typeDefine.getPrecision()); builder.scale(typeDefine.getScale()); break; case SQLSERVER_CHAR: builder.sourceType(String.format("%s(%s)", SQLSERVER_CHAR, typeDefine.getLength())); builder.dataType(BasicType.STRING_TYPE); builder.columnLength( TypeDefineUtils.doubleByteTo4ByteLength(typeDefine.getLength())); break; case SQLSERVER_NCHAR: builder.sourceType( String.format("%s(%s)", SQLSERVER_NCHAR, typeDefine.getLength())); builder.dataType(BasicType.STRING_TYPE); builder.columnLength( TypeDefineUtils.doubleByteTo4ByteLength(typeDefine.getLength())); break; case SQLSERVER_VARCHAR: if (typeDefine.getLength() == -1) { builder.sourceType(MAX_VARCHAR); builder.columnLength(TypeDefineUtils.doubleByteTo4ByteLength(POWER_2_31 - 1)); } else { builder.sourceType( String.format("%s(%s)", SQLSERVER_VARCHAR, typeDefine.getLength())); builder.columnLength( TypeDefineUtils.doubleByteTo4ByteLength(typeDefine.getLength())); } builder.dataType(BasicType.STRING_TYPE); break; case SQLSERVER_NVARCHAR: if (typeDefine.getLength() == -1) { builder.sourceType(MAX_NVARCHAR); builder.columnLength(TypeDefineUtils.doubleByteTo4ByteLength(POWER_2_31 - 1)); } else { builder.sourceType( String.format("%s(%s)", SQLSERVER_NVARCHAR, typeDefine.getLength())); builder.columnLength( TypeDefineUtils.doubleByteTo4ByteLength(typeDefine.getLength())); } builder.dataType(BasicType.STRING_TYPE); break; case SQLSERVER_TEXT: builder.sourceType(SQLSERVER_TEXT); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_31 - 1); break; case SQLSERVER_NTEXT: builder.sourceType(SQLSERVER_NTEXT); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_30 - 1); break; case SQLSERVER_XML: builder.sourceType(SQLSERVER_XML); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_31 - 1); break; case SQLSERVER_UNIQUEIDENTIFIER: builder.sourceType(SQLSERVER_UNIQUEIDENTIFIER); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength())); break; case SQLSERVER_SQLVARIANT: builder.sourceType(SQLSERVER_SQLVARIANT); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(typeDefine.getLength()); break; case SQLSERVER_BINARY: builder.sourceType( String.format("%s(%s)", SQLSERVER_BINARY, typeDefine.getLength())); builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(typeDefine.getLength()); break; case SQLSERVER_VARBINARY: if (typeDefine.getLength() == -1) { builder.sourceType(MAX_VARBINARY); builder.columnLength(POWER_2_31 - 1); } else { builder.sourceType( String.format("%s(%s)", SQLSERVER_VARBINARY, typeDefine.getLength())); builder.columnLength(typeDefine.getLength()); } builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case SQLSERVER_IMAGE: builder.sourceType(SQLSERVER_IMAGE); builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_31 - 1); break; case SQLSERVER_TIMESTAMP: builder.sourceType(SQLSERVER_TIMESTAMP); builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(8L); break; case SQLSERVER_DATE: builder.sourceType(SQLSERVER_DATE); builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case SQLSERVER_TIME: builder.sourceType(String.format("%s(%s)", SQLSERVER_TIME, typeDefine.getScale())); builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); builder.scale(typeDefine.getScale()); break; case SQLSERVER_DATETIME: builder.sourceType(SQLSERVER_DATETIME); builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(3); break; case SQLSERVER_DATETIME2: builder.sourceType( String.format("%s(%s)", SQLSERVER_DATETIME2, typeDefine.getScale())); builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(typeDefine.getScale()); break; case SQLSERVER_DATETIMEOFFSET: builder.sourceType( String.format("%s(%s)", SQLSERVER_DATETIMEOFFSET, typeDefine.getScale())); builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(typeDefine.getScale()); break; case SQLSERVER_SMALLDATETIME: builder.sourceType(SQLSERVER_SMALLDATETIME); builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.SQLSERVER, sqlServerType, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertSmallint() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder() .name("test") .columnType("smallint") .dataType("smallint") .build(); Column column = SqlServerTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.SHORT_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toLowerCase()); }
@Override public int read() throws IOException { int read = inputStream.read(); if (read != -1) fp++; return read; }
@Test public void read() throws IOException { byte[] buff = new byte[10]; int n = cs.read(buff); byte[] temp = Arrays.copyOfRange(text, 0, buff.length); assertArrayEquals(temp, buff); assertEquals(buff.length, n); }
public boolean hasKey(String key) { if (null == key) { return false; } return props.containsKey(key); }
@Test public void testHasKey() { Environment environment = Environment.of("classpath:/application.properties"); assertEquals(Boolean.FALSE, environment.hasKey("hello")); assertEquals(Boolean.TRUE, environment.hasKey("app.version")); assertEquals(Boolean.FALSE, environment.hasKey(null)); }
static long sizeOf(Mutation m) { if (m.getOperation() == Mutation.Op.DELETE) { return sizeOf(m.getKeySet()); } long result = 0; for (Value v : m.getValues()) { switch (v.getType().getCode()) { case ARRAY: result += estimateArrayValue(v); break; case STRUCT: throw new IllegalArgumentException("Structs are not supported in mutation."); default: result += estimatePrimitiveValue(v); } } return result; }
@Test public void nullPrimitiveArrays() throws Exception { Mutation int64 = Mutation.newInsertOrUpdateBuilder("test").set("one").toInt64Array((long[]) null).build(); Mutation protoEnum = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toProtoEnumArray(null, "customer.app.TestEnum") .build(); Mutation float32 = Mutation.newInsertOrUpdateBuilder("test").set("one").toFloat32Array((float[]) null).build(); Mutation float64 = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toFloat64Array((double[]) null) .build(); Mutation bool = Mutation.newInsertOrUpdateBuilder("test").set("one").toBoolArray((boolean[]) null).build(); Mutation numeric = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toNumericArray((Iterable<BigDecimal>) null) .build(); Mutation pgNumeric = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toPgNumericArray((Iterable<String>) null) .build(); Mutation json = Mutation.newInsertOrUpdateBuilder("test").set("one").toJsonArray(null).build(); Mutation jsonb = Mutation.newInsertOrUpdateBuilder("test").set("one").toPgJsonbArray(null).build(); assertThat(MutationSizeEstimator.sizeOf(int64), is(0L)); assertThat(MutationSizeEstimator.sizeOf(float32), is(0L)); assertThat(MutationSizeEstimator.sizeOf(float64), is(0L)); assertThat(MutationSizeEstimator.sizeOf(bool), is(0L)); assertThat(MutationSizeEstimator.sizeOf(numeric), is(0L)); assertThat(MutationSizeEstimator.sizeOf(pgNumeric), is(0L)); assertThat(MutationSizeEstimator.sizeOf(json), is(0L)); assertThat(MutationSizeEstimator.sizeOf(jsonb), is(0L)); assertThat(MutationSizeEstimator.sizeOf(protoEnum), is(0L)); }
public static int parseInt(String number) throws NumberFormatException { if (StrUtil.isBlank(number)) { return 0; } if (StrUtil.startWithIgnoreCase(number, "0x")) { // 0x04表示16进制数 return Integer.parseInt(number.substring(2), 16); } if (StrUtil.containsIgnoreCase(number, "E")) { // 科学计数法忽略支持,科学计数法一般用于表示非常小和非常大的数字,这类数字转换为int后精度丢失,没有意义。 throw new NumberFormatException(StrUtil.format("Unsupported int format: [{}]", number)); } try { return Integer.parseInt(number); } catch (NumberFormatException e) { return parseNumber(number).intValue(); } }
@Test public void parseIntTest3() { assertThrows(NumberFormatException.class, ()->{ final int v1 = NumberUtil.parseInt("d"); assertEquals(0, v1); }); }
@Override public void onHeartbeatSuccess(ShareGroupHeartbeatResponseData response) { if (response.errorCode() != Errors.NONE.code()) { String errorMessage = String.format( "Unexpected error in Heartbeat response. Expected no error, but received: %s", Errors.forCode(response.errorCode()) ); throw new IllegalArgumentException(errorMessage); } MemberState state = state(); if (state == MemberState.LEAVING) { log.debug("Ignoring heartbeat response received from broker. Member {} with epoch {} is " + "already leaving the group.", memberId, memberEpoch); return; } if (state == MemberState.UNSUBSCRIBED && maybeCompleteLeaveInProgress()) { log.debug("Member {} with epoch {} received a successful response to the heartbeat " + "to leave the group and completed the leave operation. ", memberId, memberEpoch); return; } if (isNotInGroup()) { log.debug("Ignoring heartbeat response received from broker. Member {} is in {} state" + " so it's not a member of the group. ", memberId, state); return; } // Update the group member id label in the client telemetry reporter if the member id has // changed. Initially the member id is empty, and it is updated when the member joins the // group. This is done here to avoid updating the label on every heartbeat response. Also // check if the member id is null, as the schema defines it as nullable. if (response.memberId() != null && !response.memberId().equals(memberId)) { clientTelemetryReporter.ifPresent(reporter -> reporter.updateMetricsLabels( Collections.singletonMap(ClientTelemetryProvider.GROUP_MEMBER_ID, response.memberId()))); } this.memberId = response.memberId(); updateMemberEpoch(response.memberEpoch()); ShareGroupHeartbeatResponseData.Assignment assignment = response.assignment(); if (assignment != null) { if (!state.canHandleNewAssignment()) { // New assignment received but member is in a state where it cannot take new // assignments (ex. preparing to leave the group) log.debug("Ignoring new assignment {} received from server because member is in {} state.", assignment, state); return; } Map<Uuid, SortedSet<Integer>> newAssignment = new HashMap<>(); assignment.topicPartitions().forEach(topicPartition -> newAssignment.put(topicPartition.topicId(), new TreeSet<>(topicPartition.partitions()))); processAssignmentReceived(newAssignment); } }
@Test public void testSameAssignmentReconciledAgainWhenFenced() { ShareMembershipManager membershipManager = createMemberInStableState(); Uuid topic1 = Uuid.randomUuid(); final ShareGroupHeartbeatResponseData.Assignment assignment1 = new ShareGroupHeartbeatResponseData.Assignment(); final ShareGroupHeartbeatResponseData.Assignment assignment2 = new ShareGroupHeartbeatResponseData.Assignment() .setTopicPartitions(Collections.singletonList( new ShareGroupHeartbeatResponseData.TopicPartitions() .setTopicId(topic1) .setPartitions(Arrays.asList(0, 1, 2)) )); when(metadata.topicNames()).thenReturn(Collections.singletonMap(topic1, "topic1")); assertEquals(toTopicIdPartitionMap(assignment1), membershipManager.currentAssignment().partitions); // Receive assignment, wait on commit membershipManager.onHeartbeatSuccess(createShareGroupHeartbeatResponse(assignment2).data()); assertEquals(MemberState.RECONCILING, membershipManager.state()); CompletableFuture<Void> commitResult = new CompletableFuture<>(); membershipManager.poll(time.milliseconds()); // Get fenced, commit completes membershipManager.transitionToFenced(); assertEquals(MemberState.JOINING, membershipManager.state()); assertTrue(membershipManager.currentAssignment().isNone()); assertTrue(subscriptionState.assignedPartitions().isEmpty()); commitResult.complete(null); assertEquals(MemberState.JOINING, membershipManager.state()); assertTrue(membershipManager.currentAssignment().isNone()); assertTrue(subscriptionState.assignedPartitions().isEmpty()); // We have to reconcile & ack the assignment again membershipManager.onHeartbeatSuccess(createShareGroupHeartbeatResponse(assignment1).data()); assertEquals(MemberState.RECONCILING, membershipManager.state()); membershipManager.poll(time.milliseconds()); assertEquals(MemberState.ACKNOWLEDGING, membershipManager.state()); membershipManager.onHeartbeatRequestGenerated(); assertEquals(MemberState.STABLE, membershipManager.state()); assertEquals(toTopicIdPartitionMap(assignment1), membershipManager.currentAssignment().partitions); }
@Override public Credentials configure(final Host host) { if(StringUtils.isNotBlank(host.getHostname())) { final Credentials credentials = new Credentials(host.getCredentials()); configuration.refresh(); // Update this host credentials from the OpenSSH configuration file in ~/.ssh/config final OpenSshConfig.Host entry = configuration.lookup(host.getHostname()); if(StringUtils.isNotBlank(entry.getUser())) { if(!credentials.validate(host.getProtocol(), new LoginOptions(host.getProtocol()).password(false))) { if(log.isInfoEnabled()) { log.info(String.format("Using username %s from %s", entry, configuration)); } credentials.setUsername(entry.getUser()); } } if(!credentials.isPublicKeyAuthentication()) { if(null != entry.getIdentityFile()) { if(log.isInfoEnabled()) { log.info(String.format("Using identity %s from %s", entry, configuration)); } credentials.setIdentity(entry.getIdentityFile()); } else { // No custom public key authentication configuration if(new HostPreferences(host).getBoolean("ssh.authentication.publickey.default.enable")) { final Local rsa = LocalFactory.get(new HostPreferences(host).getProperty("ssh.authentication.publickey.default.rsa")); if(rsa.exists()) { if(log.isInfoEnabled()) { log.info(String.format("Using RSA default host key %s from %s", rsa, configuration)); } credentials.setIdentity(rsa); } else { final Local dsa = LocalFactory.get(new HostPreferences(host).getProperty("ssh.authentication.publickey.default.dsa")); if(dsa.exists()) { if(log.isInfoEnabled()) { log.info(String.format("Using DSA default host key %s from %s", dsa, configuration)); } credentials.setIdentity(dsa); } } } } } return credentials; } return CredentialsConfigurator.DISABLED.configure(host); }
@Test public void testNullHostname() { OpenSSHCredentialsConfigurator c = new OpenSSHCredentialsConfigurator( new OpenSshConfig( new Local("src/main/test/resources", "openssh/config"))); assertNotNull(c.configure(new Host(new TestProtocol(Scheme.sftp)))); }
static Expression getProbabilityMapFunctionExpression(final RegressionModel.NormalizationMethod normalizationMethod, final boolean isBinary) { if (UNSUPPORTED_NORMALIZATION_METHODS.contains(normalizationMethod)) { throw new KiePMMLInternalException(String.format("Unsupported NormalizationMethod %s", normalizationMethod)); } else { return getProbabilityMapFunctionSupportedExpression(normalizationMethod, isBinary); } }
@Test void getProbabilityMapFunctionExpressionWithUnSupportedMethods() { UNSUPPORTED_NORMALIZATION_METHODS.forEach(normalizationMethod -> { try { KiePMMLClassificationTableFactory.getProbabilityMapFunctionExpression(normalizationMethod, false); fail("Expecting KiePMMLInternalException with normalizationMethod " + normalizationMethod); } catch (Exception e) { assertThat(e).isInstanceOf(KiePMMLInternalException.class); } }); }
public static void validate(BugPattern pattern) throws ValidationException { if (pattern == null) { throw new ValidationException("No @BugPattern provided"); } // name must not contain spaces if (CharMatcher.whitespace().matchesAnyOf(pattern.name())) { throw new ValidationException("Name must not contain whitespace: " + pattern.name()); } // linkType must be consistent with link element. switch (pattern.linkType()) { case CUSTOM: if (pattern.link().isEmpty()) { throw new ValidationException("Expected a custom link but none was provided"); } break; case AUTOGENERATED: case NONE: if (!pattern.link().isEmpty()) { throw new ValidationException("Expected no custom link but found: " + pattern.link()); } break; } }
@Test public void customSuppressionAnnotation() throws Exception { @BugPattern( name = "customSuppressionAnnotation", summary = "Uses a custom suppression annotation", explanation = "Uses a custom suppression annotation", severity = SeverityLevel.ERROR, suppressionAnnotations = CustomSuppressionAnnotation.class) final class BugPatternTestClass {} BugPattern annotation = BugPatternTestClass.class.getAnnotation(BugPattern.class); BugPatternValidator.validate(annotation); }
@Override @Transactional public void updateProduct(Integer id, String title, String details) { this.productRepository.findById(id) .ifPresentOrElse(product -> { product.setTitle(title); product.setDetails(details); }, () -> { throw new NoSuchElementException(); }); }
@Test void updateProduct_ProductExists_UpdatesProduct() { // given var productId = 1; var product = new Product(1, "Новый товар", "Описание нового товара"); var title = "Новое название"; var details = "Новое описание"; doReturn(Optional.of(product)) .when(this.productRepository).findById(1); // when this.service.updateProduct(productId, title, details); // then verify(this.productRepository).findById(productId); verifyNoMoreInteractions(this.productRepository); }
@Override public OperationResult commitMessage(EndTransactionRequestHeader requestHeader) { return getHalfMessageByOffset(requestHeader.getCommitLogOffset()); }
@Test public void testCommitMessage() { when(bridge.lookMessageByOffset(anyLong())).thenReturn(createMessageBrokerInner()); OperationResult result = queueTransactionMsgService.commitMessage(createEndTransactionRequestHeader(MessageSysFlag.TRANSACTION_COMMIT_TYPE)); assertThat(result.getResponseCode()).isEqualTo(ResponseCode.SUCCESS); }
static FEELFnResult<Boolean> matchFunctionWithFlags(String input, String pattern, String flags) { log.debug("Input: {} , Pattern: {}, Flags: {}", input, pattern, flags); if ( input == null ) { throw new InvalidParameterException("input"); } if ( pattern == null ) { throw new InvalidParameterException("pattern"); } final String flagsString; if (flags != null && !flags.isEmpty()) { checkFlags(flags); if(!flags.contains("U")){ flags += "U"; } flagsString = String.format("(?%s)", flags); } else { flagsString = ""; } log.debug("flagsString: {}", flagsString); String stringToBeMatched = flagsString + pattern; log.debug("stringToBeMatched: {}", stringToBeMatched); Pattern p=Pattern.compile(stringToBeMatched); Matcher m = p.matcher( input ); boolean matchFound=m.find(); log.debug("matchFound: {}", matchFound); return FEELFnResult.ofResult(matchFound); }
@Test void checkForPatternTest() { assertThrows(PatternSyntaxException.class, () -> MatchesFunction.matchFunctionWithFlags("foobar", "(abc|def(ghi", "i")); }
public static SimpleTransform binarise() { return new SimpleTransform(Operation.binarise); }
@Test public void testBinarise() { TransformationMap t = new TransformationMap(Collections.singletonList(SimpleTransform.binarise()),new HashMap<>()); testSimple(t,(double a) -> a < EPSILON ? 0.0 : 1.0); }
public static File rename(File file, String newName, boolean isOverride) { return rename(file, newName, false, isOverride); }
@Test @Disabled public void renameTest() { FileUtil.rename(FileUtil.file("d:/test/3.jpg"), "2.jpg", false); }
public <T> boolean execute(final Predicate<T> predicate, final T arg) { do { if (predicate.test(arg)) { return true; } } while (!isTimeout()); return false; }
@Test void assertExecuteTimeout() { assertFalse(new RetryExecutor(5L, 2L).execute(value -> value > 0, -1)); }
String findFileNameWithoutExtension(Path jarPath) { String fileName = jarPath.getFileName().toString(); // Exclude the extension from filename int endIndex = fileName.lastIndexOf('.'); if (endIndex != -1) { fileName = fileName.substring(0, endIndex); } return fileName; }
@Test public void testFindFileNameWithoutExtension() { JobUploadCall jobUploadCall = new JobUploadCall(); String expectedFileName = "foo"; Path jarPath = Paths.get("/mnt/foo.jar"); String fileNameWithoutExtension = jobUploadCall.findFileNameWithoutExtension(jarPath); assertEquals(expectedFileName, fileNameWithoutExtension); jarPath = Paths.get("foo.jar"); fileNameWithoutExtension = jobUploadCall.findFileNameWithoutExtension(jarPath); assertEquals(expectedFileName, fileNameWithoutExtension); jarPath = Paths.get("foo"); fileNameWithoutExtension = jobUploadCall.findFileNameWithoutExtension(jarPath); assertEquals(expectedFileName, fileNameWithoutExtension); }
public static KTableHolder<GenericKey> build( final KGroupedStreamHolder groupedStream, final StreamAggregate aggregate, final RuntimeBuildContext buildContext, final MaterializedFactory materializedFactory) { return build( groupedStream, aggregate, buildContext, materializedFactory, new AggregateParamsFactory() ); }
@Test public void shouldBuildAggregatorParamsCorrectlyForUnwindowedAggregate() { // Given: givenUnwindowedAggregate(); // When: aggregate.build(planBuilder, planInfo); // Then: verify(aggregateParamsFactory).create( INPUT_SCHEMA, NON_AGG_COLUMNS, functionRegistry, FUNCTIONS, false, KsqlConfig.empty() ); }
public static String repairData(final String name) { return name.startsWith(PRE_FIX) ? name : PRE_FIX + name; }
@Test void repairData() { String ret = UriUtils.repairData("http"); assertEquals("/http", ret); ret = UriUtils.repairData("/http"); assertEquals("/http", ret); }
public void set(int index, E value) { assert value != null; Storage32 newStorage = storage.set(index, value); if (newStorage != storage) { storage = newStorage; } }
@Test public void testSetDenseToSparse32WithCapacityEqualToSize() { // add some dense entries for (int i = 0; i < capacityDeltaInt(0) + 1; ++i) { set(i); verify(); } // at this point capacity is equal to size // go far beyond the last index to trigger dense to sparse conversion set(ARRAY_STORAGE_32_MAX_SPARSE_SIZE * 1000); verify(); // make sure we are still good for (int i = 0; i < ARRAY_STORAGE_32_MAX_SPARSE_SIZE * 5; ++i) { set(i); verify(); } }
char[] decode(final ByteBuf in) { final CharBuffer charBuffer = CharBuffer.allocate(in.capacity()); encoder.reset(); final ByteBuffer nioBuffer = in.nioBuffer(); encoder.decode(nioBuffer, charBuffer, false); final char[] buf = new char[charBuffer.position()]; charBuffer.flip(); charBuffer.get(buf); // Netty won't update the reader-index of the original buffer when its nio-buffer representation is read from. Adjust the position of the original buffer. in.readerIndex(nioBuffer.position()); return buf; }
@Test public void testDecodeAllByteOfMultibyteCharInSteps() throws Exception { // Setup test fixture. final byte[] multibyteCharacter = "\u3053".getBytes(StandardCharsets.UTF_8); // 3-byte character. assert multibyteCharacter.length == 3; final XMLLightweightParser parser = new XMLLightweightParser(); final ByteBuf in = ByteBufAllocator.DEFAULT.buffer(3); // Execute system under test. in.writeBytes(Arrays.copyOfRange(multibyteCharacter, 0, 1)); parser.decode(in); in.writeBytes(Arrays.copyOfRange(multibyteCharacter, 1, 2)); parser.decode(in); in.writeBytes(Arrays.copyOfRange(multibyteCharacter, 2, 3)); final char[] result = parser.decode(in); // Verify results. assertEquals(1, result.length); assertEquals(3, in.readerIndex()); }
@PublicAPI(usage = ACCESS) public static <T> DescribedPredicate<T> equalTo(T object) { return new EqualToPredicate<>(object); }
@Test public void equalTo_works() { assertThat(equalTo(5)) .rejects(4) .hasDescription("equal to '5'") .accepts(5) .rejects(6); Object object = new Object(); assertThat(equalTo(object)).accepts(object); }
public static String getAllTaskTypes() { return CharTaskTypeMaps.allTaskTypes; }
@Test public void testGetAllTaskTypes() { assertEquals("The getAllTaskTypes method did not return the expected " + "string", "(m|r|s|c|t)", TaskID.getAllTaskTypes()); }
@Override @NonNull public Map<String, ExternalTransformBuilder<?, ?, ?>> knownBuilderInstances() { return ImmutableMap.<String, ExternalTransformBuilder<?, ?, ?>>builder() .put(INSERT_URN, new InsertBuilder()) .put(UPDATE_URN, new UpdateBuilder()) .put(REPLACE_URN, new ReplaceBuilder()) .put(INSERT_OR_UPDATE_URN, new InsertOrUpdateBuilder()) .put(DELETE_URN, new DeleteBuilder()) .put(READ_URN, new ReadBuilder()) .build(); }
@Test public void testKnownBuilderInstances() { Map<String, ExternalTransformBuilder<?, ?, ?>> builderInstancesMap = spannerTransformRegistrar.knownBuilderInstances(); assertEquals(6, builderInstancesMap.size()); assertThat(builderInstancesMap, IsMapContaining.hasKey(SpannerTransformRegistrar.INSERT_URN)); assertThat(builderInstancesMap, IsMapContaining.hasKey(SpannerTransformRegistrar.UPDATE_URN)); assertThat(builderInstancesMap, IsMapContaining.hasKey(SpannerTransformRegistrar.REPLACE_URN)); assertThat( builderInstancesMap, IsMapContaining.hasKey(SpannerTransformRegistrar.INSERT_OR_UPDATE_URN)); assertThat(builderInstancesMap, IsMapContaining.hasKey(SpannerTransformRegistrar.DELETE_URN)); assertThat(builderInstancesMap, IsMapContaining.hasKey(SpannerTransformRegistrar.READ_URN)); }
public static Criterion matchVlanId(VlanId vlanId) { return new VlanIdCriterion(vlanId); }
@Test public void testMatchVlanIdMethod() { Criterion matchVlanId = Criteria.matchVlanId(vlanId1); VlanIdCriterion vlanIdCriterion = checkAndConvert(matchVlanId, Criterion.Type.VLAN_VID, VlanIdCriterion.class); assertThat(vlanIdCriterion.vlanId(), is(equalTo(vlanId1))); }
@Override public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try { defaultMQAdminExt.start(); String brokerName = commandLine.getOptionValue('b').trim(); List<String> namesrvList = defaultMQAdminExt.getNameServerAddressList(); if (namesrvList != null) { for (String namesrvAddr : namesrvList) { try { int addTopicCount = defaultMQAdminExt.addWritePermOfBroker(namesrvAddr, brokerName); System.out.printf("add write perm of broker[%s] in name server[%s] OK, %d%n", brokerName, namesrvAddr, addTopicCount ); } catch (Exception e) { System.out.printf("add write perm of broker[%s] in name server[%s] Failed%n", brokerName, namesrvAddr ); e.printStackTrace(); } } } } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + "command failed", e); } finally { defaultMQAdminExt.shutdown(); } }
@Test public void testExecute() throws SubCommandException { AddWritePermSubCommand cmd = new AddWritePermSubCommand(); Options options = ServerUtil.buildCommandlineOptions(new Options()); String[] subargs = new String[]{"-b default-broker"}; final CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new DefaultParser()); cmd.execute(commandLine, options, null); }
public List<Entry> getEntries() { return new ArrayList<>(actions.values()); }
@Test public void action_with_multiple_reasons() { List<RefeedActions.Entry> entries = new ConfigChangeActionsBuilder(). refeed(ValidationId.indexModeChange, CHANGE_MSG, DOC_TYPE, CLUSTER, SERVICE_NAME). refeed(ValidationId.indexModeChange, CHANGE_MSG_2, DOC_TYPE, CLUSTER, SERVICE_NAME). build().getRefeedActions().getEntries(); assertThat(entries.size(), is(1)); assertThat(toString(entries.get(0)), equalTo("music.foo:[baz][change,other change]")); }
@Override public boolean isValidName(String src) { return myFs.isValidName(fullPath(new Path(src)).toUri().toString()); }
@Test public void testIsValidNameInvalidInBaseFs() throws Exception { AbstractFileSystem baseFs = Mockito.spy(fc.getDefaultFileSystem()); ChRootedFs chRootedFs = new ChRootedFs(baseFs, new Path("/chroot")); Mockito.doReturn(false).when(baseFs).isValidName(Mockito.anyString()); Assert.assertFalse(chRootedFs.isValidName("/test")); Mockito.verify(baseFs).isValidName("/chroot/test"); }
public boolean isValid() throws IOException { if (contractBinary.equals(BIN_NOT_PROVIDED)) { throw new UnsupportedOperationException( "Contract binary not present in contract wrapper, " + "please generate your wrapper using -abiFile=<file>"); } if (contractAddress.equals("")) { throw new UnsupportedOperationException( "Contract binary not present, you will need to regenerate your smart " + "contract wrapper with web3j v2.2.0+"); } EthGetCode ethGetCode = transactionManager.getCode(contractAddress, DefaultBlockParameterName.LATEST); if (ethGetCode.hasError()) { return false; } String code = cleanHexPrefix(ethGetCode.getCode()); int metadataIndex = -1; for (String metadataIndicator : METADATA_HASH_INDICATORS) { metadataIndex = code.indexOf(metadataIndicator); if (metadataIndex != -1) { code = code.substring(0, metadataIndex); break; } } // There may be multiple contracts in the Solidity bytecode, hence we only check for a // match with a subset return !code.isEmpty() && contractBinary.contains(code); }
@Test public void testIsValidSkipMetadataBzzr0() throws Exception { prepareEthGetCode( TEST_CONTRACT_BINARY + "a165627a7a72305820" + "a9bc86938894dc250f6ea25dd823d4472fad6087edcda429a3504e3713a9fc880029"); Contract contract = deployContract(createTransactionReceipt()); assertTrue(contract.isValid()); }
static void scheduleStaggered(BiConsumer<Long, Long> scheduler, Duration interval, Instant now, String hostname, String clusterHostnames) { long delayMillis = 0; long intervalMillis = interval.toMillis(); List<String> hostnames = Stream.of(clusterHostnames.split(",")) .map(hostPort -> hostPort.split(":")[0]) .toList(); if (hostnames.contains(hostname)) { long offset = hostnames.indexOf(hostname) * intervalMillis; intervalMillis *= hostnames.size(); delayMillis = Math.floorMod(offset - now.toEpochMilli(), intervalMillis); } scheduler.accept(delayMillis, intervalMillis); }
@Test void testStaggering() { scheduleStaggered((delayMillis, intervalMillis) -> { assertEquals(0, delayMillis); assertEquals(10, intervalMillis); }, Duration.ofMillis(10), Instant.ofEpochMilli(27), "host", "nys:123,hark:123"); scheduleStaggered((delayMillis, intervalMillis) -> { assertEquals(3, delayMillis); assertEquals(10, intervalMillis); }, Duration.ofMillis(10), Instant.ofEpochMilli(27), "host", "host:123"); scheduleStaggered((delayMillis, intervalMillis) -> { assertEquals(7, delayMillis); assertEquals(20, intervalMillis); }, Duration.ofMillis(10), Instant.ofEpochMilli(13), "host", "host:123,:nys:321"); scheduleStaggered((delayMillis, intervalMillis) -> { assertEquals(17, delayMillis); assertEquals(20, intervalMillis); }, Duration.ofMillis(10), Instant.ofEpochMilli(13), "nys", "host:123,nys:321"); }
protected boolean isCurrentlyPredicting() { return isPredictionOn() && !mWord.isEmpty(); }
@Test public void testDeletesCorrectlyIfPredictingButDelayedPositionUpdate() { mAnySoftKeyboardUnderTest.simulateTextTyping("abcd efgh"); Assert.assertTrue(mAnySoftKeyboardUnderTest.isCurrentlyPredicting()); mAnySoftKeyboardUnderTest.setUpdateSelectionDelay(500); Assert.assertEquals("abcd efgh", mAnySoftKeyboardUnderTest.getCurrentInputConnectionText()); mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE, false); Assert.assertEquals("abcd efg", mAnySoftKeyboardUnderTest.getCurrentInputConnectionText()); mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE, true); Assert.assertEquals("abcd ef", mAnySoftKeyboardUnderTest.getCurrentInputConnectionText()); mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE, false); Assert.assertEquals("abcd e", mAnySoftKeyboardUnderTest.getCurrentInputConnectionText()); mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE, true); Assert.assertEquals("abcd ", mAnySoftKeyboardUnderTest.getCurrentInputConnectionText()); mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE, false); Assert.assertEquals("abcd", mAnySoftKeyboardUnderTest.getCurrentInputConnectionText()); mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE, true); Assert.assertEquals("abc", mAnySoftKeyboardUnderTest.getCurrentInputConnectionText()); mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE, false); Assert.assertEquals("ab", mAnySoftKeyboardUnderTest.getCurrentInputConnectionText()); mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE, true); Assert.assertEquals("a", mAnySoftKeyboardUnderTest.getCurrentInputConnectionText()); mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE, false); Assert.assertEquals("", mAnySoftKeyboardUnderTest.getCurrentInputConnectionText()); // extra mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE, true); Assert.assertEquals("", mAnySoftKeyboardUnderTest.getCurrentInputConnectionText()); }
@CanIgnoreReturnValue public GsonBuilder registerTypeAdapter(Type type, Object typeAdapter) { Objects.requireNonNull(type); $Gson$Preconditions.checkArgument( typeAdapter instanceof JsonSerializer<?> || typeAdapter instanceof JsonDeserializer<?> || typeAdapter instanceof InstanceCreator<?> || typeAdapter instanceof TypeAdapter<?>); if (isTypeObjectOrJsonElement(type)) { throw new IllegalArgumentException("Cannot override built-in adapter for " + type); } if (typeAdapter instanceof InstanceCreator<?>) { instanceCreators.put(type, (InstanceCreator<?>) typeAdapter); } if (typeAdapter instanceof JsonSerializer<?> || typeAdapter instanceof JsonDeserializer<?>) { TypeToken<?> typeToken = TypeToken.get(type); factories.add(TreeTypeAdapter.newFactoryWithMatchRawType(typeToken, typeAdapter)); } if (typeAdapter instanceof TypeAdapter<?>) { @SuppressWarnings({"unchecked", "rawtypes"}) TypeAdapterFactory factory = TypeAdapters.newFactory(TypeToken.get(type), (TypeAdapter) typeAdapter); factories.add(factory); } return this; }
@Test public void testRegisterTypeAdapterForCoreType() { Type[] types = { byte.class, int.class, double.class, Short.class, Long.class, String.class, }; for (Type type : types) { new GsonBuilder().registerTypeAdapter(type, NULL_TYPE_ADAPTER); } }
protected static String encrypt(String... args) throws Exception { int iterations = args.length == 2 ? Integer.parseInt(args[1]) : DEFAULT_ITERATIONS; EncryptionReplacer replacer = new EncryptionReplacer(); String xmlPath = System.getProperty("hazelcast.config"); Properties properties = xmlPath == null ? System.getProperties() : loadPropertiesFromConfig(new FileInputStream(xmlPath)); replacer.init(properties); String encrypted = replacer.encrypt(args[0], iterations); String variable = "$" + replacer.getPrefix() + "{" + encrypted + "}"; return variable; }
@Test public void testGenerateEncrypted() throws Exception { assumeDefaultAlgorithmsSupported(); String xml = "<hazelcast xmlns=\"http://www.hazelcast.com/schema/config\">\n" + XML_DEFAULT_CONFIG + "</hazelcast>"; File configFile = createFileWithString(xml); hazelcastConfigProperty.setOrClearProperty(configFile.getAbsolutePath()); String encrypted = encrypt("test"); assertThat(encrypted) .startsWith("$ENC{") .endsWith("}"); }
@Override public Map<String, Metric> getMetrics() { final Map<String, Metric> gauges = new HashMap<>(); for (String pool : POOLS) { for (int i = 0; i < ATTRIBUTES.length; i++) { final String attribute = ATTRIBUTES[i]; final String name = NAMES[i]; try { final ObjectName on = new ObjectName("java.nio:type=BufferPool,name=" + pool); mBeanServer.getMBeanInfo(on); gauges.put(name(pool, name), new JmxAttributeGauge(mBeanServer, on, attribute)); } catch (JMException ignored) { LOGGER.debug("Unable to load buffer pool MBeans, possibly running on Java 6"); } } } return Collections.unmodifiableMap(gauges); }
@Test public void includesAGaugeForMappedCapacity() throws Exception { final Gauge gauge = (Gauge) buffers.getMetrics().get("mapped.capacity"); when(mBeanServer.getAttribute(mapped, "TotalCapacity")).thenReturn(100); assertThat(gauge.getValue()) .isEqualTo(100); }
public ReadyCheckingSideInputReader createReaderForViews( Collection<PCollectionView<?>> newContainedViews) { if (!containedViews.containsAll(newContainedViews)) { Set<PCollectionView<?>> currentlyContained = ImmutableSet.copyOf(containedViews); Set<PCollectionView<?>> newRequested = ImmutableSet.copyOf(newContainedViews); throw new IllegalArgumentException( "Can't create a SideInputReader with unknown views " + Sets.difference(newRequested, currentlyContained)); } return new SideInputContainerSideInputReader(newContainedViews); }
@Test public void finishOnPendingViewsSetsEmptyElements() throws Exception { immediatelyInvokeCallback(mapView, SECOND_WINDOW); Future<Map<String, Integer>> mapFuture = getFutureOfView( container.createReaderForViews(ImmutableList.of(mapView)), mapView, SECOND_WINDOW); assertThat(mapFuture.get().isEmpty(), is(true)); }
public SpanCustomizer currentSpanCustomizer() { // note: we don't need to decorate the context for propagation as it is only used for toString TraceContext context = currentTraceContext.get(); if (context == null || isNoop(context)) return NoopSpanCustomizer.INSTANCE; return new SpanCustomizerShield(toSpan(context)); }
@Test void currentSpanCustomizer_defaultsToNoop() { assertThat(tracer.currentSpanCustomizer()) .isSameAs(NoopSpanCustomizer.INSTANCE); }
public static <K, V> AsMultimap<K, V> asMultimap() { return new AsMultimap<>(false); }
@Test @Category(ValidatesRunner.class) public void testMultimapSideInputIsImmutable() { final PCollectionView<Map<String, Iterable<Integer>>> view = pipeline.apply("CreateSideInput", Create.of(KV.of("a", 1))).apply(View.asMultimap()); PCollection<KV<String, Integer>> output = pipeline .apply("CreateMainInput", Create.of("apple")) .apply( "OutputSideInputs", ParDo.of( new DoFn<String, KV<String, Integer>>() { @ProcessElement public void processElement(ProcessContext c) { try { c.sideInput(view).clear(); fail("Expected UnsupportedOperationException on clear()"); } catch (UnsupportedOperationException expected) { } try { c.sideInput(view).put("c", ImmutableList.of(3)); fail("Expected UnsupportedOperationException on put()"); } catch (UnsupportedOperationException expected) { } try { c.sideInput(view).remove("c"); fail("Expected UnsupportedOperationException on remove()"); } catch (UnsupportedOperationException expected) { } try { c.sideInput(view).putAll(new HashMap<>()); fail("Expected UnsupportedOperationException on putAll()"); } catch (UnsupportedOperationException expected) { } for (Integer v : c.sideInput(view).get(c.element().substring(0, 1))) { c.output(KV.of(c.element(), v)); } } }) .withSideInputs(view)); // Pass at least one value through to guarantee that DoFn executes. PAssert.that(output).containsInAnyOrder(KV.of("apple", 1)); pipeline.run(); }
@Override public void start() { this.all = registry.meter(name(getName(), "all")); this.trace = registry.meter(name(getName(), "trace")); this.debug = registry.meter(name(getName(), "debug")); this.info = registry.meter(name(getName(), "info")); this.warn = registry.meter(name(getName(), "warn")); this.error = registry.meter(name(getName(), "error")); super.start(); }
@Test public void usesRegistryFromProperty() { SharedMetricRegistries.add("something_else", registry); System.setProperty(InstrumentedAppender.REGISTRY_PROPERTY_NAME, "something_else"); final InstrumentedAppender shared = new InstrumentedAppender(); shared.start(); when(event.getLevel()).thenReturn(Level.INFO); shared.doAppend(event); assertThat(SharedMetricRegistries.names()).contains("something_else"); assertThat(registry.meter(METRIC_NAME_PREFIX + ".info").getCount()) .isEqualTo(1); }
public Result evaluateTransition(Node node, ClusterState clusterState, SetUnitStateRequest.Condition condition, NodeState oldWantedState, NodeState newWantedState) { if (condition == FORCE) return allow(); if (inMoratorium) return disallow("Master cluster controller is bootstrapping and in moratorium"); if (condition != SAFE) return disallow("Condition not implemented: " + condition.name()); if (node.getType() != STORAGE) return disallow("Safe-set of node state is only supported for storage nodes! " + "Requested node type: " + node.getType().toString()); StorageNodeInfo nodeInfo = clusterInfo.getStorageNodeInfo(node.getIndex()); if (nodeInfo == null) return disallow("Unknown node " + node); if (noChanges(oldWantedState, newWantedState)) return alreadySet(); return switch (newWantedState.getState()) { case UP -> canSetStateUp(nodeInfo, oldWantedState); case MAINTENANCE -> canSetStateMaintenanceTemporarily(nodeInfo, clusterState, newWantedState.getDescription()); case DOWN -> canSetStateDownPermanently(nodeInfo, clusterState, newWantedState.getDescription()); default -> disallow("Destination node state unsupported in safe mode: " + newWantedState); }; }
@Test void testMaintenanceAllowedFor2Of4Groups8Nodes() { int maxNumberOfGroupsAllowedToBeDown = 2; // 4 groups with 2 nodes in each group var cluster = createCluster(8, 4, maxNumberOfGroupsAllowedToBeDown); setAllNodesUp(cluster, HostInfo.createHostInfo(createDistributorHostInfo(4, 5, 6))); var nodeStateChangeChecker = createChangeChecker(cluster); // All nodes up, set a storage node in group 0 to maintenance { ClusterState clusterState = defaultAllUpClusterState(8); int nodeIndex = 0; checkSettingToMaintenanceIsAllowed(nodeIndex, nodeStateChangeChecker, clusterState); setStorageNodeWantedStateToMaintenance(cluster, nodeIndex); } // 1 Node in group 0 in maintenance, try to set node 1 in group 0 to maintenance { ClusterState clusterState = clusterState(String.format("version:%d distributor:8 .0.s:d storage:8 .0.s:m", currentClusterStateVersion)); int nodeIndex = 1; checkSettingToMaintenanceIsAllowed(nodeIndex, nodeStateChangeChecker, clusterState); setStorageNodeWantedStateToMaintenance(cluster, nodeIndex); } // 2 nodes in group 0 in maintenance, try to set storage node 2 in group 1 to maintenance { ClusterState clusterState = clusterState(String.format("version:%d distributor:8 storage:8 .0.s:m .1.s:m", currentClusterStateVersion)); int nodeIndex = 2; checkSettingToMaintenanceIsAllowed(nodeIndex, nodeStateChangeChecker, clusterState); setStorageNodeWantedStateToMaintenance(cluster, nodeIndex); } // 2 nodes in group 0 and 1 in group 1 in maintenance, try to set storage node 4 in group 2 to maintenance, should fail (different group) { ClusterState clusterState = clusterState(String.format("version:%d distributor:8 storage:8 .0.s:m .1.s:m .2.s:m", currentClusterStateVersion)); int nodeIndex = 4; Node node = new Node(STORAGE, nodeIndex); Result result = nodeStateChangeChecker.evaluateTransition(node, clusterState, SAFE, UP_NODE_STATE, MAINTENANCE_NODE_STATE); assertFalse(result.allowed(), result.toString()); assertFalse(result.isAlreadySet()); assertEquals("At most 2 groups can have wanted state: [0, 1]", result.reason()); } // 2 nodes in group 0 and 1 in group 1 in maintenance, try to set storage node 3 in group 1 to maintenance { ClusterState clusterState = clusterState(String.format("version:%d distributor:8 storage:8 .0.s:m .1.s:m .2.s:m", currentClusterStateVersion)); int nodeIndex = 3; checkSettingToMaintenanceIsAllowed(nodeIndex, nodeStateChangeChecker, clusterState); setStorageNodeWantedStateToMaintenance(cluster, nodeIndex); } // 2 nodes in group 0 and 2 nodes in group 1 in maintenance, try to set storage node 4 in group 2 to maintenance, should fail { ClusterState clusterState = clusterState(String.format("version:%d distributor:8 storage:8 .0.s:m .1.s:m .2.s:m .3.s:m", currentClusterStateVersion)); int nodeIndex = 4; Node node = new Node(STORAGE, nodeIndex); Result result = nodeStateChangeChecker.evaluateTransition(node, clusterState, SAFE, UP_NODE_STATE, MAINTENANCE_NODE_STATE); assertFalse(result.allowed(), result.toString()); assertFalse(result.isAlreadySet()); assertEquals("At most 2 groups can have wanted state: [0, 1]", result.reason()); } // 2 nodes in group 0 in maintenance, storage node 3 in group 1 is in maintenance with another description // (set in maintenance by operator), try to set storage node 2 in group 1 to maintenance, should be allowed { ClusterState clusterState = clusterState(String.format("version:%d distributor:8 storage:8 .0.s:m .1.s:m .3.s:m", currentClusterStateVersion)); setStorageNodeWantedState(cluster, 3, MAINTENANCE, "Maintenance, set by operator"); // Set to another description setStorageNodeWantedState(cluster, 2, UP, ""); // Set back to UP, want to set this to maintenance again int nodeIndex = 2; Node node = new Node(STORAGE, nodeIndex); Result result = nodeStateChangeChecker.evaluateTransition(node, clusterState, SAFE, UP_NODE_STATE, MAINTENANCE_NODE_STATE); assertTrue(result.allowed(), result.toString()); assertFalse(result.isAlreadySet()); } // 2 nodes in group 0 up again but buckets not in sync and 2 nodes in group 1 in maintenance, // try to set storage node 4 in group 2 to maintenance { setStorageNodeWantedState(cluster, 0, MAINTENANCE, "Orchestrator"); setStorageNodeWantedState(cluster, 1, MAINTENANCE, "Orchestrator"); setStorageNodeWantedState(cluster, 2, UP, ""); // Set up again setStorageNodeWantedState(cluster, 3, UP, ""); // Set up again ClusterState clusterState = clusterState(String.format("version:%d distributor:8 storage:8 .0.s:m .1.s:m", currentClusterStateVersion)); // Set bucket in sync to 1 for node 2 in group 1 var distributorHostInfo = createDistributorHostInfo(1, 2, 1); cluster.clusterInfo().getDistributorNodeInfo(0).setHostInfo(HostInfo.createHostInfo(distributorHostInfo)); cluster.clusterInfo().getDistributorNodeInfo(1).setHostInfo(HostInfo.createHostInfo(distributorHostInfo)); cluster.clusterInfo().getDistributorNodeInfo(2).setHostInfo(HostInfo.createHostInfo(distributorHostInfo)); int nodeIndex = 2; Node node = new Node(STORAGE, nodeIndex); Result result = nodeStateChangeChecker.evaluateTransition(node, clusterState, SAFE, UP_NODE_STATE, MAINTENANCE_NODE_STATE); assertFalse(result.allowed(), result.toString()); assertFalse(result.isAlreadySet()); assertEquals("Distributor 0 says storage node 0 has buckets with redundancy as low as 1, but we require at least 4", result.reason()); } }
static String convertEnvVars(String input){ // check for any non-alphanumeric chars and convert to underscore // convert to upper case if (input == null) { return null; } return input.replaceAll("[^A-Za-z0-9]", "_").toUpperCase(); }
@Test public void testConvertEnvVarsUsingDotInValue() { String testInput = ConfigInjection.convertEnvVars("server.environment"); Assert.assertEquals("SERVER_ENVIRONMENT", testInput); }
public static Ip4Address valueOf(int value) { byte[] bytes = ByteBuffer.allocate(INET_BYTE_LENGTH).putInt(value).array(); return new Ip4Address(bytes); }
@Test public void testEqualityIPv4() { new EqualsTester() .addEqualityGroup(Ip4Address.valueOf("1.2.3.4"), Ip4Address.valueOf("1.2.3.4")) .addEqualityGroup(Ip4Address.valueOf("1.2.3.5"), Ip4Address.valueOf("1.2.3.5")) .addEqualityGroup(Ip4Address.valueOf("0.0.0.0"), Ip4Address.valueOf("0.0.0.0")) .addEqualityGroup(Ip4Address.valueOf("255.255.255.255"), Ip4Address.valueOf("255.255.255.255")) .testEquals(); }
public static <K, V> Write<K, V> write() { return new AutoValue_CdapIO_Write.Builder<K, V>().build(); }
@Test public void testWriteExpandingFailsMissingCdapPluginClass() { PBegin testPBegin = PBegin.in(TestPipeline.create()); PCollection<KV<String, String>> testPCollection = Create.empty(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of())).expand(testPBegin); CdapIO.Write<String, String> write = CdapIO.write(); assertThrows(IllegalStateException.class, () -> write.expand(testPCollection)); }
@Override public SchemaAndValue get(final ProcessingLogConfig config) { final Struct struct = new Struct(ProcessingLogMessageSchema.PROCESSING_LOG_SCHEMA) .put(ProcessingLogMessageSchema.TYPE, MessageType.DESERIALIZATION_ERROR.getTypeId()) .put(ProcessingLogMessageSchema.DESERIALIZATION_ERROR, deserializationError(config)); return new SchemaAndValue(ProcessingLogMessageSchema.PROCESSING_LOG_SCHEMA, struct); }
@Test public void shouldBuildDeserializationError() { // Given: final DeserializationError deserError = new DeserializationError( error, Optional.of(record), "topic", false ); // When: final SchemaAndValue msg = deserError.get(config); // Then: final Schema schema = msg.schema(); assertThat(schema, equalTo(PROCESSING_LOG_SCHEMA)); final Struct struct = (Struct) msg.value(); assertThat( struct.get(ProcessingLogMessageSchema.TYPE), equalTo(MessageType.DESERIALIZATION_ERROR.getTypeId())); final Struct deserializationError = struct.getStruct(DESERIALIZATION_ERROR); assertThat( deserializationError.get(DESERIALIZATION_ERROR_FIELD_TARGET), equalTo("value") ); assertThat( deserializationError.get(DESERIALIZATION_ERROR_FIELD_MESSAGE), equalTo(error.getMessage()) ); assertThat( deserializationError.get(DESERIALIZATION_ERROR_FIELD_CAUSE), equalTo(causeList) ); assertThat( deserializationError.get(DESERIALIZATION_ERROR_FIELD_RECORD_B64), equalTo(Base64.getEncoder().encodeToString(record)) ); assertThat( deserializationError.get(DESERIALIZATION_ERROR_FIELD_TOPIC), equalTo("topic") ); schema.fields().forEach( f -> { if (!ImmutableList.of(TYPE, DESERIALIZATION_ERROR).contains(f.name())) { assertThat(struct.get(f), is(nullValue())); } } ); }
@GetMapping(value = "/{id}") public Mono<Post> get(@PathVariable(value = "id") Long id) { return this.posts.findById(id); }
@Test public void getAllPostsWillBeOk() throws Exception { this.rest .get() .uri("/posts") .accept(APPLICATION_JSON) .exchange() .expectBody() .jsonPath("$.length()") .isEqualTo(2); }
public static String[] splitString( String string, String separator ) { /* * 0123456 Example a;b;c;d --> new String[] { a, b, c, d } */ // System.out.println("splitString ["+path+"] using ["+separator+"]"); List<String> list = new ArrayList<>(); if ( string == null || string.length() == 0 ) { return new String[] {}; } int sepLen = separator.length(); int from = 0; int end = string.length() - sepLen + 1; for ( int i = from; i < end; i += sepLen ) { if ( string.substring( i, i + sepLen ).equalsIgnoreCase( separator ) ) { // OK, we found a separator, the string to add to the list // is [from, i[ list.add( nullToEmpty( string.substring( from, i ) ) ); from = i + sepLen; } } // Wait, if the string didn't end with a separator, we still have information at the end of the string... // In our example that would be "d"... if ( from + sepLen <= string.length() ) { list.add( nullToEmpty( string.substring( from, string.length() ) ) ); } return list.toArray( new String[list.size()] ); }
@Test public void testSplitStringWithDelimiterAndEnclosureNullMultiChar() { String mask = "Hello%s world"; String[] chunks = {"Hello", " world"}; String stringToSplit = String.format( mask, DELIMITER2 ); String[] result = Const.splitString( stringToSplit, DELIMITER2, null ); assertSplit( result, chunks ); }
public void addValueProviders(final String segmentName, final RocksDB db, final Cache cache, final Statistics statistics) { if (storeToValueProviders.isEmpty()) { logger.debug("Adding metrics recorder of task {} to metrics recording trigger", taskId); streamsMetrics.rocksDBMetricsRecordingTrigger().addMetricsRecorder(this); } else if (storeToValueProviders.containsKey(segmentName)) { throw new IllegalStateException("Value providers for store " + segmentName + " of task " + taskId + " has been already added. This is a bug in Kafka Streams. " + "Please open a bug report under https://issues.apache.org/jira/projects/KAFKA/issues"); } verifyDbAndCacheAndStatistics(segmentName, db, cache, statistics); logger.debug("Adding value providers for store {} of task {}", segmentName, taskId); storeToValueProviders.put(segmentName, new DbAndCacheAndStatistics(db, cache, statistics)); }
@Test public void shouldThrowIfStatisticsToAddIsNotNullButExistingStatisticsAreNull() { recorder.addValueProviders(SEGMENT_STORE_NAME_1, dbToAdd1, cacheToAdd1, null); final Throwable exception = assertThrows( IllegalStateException.class, () -> recorder.addValueProviders(SEGMENT_STORE_NAME_2, dbToAdd2, cacheToAdd2, statisticsToAdd2) ); assertThat( exception.getMessage(), is("Statistics for segment " + SEGMENT_STORE_NAME_2 + " of task " + TASK_ID1 + " is not null although the statistics of another segment in this metrics recorder is null. " + "This is a bug in Kafka Streams. " + "Please open a bug report under https://issues.apache.org/jira/projects/KAFKA/issues") ); }
public static <T extends IOReadableWritable> T createCopyWritable(T original) throws IOException { if (original == null) { return null; } final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (DataOutputViewStreamWrapper out = new DataOutputViewStreamWrapper(baos)) { original.write(out); } final ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); try (DataInputViewStreamWrapper in = new DataInputViewStreamWrapper(bais)) { @SuppressWarnings("unchecked") T copy = (T) instantiate(original.getClass()); copy.read(in); return copy; } }
@Test void testCopyWritable() throws Exception { WritableType original = new WritableType(); WritableType copy = InstantiationUtil.createCopyWritable(original); assertThat(copy).isNotSameAs(original); assertThat(copy).isEqualTo(original); }
@VisibleForTesting MailTemplateDO validateMailTemplate(String templateCode) { // 获得邮件模板。考虑到效率,从缓存中获取 MailTemplateDO template = mailTemplateService.getMailTemplateByCodeFromCache(templateCode); // 邮件模板不存在 if (template == null) { throw exception(MAIL_TEMPLATE_NOT_EXISTS); } return template; }
@Test public void testValidateMailTemplateValid_notExists() { // 准备参数 String templateCode = RandomUtils.randomString(); // mock 方法 // 调用,并断言异常 assertServiceException(() -> mailSendService.validateMailTemplate(templateCode), MAIL_TEMPLATE_NOT_EXISTS); }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String db2Type = typeDefine.getDataType().toUpperCase(); switch (db2Type) { case DB2_BOOLEAN: builder.sourceType(DB2_BOOLEAN); builder.dataType(BasicType.BOOLEAN_TYPE); break; case DB2_SMALLINT: builder.sourceType(DB2_SMALLINT); builder.dataType(BasicType.SHORT_TYPE); break; case DB2_INT: case DB2_INTEGER: builder.sourceType(DB2_INT); builder.dataType(BasicType.INT_TYPE); break; case DB2_BIGINT: builder.sourceType(DB2_BIGINT); builder.dataType(BasicType.LONG_TYPE); break; case DB2_REAL: builder.sourceType(DB2_REAL); builder.dataType(BasicType.FLOAT_TYPE); break; case DB2_DOUBLE: builder.sourceType(DB2_DOUBLE); builder.dataType(BasicType.DOUBLE_TYPE); break; case DB2_DECFLOAT: builder.sourceType(DB2_DECFLOAT); builder.dataType(BasicType.DOUBLE_TYPE); break; case DB2_DECIMAL: builder.sourceType( String.format( "%s(%s,%s)", DB2_DECIMAL, typeDefine.getPrecision(), typeDefine.getScale())); builder.dataType( new DecimalType( Math.toIntExact(typeDefine.getPrecision()), typeDefine.getScale())); builder.columnLength(typeDefine.getPrecision()); builder.scale(typeDefine.getScale()); break; case DB2_CHARACTER: case DB2_CHAR: builder.sourceType(String.format("%s(%d)", DB2_CHAR, typeDefine.getLength())); // For char/varchar this length is in bytes builder.columnLength(typeDefine.getLength()); builder.dataType(BasicType.STRING_TYPE); break; case DB2_VARCHAR: builder.sourceType(String.format("%s(%d)", DB2_VARCHAR, typeDefine.getLength())); builder.columnLength(typeDefine.getLength()); builder.dataType(BasicType.STRING_TYPE); break; case DB2_LONG_VARCHAR: builder.sourceType(DB2_LONG_VARCHAR); // default length is 32700 builder.columnLength(typeDefine.getLength()); builder.dataType(BasicType.STRING_TYPE); break; case DB2_CLOB: builder.sourceType(String.format("%s(%d)", DB2_CLOB, typeDefine.getLength())); builder.columnLength(typeDefine.getLength()); builder.dataType(BasicType.STRING_TYPE); break; case DB2_GRAPHIC: builder.sourceType(String.format("%s(%d)", DB2_GRAPHIC, typeDefine.getLength())); builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength())); builder.dataType(BasicType.STRING_TYPE); break; case DB2_VARGRAPHIC: builder.sourceType(String.format("%s(%d)", DB2_VARGRAPHIC, typeDefine.getLength())); builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength())); builder.dataType(BasicType.STRING_TYPE); break; case DB2_DBCLOB: builder.sourceType(String.format("%s(%d)", DB2_DBCLOB, typeDefine.getLength())); builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength())); builder.dataType(BasicType.STRING_TYPE); break; case DB2_XML: builder.sourceType(DB2_XML); builder.columnLength((long) Integer.MAX_VALUE); builder.dataType(BasicType.STRING_TYPE); break; case DB2_BINARY: builder.sourceType(String.format("%s(%d)", DB2_BINARY, typeDefine.getLength())); builder.columnLength(typeDefine.getLength()); builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case DB2_VARBINARY: builder.sourceType(String.format("%s(%d)", DB2_VARBINARY, typeDefine.getLength())); builder.columnLength(typeDefine.getLength()); builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case DB2_BLOB: builder.sourceType(String.format("%s(%d)", DB2_BLOB, typeDefine.getLength())); builder.columnLength(typeDefine.getLength()); builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case DB2_DATE: builder.sourceType(DB2_DATE); builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case DB2_TIME: builder.sourceType(DB2_TIME); builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); break; case DB2_TIMESTAMP: builder.sourceType(String.format("%s(%d)", DB2_TIMESTAMP, typeDefine.getScale())); builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(typeDefine.getScale()); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.DB_2, db2Type, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertUnsupported() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder().name("test").columnType("aaa").dataType("aaa").build(); try { DB2TypeConverter.INSTANCE.convert(typeDefine); Assertions.fail(); } catch (SeaTunnelRuntimeException e) { // ignore } catch (Throwable e) { Assertions.fail(); } }
@Override public boolean remove(Object o) { throw new UnsupportedOperationException(); }
@Test(expected = UnsupportedOperationException.class) public void test_iteratorRemove() { Iterator<Integer> iterator = set.iterator(); iterator.next(); iterator.remove(); }
@GetInitialRestriction public OffsetByteRange getInitialRestriction( @Element SubscriptionPartition subscriptionPartition) { Offset offset = offsetReaderFactory.apply(subscriptionPartition).read(); return OffsetByteRange.of(new OffsetRange(offset.value(), Long.MAX_VALUE /* open interval */)); }
@Test public void getInitialRestrictionReadFailure() { when(initialOffsetReader.read()).thenThrow(new CheckedApiException(Code.INTERNAL).underlying); assertThrows(ApiException.class, () -> sdf.getInitialRestriction(PARTITION)); }
@Override public Mono<Void> filter(ServerWebExchange exchange, GatewayFilterChain chain) { if (CollectionUtils.isEmpty(trafficStainers)) { return chain.filter(exchange); } // 1. get stained labels from request Map<String, String> stainedLabels = getStainedLabels(exchange); if (CollectionUtils.isEmpty(stainedLabels)) { return chain.filter(exchange); } // 2. put stained labels to metadata context ServerHttpRequest request = exchange.getRequest().mutate().headers((httpHeaders) -> { MetadataContext metadataContext = exchange.getAttribute(MetadataConstant.HeaderName.METADATA_CONTEXT); if (metadataContext == null) { metadataContext = MetadataContextHolder.get(); } Map<String, String> oldTransitiveMetadata = metadataContext.getTransitiveMetadata(); // append new transitive metadata Map<String, String> newTransitiveMetadata = new HashMap<>(oldTransitiveMetadata); newTransitiveMetadata.putAll(stainedLabels); metadataContext.setTransitiveMetadata(newTransitiveMetadata); }).build(); return chain.filter(exchange.mutate().request(request).build()); }
@Test public void testWithTrafficStainers() { MetadataContext metadataContext = new MetadataContext(); MetadataContextHolder.set(metadataContext); RuleStainingProperties ruleStainingProperties = new RuleStainingProperties(); ruleStainingProperties.setNamespace(testNamespace); ruleStainingProperties.setGroup(testGroup); ruleStainingProperties.setFileName(testFileName); ConfigFile configFile = Mockito.mock(ConfigFile.class); when(configFile.getContent()).thenReturn("{\n" + " \"rules\":[\n" + " {\n" + " \"conditions\":[\n" + " {\n" + " \"key\":\"${http.query.uid}\",\n" + " \"values\":[\"1000\"],\n" + " \"operation\":\"EQUALS\"\n" + " }\n" + " ],\n" + " \"labels\":[\n" + " {\n" + " \"key\":\"env\",\n" + " \"value\":\"blue\"\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + "}"); when(configFileService.getConfigFile(testNamespace, testGroup, testFileName)).thenReturn(configFile); StainingRuleManager stainingRuleManager = new StainingRuleManager(ruleStainingProperties, configFileService); RuleStainingExecutor ruleStainingExecutor = new RuleStainingExecutor(); RuleTrafficStainer ruleTrafficStainer = new RuleTrafficStainer(stainingRuleManager, ruleStainingExecutor); TrafficStainingGatewayFilter filter = new TrafficStainingGatewayFilter(Collections.singletonList(ruleTrafficStainer)); MockServerHttpRequest request = MockServerHttpRequest.get("/users") .queryParam("uid", "1000").build(); MockServerWebExchange exchange = new MockServerWebExchange.Builder(request).build(); filter.filter(exchange, chain); Map<String, String> map = metadataContext.getTransitiveMetadata(); assertThat(map).isNotNull(); assertThat(map.size()).isEqualTo(1); assertThat(map.get("env")).isEqualTo("blue"); }
public Map<String, List<QueryHeaderRewriteRule>> getHeaderRewriteRules() { return headerRewriteRules; }
@Test public void testHeaderRewriteRules() { Assert.assertNotNull(routerConfig.getHeaderRewriteRules()); Assert.assertEquals(routerConfig.getHeaderRewriteRules().size(), 4); }
public static int getWorstCaseEditDistance( int sourceLength, int targetLength, int changeCost, int openGapCost, int continueGapCost) { int maxLen = Math.max(sourceLength, targetLength); int minLen = Math.min(sourceLength, targetLength); // Compute maximum cost of changing one string into another. If the // lengths differ, you'll need maxLen - minLen insertions or deletions. int totChangeCost = scriptCost(openGapCost, continueGapCost, maxLen - minLen) + minLen * changeCost; // Another possibility is to just delete the entire source and insert the // target, and not do any changes. int blowAwayCost = scriptCost(openGapCost, continueGapCost, sourceLength) + scriptCost(openGapCost, continueGapCost, targetLength); return Math.min(totChangeCost, blowAwayCost); }
@Test public void needlemanWunschEditDistanceWorstCase_matchesLevenschtein_withHugeGapCost() { String identifier = "fooBar"; String otherIdentifier = "bazQux"; double levenschtein = LevenshteinEditDistance.getWorstCaseEditDistance( identifier.length(), otherIdentifier.length()); double needlemanWunsch = NeedlemanWunschEditDistance.getWorstCaseEditDistance( identifier.length(), otherIdentifier.length(), 1, 1000, 1000); assertThat(needlemanWunsch).isEqualTo(levenschtein); }
public static boolean canDrop( FilterPredicate pred, List<ColumnChunkMetaData> columns, DictionaryPageReadStore dictionaries) { Objects.requireNonNull(pred, "pred cannnot be null"); Objects.requireNonNull(columns, "columns cannnot be null"); return pred.accept(new DictionaryFilter(columns, dictionaries)); }
@Test public void testLtFixed() throws Exception { BinaryColumn fixed = binaryColumn("fixed_field"); // Only V2 supports dictionary encoding for FIXED_LEN_BYTE_ARRAY values if (version == PARQUET_2_0) { assertTrue("Should drop: < lowest value", canDrop(lt(fixed, DECIMAL_VALUES[0]), ccmd, dictionaries)); } assertFalse("Should not drop: < 2nd lowest value", canDrop(lt(fixed, DECIMAL_VALUES[1]), ccmd, dictionaries)); }
public static MySQLBinaryProtocolValue getBinaryProtocolValue(final BinaryColumnType binaryColumnType) { Preconditions.checkArgument(BINARY_PROTOCOL_VALUES.containsKey(binaryColumnType), "Cannot find MySQL type '%s' in column type when process binary protocol value", binaryColumnType); return BINARY_PROTOCOL_VALUES.get(binaryColumnType); }
@Test void assertGetBinaryProtocolValueWithMySQLTypeDecimal() { assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.DECIMAL), instanceOf(MySQLStringLenencBinaryProtocolValue.class)); }
public static boolean compare(Object source, Object target) { if (source == target) { return true; } if (source == null || target == null) { return false; } if (source.equals(target)) { return true; } if (source instanceof Boolean) { return compare(((Boolean) source), target); } if (source instanceof Number) { return compare(((Number) source), target); } if (target instanceof Number) { return compare(((Number) target), source); } if (source instanceof Date) { return compare(((Date) source), target); } if (target instanceof Date) { return compare(((Date) target), source); } if (source instanceof String) { return compare(((String) source), target); } if (target instanceof String) { return compare(((String) target), source); } if (source instanceof Collection) { return compare(((Collection) source), target); } if (target instanceof Collection) { return compare(((Collection) target), source); } if (source instanceof Map) { return compare(((Map) source), target); } if (target instanceof Map) { return compare(((Map) target), source); } if (source.getClass().isEnum() || source instanceof Enum) { return compare(((Enum) source), target); } if (target.getClass().isEnum() || source instanceof Enum) { return compare(((Enum) target), source); } if (source.getClass().isArray()) { return compare(((Object[]) source), target); } if (target.getClass().isArray()) { return compare(((Object[]) target), source); } return compare(FastBeanCopier.copy(source, HashMap.class), FastBeanCopier.copy(target, HashMap.class)); }
@Test public void nullTest() { Assert.assertFalse(CompareUtils.compare(1, null)); Assert.assertFalse(CompareUtils.compare((Object) null, 1)); Assert.assertTrue(CompareUtils.compare((Object) null, null)); Assert.assertFalse(CompareUtils.compare((Number) null, 1)); Assert.assertTrue(CompareUtils.compare((Number) null, null)); Assert.assertFalse(CompareUtils.compare((Date) null, 1)); Assert.assertTrue(CompareUtils.compare((Date) null, null)); Assert.assertFalse(CompareUtils.compare((String) null, 1)); Assert.assertTrue(CompareUtils.compare((String) null, null)); Assert.assertFalse(CompareUtils.compare((Collection) null, 1)); Assert.assertTrue(CompareUtils.compare((Collection) null, null)); Assert.assertFalse(CompareUtils.compare((Map<?, ?>) null, 1)); Assert.assertTrue(CompareUtils.compare((Map<?, ?>) null, null)); }
@Udf public Map<String, String> records(@UdfParameter final String jsonObj) { if (jsonObj == null) { return null; } final JsonNode node = UdfJsonMapper.parseJson(jsonObj); if (node.isMissingNode() || !node.isObject()) { return null; } final Map<String, String> ret = new HashMap<>(node.size()); node.fieldNames().forEachRemaining(k -> { final JsonNode value = node.get(k); if (value instanceof TextNode) { ret.put(k, value.textValue()); } else { ret.put(k, value.toString()); } }); return ret; }
@Test public void shouldExtractRecords() { // When final Map<String, String> result = udf.records("{\"a\": \"abc\", \"b\": { \"c\": \"a\" }, \"d\": 1}"); // Then: final Map<String, String> expected = new HashMap<String, String>() {{ put("a", "abc"); put("b", "{\"c\":\"a\"}"); put("d", "1"); }}; assertEquals(expected, result); }
@Override public final MetadataResolver resolve(final boolean force) { if (force) { this.metadataResolver = prepareServiceProviderMetadata(); } return this.metadataResolver; }
@Test public void resolveServiceProviderMetadataViaExistingClasspath() { val configuration = initializeConfiguration(new ClassPathResource("sample-sp-metadata.xml"), "target/keystore.jks"); final SAML2MetadataResolver metadataResolver = new SAML2ServiceProviderMetadataResolver(configuration); assertNotNull(metadataResolver.resolve()); }
public static Set<Result> anaylze(String log) { Set<Result> results = new HashSet<>(); for (Rule rule : Rule.values()) { Matcher matcher = rule.pattern.matcher(log); if (matcher.find()) { results.add(new Result(rule, log, matcher)); } } return results; }
@Test public void optifineRepeatInstallation() throws IOException { CrashReportAnalyzer.Result result = findResultByRule( CrashReportAnalyzer.anaylze(loadLog("/logs/optifine_repeat_installation.txt")), CrashReportAnalyzer.Rule.OPTIFINE_REPEAT_INSTALLATION); }
@Override public ProtobufSystemInfo.Section toProtobuf() { ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName("System"); setAttribute(protobuf, "Server ID", server.getId()); setAttribute(protobuf, "Version", getVersion()); setAttribute(protobuf, "Edition", sonarRuntime.getEdition().getLabel()); setAttribute(protobuf, NCLOC.getName(), statisticsSupport.getLinesOfCode()); setAttribute(protobuf, "Container", containerSupport.isRunningInContainer()); setAttribute(protobuf, "External Users and Groups Provisioning", commonSystemInformation.getManagedInstanceProviderName()); setAttribute(protobuf, "External User Authentication", commonSystemInformation.getExternalUserAuthentication()); addIfNotEmpty(protobuf, "Accepted external identity providers", commonSystemInformation.getEnabledIdentityProviders()); addIfNotEmpty(protobuf, "External identity providers whose users are allowed to sign themselves up", commonSystemInformation.getAllowsToSignUpEnabledIdentityProviders()); setAttribute(protobuf, "High Availability", false); setAttribute(protobuf, "Official Distribution", officialDistribution.check()); setAttribute(protobuf, "Force authentication", commonSystemInformation.getForceAuthentication()); setAttribute(protobuf, "Home Dir", config.get(PATH_HOME.getKey()).orElse(null)); setAttribute(protobuf, "Data Dir", config.get(PATH_DATA.getKey()).orElse(null)); setAttribute(protobuf, "Temp Dir", config.get(PATH_TEMP.getKey()).orElse(null)); setAttribute(protobuf, "Processors", Runtime.getRuntime().availableProcessors()); return protobuf.build(); }
@Test public void toProtobuf_whenEnabledIdentityProviders_shouldWriteThem() { when(commonSystemInformation.getEnabledIdentityProviders()).thenReturn(List.of("Bitbucket, GitHub")); ProtobufSystemInfo.Section protobuf = underTest.toProtobuf(); assertThatAttributeIs(protobuf, "Accepted external identity providers", "Bitbucket, GitHub"); }
@Override public void run(Job job) throws Exception { getBackgroundJobWorker(job).run(); if (Thread.currentThread().isInterrupted()) throw new InterruptedException(); }
@Test void ifCodeThrowsInterruptedException_AnInterruptExceptionIsThrown() throws Exception { final AbstractBackgroundJobRunner backgroundJobRunner = getJobRunner(); final Job job = aJobInProgress().build(); doThrow(new InterruptedException()).when(worker).run(); assertThatThrownBy(() -> backgroundJobRunner.run(job)).isInstanceOf(InterruptedException.class); }