focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public void broadcastWorkerUUIDs() { Set<CeWorker> workers = ceCeWorkerFactory.getWorkers(); workerUUIDs = workers.stream().map(CeWorker::getUUID).collect(Collectors.toSet()); }
@Test public void broadcastWorkerUUIDs_must_retrieve_from_ceworkerfactory() { CeWorkerFactory ceWorkerFactory = mock(CeWorkerFactory.class); StandaloneCeDistributedInformation ceCluster = new StandaloneCeDistributedInformation(ceWorkerFactory); ceCluster.broadcastWorkerUUIDs(); verify(ceWorkerFactory).getWorkers(); }
public String toTruncateJson(Long length) { String jsonStr = JSONUtil.toJsonStr(this); long overLength = jsonStr.length() - length; if (overLength <= 0) { return jsonStr; } this.truncationFlag = true; if (CollectionUtil.isEmpty(rowData)) { this.columns = Sets.newLinkedHashSet(); String finalJsonStr = JSONUtil.toJsonStr(this); if (finalJsonStr.length() > length) { log.warn( "The row data and columns is empty, but still exceeds the length limit. " + "Json: {}, length: {}", finalJsonStr, length); return "{}"; } return finalJsonStr; } // Estimate the size of each row of data to determine how many rows should be removed. String lineJsonStr = JSONUtil.toJsonStr(rowData.get(rowData.size() - 1)); int lineLength = lineJsonStr.length(); int removeLine = getRemoveLine(overLength, lineLength, rowData.size()); rowData = ListUtil.sub(rowData, 0, rowData.size() - removeLine); return toTruncateJson(length); }
@Test public void toTruncateJsonTest2() { SelectResult selectResult = prepareData(); String truncateJson = selectResult.toTruncateJson(200L); log.info("truncateJson: {}", truncateJson); selectResult.setRowData(Lists.newArrayList()); assertEquals(JSONUtil.toJsonStr(selectResult), truncateJson); }
@Override public void handleTenantMenu(TenantMenuHandler handler) { // 如果禁用,则不执行逻辑 if (isTenantDisable()) { return; } // 获得租户,然后获得菜单 TenantDO tenant = getTenant(TenantContextHolder.getRequiredTenantId()); Set<Long> menuIds; if (isSystemTenant(tenant)) { // 系统租户,菜单是全量的 menuIds = CollectionUtils.convertSet(menuService.getMenuList(), MenuDO::getId); } else { menuIds = tenantPackageService.getTenantPackage(tenant.getPackageId()).getMenuIds(); } // 执行处理器 handler.handle(menuIds); }
@Test // 普通租户的情况 public void testHandleTenantMenu_normal() { // 准备参数 TenantMenuHandler handler = mock(TenantMenuHandler.class); // mock 未禁用 when(tenantProperties.getEnable()).thenReturn(true); // mock 租户 TenantDO dbTenant = randomPojo(TenantDO.class, o -> o.setPackageId(200L)); tenantMapper.insert(dbTenant);// @Sql: 先插入出一条存在的数据 TenantContextHolder.setTenantId(dbTenant.getId()); // mock 菜单 when(tenantPackageService.getTenantPackage(eq(200L))).thenReturn(randomPojo(TenantPackageDO.class, o -> o.setMenuIds(asSet(100L, 101L)))); // 调用 tenantService.handleTenantMenu(handler); // 断言 verify(handler).handle(asSet(100L, 101L)); }
public static boolean canDrop( FilterPredicate pred, List<ColumnChunkMetaData> columns, DictionaryPageReadStore dictionaries) { Objects.requireNonNull(pred, "pred cannnot be null"); Objects.requireNonNull(columns, "columns cannnot be null"); return pred.accept(new DictionaryFilter(columns, dictionaries)); }
@Test public void testLtInt() throws Exception { IntColumn i32 = intColumn("int32_field"); int lowest = Integer.MAX_VALUE; for (int value : intValues) { lowest = Math.min(lowest, value); } assertTrue("Should drop: < lowest value", canDrop(lt(i32, lowest), ccmd, dictionaries)); assertFalse("Should not drop: < (lowest value + 1)", canDrop(lt(i32, lowest + 1), ccmd, dictionaries)); assertFalse( "Should not drop: contains matching values", canDrop(lt(i32, Integer.MAX_VALUE), ccmd, dictionaries)); }
public List<SourceAndTarget> clusterPairs() { List<SourceAndTarget> pairs = new ArrayList<>(); Set<String> clusters = clusters(); Map<String, String> originalStrings = originalsStrings(); boolean globalHeartbeatsEnabled = MirrorHeartbeatConfig.EMIT_HEARTBEATS_ENABLED_DEFAULT; if (originalStrings.containsKey(MirrorHeartbeatConfig.EMIT_HEARTBEATS_ENABLED)) { globalHeartbeatsEnabled = Boolean.parseBoolean(originalStrings.get(MirrorHeartbeatConfig.EMIT_HEARTBEATS_ENABLED)); } for (String source : clusters) { for (String target : clusters) { if (!source.equals(target)) { String clusterPairConfigPrefix = source + "->" + target + "."; boolean clusterPairEnabled = Boolean.parseBoolean(originalStrings.get(clusterPairConfigPrefix + "enabled")); boolean clusterPairHeartbeatsEnabled = globalHeartbeatsEnabled; if (originalStrings.containsKey(clusterPairConfigPrefix + MirrorHeartbeatConfig.EMIT_HEARTBEATS_ENABLED)) { clusterPairHeartbeatsEnabled = Boolean.parseBoolean(originalStrings.get(clusterPairConfigPrefix + MirrorHeartbeatConfig.EMIT_HEARTBEATS_ENABLED)); } // By default, all source->target Herder combinations are created even if `x->y.enabled=false` // Unless `emit.heartbeats.enabled=false` or `x->y.emit.heartbeats.enabled=false` // Reason for this behavior: for a given replication flow A->B with heartbeats, 2 herders are required : // B->A for the MirrorHeartbeatConnector (emits heartbeats into A for monitoring replication health) // A->B for the MirrorSourceConnector (actual replication flow) if (clusterPairEnabled || clusterPairHeartbeatsEnabled) { pairs.add(new SourceAndTarget(source, target)); } } } } return pairs; }
@Test public void testClusterPairsWithGloballyDisabledHeartbeats() { MirrorMakerConfig mirrorConfig = new MirrorMakerConfig(makeProps( "clusters", "a, b, c, d, e, f", "emit.heartbeats.enabled", "false", "a->b.enabled", "true", "a->c.enabled", "true", "a->d.enabled", "true", "a->e.enabled", "false", "a->f.enabled", "false")); List<SourceAndTarget> clusterPairs = mirrorConfig.clusterPairs(); assertEquals(3, clusterPairs.size(), "clusterPairs count should match (x->y.enabled=true or x->y.emit.heartbeats.enabled=true) count"); // Link b->a.enabled doesn't exist therefore it must not be in clusterPairs SourceAndTarget sourceAndTarget = new SourceAndTarget("b", "a"); assertFalse(clusterPairs.contains(sourceAndTarget), "disabled/unset link x->y should not be in clusterPairs"); }
public static void mergeParams( Map<String, ParamDefinition> params, Map<String, ParamDefinition> paramsToMerge, MergeContext context) { if (paramsToMerge == null) { return; } Stream.concat(params.keySet().stream(), paramsToMerge.keySet().stream()) .forEach( name -> { ParamDefinition paramToMerge = paramsToMerge.get(name); if (paramToMerge == null) { return; } if (paramToMerge.getType() == ParamType.MAP && paramToMerge.isLiteral()) { Map<String, ParamDefinition> baseMap = mapValueOrEmpty(params, name); Map<String, ParamDefinition> toMergeMap = mapValueOrEmpty(paramsToMerge, name); mergeParams( baseMap, toMergeMap, MergeContext.copyWithParentMode( context, params.getOrDefault(name, paramToMerge).getMode())); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else if (paramToMerge.getType() == ParamType.STRING_MAP && paramToMerge.isLiteral()) { Map<String, String> baseMap = stringMapValueOrEmpty(params, name); Map<String, String> toMergeMap = stringMapValueOrEmpty(paramsToMerge, name); baseMap.putAll(toMergeMap); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else { params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, paramToMerge.getValue())); } }); }
@Test public void testMergeInternalModeToMode() throws JsonProcessingException { Map<String, ParamDefinition> allParams = parseParamDefMap("{'param1': {'type': 'LONG','value': 2, 'internal_mode': 'RESERVED'}}"); Map<String, ParamDefinition> paramsToMerge = parseParamDefMap("{'param1': {'type': 'LONG', 'value': 3}}"); ParamsMergeHelper.mergeParams(allParams, paramsToMerge, systemMergeContext); assertEquals(ParamMode.CONSTANT, allParams.get("param1").asLongParamDef().getMode()); for (String mode : Arrays.asList("OPTIONAL", "PROVIDED", "REQUIRED")) { allParams = parseParamDefMap( String.format( "{'param1': {'type': 'LONG','value': 2, 'internal_mode': '%s'}}", mode)); paramsToMerge = parseParamDefMap("{'param1': {'type': 'LONG', 'value': 3}}"); ParamsMergeHelper.mergeParams(allParams, paramsToMerge, systemMergeContext); assertEquals(ParamMode.MUTABLE, allParams.get("param1").asLongParamDef().getMode()); } }
public ValidationResult validateMessagesAndAssignOffsets(PrimitiveRef.LongRef offsetCounter, MetricsRecorder metricsRecorder, BufferSupplier bufferSupplier) { if (sourceCompressionType == CompressionType.NONE && targetCompression.type() == CompressionType.NONE) { // check the magic value if (!records.hasMatchingMagic(toMagic)) return convertAndAssignOffsetsNonCompressed(offsetCounter, metricsRecorder); else // Do in-place validation, offset assignment and maybe set timestamp return assignOffsetsNonCompressed(offsetCounter, metricsRecorder); } else return validateMessagesAndAssignOffsetsCompressed(offsetCounter, metricsRecorder, bufferSupplier); }
@Test public void testZStdCompressedWithUnavailableIBPVersion() { // The timestamps should be overwritten MemoryRecords records = createRecords(RecordBatch.MAGIC_VALUE_V2, 1234L, Compression.NONE); assertThrows(UnsupportedCompressionTypeException.class, () -> new LogValidator( records, topicPartition, time, CompressionType.NONE, Compression.zstd().build(), false, RecordBatch.MAGIC_VALUE_V2, TimestampType.LOG_APPEND_TIME, 1000L, 1000L, RecordBatch.NO_PARTITION_LEADER_EPOCH, AppendOrigin.CLIENT, MetadataVersion.IBP_2_0_IV1 ).validateMessagesAndAssignOffsets( PrimitiveRef.ofLong(0L), metricsRecorder, RequestLocal.withThreadConfinedCaching().bufferSupplier() ) ); }
public static ExecutionEnvironment createBatchExecutionEnvironment(FlinkPipelineOptions options) { return createBatchExecutionEnvironment( options, MoreObjects.firstNonNull(options.getFilesToStage(), Collections.emptyList()), options.getFlinkConfDir()); }
@Test public void shouldSupportIPv4Streaming() { FlinkPipelineOptions options = getDefaultPipelineOptions(); options.setRunner(FlinkRunner.class); options.setFlinkMaster("192.168.1.1:1234"); ExecutionEnvironment bev = FlinkExecutionEnvironments.createBatchExecutionEnvironment(options); checkHostAndPort(bev, "192.168.1.1", 1234); options.setFlinkMaster("192.168.1.1"); bev = FlinkExecutionEnvironments.createBatchExecutionEnvironment(options); checkHostAndPort(bev, "192.168.1.1", RestOptions.PORT.defaultValue()); }
public long lastAppliedOffset() { return metrics.lastAppliedOffset(); }
@Test public void testLastAppliedOffset() throws Exception { MockFaultHandler faultHandler = new MockFaultHandler("testLastAppliedOffset"); List<MockPublisher> publishers = asList(new MockPublisher("a"), new MockPublisher("b")); try (MetadataLoader loader = new MetadataLoader.Builder(). setFaultHandler(faultHandler). setHighWaterMarkAccessor(() -> OptionalLong.of(1L)). build()) { loader.installPublishers(publishers).get(); loader.handleLoadSnapshot(MockSnapshotReader.fromRecordLists( new MetadataProvenance(200, 100, 4000), asList( singletonList(new ApiMessageAndVersion(new FeatureLevelRecord(). setName(MetadataVersion.FEATURE_NAME). setFeatureLevel(IBP_3_3_IV1.featureLevel()), (short) 0)), singletonList(new ApiMessageAndVersion(new TopicRecord(). setName("foo"). setTopicId(Uuid.fromString("Uum7sfhHQP-obSvfywmNUA")), (short) 0)) ))); for (MockPublisher publisher : publishers) { publisher.firstPublish.get(1, TimeUnit.MINUTES); } loader.waitForAllEventsToBeHandled(); assertEquals(200L, loader.lastAppliedOffset()); loader.handleCommit(new MockBatchReader(201, singletonList( MockBatchReader.newBatch(201, 100, singletonList( new ApiMessageAndVersion(new RemoveTopicRecord(). setTopicId(Uuid.fromString("Uum7sfhHQP-obSvfywmNUA")), (short) 0)))))); loader.waitForAllEventsToBeHandled(); assertEquals(201L, loader.lastAppliedOffset()); } for (int i = 0; i < 2; i++) { assertTrue(publishers.get(i).closed); assertTrue(publishers.get(i).closed); assertEquals(IBP_3_3_IV1, publishers.get(i).latestImage.features().metadataVersion()); } faultHandler.maybeRethrowFirstException(); }
public String setCommitLogReadAheadMode(final String addr, final String mode, final long timeoutMillis) throws RemotingConnectException, RemotingSendRequestException, RemotingTimeoutException, InterruptedException, MQBrokerException { RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.SET_COMMITLOG_READ_MODE, null); HashMap<String, String> extFields = new HashMap<>(); extFields.put(FIleReadaheadMode.READ_AHEAD_MODE, mode); request.setExtFields(extFields); RemotingCommand response = this.remotingClient.invokeSync(addr, request, timeoutMillis); assert response != null; switch (response.getCode()) { case ResponseCode.SUCCESS: { if (null != response.getRemark() && response.getRemark().length() > 0) { return response.getRemark(); } return null; } default: break; } throw new MQBrokerException(response.getCode(), response.getRemark()); }
@Test public void assertSetCommitLogReadAheadMode() throws RemotingException, InterruptedException, MQBrokerException { mockInvokeSync(); when(response.getRemark()).thenReturn("remark"); String actual = mqClientAPI.setCommitLogReadAheadMode(defaultBrokerAddr, "", defaultTimeout); assertNotNull(actual); assertEquals("remark", actual); }
static SSLContext ignoreSSLCertificate() { try { // Install the all-trusting trust manager SSLContext sc = SSLContext.getInstance("TLS"); sc.init(null, trustAllCerts, new java.security.SecureRandom()); KeyStore ks = Preconditions.checkStateNotNull(KeyStore.getInstance("JKS"), "Keystore 'JKS' not found"); ClassLoader classLoader = Preconditions.checkStateNotNull( SSLUtils.class.getClassLoader(), "SSLUtil classloader is null - boot classloader?"); InputStream inputStream = classLoader.getResourceAsStream("resources/.keystore"); if (inputStream != null) { LOG.info("Found keystore in classpath 'resources/.keystore'. Loading..."); } else { LOG.info( "Unable to find keystore under 'resources/.keystore' in the classpath. " + "Continuing with an empty keystore."); } ks.load(inputStream, "changeit".toCharArray()); KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); kmf.init(ks, "changeit".toCharArray()); SSLContext ctx = SSLContext.getInstance("TLS"); ctx.init(kmf.getKeyManagers(), trustAllCerts, null); SSLContext.setDefault(ctx); return ctx; } catch (Exception e) { throw new RuntimeException(e); } }
@Test public void testIgnoreSSLCertificate() { // smoke test SSLUtils.ignoreSSLCertificate(); }
@Override public void run() { if (processor != null) { processor.execute(); } else { if (!beforeHook()) { logger.info("before-feature hook returned [false], aborting: {}", this); } else { scenarios.forEachRemaining(this::processScenario); } afterFeature(); } }
@Test void testSetXml() { run("set-xml.feature"); }
public static List<ProjectionColumn> generateProjectionColumns( String projectionExpression, List<Column> columns, List<UserDefinedFunctionDescriptor> udfDescriptors) { if (isNullOrWhitespaceOnly(projectionExpression)) { return new ArrayList<>(); } SqlSelect sqlSelect = parseProjectionExpression(projectionExpression); if (sqlSelect.getSelectList().isEmpty()) { return new ArrayList<>(); } expandWildcard(sqlSelect, columns); RelNode relNode = sqlToRel(columns, sqlSelect, udfDescriptors); Map<String, RelDataType> relDataTypeMap = relNode.getRowType().getFieldList().stream() .collect( Collectors.toMap( RelDataTypeField::getName, RelDataTypeField::getType)); Map<String, DataType> rawDataTypeMap = columns.stream().collect(Collectors.toMap(Column::getName, Column::getType)); Map<String, Boolean> isNotNullMap = columns.stream() .collect( Collectors.toMap( Column::getName, column -> !column.getType().isNullable())); List<ProjectionColumn> projectionColumns = new ArrayList<>(); for (SqlNode sqlNode : sqlSelect.getSelectList()) { if (sqlNode instanceof SqlBasicCall) { SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode; if (SqlKind.AS.equals(sqlBasicCall.getOperator().kind)) { Optional<SqlNode> transformOptional = Optional.empty(); String columnName; List<SqlNode> operandList = sqlBasicCall.getOperandList(); if (operandList.size() == 2) { transformOptional = Optional.of(operandList.get(0)); SqlNode sqlNode1 = operandList.get(1); if (sqlNode1 instanceof SqlIdentifier) { SqlIdentifier sqlIdentifier = (SqlIdentifier) sqlNode1; columnName = sqlIdentifier.names.get(sqlIdentifier.names.size() - 1); } else { columnName = null; } } else { columnName = null; } if (isMetadataColumn(columnName)) { continue; } ProjectionColumn projectionColumn = transformOptional.isPresent() ? ProjectionColumn.of( columnName, DataTypeConverter.convertCalciteRelDataTypeToDataType( relDataTypeMap.get(columnName)), transformOptional.get().toString(), JaninoCompiler.translateSqlNodeToJaninoExpression( transformOptional.get(), udfDescriptors), parseColumnNameList(transformOptional.get())) : ProjectionColumn.of( columnName, DataTypeConverter.convertCalciteRelDataTypeToDataType( relDataTypeMap.get(columnName))); boolean hasReplacedDuplicateColumn = false; for (int i = 0; i < projectionColumns.size(); i++) { if (projectionColumns.get(i).getColumnName().equals(columnName) && !projectionColumns.get(i).isValidTransformedProjectionColumn()) { hasReplacedDuplicateColumn = true; projectionColumns.set(i, projectionColumn); break; } } if (!hasReplacedDuplicateColumn) { projectionColumns.add(projectionColumn); } } else { throw new ParseException( "Unrecognized projection expression: " + sqlBasicCall + ". Should be <EXPR> AS <IDENTIFIER>"); } } else if (sqlNode instanceof SqlIdentifier) { SqlIdentifier sqlIdentifier = (SqlIdentifier) sqlNode; String columnName = sqlIdentifier.names.get(sqlIdentifier.names.size() - 1); DataType columnType; if (rawDataTypeMap.containsKey(columnName)) { columnType = rawDataTypeMap.get(columnName); } else if (relDataTypeMap.containsKey(columnName)) { columnType = DataTypeConverter.convertCalciteRelDataTypeToDataType( relDataTypeMap.get(columnName)); } else { throw new RuntimeException( String.format("Failed to deduce column %s type", columnName)); } if (isMetadataColumn(columnName)) { projectionColumns.add( ProjectionColumn.of( columnName, // Metadata columns should never be null columnType.notNull(), columnName, columnName, Arrays.asList(columnName))); } else { // Calcite translated column type doesn't keep nullability. // Appending it manually to circumvent this problem. projectionColumns.add( ProjectionColumn.of( columnName, isNotNullMap.get(columnName) ? columnType.notNull() : columnType.nullable())); } } else { throw new ParseException("Unrecognized projection: " + sqlNode.toString()); } } return projectionColumns; }
@Test public void testGenerateProjectionColumns() { List<Column> testColumns = Arrays.asList( Column.physicalColumn("id", DataTypes.INT(), "id"), Column.physicalColumn("name", DataTypes.STRING(), "string"), Column.physicalColumn("age", DataTypes.INT(), "age"), Column.physicalColumn("address", DataTypes.STRING(), "address"), Column.physicalColumn("weight", DataTypes.DOUBLE(), "weight"), Column.physicalColumn("height", DataTypes.DOUBLE(), "height")); List<ProjectionColumn> result = TransformParser.generateProjectionColumns( "id, upper(name) as name, age + 1 as newage, weight / (height * height) as bmi", testColumns, Collections.emptyList()); List<String> expected = Arrays.asList( "ProjectionColumn{column=`id` INT, expression='null', scriptExpression='null', originalColumnNames=null, transformExpressionKey=null}", "ProjectionColumn{column=`name` STRING, expression='UPPER(`TB`.`name`)', scriptExpression='upper(name)', originalColumnNames=[name], transformExpressionKey=null}", "ProjectionColumn{column=`newage` INT, expression='`TB`.`age` + 1', scriptExpression='age + 1', originalColumnNames=[age], transformExpressionKey=null}", "ProjectionColumn{column=`bmi` DOUBLE, expression='`TB`.`weight` / (`TB`.`height` * `TB`.`height`)', scriptExpression='weight / height * height', originalColumnNames=[weight, height, height], transformExpressionKey=null}"); Assertions.assertThat(result.toString()).isEqualTo("[" + String.join(", ", expected) + "]"); List<ProjectionColumn> metadataResult = TransformParser.generateProjectionColumns( "*, __namespace_name__, __schema_name__, __table_name__", testColumns, Collections.emptyList()); List<String> metadataExpected = Arrays.asList( "ProjectionColumn{column=`id` INT, expression='null', scriptExpression='null', originalColumnNames=null, transformExpressionKey=null}", "ProjectionColumn{column=`name` STRING, expression='null', scriptExpression='null', originalColumnNames=null, transformExpressionKey=null}", "ProjectionColumn{column=`age` INT, expression='null', scriptExpression='null', originalColumnNames=null, transformExpressionKey=null}", "ProjectionColumn{column=`address` STRING, expression='null', scriptExpression='null', originalColumnNames=null, transformExpressionKey=null}", "ProjectionColumn{column=`weight` DOUBLE, expression='null', scriptExpression='null', originalColumnNames=null, transformExpressionKey=null}", "ProjectionColumn{column=`height` DOUBLE, expression='null', scriptExpression='null', originalColumnNames=null, transformExpressionKey=null}", "ProjectionColumn{column=`__namespace_name__` STRING NOT NULL, expression='__namespace_name__', scriptExpression='__namespace_name__', originalColumnNames=[__namespace_name__], transformExpressionKey=null}", "ProjectionColumn{column=`__schema_name__` STRING NOT NULL, expression='__schema_name__', scriptExpression='__schema_name__', originalColumnNames=[__schema_name__], transformExpressionKey=null}", "ProjectionColumn{column=`__table_name__` STRING NOT NULL, expression='__table_name__', scriptExpression='__table_name__', originalColumnNames=[__table_name__], transformExpressionKey=null}"); Assertions.assertThat(metadataResult.toString()) .isEqualTo("[" + String.join(", ", metadataExpected) + "]"); // calculated columns must use AS to provide an alias name Assertions.assertThatThrownBy( () -> TransformParser.generateProjectionColumns( "id, 1 + 1", testColumns, Collections.emptyList())) .isExactlyInstanceOf(ParseException.class); }
public static boolean tryFillGap( final UnsafeBuffer logMetaDataBuffer, final UnsafeBuffer termBuffer, final int termId, final int gapOffset, final int gapLength) { int offset = (gapOffset + gapLength) - FRAME_ALIGNMENT; while (offset >= gapOffset) { if (0 != termBuffer.getInt(offset)) { return false; } offset -= FRAME_ALIGNMENT; } applyDefaultHeader(logMetaDataBuffer, termBuffer, gapOffset); frameType(termBuffer, gapOffset, HDR_TYPE_PAD); frameTermOffset(termBuffer, gapOffset); frameTermId(termBuffer, gapOffset, termId); frameLengthOrdered(termBuffer, gapOffset, gapLength); return true; }
@Test void shouldFillGapAtEndOfTerm() { final int gapOffset = termBuffer.capacity() - 64; final int gapLength = 64; dataFlyweight .sessionId(SESSION_ID) .termId(TERM_ID) .streamId(STREAM_ID) .flags(UNFRAGMENTED) .frameLength(termBuffer.capacity() - gapOffset); dataFlyweight.setMemory(0, gapOffset - DataHeaderFlyweight.HEADER_LENGTH, (byte)'x'); assertTrue(TermGapFiller.tryFillGap(metaDataBuffer, termBuffer, TERM_ID, gapOffset, gapLength)); dataFlyweight.wrap(termBuffer, gapOffset, termBuffer.capacity() - gapOffset); assertEquals(gapLength, dataFlyweight.frameLength()); assertEquals(gapOffset, dataFlyweight.termOffset()); assertEquals(SESSION_ID, dataFlyweight.sessionId()); assertEquals(TERM_ID, dataFlyweight.termId()); assertEquals(PADDING_FRAME_TYPE, dataFlyweight.headerType()); assertEquals(UNFRAGMENTED, (byte)(dataFlyweight.flags())); }
@Override public String getHealth() { for (int i = 0; i < isHealthList.size(); i++) { if (!isHealthList.get(i)) { if (i == masterIndex) { // The master is unhealthy. return "DOWN:" + InternetAddressUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); } else { // The slave is unhealthy. return "WARN:" + InternetAddressUtil.getIPFromString(dataSourceList.get(i).getJdbcUrl()); } } } return "UP"; }
@Test void testGetHealth() { List<Boolean> isHealthList = new ArrayList<>(); ReflectionTestUtils.setField(service, "isHealthList", isHealthList); assertEquals("UP", service.getHealth()); }
public static String serialize(Object obj) throws JsonProcessingException { return MAPPER.writeValueAsString(obj); }
@Test void serializeCounterWithoutHost() throws JsonProcessingException { DSeries series = new DSeries(); series.add( new DCounter(new TestCounter(1), METRIC, null, tags, () -> MOCKED_SYSTEM_MILLIS)); assertSerialization( DatadogHttpClient.serialize(series), new MetricAssertion(MetricType.count, false, "1")); }
static <T> Optional<T> lookupByNameAndType(CamelContext camelContext, String name, Class<T> type) { return Optional.ofNullable(ObjectHelper.isEmpty(name) ? null : name) .map(n -> EndpointHelper.isReferenceParameter(n) ? EndpointHelper.resolveReferenceParameter(camelContext, n, type, false) : camelContext.getRegistry().lookupByNameAndType(n, type)); }
@Test void testLookupByNameAndTypeWithNullName() { Optional<Object> object = DynamicRouterRecipientListHelper.lookupByNameAndType(camelContext, null, Object.class); Assertions.assertFalse(object.isPresent()); }
public static String dump(ByteBuffer buffer, int pos, int limit) { int oldpos = buffer.position(); int oldlimit = buffer.limit(); buffer.limit(limit).position(pos); StringBuilder builder = new StringBuilder("["); for (int idx = buffer.position(); idx < buffer.limit(); ++idx) { builder.append(buffer.get(idx)); builder.append(','); } builder.append(']'); buffer.limit(oldlimit).position(oldpos); return builder.toString(); }
@Test public void testDump() { byte[] array = new byte[10]; ByteBuffer buffer = ByteBuffer.wrap(array); String dump = Utils.dump(buffer, 0, 10); assertThat(dump, is("[0,0,0,0,0,0,0,0,0,0,]")); }
@Override public int hashCode() { if (value == null) { return 31; } // Using recommended hashing algorithm from Effective Java for longs and doubles if (isIntegral(this)) { long value = getAsNumber().longValue(); return (int) (value ^ (value >>> 32)); } if (value instanceof Number) { long value = Double.doubleToLongBits(getAsNumber().doubleValue()); return (int) (value ^ (value >>> 32)); } return value.hashCode(); }
@Test public void testShortEqualsBigInteger() { JsonPrimitive p1 = new JsonPrimitive((short) 10); JsonPrimitive p2 = new JsonPrimitive(new BigInteger("10")); assertThat(p1).isEqualTo(p2); assertThat(p1.hashCode()).isEqualTo(p2.hashCode()); }
public MetricGroup group(String groupName, String... tagKeyValues) { MetricGroupId groupId = groupId(groupName, tagKeyValues); MetricGroup group = groupsByName.get(groupId); if (group == null) { group = new MetricGroup(groupId); MetricGroup previous = groupsByName.putIfAbsent(groupId, group); if (previous != null) group = previous; } return group; }
@Test public void testGettingGroupWithOddNumberOfTags() { assertThrows(IllegalArgumentException.class, () -> metrics.group("name", "k1", "v1", "k2", "v2", "extra")); }
@SuppressWarnings("unchecked") public static SelType box(Object o) { if (o == null) { // returned null from a method, representing void or object return SelType.NULL; } SelTypes type = fromClazzToSelType(o.getClass()); switch (type) { case STRING: return SelString.of((String) o); case LONG: return SelLong.of(((Number) o).longValue()); case DOUBLE: return SelDouble.of(((Number) o).doubleValue()); case BOOLEAN: return SelBoolean.of((Boolean) o); case STRING_ARRAY: case LONG_ARRAY: case DOUBLE_ARRAY: case BOOLEAN_ARRAY: return SelArray.of(o, type); case MAP: return SelMap.of((Map<String, Object>) o); case DATETIME: return SelJodaDateTime.of((DateTime) o); case DATETIME_PROPERTY: return SelJodaDateTimeProperty.of((DateTime.Property) o); } throw new UnsupportedOperationException( "Not support to box an object " + o + " for type " + type.name()); }
@Test public void testBoxUnsupported() { Object[] testObjects = new Object[] { DateTimeZone.forID("UTC"), DateTimeFormat.forPattern("yyyy"), Days.days(1), new SimpleDateFormat("yyyyMMdd"), new Date(12345), new ArrayList() }; for (int i = 0; i < testObjects.length; ++i) { try { SelTypeUtil.box(testObjects[i]); } catch (UnsupportedOperationException e) { // expected } } }
@CheckForNull public Charset detect(byte[] buf) { // Try UTF-8 first since we are very confident in it if it's a yes. // Fail if we see nulls to not have FPs if the text is ASCII encoded in UTF-16. Result utf8Result = validator.isUTF8(buf, true); if (utf8Result.valid() == Validation.YES) { return utf8Result.charset(); } else if (utf8Result.valid() == Validation.MAYBE) { return detectAscii(buf); } // try UTF16 with both endiness. Fail if we see nulls to not have FPs if it's UTF-32. Result utf16 = validator.isUTF16(buf, true); if (utf16.valid() == Validation.YES && validator.isValidUTF16(buf, UTF_16LE.equals(utf16.charset()))) { return utf16.charset(); } // at this point we know it can't be UTF-8 Charset c = userConfiguration; if (!UTF_8.equals(c) && (!isUtf16(c) || utf16.valid() == Validation.MAYBE) && validator.tryDecode(buf, c)) { return c; } Result windows1252 = validator.isValidWindows1252(buf); if (windows1252.valid() == Validation.MAYBE) { return windows1252.charset(); } return null; }
@Test public void windows1252() throws IOException, URISyntaxException { ByteCharsetDetector detector = new ByteCharsetDetector(new CharsetValidation(), StandardCharsets.UTF_8); assertThat(detector.detect(readFile("windows-1252"))).isEqualTo(Charset.forName("Windows-1252")); }
public WsResponse call(WsRequest request) { checkState(!globalMode.isMediumTest(), "No WS call should be made in medium test mode"); WsResponse response = target.wsConnector().call(request); failIfUnauthorized(response); checkAuthenticationWarnings(response); return response; }
@Test public void call_whenMissingPermissions_shouldFailWithMsg() { WsRequest request = newRequest(); server.stubFor(get(urlEqualTo(URL_ENDPOINT)) .willReturn(aResponse() .withStatus(403) .withBody("Unauthorized"))); DefaultScannerWsClient client = new DefaultScannerWsClient(wsClient, true, new GlobalAnalysisMode(new ScannerProperties(Collections.emptyMap())), analysisWarnings); assertThatThrownBy(() -> client.call(request)) .isInstanceOf(MessageException.class) .hasMessage( "You're not authorized to analyze this project or the project doesn't exist on SonarQube and you're not authorized to create it. Please contact an administrator."); }
public static String replaceFirst(String source, String search, String replace) { int start = source.indexOf(search); int len = search.length(); if (start == -1) { return source; } if (start == 0) { return replace + source.substring(len); } return source.substring(0, start) + replace + source.substring(start + len); }
@Test public void testReplace7() { assertEquals("alt=\"\"", JOrphanUtils.replaceFirst("alt=\"\"", "alt=\"\" ", "")); }
public void validateDocumentGraph(List<SDDocumentType> documents) { for (SDDocumentType document : documents) { validateRoot(document); } }
@Test void simple_ref_dag_is_allowed() { Schema advertiserSchema = createSearchWithName("advertiser"); Schema campaignSchema = createSearchWithName("campaign"); Schema adSchema = createSearchWithName("ad"); createDocumentReference(adSchema, advertiserSchema, "advertiser_ref"); createDocumentReference(adSchema, campaignSchema, "campaign_ref"); DocumentGraphValidator validator = new DocumentGraphValidator(); validator.validateDocumentGraph(documentListOf(advertiserSchema, campaignSchema, adSchema)); }
static CliArguments fromRawArgs(String[] rawArgs) throws CliArgumentsException { CommandLineParser parser = new DefaultParser(); try { return new CliArguments(parser.parse(optionsDefinition, rawArgs)); } catch (ParseException e) { throw new CliArgumentsException(e); } }
@Test void fails_on_conflicting_parameters() throws CliArgumentsException { assertEquals("Exactly one of 'file' and 'stdin' must be specified", assertThrows(CliArgumentsException.class, () -> CliArguments.fromRawArgs(new String[] {"--endpoint", "https://endpoint", "--file", "/path/to/file", "--stdin"})) .getMessage()); assertEquals("Exactly one of 'file' and 'stdin' must be specified", assertThrows(CliArgumentsException.class, () -> CliArguments.fromRawArgs(new String[] {"--endpoint", "https://endpoint"})) .getMessage()); assertEquals("At most one of 'file', 'stdin' and 'test-payload-size' may be specified", assertThrows(CliArgumentsException.class, () -> CliArguments.fromRawArgs(new String[] {"--endpoint", "https://endpoint", "--speed-test", "--test-payload-size", "123", "--file", "file"})) .getMessage()); CliArguments.fromRawArgs(new String[] {"--endpoint", "foo", "--speed-test"}); }
public static String evaluate(String jsonText, JsonEvaluationSpecification specification) throws JsonMappingException { // Parse json text ang get root node. JsonNode rootNode; try { ObjectMapper mapper = new ObjectMapper(); rootNode = mapper.readTree(new StringReader(jsonText)); } catch (Exception e) { log.error("Exception while parsing Json text", e); throw new JsonMappingException("Exception while parsing Json payload"); } // Retrieve evaluated node within JSON tree. JsonNode evaluatedNode = rootNode.at(specification.getExp()); String caseKey = evaluatedNode.asText(); switch (specification.getOperator()) { case equals: // Consider simple equality. String value = specification.getCases().get(caseKey); return (value != null ? value : specification.getCases().getDefault()); case range: // Consider range evaluation. double caseNumber = 0.000; try { caseNumber = Double.parseDouble(caseKey); } catch (NumberFormatException nfe) { log.error(caseKey + " into range expression cannot be parsed as number. Considering default case."); return specification.getCases().getDefault(); } return foundRangeMatchingCase(caseNumber, specification.getCases()); case regexp: // Consider regular expression evaluation for each case key. for (String choiceKey : specification.getCases().keySet()) { if (!"default".equals(choiceKey)) { if (Pattern.matches(choiceKey, caseKey)) { return specification.getCases().get(choiceKey); } } } break; case size: // Consider size evaluation. if (evaluatedNode.isArray()) { int size = evaluatedNode.size(); return foundRangeMatchingCase(size, specification.getCases()); } break; case presence: // Consider presence evaluation of evaluatedNode directly. if (evaluatedNode != null && evaluatedNode.toString().length() > 0) { if (specification.getCases().containsKey("found")) { return specification.getCases().get("found"); } } else { if (specification.getCases().containsKey("missing")) { return specification.getCases().get("missing"); } } break; } return specification.getCases().getDefault(); }
@Test void testPresenceOperatorDispatcher() throws Exception { DispatchCases cases = new DispatchCases(); Map<String, String> dispatchCases = new HashMap<>(); dispatchCases.put("found", "Extra"); dispatchCases.put("missing", "Normal"); dispatchCases.put("default", "Basic"); cases.putAll(dispatchCases); JsonEvaluationSpecification specifications = new JsonEvaluationSpecification(); specifications.setExp("/extra"); specifications.setOperator(EvaluationOperator.presence); specifications.setCases(cases); String result = JsonExpressionEvaluator.evaluate(BELGIUM_BEER, specifications); assertEquals("Normal", result); result = JsonExpressionEvaluator.evaluate(GERMAN_BEER, specifications); assertEquals("Extra", result); result = JsonExpressionEvaluator.evaluate(ENGLISH_BEER, specifications); assertEquals("Normal", result); result = JsonExpressionEvaluator.evaluate(EXTRA_GERMAN_BEER, specifications); assertEquals("Extra", result); }
public LogoutRequestModel parseLogoutRequest(HttpServletRequest request) throws SamlValidationException, SamlParseException, SamlSessionException, DienstencatalogusException { final LogoutRequestModel logoutRequestModel = new LogoutRequestModel(); try { final BaseHttpServletRequestXMLMessageDecoder decoder = decodeRequest(request); var logoutRequest = (LogoutRequest) decoder.getMessageContext().getMessage(); final SAMLBindingContext bindingContext = decoder.getMessageContext().getSubcontext(SAMLBindingContext.class); logoutRequestModel.setLogoutRequest(logoutRequest); logoutRequestModel.setRequest(request); validateRequest(logoutRequestModel); var id = logoutRequest.getNameID() != null ? logoutRequest.getNameID().getValue() : logoutRequest.getSessionIndexes().get(0).getValue(); var samlSession = samlSessionRepository.findById(id) .orElseThrow(() -> new SamlSessionException("LogoutRequest no saml session found for nameID: " + id)); logoutRequestModel.setConnectionEntityId(samlSession.getConnectionEntityId()); logoutRequestModel.setServiceEntityId(samlSession.getServiceEntityId()); logoutRequestModel.setServiceUuid(samlSession.getServiceUuid()); logoutRequestModel.setRelayState(bindingContext.getRelayState()); logoutRequestModel.setEntranceSession(samlSession.getProtocolType().equals(ProtocolType.SAML_COMBICONNECT)); dcMetadataService.resolveDcMetadata(logoutRequestModel); if (!logoutRequestModel.getConnectionEntityId().equals(logoutRequestModel.getLogoutRequest().getIssuer().getValue())) { throw new SamlValidationException("Issuer not equal to connectorEntityId"); } verifySignature(logoutRequestModel, logoutRequestModel.getLogoutRequest().getSignature()); logout(samlSession); if (logger.isDebugEnabled()) OpenSAMLUtils.logSAMLObject((LogoutRequest) decoder.getMessageContext().getMessage()); } catch (MessageDecodingException e) { throw new SamlParseException("Authentication deflate decode exception", e); } catch (ComponentInitializationException e) { throw new SamlParseException("Authentication deflate initialization exception", e); } return logoutRequestModel; }
@Test public void parseLogoutRequestNoNameIDValue() { httpRequestMock.setParameter("SAMLRequest", "SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS"); Exception exception = assertThrows(SamlValidationException.class, () -> logoutService.parseLogoutRequest(httpRequestMock)); assertEquals("LogoutRequest validation error", exception.getMessage()); }
public static SchemaAndValue parseString(String value) { if (value == null) { return NULL_SCHEMA_AND_VALUE; } if (value.isEmpty()) { return new SchemaAndValue(Schema.STRING_SCHEMA, value); } ValueParser parser = new ValueParser(new Parser(value)); return parser.parse(false); }
@Test public void shouldParseByteAsInt8() { Byte value = Byte.MAX_VALUE; SchemaAndValue schemaAndValue = Values.parseString( String.valueOf(value) ); assertEquals(Schema.INT8_SCHEMA, schemaAndValue.schema()); assertInstanceOf(Byte.class, schemaAndValue.value()); assertEquals(value.byteValue(), ((Byte) schemaAndValue.value()).byteValue()); value = Byte.MIN_VALUE; schemaAndValue = Values.parseString( String.valueOf(value) ); assertEquals(Schema.INT8_SCHEMA, schemaAndValue.schema()); assertInstanceOf(Byte.class, schemaAndValue.value()); assertEquals(value.byteValue(), ((Byte) schemaAndValue.value()).byteValue()); }
@SuppressWarnings("unchecked") @Override protected Object createObject(ValueWrapper<Object> initialInstance, String className, Map<List<String>, Object> params, ClassLoader classLoader) { // simple types if (initialInstance.isValid() && !(initialInstance.getValue() instanceof Map)) { return initialInstance.getValue(); } Map<String, Object> toReturn = (Map<String, Object>) initialInstance.orElseGet(HashMap::new); for (Map.Entry<List<String>, Object> listObjectEntry : params.entrySet()) { // direct mapping already considered if (listObjectEntry.getKey().isEmpty()) { continue; } List<String> allSteps = listObjectEntry.getKey(); List<String> steps = allSteps.subList(0, allSteps.size() - 1); String lastStep = allSteps.get(allSteps.size() - 1); Map<String, Object> targetMap = toReturn; for (String step : steps) { targetMap = (Map<String, Object>) targetMap.computeIfAbsent(step, k -> new HashMap<>()); } targetMap.put(lastStep, listObjectEntry.getValue()); } return toReturn; }
@SuppressWarnings("unchecked") @Test public void createObject_directMappingComplexType() { Map<List<String>, Object> params = new HashMap<>(); Map<String, Object> directMappingComplexTypeValue = new HashMap<>(); directMappingComplexTypeValue.put("key1", "value1"); params.put(List.of(), directMappingComplexTypeValue); params.put(List.of("key2"), "value2"); ValueWrapper<Object> initialInstance = runnerHelper.getDirectMapping(params); Object objectRaw = runnerHelper.createObject(initialInstance, Map.class.getCanonicalName(), params, getClass().getClassLoader()); assertThat(objectRaw).isInstanceOf(Map.class); Map<String, Object> object = (Map<String, Object>) objectRaw; assertThat(object).containsEntry("key1","value1").containsEntry("key2", "value2"); }
public static Schema inferSchema(Object value) { if (value instanceof String) { return Schema.STRING_SCHEMA; } else if (value instanceof Boolean) { return Schema.BOOLEAN_SCHEMA; } else if (value instanceof Byte) { return Schema.INT8_SCHEMA; } else if (value instanceof Short) { return Schema.INT16_SCHEMA; } else if (value instanceof Integer) { return Schema.INT32_SCHEMA; } else if (value instanceof Long) { return Schema.INT64_SCHEMA; } else if (value instanceof Float) { return Schema.FLOAT32_SCHEMA; } else if (value instanceof Double) { return Schema.FLOAT64_SCHEMA; } else if (value instanceof byte[] || value instanceof ByteBuffer) { return Schema.BYTES_SCHEMA; } else if (value instanceof List) { return inferListSchema((List<?>) value); } else if (value instanceof Map) { return inferMapSchema((Map<?, ?>) value); } else if (value instanceof Struct) { return ((Struct) value).schema(); } return null; }
@Test public void shouldInferNoSchemaForEmptyList() { Schema listSchema = Values.inferSchema(Collections.emptyList()); assertNull(listSchema); }
@Override public void pre(SpanAdapter span, Exchange exchange, Endpoint endpoint) { super.pre(span, exchange, endpoint); span.setTag(TagConstants.DB_SYSTEM, CASSANDRA_DB_TYPE); URI uri = URI.create(endpoint.getEndpointUri()); if (uri.getPath() != null && !uri.getPath().isEmpty()) { // Strip leading '/' from path span.setTag(TagConstants.DB_NAME, uri.getPath().substring(1)); } String cql = exchange.getIn().getHeader(CAMEL_CQL_QUERY, String.class); if (cql != null) { span.setTag(TagConstants.DB_STATEMENT, cql); } else { Map<String, String> queryParameters = toQueryParameters(endpoint.getEndpointUri()); if (queryParameters.containsKey("cql")) { span.setTag(TagConstants.DB_STATEMENT, queryParameters.get("cql")); } } }
@Test public void testPreCqlFromUri() { String cql = "select%20*%20from%20users"; String keyspace = "test"; Endpoint endpoint = Mockito.mock(Endpoint.class); Exchange exchange = Mockito.mock(Exchange.class); Message message = Mockito.mock(Message.class); Mockito.when(endpoint.getEndpointUri()).thenReturn("cql://host1,host2:8080/" + keyspace + "?cql=" + cql + "&consistencyLevel=quorum"); Mockito.when(exchange.getIn()).thenReturn(message); SpanDecorator decorator = new CqlSpanDecorator(); MockSpanAdapter span = new MockSpanAdapter(); decorator.pre(span, exchange, endpoint); assertEquals(CqlSpanDecorator.CASSANDRA_DB_TYPE, span.tags().get(TagConstants.DB_SYSTEM)); assertEquals(cql, span.tags().get(TagConstants.DB_STATEMENT)); assertEquals(keyspace, span.tags().get(TagConstants.DB_NAME)); }
public static NetworkEndpoint forIpHostnameAndPort(String ipAddress, String hostname, int port) { checkArgument( 0 <= port && port <= MAX_PORT_NUMBER, "Port out of range. Expected [0, %s], actual %s.", MAX_PORT_NUMBER, port); return forIpAndHostname(ipAddress, hostname).toBuilder() .setType(NetworkEndpoint.Type.IP_HOSTNAME_PORT) .setPort(Port.newBuilder().setPortNumber(port)) .build(); }
@Test public void forIpHostnameAndPort_returnsIpHostnameAndPortNetworkEndpoint() { assertThat(NetworkEndpointUtils.forIpHostnameAndPort("1.2.3.4", "host.com", 8888)) .isEqualTo( NetworkEndpoint.newBuilder() .setType(NetworkEndpoint.Type.IP_HOSTNAME_PORT) .setIpAddress( IpAddress.newBuilder() .setAddressFamily(AddressFamily.IPV4) .setAddress("1.2.3.4")) .setHostname(Hostname.newBuilder().setName("host.com")) .setPort(Port.newBuilder().setPortNumber(8888)) .build()); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return PathAttributes.EMPTY; } try { final FileAttributes stat; if(file.isSymbolicLink()) { stat = session.sftp().lstat(file.getAbsolute()); } else { stat = session.sftp().stat(file.getAbsolute()); } switch(stat.getType()) { case BLOCK_SPECIAL: case CHAR_SPECIAL: case FIFO_SPECIAL: case SOCKET_SPECIAL: case REGULAR: case SYMLINK: if(!file.getType().contains(Path.Type.file)) { throw new NotfoundException(String.format("File %s is of type %s but expected %s", file.getAbsolute(), stat.getType(), file.getType())); } break; case DIRECTORY: if(!file.getType().contains(Path.Type.directory)) { throw new NotfoundException(String.format("File %s is of type %s but expected %s", file.getAbsolute(), stat.getType(), file.getType())); } break; } return this.toAttributes(stat); } catch(IOException e) { throw new SFTPExceptionMappingService().map("Failure to read attributes of {0}", e, file); } }
@Test(expected = NotfoundException.class) public void testFindNotFound() throws Exception { new SFTPAttributesFinderFeature(session).find(new Path(UUID.randomUUID().toString(), EnumSet.of(Path.Type.file))); }
public static boolean addDefaultTags(InstanceConfig instanceConfig, Supplier<List<String>> defaultTagsSupplier) { List<String> instanceTags = instanceConfig.getTags(); if (instanceTags.isEmpty()) { List<String> defaultTags = defaultTagsSupplier.get(); if (!CollectionUtils.isEmpty(defaultTags)) { LOGGER.info("Updating instance: {} with default tags: {}", instanceConfig.getId(), instanceTags); for (String defaultTag : defaultTags) { instanceConfig.addTag(defaultTag); } return true; } } return false; }
@Test public void testAddDefaultTags() { String instanceId = "Server_myInstance"; InstanceConfig instanceConfig = new InstanceConfig(instanceId); List<String> defaultTags = Arrays.asList("tag1", "tag2"); assertTrue(HelixHelper.addDefaultTags(instanceConfig, () -> defaultTags)); assertEquals(instanceConfig.getTags(), defaultTags); assertFalse(HelixHelper.addDefaultTags(instanceConfig, () -> defaultTags)); assertEquals(instanceConfig.getTags(), defaultTags); List<String> otherTags = Arrays.asList("tag3", "tag4"); assertFalse(HelixHelper.addDefaultTags(instanceConfig, () -> otherTags)); assertEquals(instanceConfig.getTags(), defaultTags); }
public MountTable getMountPoint(final String path) throws IOException { verifyMountTable(); return findDeepest(RouterAdmin.normalizeFileSystemPath(path)); }
@Test public void testGetMountPoint() throws IOException { // Check get the mount table entry for a path MountTable mtEntry; mtEntry = mountTable.getMountPoint("/"); assertEquals("/", mtEntry.getSourcePath()); mtEntry = mountTable.getMountPoint("/user"); assertEquals("/user", mtEntry.getSourcePath()); mtEntry = mountTable.getMountPoint("/user/a"); assertEquals("/user/a", mtEntry.getSourcePath()); mtEntry = mountTable.getMountPoint("/user/a/"); assertEquals("/user/a", mtEntry.getSourcePath()); mtEntry = mountTable.getMountPoint("/user/a/11"); assertEquals("/user/a", mtEntry.getSourcePath()); mtEntry = mountTable.getMountPoint("/user/a1"); assertEquals("/user", mtEntry.getSourcePath()); }
private static String randomString(Random random, char[] alphabet, int numRandomChars) { // The buffer most hold the size of the requested number of random chars and the chunk separators ('-'). int bufferSize = numRandomChars + ((numRandomChars - 1) / RANDOM_STRING_CHUNK_SIZE); CharBuffer charBuffer = CharBuffer.allocate(bufferSize); try { randomString(charBuffer, random, alphabet, numRandomChars); } catch (IOException e) { // This should never happen if we calcuate the buffer size correctly. throw new AssertionError(e); } return charBuffer.flip().toString(); }
@Test public void testRandomString() { String result; // Test various lengths - make sure the same length is returned result = StringUtils.randomString(4); assertTrue(result.length() == 4); result = StringUtils.randomString(16); assertTrue(result.length() == 16); result = StringUtils.randomString(128); assertTrue(result.length() == 128); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { if(0L == status.getLength()) { return new NullInputStream(0L); } final Storage.Objects.Get request = session.getClient().objects().get( containerService.getContainer(file).getName(), containerService.getKey(file)); if(containerService.getContainer(file).attributes().getCustom().containsKey(GoogleStorageAttributesFinderFeature.KEY_REQUESTER_PAYS)) { request.setUserProject(session.getHost().getCredentials().getUsername()); } final VersioningConfiguration versioning = null != session.getFeature(Versioning.class) ? session.getFeature(Versioning.class).getConfiguration( containerService.getContainer(file) ) : VersioningConfiguration.empty(); if(versioning.isEnabled()) { if(StringUtils.isNotBlank(file.attributes().getVersionId())) { request.setGeneration(Long.parseLong(file.attributes().getVersionId())); } } if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); final String header; if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) { header = String.format("bytes=%d-", range.getStart()); } else { header = String.format("bytes=%d-%d", range.getStart(), range.getEnd()); } if(log.isDebugEnabled()) { log.debug(String.format("Add range header %s for file %s", header, file)); } final HttpHeaders headers = request.getRequestHeaders(); headers.setRange(header); // Disable compression headers.setAcceptEncoding("identity"); } return request.executeMediaAsInputStream(); } catch(IOException e) { throw new GoogleStorageExceptionMappingService().map("Download {0} failed", e, file); } }
@Test public void testReadPath() throws Exception { final int length = 47; final byte[] content = RandomUtils.nextBytes(length); final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path directory = new GoogleStorageDirectoryFeature(session).mkdir(new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path file = new Path(directory, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final TransferStatus status = new TransferStatus().withLength(content.length); status.setChecksum(new SHA256ChecksumCompute().compute(new ByteArrayInputStream(content), status)); final OutputStream out = new GoogleStorageWriteFeature(session).write(file, status, new DisabledConnectionCallback()); new StreamCopier(new TransferStatus(), new TransferStatus()).transfer(new ByteArrayInputStream(content), out); assertEquals(length, new GoogleStorageAttributesFinderFeature(session).find(file).getSize()); final CountingInputStream in = new CountingInputStream(new GoogleStorageReadFeature(session).read(file, new TransferStatus(), new DisabledConnectionCallback())); in.close(); assertEquals(0L, in.getByteCount(), 0L); new GoogleStorageDeleteFeature(session).delete(Arrays.asList(file, directory), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
void verifyToken(String token) throws TokenVerificationException { final SecretKey key = Keys.hmacShaKeyFor(this.signingKey.getBytes(StandardCharsets.UTF_8)); final JwtParser parser = Jwts.parser() .verifyWith(key) .requireSubject(REQUIRED_SUBJECT) .requireIssuer(REQUIRED_ISSUER) .build(); try { parser.parse(token); } catch (UnsupportedJwtException e) { throw new TokenVerificationException("Token format/configuration is not supported", e); } catch (Throwable e) { throw new TokenVerificationException(e); } }
@Test void testNoneAlgorithm() { final String key = "gTVfiF6A0pB70A3UP1EahpoR6LId9DdNadIkYNygK5Z8lpeJIpw9vN0jZ6fdsfeuV9KIg9gVLkCHIPj6FHW5Q9AvpOoGZO3h"; final JwtTokenAuthFilter validator = new JwtTokenAuthFilter(key); Assertions.assertThatThrownBy(() -> validator.verifyToken(removeSignature(generateToken(key)))) .isInstanceOf(TokenVerificationException.class) .hasMessageContaining("Token format/configuration is not supported"); }
public EsPercolateBolt(EsConfig esConfig) { this(esConfig, new DefaultEsTupleMapper()); }
@Test public void testEsPercolateBolt() { Tuple tuple = EsTestUtil.generateTestTuple(source, index, type, null); bolt.execute(tuple); verify(outputCollector).ack(tuple); ArgumentCaptor<Values> emitCaptor = ArgumentCaptor.forClass(Values.class); verify(outputCollector).emit(emitCaptor.capture()); assertThat(emitCaptor.getValue().get(0), is(source)); assertThat(emitCaptor.getValue().get(1), instanceOf(PercolateResponse.Match.class)); }
@Subscribe public void onVarbitChanged(VarbitChanged varbitChanged) { if (varbitChanged.getVarbitId() == Varbits.WINTERTODT_TIMER) { int timeToNotify = config.roundNotification(); // Sometimes wt var updates are sent to players even after leaving wt. // So only notify if in wt or after just having left. if (timeToNotify > 0 && (isInWintertodt || needRoundNotif)) { int timeInSeconds = varbitChanged.getValue() * 30 / 50; int prevTimeInSeconds = previousTimerValue * 30 / 50; log.debug("Seconds left until round start: {}", timeInSeconds); if (prevTimeInSeconds > timeToNotify && timeInSeconds <= timeToNotify) { notifier.notify("Wintertodt round is about to start"); needRoundNotif = false; } } previousTimerValue = varbitChanged.getValue(); } }
@Test public void matchStartingNotification_shouldNotify_when15SecondsOptionSelected() { when(config.roundNotification()).thenReturn(15); VarbitChanged varbitChanged = new VarbitChanged(); varbitChanged.setVarbitId(Varbits.WINTERTODT_TIMER); varbitChanged.setValue(35); wintertodtPlugin.onVarbitChanged(varbitChanged); //(15 * 50) / 30 = ~25 varbitChanged.setValue(25); wintertodtPlugin.onVarbitChanged(varbitChanged); verify(notifier, times(1)).notify("Wintertodt round is about to start"); }
@Override protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) { if (Objects.isNull(rule)) { return Mono.empty(); } final ShenyuContext shenyuContext = exchange.getAttribute(Constants.CONTEXT); assert shenyuContext != null; final SpringCloudSelectorHandle springCloudSelectorHandle = SpringCloudPluginDataHandler.SELECTOR_CACHED.get().obtainHandle(selector.getId()); final SpringCloudRuleHandle ruleHandle = buildRuleHandle(rule); String serviceId = springCloudSelectorHandle.getServiceId(); if (StringUtils.isBlank(serviceId)) { Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.CANNOT_CONFIG_SPRINGCLOUD_SERVICEID); return WebFluxResultUtils.result(exchange, error); } final String ip = Objects.requireNonNull(exchange.getRequest().getRemoteAddress()).getAddress().getHostAddress(); final Upstream upstream = serviceChooser.choose(serviceId, selector.getId(), ip, ruleHandle.getLoadBalance()); if (Objects.isNull(upstream)) { Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.SPRINGCLOUD_SERVICEID_IS_ERROR); return WebFluxResultUtils.result(exchange, error); } final String domain = upstream.buildDomain(); setDomain(URI.create(domain + shenyuContext.getRealUrl()), exchange); //set time out. exchange.getAttributes().put(Constants.HTTP_TIME_OUT, ruleHandle.getTimeout()); return chain.execute(exchange); }
@Test public void testSpringCloudPluginRuleEmpty() { Mono<Void> execute = springCloudPlugin.doExecute(exchange, chain, selector, null); StepVerifier.create(execute).expectSubscription().verifyComplete(); }
@Override public T deserialize(final String topic, final byte[] bytes) { try { if (bytes == null) { return null; } // don't use the JsonSchemaConverter to read this data because // we require that the MAPPER enables USE_BIG_DECIMAL_FOR_FLOATS, // which is not currently available in the standard converters final JsonNode value = isJsonSchema ? JsonSerdeUtils.readJsonSR(bytes, MAPPER, JsonNode.class) : MAPPER.readTree(bytes); final Object coerced = enforceFieldType( "$", new JsonValueContext(value, schema) ); if (LOG.isTraceEnabled()) { LOG.trace("Deserialized {}. topic:{}, row:{}", target, topic, coerced); } return SerdeUtils.castToTargetType(coerced, targetType); } catch (final Exception e) { // Clear location in order to avoid logging data, for security reasons if (e instanceof JsonParseException) { ((JsonParseException) e).clearLocation(); } throw new SerializationException( "Failed to deserialize " + target + " from topic: " + topic + ". " + e.getMessage(), e); } }
@Test public void shouldDeserializedJsonObjectAsMap() { // Given: final KsqlJsonDeserializer<Map> deserializer = givenDeserializerForSchema( SchemaBuilder .map(Schema.OPTIONAL_STRING_SCHEMA, Schema.OPTIONAL_INT64_SCHEMA) .build(), Map.class ); final byte[] bytes = serializeJson(ImmutableMap.of("a", 42, "b", 42L, "c", "42")); // When: final Object result = deserializer.deserialize(SOME_TOPIC, bytes); // Then: assertThat(result, is(ImmutableMap.of("a", 42L, "b", 42L, "c", 42L))); }
protected ValidationTaskResult loadHdfsConfig() { Pair<String, String> clientConfFiles = getHdfsConfPaths(); String coreConfPath = clientConfFiles.getFirst(); String hdfsConfPath = clientConfFiles.getSecond(); mCoreConf = accessAndParseConf("core-site.xml", coreConfPath); mHdfsConf = accessAndParseConf("hdfs-site.xml", hdfsConfPath); return new ValidationTaskResult(mState, getName(), mMsg.toString(), mAdvice.toString()); }
@Test public void missingHdfsSiteXML() { // Only prepare core-site.xml String coreSite = Paths.get(sTestDir.toPath().toString(), "core-site.xml").toString(); ValidationTestUtils.writeXML(coreSite, ImmutableMap.of("key1", "value1")); CONF.set(PropertyKey.UNDERFS_HDFS_CONFIGURATION, coreSite); HdfsConfValidationTask task = new HdfsConfValidationTask("hdfs://namenode:9000/alluxio", CONF); ValidationTaskResult result = task.loadHdfsConfig(); assertEquals(result.getState(), ValidationUtils.State.SKIPPED); assertThat(result.getResult(), containsString("hdfs-site.xml is not configured")); assertThat(result.getAdvice(), containsString("hdfs-site.xml")); }
public Optional<UpdateCenter> getUpdateCenter() { return getUpdateCenter(false); }
@Test public void ignore_connection_errors() { when(reader.readString(any(URI.class), eq(StandardCharsets.UTF_8))).thenThrow(new SonarException()); assertThat(underTest.getUpdateCenter()).isEmpty(); }
@Override public void createOrUpdateBinding(DeviceId deviceId, PiPipeconfId pipeconfId) { deviceToPipeconf.put(deviceId, pipeconfId); }
@Test public void createOrUpdatePipeconfToDeviceBinding() { store.createOrUpdateBinding(DEVICE_ID, PIPECONF_ID); assertTrue("Value should be in the map", store.deviceToPipeconf.containsKey(DEVICE_ID)); assertEquals("Value should be in the map", PIPECONF_ID, store.deviceToPipeconf.get(DEVICE_ID).value()); assertTrue("Value should be in the map", store.pipeconfToDevices.containsKey(PIPECONF_ID)); assertTrue("Value should be in the map", store.pipeconfToDevices.get(PIPECONF_ID).contains(DEVICE_ID)); }
@Override public InterpreterResult interpret(final String st, final InterpreterContext context) throws InterpreterException { if (LOGGER.isDebugEnabled()) { LOGGER.debug("st:\n{}", st); } final FormType form = getFormType(); RemoteInterpreterProcess interpreterProcess = null; try { interpreterProcess = getOrCreateInterpreterProcess(); } catch (IOException e) { throw new InterpreterException(e); } if (!interpreterProcess.isRunning()) { return new InterpreterResult(InterpreterResult.Code.ERROR, "Interpreter process is not running\n" + interpreterProcess.getErrorMessage()); } return interpreterProcess.callRemoteFunction(client -> { RemoteInterpreterResult remoteResult = client.interpret( sessionId, className, st, convert(context)); Map<String, Object> remoteConfig = (Map<String, Object>) GSON.fromJson( remoteResult.getConfig(), new TypeToken<Map<String, Object>>() { }.getType()); context.getConfig().clear(); if (remoteConfig != null) { context.getConfig().putAll(remoteConfig); } GUI currentGUI = context.getGui(); GUI currentNoteGUI = context.getNoteGui(); if (form == FormType.NATIVE) { GUI remoteGui = GUI.fromJson(remoteResult.getGui()); GUI remoteNoteGui = GUI.fromJson(remoteResult.getNoteGui()); currentGUI.clear(); currentGUI.setParams(remoteGui.getParams()); currentGUI.setForms(remoteGui.getForms()); currentNoteGUI.setParams(remoteNoteGui.getParams()); currentNoteGUI.setForms(remoteNoteGui.getForms()); } else if (form == FormType.SIMPLE) { final Map<String, Input> currentForms = currentGUI.getForms(); final Map<String, Object> currentParams = currentGUI.getParams(); final GUI remoteGUI = GUI.fromJson(remoteResult.getGui()); final Map<String, Input> remoteForms = remoteGUI.getForms(); final Map<String, Object> remoteParams = remoteGUI.getParams(); currentForms.putAll(remoteForms); currentParams.putAll(remoteParams); } return convert(remoteResult); } ); }
@Test void testRemoteInterperterErrorStatus() throws TTransportException, IOException, InterpreterException { interpreterSetting.setProperty("zeppelin.interpreter.echo.fail", "true"); interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED); Interpreter interpreter1 = interpreterSetting.getDefaultInterpreter("user1", note1Id); assertTrue(interpreter1 instanceof RemoteInterpreter); RemoteInterpreter remoteInterpreter1 = (RemoteInterpreter) interpreter1; InterpreterContext context1 = createDummyInterpreterContext();; assertEquals(Code.ERROR, remoteInterpreter1.interpret("hello", context1).code()); }
public static String getMethodResourceName(Invoker<?> invoker, Invocation invocation){ return getMethodResourceName(invoker, invocation, false); }
@Test public void testGetResourceName() throws NoSuchMethodException { Invoker invoker = mock(Invoker.class); when(invoker.getInterface()).thenReturn(DemoService.class); Invocation invocation = mock(Invocation.class); Method method = DemoService.class.getDeclaredMethod("sayHello", String.class, int.class); when(invocation.getMethodName()).thenReturn(method.getName()); when(invocation.getParameterTypes()).thenReturn(method.getParameterTypes()); String resourceName = DubboUtils.getMethodResourceName(invoker, invocation); assertEquals("com.alibaba.csp.sentinel.adapter.dubbo.provider.DemoService:sayHello(java.lang.String,int)", resourceName); }
@Override @Cacheable(value = RedisKeyConstants.ROLE, key = "#id", unless = "#result == null") public RoleDO getRoleFromCache(Long id) { return roleMapper.selectById(id); }
@Test public void testGetRoleFromCache() { // mock 数据(缓存) RoleDO roleDO = randomPojo(RoleDO.class); roleMapper.insert(roleDO); // 参数准备 Long id = roleDO.getId(); // 调用 RoleDO dbRoleDO = roleService.getRoleFromCache(id); // 断言 assertPojoEquals(roleDO, dbRoleDO); }
@Override @SuppressFBWarnings(value = "EI_EXPOSE_REP", justification = "columnTypes is ImmutableList") public List<ColumnType> columnTypes() { return columnTypes; }
@Test public void shouldGetColumnTypes() { assertThat(row.columnTypes(), is(COLUMN_TYPES)); }
public static StringSetData create(Set<String> set) { return new AutoValue_StringSetData(ImmutableSet.copyOf(set)); }
@Test public void testCreate() { // test empty stringset creation assertTrue(StringSetData.create(Collections.emptySet()).stringSet().isEmpty()); // single element test ImmutableSet<String> singleElement = ImmutableSet.of("ab"); StringSetData setData = StringSetData.create(singleElement); assertEquals(setData.stringSet(), singleElement); // multiple element test ImmutableSet<String> multipleElement = ImmutableSet.of("cd", "ef"); setData = StringSetData.create(multipleElement); assertEquals(setData.stringSet(), multipleElement); }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return true; } try { final Path found = this.search(file, listener); return found != null; } catch(NotfoundException e) { if(log.isDebugEnabled()) { log.debug(String.format("Parent directory for file %s not found", file)); } return false; } }
@Test public void testFind() throws Exception { final AtomicInteger count = new AtomicInteger(); final DefaultFindFeature feature = new DefaultFindFeature(new NullSession(new Host(new TestProtocol())) { @Override public AttributedList<Path> list(Path file, ListProgressListener listener) { count.incrementAndGet(); return AttributedList.emptyList(); } }); assertFalse(feature.find(new Path("/t", EnumSet.of(Path.Type.directory)))); assertEquals(1, count.get()); assertFalse(feature.find(new Path("/t", EnumSet.of(Path.Type.directory)))); assertEquals(2, count.get()); }
@Override public Collection<LocalDataQueryResultRow> getRows(final ExportMetaDataStatement sqlStatement, final ContextManager contextManager) { String exportedData = generateExportData(contextManager.getMetaDataContexts().getMetaData()); if (sqlStatement.getFilePath().isPresent()) { String filePath = sqlStatement.getFilePath().get(); ExportUtils.exportToFile(filePath, exportedData); return Collections.singleton(new LocalDataQueryResultRow(contextManager.getComputeNodeInstanceContext().getInstance().getMetaData().getId(), LocalDateTime.now(), String.format("Successfully exported to:'%s'", filePath))); } return Collections.singleton(new LocalDataQueryResultRow( contextManager.getComputeNodeInstanceContext().getInstance().getMetaData().getId(), LocalDateTime.now(), Base64.encodeBase64String(exportedData.getBytes()))); }
@Test void assertExecuteWithEmptyMetaData() { ContextManager contextManager = mockEmptyContextManager(); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); when(ProxyContext.getInstance().getAllDatabaseNames()).thenReturn(Collections.singleton("empty_metadata")); when(database.getResourceMetaData().getAllInstanceDataSourceNames()).thenReturn(Collections.singleton("empty_metadata")); when(database.getResourceMetaData().getStorageUnits()).thenReturn(Collections.emptyMap()); when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.emptyList()); ExportMetaDataStatement sqlStatement = new ExportMetaDataStatement(null); Collection<LocalDataQueryResultRow> actual = new ExportMetaDataExecutor().getRows(sqlStatement, contextManager); assertThat(actual.size(), is(1)); LocalDataQueryResultRow row = actual.iterator().next(); assertMetaData(row.getCell(3), EXPECTED_EMPTY_METADATA_VALUE); }
public static boolean isWindows() { return IS_WINDOWS; }
@Test public void isWindows() { String osName = System.getProperty("os.name").toLowerCase(); Assert.assertEquals(osName.contains("windows"), SystemInfo.isWindows()); Assert.assertEquals(osName.contains("linux"), SystemInfo.isLinux()); Assert.assertEquals(osName.contains("mac"), SystemInfo.isMac()); }
public static String gitCommitId() { ensureLoaded(); return commitId; }
@Test public void testGitCommitId() { assertThat(IcebergBuild.gitCommitId()) .as("Should not use unknown commit ID") .isNotEqualTo("unknown"); assertThat( Pattern.compile("[0-9a-f]{40}") .matcher(IcebergBuild.gitCommitId().toLowerCase(Locale.ROOT))) .as("Should be a hexadecimal string of 20 bytes") .matches(); }
public static String convertToHtml(String input) { return new Markdown().convert(StringEscapeUtils.escapeHtml4(input)); }
@Test public void shouldDecorateHeadings() { assertThat(Markdown.convertToHtml(" = Top\r== Sub\r\n=== Sub sub\n ==== \n ===== five\n============ max")) .isEqualTo("<h1>Top</h1><h2>Sub</h2><h3>Sub sub</h3><h4></h4><h5>five</h5><h6>max</h6>"); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); final DSTOffset offset = readDSTOffset(data, 0); if (offset == null) { onInvalidDataReceived(device, data); return; } onDSTOffsetReceived(device, offset); }
@Test public void onDSTOffsetReceived_standard() { final Data data = new Data(new byte[] { 0 }); callback.onDataReceived(null, data); assertTrue(success); assertSame(DSTOffsetCallback.DSTOffset.STANDARD_TIME, result); }
@Override protected void validateDataImpl(TenantId tenantId, Edge edge) { validateString("Edge name", edge.getName()); validateString("Edge type", edge.getType()); if (StringUtils.isEmpty(edge.getSecret())) { throw new DataValidationException("Edge secret should be specified!"); } if (StringUtils.isEmpty(edge.getRoutingKey())) { throw new DataValidationException("Edge routing key should be specified!"); } if (edge.getTenantId() == null) { throw new DataValidationException("Edge should be assigned to tenant!"); } else { if (!tenantService.tenantExists(edge.getTenantId())) { throw new DataValidationException("Edge is referencing to non-existent tenant!"); } } if (edge.getCustomerId() == null) { edge.setCustomerId(new CustomerId(NULL_UUID)); } else if (!edge.getCustomerId().getId().equals(NULL_UUID)) { Customer customer = customerDao.findById(edge.getTenantId(), edge.getCustomerId().getId()); if (customer == null) { throw new DataValidationException("Can't assign edge to non-existent customer!"); } if (!customer.getTenantId().getId().equals(edge.getTenantId().getId())) { throw new DataValidationException("Can't assign edge to customer from different tenant!"); } } }
@Test void testValidateNameInvocation() { Edge edge = new Edge(); edge.setName("Edge 007"); edge.setType("Silos"); edge.setSecret("secret"); edge.setRoutingKey("53c56104-d302-4d6e-97f5-a7a99c7effdc"); edge.setTenantId(tenantId); validator.validateDataImpl(tenantId, edge); verify(validator).validateString("Edge name", edge.getName()); verify(validator).validateString("Edge type", edge.getType()); }
@Override public String getName() { return NAME; }
@Test public void testCalculateMinShareRatios() { Map<String, Integer> index = ResourceUtils.getResourceTypeIndex(); Resource used = Resources.createResource(10, 5); Resource minShares = Resources.createResource(5, 10); float[][] ratios = new float[3][3]; DominantResourceFairnessComparatorN comparator = new DominantResourceFairnessComparatorN(); used.setResourceValue("test", 2L); minShares.setResourceValue("test", 0L); comparator.calculateMinShareRatios(used, minShares, ratios); assertEquals("Calculated min share ratio for memory (10MB out of 5MB) is " + "incorrect", 2.0, ratios[index.get(ResourceInformation.MEMORY_MB.getName())][2], .00001f); assertEquals("Calculated min share ratio for vcores (5 out of 10) is " + "incorrect", 0.5, ratios[index.get(ResourceInformation.VCORES.getName())][2], .00001f); assertEquals("Calculated min share ratio for test resource (0 out of 5) is " + "incorrect", Float.POSITIVE_INFINITY, ratios[index.get("test")][2], 0.00001f); }
public SearchQuery parse(String encodedQueryString) { if (Strings.isNullOrEmpty(encodedQueryString) || "*".equals(encodedQueryString)) { return new SearchQuery(encodedQueryString); } final var queryString = URLDecoder.decode(encodedQueryString, StandardCharsets.UTF_8); final Matcher matcher = querySplitterMatcher(requireNonNull(queryString).trim()); final ImmutableMultimap.Builder<String, FieldValue> builder = ImmutableMultimap.builder(); final ImmutableSet.Builder<String> disallowedKeys = ImmutableSet.builder(); while (matcher.find()) { final String entry = matcher.group(); if (!entry.contains(":")) { builder.put(withPrefixIfNeeded(defaultField), createFieldValue(defaultFieldKey.getFieldType(), entry, false)); continue; } final Iterator<String> entryFields = FIELD_VALUE_SPLITTER.splitToList(entry).iterator(); checkArgument(entryFields.hasNext(), INVALID_ENTRY_MESSAGE, entry); final String key = entryFields.next(); // Skip if there are no valid k/v pairs. (i.e. "action:") if (!entryFields.hasNext()) { continue; } final boolean negate = key.startsWith("-"); final String cleanKey = key.replaceFirst("^-", ""); final String value = entryFields.next(); VALUE_SPLITTER.splitToList(value).forEach(v -> { if (!dbFieldMapping.containsKey(cleanKey)) { disallowedKeys.add(cleanKey); } final SearchQueryField translatedKey = dbFieldMapping.get(cleanKey); if (translatedKey != null) { builder.put(withPrefixIfNeeded(translatedKey.getDbField()), createFieldValue(translatedKey.getFieldType(), v, negate)); } else { builder.put(withPrefixIfNeeded(defaultField), createFieldValue(defaultFieldKey.getFieldType(), v, negate)); } }); checkArgument(!entryFields.hasNext(), INVALID_ENTRY_MESSAGE, entry); } return new SearchQuery(queryString, builder.build(), disallowedKeys.build()); }
@Test void disallowedField() { SearchQueryParser parser = new SearchQueryParser("defaultfield", ImmutableSet.of("name", "id")); final SearchQuery query = parser.parse("notallowed:foo"); final Multimap<String, SearchQueryParser.FieldValue> queryMap = query.getQueryMap(); assertThat(queryMap.size()).isEqualTo(1); assertThat(queryMap.get("defaultfield")).containsOnly(new SearchQueryParser.FieldValue("foo", false)); assertThat(query.hasDisallowedKeys()).isTrue(); assertThat(query.getDisallowedKeys()).containsExactly("notallowed"); final DBQuery.Query dbQuery = query.toDBQuery(); final Collection<String> fieldNames = extractFieldNames(dbQuery.conditions()); assertThat(fieldNames).containsExactly("defaultfield"); }
public CqlSessionSelectResults tableList(String clusterId, TableDTO.ClusterTableListArgs args) { CqlSession session = cqlSessionFactory.get(clusterId); SimpleStatement statement = ClusterUtils.getSchemaTables(session, args.getKeyspace()) .all() .whereColumn(CassandraSystemTablesColumn.TABLES_KEYSPACE_NAME.getColumnName()).isEqualTo(bindMarker()) .build() .setPageSize(args.getPageSize()) .setTimeout(Duration.ofSeconds(3)) .setPagingState(StringUtils.isNotBlank(args.getCursor()) ? Bytes.fromHexString(args.getCursor()) : null); PreparedStatement preparedStatement = session.prepare(statement); ResultSet resultSet = session.execute(preparedStatement.bind(args.getKeyspace())); return CqlSessionSelectResults.of( convertRows(session, resultSet), CassdioColumnDefinition.makes(resultSet.getColumnDefinitions()), resultSet.getExecutionInfo().getPagingState() ); }
@Test void when_empty_table_in_keyspace_result_empty() { // given TableDTO.ClusterTableListArgs args = TableDTO.ClusterTableListArgs.builder() .keyspace("empty_table_keyspace") .build(); // when CqlSessionSelectResults sut = clusterTableListCommander.tableList(CLUSTER_ID, args); // then assertThat(sut).isNotNull(); assertThat(sut.getRows()).isEmpty(); // assertThat(sut.getNextPageState()).isNull(); }
public Repository connectRepository( String repositoryName, String username, String password ) throws KettleException { // Verify that the repository exists on the slave server... // RepositoriesMeta repositoriesMeta = new RepositoriesMeta(); repositoriesMeta.getLog().setLogLevel( log.getLogLevel() ); try { repositoriesMeta.readData(); } catch ( Exception e ) { throw new KettleException( "Unable to get a list of repositories to locate repository '" + repositoryName + "'" ); } return connectRepository( repositoriesMeta, repositoryName, username, password ); }
@Test public void testConnectRepository() throws KettleException { JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(); final RepositoriesMeta repositoriesMeta = mock( RepositoriesMeta.class ); final RepositoryMeta repositoryMeta = mock( RepositoryMeta.class ); final Repository repository = mock( Repository.class ); final String mockRepo = "mockRepo"; final boolean[] connectionSuccess = {false}; Repository initialRepo = mock( Repository.class ); jobExecutionConfiguration.setRepository( initialRepo ); KettleLogStore.init(); //Create mock repository plugin MockRepositoryPlugin mockRepositoryPlugin = mock( MockRepositoryPlugin.class ); when( mockRepositoryPlugin.getIds() ).thenReturn( new String[]{"mockRepo"} ); when( mockRepositoryPlugin.matches( "mockRepo" ) ).thenReturn( true ); when( mockRepositoryPlugin.getName() ).thenReturn( "mock-repository" ); when( mockRepositoryPlugin.getClassMap() ).thenAnswer( (Answer<Map<Class<?>, String>>) invocation -> { Map<Class<?>, String> dbMap = new HashMap<>(); dbMap.put( Repository.class, repositoryMeta.getClass().getName() ); return dbMap; } ); PluginRegistry.getInstance().registerPlugin( RepositoryPluginType.class, mockRepositoryPlugin ); // Define valid connection criteria when( repositoriesMeta.findRepository( anyString() ) ).thenAnswer( (Answer<RepositoryMeta>) invocation -> mockRepo.equals( invocation.getArguments()[0] ) ? repositoryMeta : null ); when( mockRepositoryPlugin.loadClass( Repository.class ) ).thenReturn( repository ); doAnswer( invocation -> { if ( "username".equals( invocation.getArguments()[0] ) && "password".equals( invocation.getArguments()[1] ) ) { connectionSuccess[0] = true; } else { connectionSuccess[0] = false; throw new KettleException( "Mock Repository connection failed" ); } return null; } ).when( repository ).connect( anyString(), anyString() ); //Ignore repository not found in RepositoriesMeta jobExecutionConfiguration.connectRepository( repositoriesMeta, "notFound", "username", "password" ); assertEquals( "Repository Changed", initialRepo, jobExecutionConfiguration.getRepository() ); //Ignore failed attempt to connect jobExecutionConfiguration.connectRepository( repositoriesMeta, mockRepo, "username", "" ); assertEquals( "Repository Changed", initialRepo, jobExecutionConfiguration.getRepository() ); //Save repository if connection passes jobExecutionConfiguration.connectRepository( repositoriesMeta, mockRepo, "username", "password" ); assertEquals( "Repository didn't change", repository, jobExecutionConfiguration.getRepository() ); assertTrue( "Repository not connected", connectionSuccess[0] ); }
@Override public Mono<Post> get(String name) { return client.fetch(Post.class, name); }
@Test void get() { when(client.fetch(eq(Post.class), any())) .thenReturn(Mono.empty()); when(client.fetch(eq(Post.class), eq("fake-post"))) .thenReturn(Mono.just(TestPost.postV1())); postCommentSubject.get("fake-post") .as(StepVerifier::create) .expectNext(TestPost.postV1()) .verifyComplete(); postCommentSubject.get("fake-post2") .as(StepVerifier::create) .verifyComplete(); }
public Optional<Column> findValueColumn(final ColumnName columnName) { return findColumnMatching(withNamespace(VALUE).and(withName(columnName))); }
@Test public void shouldNotGetColumnByNameIfWrongCase() { // When: final Optional<Column> result = SOME_SCHEMA.findValueColumn(ColumnName.of("F0")); // Then: assertThat(result, is(Optional.empty())); }
@GetMapping("/mobile") public MobileProduct getProductMobile() { var mobileProduct = new MobileProduct(); mobileProduct.setPrice(priceClient.getPrice()); return mobileProduct; }
@Test void testGetProductMobile() { var price = "20"; when(priceClient.getPrice()).thenReturn(price); var mobileProduct = apiGateway.getProductMobile(); assertEquals(price, mobileProduct.getPrice()); }
public static Optional<String> convertRegexToLiteral(String s) { try { Pattern.compile(s); } catch (PatternSyntaxException e) { /* The string is a malformed regular expression which will throw an error at runtime. We will * preserve this behavior by not rewriting it. */ return Optional.empty(); } boolean inQuote = false; StringBuilder result = new StringBuilder(); int length = s.length(); for (int i = 0; i < length; ++i) { char current = s.charAt(i); if (!inQuote && UNESCAPED_CONSTRUCT.matches(current)) { /* If we see an unescaped regular expression control character then we can't match this as a * string-literal so give up */ return Optional.empty(); } else if (current == '\\') { /* There should be a character following the backslash. No need to check for string length * since we have already ascertained we have a well formed regex */ char escaped = s.charAt(++i); if (escaped == 'Q') { inQuote = true; } else if (escaped == 'E') { inQuote = false; } else { /* If not starting or ending a quotation (\Q...\E) backslashes can only be used to write * escaped constructs or to quote characters that would otherwise be interpreted as * unescaped constructs. * * If they are escaping a construct we can write as a literal string (i.e. one of \t \n * \f \r or \\) then we convert to a literal character. * * If they are escaping an unescaped construct we convert to the relevant character * * Everything else we can't represent in a literal string */ Character controlChar = REGEXCHAR_TO_LITERALCHAR.get(escaped); if (controlChar != null) { result.append(controlChar); } else if (escaped == '\\') { result.append('\\'); } else if (UNESCAPED_CONSTRUCT.matches(escaped)) { result.append(escaped); } else { return Optional.empty(); } } } else { /* Otherwise we have a literal character to match so keep going */ result.append(current); } } return Optional.of(result.toString()); }
@Test public void negative() { assertThat(Regexes.convertRegexToLiteral("[a-z]+")).isEmpty(); }
@Override public NodeId getMaster(NetworkId networkId, DeviceId deviceId) { Map<DeviceId, NodeId> masterMap = getMasterMap(networkId); return masterMap.get(deviceId); }
@Test public void getMaster() { put(VNID1, VDID3, N2, true, true); assertEquals("wrong role", MASTER, sms.getRole(VNID1, N2, VDID3)); assertEquals("wrong node", N2, sms.getMaster(VNID1, VDID3)); }
public ChannelFuture removeAndWrite() { assert executor.inEventLoop(); PendingWrite write = head; if (write == null) { return null; } Object msg = write.msg; ChannelPromise promise = write.promise; recycle(write, true); return invoker.write(msg, promise); }
@Test public void testRemoveAndWrite() { assertWrite(new TestHandler() { @Override public void flush(ChannelHandlerContext ctx) throws Exception { assertFalse(ctx.channel().isWritable(), "Should not be writable anymore"); ChannelFuture future = queue.removeAndWrite(); future.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) { assertQueueEmpty(queue); } }); super.flush(ctx); } }, 1); }
public void schedule(final ConnectorTaskId id, final WorkerSourceTask workerTask) { long commitIntervalMs = config.getLong(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_CONFIG); ScheduledFuture<?> commitFuture = commitExecutorService.scheduleWithFixedDelay(() -> { try (LoggingContext loggingContext = LoggingContext.forOffsets(id)) { commit(workerTask); } }, commitIntervalMs, commitIntervalMs, TimeUnit.MILLISECONDS); committers.put(id, commitFuture); }
@SuppressWarnings("unchecked") @Test public void testSchedule() { ArgumentCaptor<Runnable> taskWrapper = ArgumentCaptor.forClass(Runnable.class); when(executor.scheduleWithFixedDelay( taskWrapper.capture(), eq(DEFAULT_OFFSET_COMMIT_INTERVAL_MS), eq(DEFAULT_OFFSET_COMMIT_INTERVAL_MS), eq(TimeUnit.MILLISECONDS)) ).thenReturn((ScheduledFuture) commitFuture); committer.schedule(taskId, task); assertNotNull(taskWrapper.getValue()); assertEquals(singletonMap(taskId, commitFuture), committers); }
public static Optional<Object> invokeMethod(Object target, String methodName, Class<?>[] paramsType, Object[] params) { if (methodName == null || target == null) { return Optional.empty(); } final Optional<Method> method = findMethod(target.getClass(), methodName, paramsType); if (method.isPresent()) { return invokeMethod(target, method.get(), params); } return Optional.empty(); }
@Test public void testInvokeMethod() { int params = 88; final Optional<Object> staticMethod = ReflectUtils .invokeMethod(TestReflect.class.getName(), "staticMethod", new Class[]{int.class}, new Object[]{params}); Assert.assertTrue(staticMethod.isPresent() && staticMethod.get() instanceof String); Assert.assertEquals(TestReflect.staticMethod(params), staticMethod.get()); }
public static String getVersion() throws IOException { return loadProperties().getProperty(VERSION); }
@Test public void testGetVersion() throws IOException { assertEquals(getVersion(), (DEFAULT)); }
public List<Issue> validateMetadata(ExtensionVersion extVersion) { return Observation.createNotStarted("ExtensionValidator#validateMetadata", observations).observe(() -> { var issues = new ArrayList<Issue>(); checkVersion(extVersion.getVersion(), issues); checkTargetPlatform(extVersion.getTargetPlatform(), issues); checkCharacters(extVersion.getDisplayName(), "displayName", issues); checkFieldSize(extVersion.getDisplayName(), DEFAULT_STRING_SIZE, "displayName", issues); checkCharacters(extVersion.getDescription(), "description", issues); checkFieldSize(extVersion.getDescription(), DESCRIPTION_SIZE, "description", issues); checkCharacters(extVersion.getCategories(), "categories", issues); checkFieldSize(extVersion.getCategories(), DEFAULT_STRING_SIZE, "categories", issues); checkCharacters(extVersion.getTags(), "keywords", issues); checkFieldSize(extVersion.getTags(), DEFAULT_STRING_SIZE, "keywords", issues); checkCharacters(extVersion.getLicense(), "license", issues); checkFieldSize(extVersion.getLicense(), DEFAULT_STRING_SIZE, "license", issues); checkURL(extVersion.getHomepage(), "homepage", issues); checkFieldSize(extVersion.getHomepage(), DEFAULT_STRING_SIZE, "homepage", issues); checkURL(extVersion.getRepository(), "repository", issues); checkFieldSize(extVersion.getRepository(), DEFAULT_STRING_SIZE, "repository", issues); checkURL(extVersion.getBugs(), "bugs", issues); checkFieldSize(extVersion.getBugs(), DEFAULT_STRING_SIZE, "bugs", issues); checkInvalid(extVersion.getMarkdown(), s -> !MARKDOWN_VALUES.contains(s), "markdown", issues, MARKDOWN_VALUES.toString()); checkCharacters(extVersion.getGalleryColor(), "galleryBanner.color", issues); checkFieldSize(extVersion.getGalleryColor(), GALLERY_COLOR_SIZE, "galleryBanner.color", issues); checkInvalid(extVersion.getGalleryTheme(), s -> !GALLERY_THEME_VALUES.contains(s), "galleryBanner.theme", issues, GALLERY_THEME_VALUES.toString()); checkFieldSize(extVersion.getLocalizedLanguages(), DEFAULT_STRING_SIZE, "localizedLanguages", issues); checkInvalid(extVersion.getQna(), s -> !QNA_VALUES.contains(s) && isInvalidURL(s), "qna", issues, QNA_VALUES.toString() + " or a URL"); checkFieldSize(extVersion.getQna(), DEFAULT_STRING_SIZE, "qna", issues); return issues; }); }
@Test public void testGitProtocol() { var extension = new ExtensionVersion(); extension.setTargetPlatform(TargetPlatform.NAME_UNIVERSAL); extension.setVersion("1.0.0"); extension.setRepository("git+https://github.com/Foo/Bar.git"); var issues = validator.validateMetadata(extension); assertThat(issues).isEmpty(); }
@Override public ObjectNode encode(NextObjective nextObjective, CodecContext context) { checkNotNull(nextObjective, NOT_NULL_MESSAGE); final JsonCodec<TrafficTreatment> trafficTreatmentCodec = context.codec(TrafficTreatment.class); final JsonCodec<TrafficSelector> trafficSelectorCodec = context.codec(TrafficSelector.class); // encode common properties ObjectiveCodecHelper och = new ObjectiveCodecHelper(); ObjectNode result = och.encode(nextObjective, context); // encode id result.put(ID, nextObjective.id()); // encode type result.put(TYPE, nextObjective.type().toString()); // encode operation result.put(OPERATION, nextObjective.op().toString()); // encode treatments ArrayNode treatments = context.mapper().createArrayNode(); nextObjective.nextTreatments().forEach(nt -> { if (nt.type().equals(NextTreatment.Type.TREATMENT)) { TrafficTreatment tt = ((DefaultNextTreatment) nt).treatment(); ObjectNode treatmentJson = trafficTreatmentCodec.encode(tt, context); treatmentJson.put(WEIGHT, nt.weight()); treatments.add(treatmentJson); } }); result.set(TREATMENTS, treatments); // encode meta if (nextObjective.meta() != null) { ObjectNode trafficSelectorNode = trafficSelectorCodec.encode(nextObjective.meta(), context); result.set(META, trafficSelectorNode); } return result; }
@Test public void testNextObjectiveEncode() { TrafficTreatment treatment = DefaultTrafficTreatment.builder().build(); NextTreatment nextTreatment = DefaultNextTreatment.of(treatment, 5); NextObjective nextObj = DefaultNextObjective.builder() .makePermanent() .withType(NextObjective.Type.HASHED) .fromApp(APP_ID) .withPriority(60) .withId(5) .addTreatment(nextTreatment) .add(); ObjectNode nextObjJson = nextObjectiveCodec.encode(nextObj, context); assertThat(nextObjJson, matchesNextObjective(nextObj)); }
protected Destination createDestination(String destName) throws JMSException { String simpleName = getSimpleName(destName); byte destinationType = getDestinationType(destName); if (destinationType == ActiveMQDestination.QUEUE_TYPE) { LOG.info("Creating queue: {}", destName); return getSession().createQueue(simpleName); } else if (destinationType == ActiveMQDestination.TOPIC_TYPE) { LOG.info("Creating topic: {}", destName); return getSession().createTopic(simpleName); } else { return createTemporaryDestination(destName); } }
@Test public void testCreateDestination_queue() throws JMSException { assertDestinationNameType("dest", QUEUE_TYPE, asAmqDest(jmsClient.createDestination("queue://dest"))); }
public static boolean isXMLWellFormed( InputStream is ) throws KettleException { boolean retval = false; try { SAXParserFactory factory = XMLParserFactoryProducer.createSecureSAXParserFactory(); XMLTreeHandler handler = new XMLTreeHandler(); // Parse the input. SAXParser saxParser = factory.newSAXParser(); saxParser.parse( is, handler ); retval = true; } catch ( Exception e ) { throw new KettleException( e ); } return retval; }
@Test( expected = KettleException.class ) public void exceptionIsThrownWhenParsingXmlWithBigAmountOfExternalEntities() throws KettleException { final String maliciousXml = "<?xml version=\"1.0\"?>\n" + "<!DOCTYPE lolz [\n" + " <!ENTITY lol \"lol\">\n" + " <!ELEMENT lolz (#PCDATA)>\n" + " <!ENTITY lol1 \"&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;&lol;\">\n" + " <!ENTITY lol2 \"&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;&lol1;\">\n" + " <!ENTITY lol3 \"&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;\">\n" + " <!ENTITY lol4 \"&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;\">\n" + " <!ENTITY lol5 \"&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;\">\n" + " <!ENTITY lol6 \"&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;\">\n" + " <!ENTITY lol7 \"&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;&lol6;\">\n" + " <!ENTITY lol8 \"&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;&lol7;\">\n" + " <!ENTITY lol9 \"&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;&lol8;\">\n" + "]>\n" + "<lolz>&lol9;</lolz>"; XMLCheck.isXMLWellFormed( new ByteArrayInputStream( maliciousXml.getBytes() ) ); }
@SuppressWarnings("unchecked") public synchronized void registerConsumer(String className, EventConsumer<? extends T> eventConsumer) { this.eventConsumerMap.compute(className, (k, consumers) -> { if (consumers == null) { consumers = new CopyOnWriteArraySet<>(); consumers.add((EventConsumer<T>) eventConsumer); return consumers; } else { consumers.add((EventConsumer<T>) eventConsumer); return consumers; } }); this.consumerRegistered = true; }
@Test public void testRegisterConsumer() { EventProcessor<Number> eventProcessor = new EventProcessor<>(); EventConsumer<Integer> eventConsumer = event -> logger.info(event.toString()); eventProcessor.registerConsumer(Integer.class.getName(), eventConsumer); assertThat(eventProcessor.eventConsumerMap).hasSize(1); assertThat(eventProcessor.eventConsumerMap.get(Integer.class.getName())).hasSize(1); boolean consumed = eventProcessor.processEvent(1); then(logger).should(times(1)).info("1"); assertThat(consumed).isTrue(); }
public void doLongPolling(final HttpServletRequest request, final HttpServletResponse response) { // compare group md5 List<ConfigGroupEnum> changedGroup = compareChangedGroup(request); String clientIp = getRemoteIp(request); // response immediately. if (CollectionUtils.isNotEmpty(changedGroup)) { this.generateResponse(response, changedGroup); LOG.info("send response with the changed group, ip={}, group={}", clientIp, changedGroup); return; } LOG.debug("no changed group, ip={}, waiting for compare cache changed", clientIp); // listen for configuration changed. final AsyncContext asyncContext = request.startAsync(); // AsyncContext.settimeout() does not timeout properly, so you have to control it yourself asyncContext.setTimeout(0L); // block client's thread. scheduler.execute(new LongPollingClient(asyncContext, clientIp, HttpConstants.SERVER_MAX_HOLD_TIMEOUT)); }
@Test public void testDoLongPolling() throws UnsupportedEncodingException { testCompareChangedGroup(); testGetRemoteIp(); testGenerateResponse(); }
public void unregister(final SimpleLoadBalancerStateListener listener) { trace(_log, "unregister listener: ", listener); _executor.execute(new PropertyEvent("remove listener for state") { @Override public void innerRun() { _listeners.remove(listener); } }); }
@Test(groups = { "small", "back-end" }) public void testUnregister() { reset(); TestListener listener = new TestListener(); List<String> schemes = new ArrayList<>(); schemes.add("http"); _state.register(listener); assertNull(listener.scheme); assertNull(listener.strategy); assertNull(listener.serviceName); // trigger a strategy add // first add a cluster _state.listenToCluster("cluster-1", new NullStateListenerCallback()); _clusterRegistry.put("cluster-1", new ClusterProperties("cluster-1")); // then add a service _state.listenToService("service-1", new NullStateListenerCallback()); _serviceRegistry.put("service-1", new ServiceProperties("service-1", "cluster-1", "/test", Arrays.asList("random"), Collections.<String, Object> emptyMap(), null, null, schemes, null)); // this should trigger a refresh assertEquals(listener.scheme, "http"); assertTrue(listener.strategy instanceof RandomLoadBalancerStrategy); assertEquals(listener.serviceName, "service-1"); _state.unregister(listener); // then update the cluster _clusterRegistry.put("cluster-1", new ClusterProperties("cluster-1")); // there should be no update here, since we unregistered assertEquals(listener.scheme, "http"); assertTrue(listener.strategy instanceof RandomLoadBalancerStrategy); assertEquals(listener.serviceName, "service-1"); }
public static String mapToXmlStr(Map<?, ?> data) { return toStr(mapToXml(data, "xml")); }
@Test public void mapToXmlTestWithOmitXmlDeclaration() { final Map<String, Object> map = MapBuilder.create(new LinkedHashMap<String, Object>()) .put("name", "ddatsh") .build(); final String xml = XmlUtil.mapToXmlStr(map, true); assertEquals("<xml><name>ddatsh</name></xml>", xml); }
public SerializationConfig getSerializationConfig() { return serializationConfig; }
@Test public void testAccessSerializationConfigOverClientInstance() { hazelcastFactory.newHazelcastInstance(); ClientConfig clientConfig = new ClientConfig(); clientConfig.getSerializationConfig().addPortableFactory(PortableFactory.FACTORY_ID, new PortableFactory()); HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig); SerializationConfig serializationConfig = client.getConfig().getSerializationConfig(); Map<Integer, com.hazelcast.nio.serialization.PortableFactory> factories = serializationConfig.getPortableFactories(); assertEquals(1, factories.size()); assertEquals(Employee.CLASS_ID, factories.get(PortableFactory.FACTORY_ID).create(Employee.CLASS_ID).getClassId()); }
public IssueSyncProgress getIssueSyncProgress(DbSession dbSession) { int completedCount = dbClient.projectDao().countIndexedProjects(dbSession); int total = dbClient.projectDao().countProjects(dbSession); boolean hasFailures = dbClient.ceActivityDao().hasAnyFailedOrCancelledIssueSyncTask(dbSession); boolean isCompleted = !dbClient.ceQueueDao().hasAnyIssueSyncTaskPendingOrInProgress(dbSession); return new IssueSyncProgress(isCompleted, completedCount, total, hasFailures); }
@Test public void return_is_completed_false_if_in_progress_task_exist_and_branches_need_issue_sync() { insertCeQueue("TASK_1", Status.IN_PROGRESS); // only project IntStream.range(0, 10).forEach(value -> insertProjectWithBranches(true, 0)); // project + additional branch IntStream.range(0, 10).forEach(value -> insertProjectWithBranches(false, 1)); IssueSyncProgress result = underTest.getIssueSyncProgress(db.getSession()); assertThat(result.isCompleted()).isFalse(); }
public static Optional<Expression> convert( org.apache.flink.table.expressions.Expression flinkExpression) { if (!(flinkExpression instanceof CallExpression)) { return Optional.empty(); } CallExpression call = (CallExpression) flinkExpression; Operation op = FILTERS.get(call.getFunctionDefinition()); if (op != null) { switch (op) { case IS_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::isNull); case NOT_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::notNull); case LT: return convertFieldAndLiteral(Expressions::lessThan, Expressions::greaterThan, call); case LT_EQ: return convertFieldAndLiteral( Expressions::lessThanOrEqual, Expressions::greaterThanOrEqual, call); case GT: return convertFieldAndLiteral(Expressions::greaterThan, Expressions::lessThan, call); case GT_EQ: return convertFieldAndLiteral( Expressions::greaterThanOrEqual, Expressions::lessThanOrEqual, call); case EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.isNaN(ref); } else { return Expressions.equal(ref, lit); } }, call); case NOT_EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.notNaN(ref); } else { return Expressions.notEqual(ref, lit); } }, call); case NOT: return onlyChildAs(call, CallExpression.class) .flatMap(FlinkFilters::convert) .map(Expressions::not); case AND: return convertLogicExpression(Expressions::and, call); case OR: return convertLogicExpression(Expressions::or, call); case STARTS_WITH: return convertLike(call); } } return Optional.empty(); }
@Test public void testLessThanEquals() { UnboundPredicate<Integer> expected = org.apache.iceberg.expressions.Expressions.lessThanOrEqual("field1", 1); Optional<org.apache.iceberg.expressions.Expression> actual = FlinkFilters.convert(resolve(Expressions.$("field1").isLessOrEqual(Expressions.lit(1)))); assertThat(actual).isPresent(); assertPredicatesMatch(expected, actual.get()); Optional<org.apache.iceberg.expressions.Expression> actual1 = FlinkFilters.convert(resolve(Expressions.lit(1).isGreaterOrEqual(Expressions.$("field1")))); assertThat(actual1).isPresent(); assertPredicatesMatch(expected, actual1.get()); }
@Override public void groupOperationFailed(DeviceId deviceId, GroupOperation operation) { StoredGroupEntry existing = getStoredGroupEntry(deviceId, operation.groupId()); if (existing == null) { log.warn("No group entry with ID {} found ", operation.groupId()); return; } log.warn("groupOperationFailed: group operation {} failed in state {} " + "for group {} in device {} with code {}", operation.opType(), existing.state(), existing.id(), existing.deviceId(), operation.failureCode()); if (operation.failureCode() == GroupOperation.GroupMsgErrorCode.GROUP_EXISTS) { if (operation.buckets().equals(existing.buckets())) { if (existing.state() == GroupState.PENDING_ADD || existing.state() == GroupState.PENDING_ADD_RETRY) { log.info("GROUP_EXISTS: GroupID and Buckets match for group in pending " + "add state - moving to ADDED for group {} in device {}", existing.id(), deviceId); addOrUpdateGroupEntry(existing); return; } else { log.warn("GROUP_EXISTS: GroupId and Buckets match but existing" + "group in state: {}", existing.state()); } } else { log.warn("GROUP EXISTS: Group ID matched but buckets did not. " + "Operation: {} Existing: {}", operation.buckets(), existing.buckets()); } } if (operation.failureCode() == GroupOperation.GroupMsgErrorCode.INVALID_GROUP) { existing.incrFailedRetryCount(); if (existing.failedRetryCount() < MAX_FAILED_ATTEMPTS) { log.warn("Group {} programming failed {} of {} times in dev {}, " + "retrying ..", existing.id(), existing.failedRetryCount(), MAX_FAILED_ATTEMPTS, deviceId); return; } log.warn("Group {} programming failed {} of {} times in dev {}, " + "removing group from store", existing.id(), existing.failedRetryCount(), MAX_FAILED_ATTEMPTS, deviceId); // fall through to case } switch (operation.opType()) { case ADD: if (existing.state() == GroupState.PENDING_ADD || existing.state() == GroupState.PENDING_ADD_RETRY) { notifyDelegate(new GroupEvent(Type.GROUP_ADD_FAILED, existing)); log.warn("groupOperationFailed: cleaning up " + "group {} from store in device {}....", existing.id(), existing.deviceId()); //Removal from groupid based map will happen in the //map update listener getGroupStoreKeyMap().remove(new GroupStoreKeyMapKey(existing.deviceId(), existing.appCookie())); } break; case MODIFY: notifyDelegate(new GroupEvent(Type.GROUP_UPDATE_FAILED, existing)); break; case DELETE: notifyDelegate(new GroupEvent(Type.GROUP_REMOVE_FAILED, existing)); break; default: log.warn("Unknown group operation type {}", operation.opType()); } }
@Test public void testGroupOperationFailed() { TestDelegate delegate = new TestDelegate(); groupStore.setDelegate(delegate); groupStore.deviceInitialAuditCompleted(deviceId1, true); groupStore.deviceInitialAuditCompleted(deviceId2, true); groupStore.storeGroupDescription(groupDescription1); groupStore.storeGroupDescription(groupDescription2); List<GroupEvent> eventsAfterAdds = delegate.eventsSeen(); assertThat(eventsAfterAdds, hasSize(2)); eventsAfterAdds.forEach(event -> assertThat(event.type(), is(GroupEvent.Type.GROUP_ADD_REQUESTED))); delegate.resetEvents(); GroupOperation opAdd = GroupOperation.createAddGroupOperation(groupId1, INDIRECT, indirectGroupBuckets); groupStore.groupOperationFailed(deviceId1, opAdd); List<GroupEvent> eventsAfterAddFailed = delegate.eventsSeen(); assertThat(eventsAfterAddFailed, hasSize(2)); assertThat(eventsAfterAddFailed.get(0).type(), is(GroupEvent.Type.GROUP_ADD_FAILED)); assertThat(eventsAfterAddFailed.get(1).type(), is(GroupEvent.Type.GROUP_REMOVED)); delegate.resetEvents(); GroupOperation opModify = GroupOperation.createModifyGroupOperation(groupId2, INDIRECT, indirectGroupBuckets); groupStore.groupOperationFailed(deviceId2, opModify); List<GroupEvent> eventsAfterModifyFailed = delegate.eventsSeen(); assertThat(eventsAfterModifyFailed, hasSize(1)); assertThat(eventsAfterModifyFailed.get(0).type(), is(GroupEvent.Type.GROUP_UPDATE_FAILED)); delegate.resetEvents(); GroupOperation opDelete = GroupOperation.createDeleteGroupOperation(groupId2, INDIRECT); groupStore.groupOperationFailed(deviceId2, opDelete); List<GroupEvent> eventsAfterDeleteFailed = delegate.eventsSeen(); assertThat(eventsAfterDeleteFailed, hasSize(1)); assertThat(eventsAfterDeleteFailed.get(0).type(), is(GroupEvent.Type.GROUP_REMOVE_FAILED)); delegate.resetEvents(); }
@VisibleForTesting static <T extends Comparable<? super T>> boolean ge(T a, T b) { return a.compareTo(b) >= 0; }
@Test public void testComparators_GE() { Assert.assertFalse(VersionChecker.ge(0, 1)); Assert.assertTrue(VersionChecker.ge(1, 1)); Assert.assertTrue(VersionChecker.ge(2, 1)); }
@Override public Path move(final Path file, final Path target, final TransferStatus status, final Delete.Callback delete, final ConnectionCallback callback) throws BackgroundException { try { final EueApiClient client = new EueApiClient(session); if(status.isExists()) { if(!new CaseInsensitivePathPredicate(file).test(target)) { if(log.isWarnEnabled()) { log.warn(String.format("Trash file %s to be replaced with %s", target, file)); } new EueTrashFeature(session, fileid).delete(Collections.singletonMap(target, status), callback, delete); } } final String resourceId = fileid.getFileId(file); if(!new SimplePathPredicate(file.getParent()).test(target.getParent())) { final ResourceMoveResponseEntries resourceMoveResponseEntries; final String parentResourceId = fileid.getFileId(target.getParent()); switch(parentResourceId) { case EueResourceIdProvider.ROOT: case EueResourceIdProvider.TRASH: resourceMoveResponseEntries = new MoveChildrenForAliasApiApi(client) .resourceAliasAliasChildrenMovePost(parentResourceId, Collections.singletonList(String.format("%s/resource/%s", session.getBasePath(), resourceId)), null, null, null, "rename", null); break; default: resourceMoveResponseEntries = new MoveChildrenApi(client) .resourceResourceIdChildrenMovePost(parentResourceId, Collections.singletonList(String.format("%s/resource/%s", session.getBasePath(), resourceId)), null, null, null, "rename", null); } if(null == resourceMoveResponseEntries) { // Move of single file will return 200 status code with empty response body } else { for(ResourceMoveResponseEntry resourceMoveResponseEntry : resourceMoveResponseEntries.values()) { switch(resourceMoveResponseEntry.getStatusCode()) { case HttpStatus.SC_OK: break; default: log.warn(String.format("Failure %s moving file %s", resourceMoveResponseEntries, file)); final ResourceCreationResponseEntryEntity entity = resourceMoveResponseEntry.getEntity(); if(null == entity) { throw new EueExceptionMappingService().map(new ApiException(resourceMoveResponseEntry.getReason(), null, resourceMoveResponseEntry.getStatusCode(), client.getResponseHeaders())); } throw new EueExceptionMappingService().map(new ApiException(resourceMoveResponseEntry.getEntity().getError(), null, resourceMoveResponseEntry.getStatusCode(), client.getResponseHeaders())); } } } } if(!StringUtils.equals(file.getName(), target.getName())) { final ResourceUpdateModel resourceUpdateModel = new ResourceUpdateModel(); final ResourceUpdateModelUpdate resourceUpdateModelUpdate = new ResourceUpdateModelUpdate(); final Uifs uifs = new Uifs(); uifs.setName(target.getName()); resourceUpdateModelUpdate.setUifs(uifs); resourceUpdateModel.setUpdate(resourceUpdateModelUpdate); final ResourceMoveResponseEntries resourceMoveResponseEntries = new UpdateResourceApi(client).resourceResourceIdPatch(resourceId, resourceUpdateModel, null, null, null); if(null == resourceMoveResponseEntries) { // Move of single file will return 200 status code with empty response body } else { for(ResourceMoveResponseEntry resourceMoveResponseEntry : resourceMoveResponseEntries.values()) { switch(resourceMoveResponseEntry.getStatusCode()) { case HttpStatus.SC_CREATED: break; default: log.warn(String.format("Failure %s renaming file %s", resourceMoveResponseEntry, file)); throw new EueExceptionMappingService().map(new ApiException(resourceMoveResponseEntry.getReason(), null, resourceMoveResponseEntry.getStatusCode(), client.getResponseHeaders())); } } } } fileid.cache(file, null); return target; } catch(ApiException e) { throw new EueExceptionMappingService().map("Cannot rename {0}", e, file); } }
@Test(expected = NotfoundException.class) public void testMoveInvalidResourceId() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); final Path folder = new EueDirectoryFeature(session, fileid).mkdir( new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path file = new EueTouchFeature(session, fileid).touch(new Path(folder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final String resourceId = file.attributes().getFileId(); new EueDeleteFeature(session, fileid).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); try { new EueMoveFeature(session, fileid).move(file.withAttributes(new PathAttributes().withFileId(resourceId)), new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback()); fail(); } finally { new EueDeleteFeature(session, fileid).delete(Collections.singletonList(folder), new DisabledLoginCallback(), new Delete.DisabledCallback()); } }
@Override public PageData<RuleNodeId> findAllRuleNodeIdsByTypeAndVersionLessThan(String type, int version, PageLink pageLink) { return DaoUtil.pageToPageData(ruleNodeRepository .findAllRuleNodeIdsByTypeAndVersionLessThan( type, version, DaoUtil.toPageable(pageLink))) .mapData(RuleNodeId::new); }
@Test public void testFindRuleNodeIdsByTypeAndVersionLessThan() { // test - search text ignored PageData<RuleNodeId> ruleNodeIds = ruleNodeDao.findAllRuleNodeIdsByTypeAndVersionLessThan( "A", 1, new PageLink(10, 0, PREFIX_FOR_RULE_NODE_NAME)); assertEquals(0, ruleNodeIds.getTotalElements()); // due to DaoUtil.pageToPageData impl for Slice assertEquals(0, ruleNodeIds.getTotalPages()); // due to DaoUtil.pageToPageData impl for Slice assertEquals(10, ruleNodeIds.getData().size()); ruleNodeIds = ruleNodeDao.findAllRuleNodeIdsByTypeAndVersionLessThan( "A", 1, new PageLink(10, 0)); assertEquals(0, ruleNodeIds.getTotalElements()); // due to DaoUtil.pageToPageData impl for Slice assertEquals(0, ruleNodeIds.getTotalPages()); // due to DaoUtil.pageToPageData impl for Slice assertEquals(10, ruleNodeIds.getData().size()); }
static PartitionFunction createPartitionFunction( PartitioningProviderManager partitioningProviderManager, Session session, PartitioningHandle partitioning, int partitionCount, List<Type> partitioningChannelTypes, boolean isHashPrecomputed) { if (partitioning.getConnectorHandle() instanceof SystemPartitioningHandle) { HashGenerator hashGenerator; if (isHashPrecomputed) { hashGenerator = new PrecomputedHashGenerator(0); } else { hashGenerator = InterpretedHashGenerator.createPositionalWithTypes(partitioningChannelTypes); } return new LocalPartitionGenerator(hashGenerator, partitionCount); } ConnectorNodePartitioningProvider partitioningProvider = partitioningProviderManager.getPartitioningProvider(partitioning.getConnectorId().get()); int bucketCount = partitioningProvider.getBucketCount( partitioning.getTransactionHandle().orElse(null), session.toConnectorSession(partitioning.getConnectorId().get()), partitioning.getConnectorHandle()); int[] bucketToPartition = new int[bucketCount]; for (int bucket = 0; bucket < bucketCount; bucket++) { bucketToPartition[bucket] = bucket % partitionCount; } BucketFunction bucketFunction = partitioningProvider.getBucketFunction( partitioning.getTransactionHandle().orElse(null), session.toConnectorSession(partitioning.getConnectorId().get()), partitioning.getConnectorHandle(), partitioningChannelTypes, bucketCount); checkArgument(bucketFunction != null, "No bucket function for partitioning: %s", partitioning); return new BucketPartitionFunction(bucketFunction, bucketToPartition); }
@Test public void testCreatePartitionFunction() { int partitionCount = 10; PartitioningProviderManager partitioningProviderManager = new PartitioningProviderManager(); partitioningProviderManager.addPartitioningProvider( new ConnectorId("prism"), new ConnectorNodePartitioningProvider() { @Override public ConnectorBucketNodeMap getBucketNodeMap(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorPartitioningHandle partitioningHandle, List<Node> sortedNodes) { return createBucketNodeMap(Stream.generate(() -> sortedNodes).flatMap(List::stream).limit(10).collect(toImmutableList()), SOFT_AFFINITY); } @Override public ToIntFunction<ConnectorSplit> getSplitBucketFunction(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorPartitioningHandle partitioningHandle) { return null; } @Override public BucketFunction getBucketFunction(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorPartitioningHandle partitioningHandle, List<Type> partitionChannelTypes, int bucketCount) { return (Page page, int position) -> partitionCount; } @Override public int getBucketCount(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorPartitioningHandle partitioningHandle) { return 10; } }); PartitioningHandle partitioningHandle = new PartitioningHandle( Optional.of(new ConnectorId("prism")), Optional.of(new ConnectorTransactionHandle() { @Override public int hashCode() { return super.hashCode(); } @Override public boolean equals(Object obj) { return super.equals(obj); } }), new ConnectorPartitioningHandle() { }); PartitionFunction partitionFunction = createPartitionFunction(partitioningProviderManager, session, partitioningHandle, 600, ImmutableList.of(), false); assertEquals(partitionFunction.getPartitionCount(), partitionCount); }
@Override public DescriptiveUrl toDownloadUrl(final Path file, final Sharee sharee, final ShareCreationRequestModel options, final PasswordCallback callback) throws BackgroundException { return this.toGuestUrl(file, options, callback); }
@Test(expected = InteroperabilityException.class) public void testInvalidPin() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); final Path sourceFolder = new EueDirectoryFeature(session, fileid).mkdir(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final EueShareFeature feature = new EueShareFeature(session, fileid); try { feature.toDownloadUrl(sourceFolder, Share.Sharee.world, null, new DisabledPasswordCallback() { @Override public Credentials prompt(final Host bookmark, final String title, final String reason, final LoginOptions options) { return new Credentials(null, "test"); } }); } catch(InteroperabilityException e) { assertEquals("Pin does not match pin policy. Please contact your web hosting service provider for assistance.", e.getDetail()); throw e; } finally { new EueDeleteFeature(session, fileid).delete(Collections.singletonList(sourceFolder), new DisabledPasswordCallback(), new Delete.DisabledCallback()); } }
public CompletableFuture<Void> waitAsync(CompletableFuture<Void> eventPubFuture, String bundle, UnloadDecision decision, long timeout, TimeUnit timeoutUnit) { return eventPubFuture.thenCompose(__ -> inFlightUnloadRequest.computeIfAbsent(bundle, ignore -> { if (log.isDebugEnabled()) { log.debug("Handle unload bundle: {}, timeout: {} {}", bundle, timeout, timeoutUnit); } CompletableFuture<Void> future = new CompletableFuture<>(); future.orTimeout(timeout, timeoutUnit).whenComplete((v, ex) -> { if (ex != null) { inFlightUnloadRequest.remove(bundle); log.warn("Failed to wait unload for serviceUnit: {}", bundle, ex); } }); return future; })).whenComplete((__, ex) -> { if (ex != null) { counter.update(Failure, Unknown); log.warn("Failed to unload bundle: {}", bundle, ex); return; } log.info("Complete unload bundle: {}", bundle); counter.update(decision); }); }
@Test public void testEventPubFutureHasException() { UnloadCounter counter = new UnloadCounter(); UnloadManager manager = new UnloadManager(counter, "mockBrokerId"); var unloadDecision = new UnloadDecision(new Unload("broker-1", "bundle-1"), Success, Admin); CompletableFuture<Void> future = manager.waitAsync(FutureUtil.failedFuture(new Exception("test")), "bundle-1", unloadDecision, 10, TimeUnit.SECONDS); assertTrue(future.isCompletedExceptionally()); try { future.get(); fail(); } catch (Exception ex) { assertEquals(ex.getCause().getMessage(), "test"); } assertEquals(counter.getBreakdownCounters().get(Failure).get(Unknown).get(), 1); }
public SqlType getExpressionSqlType(final Expression expression) { return getExpressionSqlType(expression, Collections.emptyMap()); }
@Test public void shouldResolveTypeForAddDoubleIntegerLiteral() { final Expression expression = new ArithmeticBinaryExpression(Operator.ADD, COL3, literal(10)); final SqlType type = expressionTypeManager.getExpressionSqlType(expression); assertThat(type, is(SqlTypes.DOUBLE)); }
public long removeSubscription(final long registrationId) { final long correlationId = toDriverCommandBuffer.nextCorrelationId(); final int index = toDriverCommandBuffer.tryClaim(REMOVE_SUBSCRIPTION, RemoveMessageFlyweight.length()); if (index < 0) { throw new AeronException("could not write remove subscription command"); } removeMessage .wrap(toDriverCommandBuffer.buffer(), index) .registrationId(registrationId) .clientId(clientId) .correlationId(correlationId); toDriverCommandBuffer.commit(index); return correlationId; }
@Test void threadSendsRemoveSubscriberMessage() { conductor.removeSubscription(CORRELATION_ID); assertReadsOneMessage( (msgTypeId, buffer, index, length) -> { final RemoveMessageFlyweight removeMessage = new RemoveMessageFlyweight(); removeMessage.wrap(buffer, index); assertEquals(REMOVE_SUBSCRIPTION, msgTypeId); assertEquals(CORRELATION_ID, removeMessage.registrationId()); } ); }
public UserLookupService(boolean supportsGroups) { this.supportsGroups = supportsGroups; }
@Test public void testUserLookupService() throws IOException { UserPrincipalLookupService service = new UserLookupService(true); UserPrincipal bob1 = service.lookupPrincipalByName("bob"); UserPrincipal bob2 = service.lookupPrincipalByName("bob"); UserPrincipal alice = service.lookupPrincipalByName("alice"); assertThat(bob1).isEqualTo(bob2); assertThat(bob1).isNotEqualTo(alice); GroupPrincipal group1 = service.lookupPrincipalByGroupName("group"); GroupPrincipal group2 = service.lookupPrincipalByGroupName("group"); GroupPrincipal foo = service.lookupPrincipalByGroupName("foo"); assertThat(group1).isEqualTo(group2); assertThat(group1).isNotEqualTo(foo); }
public FEELFnResult<List<Object>> invoke(@ParameterName("list") Object[] lists) { if ( lists == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "lists", "cannot be null")); } final Set<Object> resultSet = new LinkedHashSet<>(); for ( final Object list : lists ) { if ( list instanceof Collection ) { resultSet.addAll((Collection) list); } else { resultSet.add(list); } } // spec requires us to return a new list return FEELFnResult.ofResult( new ArrayList<>(resultSet) ); }
@Test void invokeListsNoDuplicates() { final Object[] params = new Object[]{Arrays.asList(10, 8, 3), Arrays.asList(1, 15, 2)}; FunctionTestUtil.assertResultList(unionFunction.invoke(params), Arrays.asList(10, 8, 3, 1, 15, 2)); }
@Override public SelType call(String methodName, SelType[] args) { if ("min".equals(methodName) && args.length == 2) { return SelLong.of(Math.min(((SelLong) args[0]).longVal(), ((SelLong) args[1]).longVal())); } else if ("max".equals(methodName) && args.length == 2) { return SelLong.of(Math.max(((SelLong) args[0]).longVal(), ((SelLong) args[1]).longVal())); } else if ("random".equals(methodName) && args.length == 0) { return SelDouble.of(Math.random()); } else if ("pow".equals(methodName) && args.length == 2) { return SelDouble.of( Math.pow( ((Number) args[0].getInternalVal()).doubleValue(), ((Number) args[1].getInternalVal()).doubleValue())); } throw new UnsupportedOperationException( type() + " DO NOT support calling method: " + methodName + " with args: " + Arrays.toString(args)); }
@Test public void call() { SelType res = SelJavaMath.INSTANCE.call("min", new SelType[] {SelLong.of(1), SelLong.of(2)}); assertEquals("LONG: 1", res.type() + ": " + res); res = SelJavaMath.INSTANCE.call("max", new SelType[] {SelLong.of(1), SelLong.of(2)}); assertEquals("LONG: 2", res.type() + ": " + res); res = SelJavaMath.INSTANCE.call("pow", new SelType[] {SelLong.of(3), SelLong.of(2)}); assertEquals(SelTypes.DOUBLE, res.type()); assertEquals(9.0, ((SelDouble) res).doubleVal(), 0.01); res = SelJavaMath.INSTANCE.call("random", new SelType[] {}); assertEquals(SelTypes.DOUBLE, res.type()); }
public void isAssignableTo(Class<?> clazz) { if (!clazz.isAssignableFrom(checkNotNull(actual))) { failWithActual("expected to be assignable to", clazz.getName()); } }
@Test public void testIsAssignableTo_parent() { assertThat(String.class).isAssignableTo(Object.class); assertThat(NullPointerException.class).isAssignableTo(Exception.class); }
@Override public String resolve(Method method, Object[] arguments, String spelExpression) { if (StringUtils.isEmpty(spelExpression)) { return spelExpression; } if (spelExpression.matches(PLACEHOLDER_SPEL_REGEX) && stringValueResolver != null) { return stringValueResolver.resolveStringValue(spelExpression); } if (spelExpression.matches(METHOD_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } if (spelExpression.matches(BEAN_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); evaluationContext.setBeanResolver(new BeanFactoryResolver(this.beanFactory)); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } return spelExpression; }
@Test public void emptyStringTest() throws Exception { DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, ""); assertThat(result).isEqualTo(""); }
@Override public Processor<KO, SubscriptionWrapper<K>, CombinedKey<KO, K>, Change<ValueAndTimestamp<SubscriptionWrapper<K>>>> get() { return new ContextualProcessor<KO, SubscriptionWrapper<K>, CombinedKey<KO, K>, Change<ValueAndTimestamp<SubscriptionWrapper<K>>>>() { private TimestampedKeyValueStore<Bytes, SubscriptionWrapper<K>> store; private Sensor droppedRecordsSensor; @Override public void init(final ProcessorContext<CombinedKey<KO, K>, Change<ValueAndTimestamp<SubscriptionWrapper<K>>>> context) { super.init(context); final InternalProcessorContext<?, ?> internalProcessorContext = (InternalProcessorContext<?, ?>) context; droppedRecordsSensor = TaskMetrics.droppedRecordsSensor( Thread.currentThread().getName(), internalProcessorContext.taskId().toString(), internalProcessorContext.metrics() ); store = internalProcessorContext.getStateStore(storeName); keySchema.init(context); } @Override public void process(final Record<KO, SubscriptionWrapper<K>> record) { if (record.key() == null && !SubscriptionWrapper.Instruction.PROPAGATE_NULL_IF_NO_FK_VAL_AVAILABLE.equals(record.value().getInstruction())) { dropRecord(); return; } if (record.value().getVersion() > SubscriptionWrapper.CURRENT_VERSION) { //Guard against modifications to SubscriptionWrapper. Need to ensure that there is compatibility //with previous versions to enable rolling upgrades. Must develop a strategy for upgrading //from older SubscriptionWrapper versions to newer versions. throw new UnsupportedVersionException("SubscriptionWrapper is of an incompatible version."); } context().forward( record.withKey(new CombinedKey<>(record.key(), record.value().getPrimaryKey())) .withValue(inferChange(record)) .withTimestamp(record.timestamp()) ); } private Change<ValueAndTimestamp<SubscriptionWrapper<K>>> inferChange(final Record<KO, SubscriptionWrapper<K>> record) { if (record.key() == null) { return new Change<>(ValueAndTimestamp.make(record.value(), record.timestamp()), null); } else { return inferBasedOnState(record); } } private Change<ValueAndTimestamp<SubscriptionWrapper<K>>> inferBasedOnState(final Record<KO, SubscriptionWrapper<K>> record) { final Bytes subscriptionKey = keySchema.toBytes(record.key(), record.value().getPrimaryKey()); final ValueAndTimestamp<SubscriptionWrapper<K>> newValue = ValueAndTimestamp.make(record.value(), record.timestamp()); final ValueAndTimestamp<SubscriptionWrapper<K>> oldValue = store.get(subscriptionKey); //This store is used by the prefix scanner in ForeignTableJoinProcessorSupplier if (record.value().getInstruction().equals(SubscriptionWrapper.Instruction.DELETE_KEY_AND_PROPAGATE) || record.value().getInstruction().equals(SubscriptionWrapper.Instruction.DELETE_KEY_NO_PROPAGATE)) { store.delete(subscriptionKey); } else { store.put(subscriptionKey, newValue); } return new Change<>(newValue, oldValue); } private void dropRecord() { if (context().recordMetadata().isPresent()) { final RecordMetadata recordMetadata = context().recordMetadata().get(); LOG.warn( "Skipping record due to null foreign key. " + "topic=[{}] partition=[{}] offset=[{}]", recordMetadata.topic(), recordMetadata.partition(), recordMetadata.offset() ); } else { LOG.warn( "Skipping record due to null foreign key. Topic, partition, and offset not known." ); } droppedRecordsSensor.record(); } }; }
@Test public void shouldDeleteKeyAndPropagateV1() { final StoreBuilder<TimestampedKeyValueStore<Bytes, SubscriptionWrapper<String>>> storeBuilder = storeBuilder(); final SubscriptionReceiveProcessorSupplier<String, String> supplier = supplier(storeBuilder); final Processor<String, SubscriptionWrapper<String>, CombinedKey<String, String>, Change<ValueAndTimestamp<SubscriptionWrapper<String>>>> processor = supplier.get(); stateStore = storeBuilder.build(); context.addStateStore(stateStore); stateStore.init((StateStoreContext) context, stateStore); final SubscriptionWrapper<String> oldWrapper = new SubscriptionWrapper<>( new long[]{1L, 2L}, Instruction.DELETE_KEY_AND_PROPAGATE, PK2, SubscriptionWrapper.VERSION_1, 1 ); final ValueAndTimestamp<SubscriptionWrapper<String>> oldValue = ValueAndTimestamp.make(oldWrapper, 0); final Bytes key = COMBINED_KEY_SCHEMA.toBytes(FK, PK1); stateStore.put(key, oldValue); processor.init(context); final SubscriptionWrapper<String> newWrapper = new SubscriptionWrapper<>( new long[]{1L, 2L}, Instruction.DELETE_KEY_AND_PROPAGATE, PK1, SubscriptionWrapper.VERSION_1, 1 ); final ValueAndTimestamp<SubscriptionWrapper<String>> newValue = ValueAndTimestamp.make( newWrapper, 1L); final Record<String, SubscriptionWrapper<String>> record = new Record<>( FK, newWrapper, 1L ); processor.process(record); final List<CapturedForward<? extends CombinedKey<String, String>, ? extends Change<ValueAndTimestamp<SubscriptionWrapper<String>>>>> forwarded = context.forwarded(); assertNull(stateStore.get(key)); assertEquals(1, forwarded.size()); assertEquals( record.withKey(new CombinedKey<>(FK, PK1)) .withValue(new Change<>(newValue, oldValue)), forwarded.get(0).record() ); }
public static YamlScalar asScalar(YamlNode node) { if (node != null && !(node instanceof YamlScalar)) { String nodeName = node.nodeName(); throw new YamlException(String.format("Child %s is not a scalar, it's actual type is %s", nodeName, node.getClass())); } return (YamlScalar) node; }
@Test public void asScalarReturnsIfScalarPassed() { YamlNode genericNode = new YamlScalarImpl(null, "scalar", "value"); YamlUtil.asScalar(genericNode); }
@NonNull public static Permutor<FeedItem> getPermutor(@NonNull SortOrder sortOrder) { Comparator<FeedItem> comparator = null; Permutor<FeedItem> permutor = null; switch (sortOrder) { case EPISODE_TITLE_A_Z: comparator = (f1, f2) -> itemTitle(f1).compareTo(itemTitle(f2)); break; case EPISODE_TITLE_Z_A: comparator = (f1, f2) -> itemTitle(f2).compareTo(itemTitle(f1)); break; case DATE_OLD_NEW: comparator = (f1, f2) -> pubDate(f1).compareTo(pubDate(f2)); break; case DATE_NEW_OLD: comparator = (f1, f2) -> pubDate(f2).compareTo(pubDate(f1)); break; case DURATION_SHORT_LONG: comparator = (f1, f2) -> Integer.compare(duration(f1), duration(f2)); break; case DURATION_LONG_SHORT: comparator = (f1, f2) -> Integer.compare(duration(f2), duration(f1)); break; case EPISODE_FILENAME_A_Z: comparator = (f1, f2) -> itemLink(f1).compareTo(itemLink(f2)); break; case EPISODE_FILENAME_Z_A: comparator = (f1, f2) -> itemLink(f2).compareTo(itemLink(f1)); break; case FEED_TITLE_A_Z: comparator = (f1, f2) -> feedTitle(f1).compareTo(feedTitle(f2)); break; case FEED_TITLE_Z_A: comparator = (f1, f2) -> feedTitle(f2).compareTo(feedTitle(f1)); break; case RANDOM: permutor = Collections::shuffle; break; case SMART_SHUFFLE_OLD_NEW: permutor = (queue) -> smartShuffle(queue, true); break; case SMART_SHUFFLE_NEW_OLD: permutor = (queue) -> smartShuffle(queue, false); break; case SIZE_SMALL_LARGE: comparator = (f1, f2) -> Long.compare(size(f1), size(f2)); break; case SIZE_LARGE_SMALL: comparator = (f1, f2) -> Long.compare(size(f2), size(f1)); break; case COMPLETION_DATE_NEW_OLD: comparator = (f1, f2) -> f2.getMedia().getPlaybackCompletionDate() .compareTo(f1.getMedia().getPlaybackCompletionDate()); break; default: throw new IllegalArgumentException("Permutor not implemented"); } if (comparator != null) { final Comparator<FeedItem> comparator2 = comparator; permutor = (queue) -> Collections.sort(queue, comparator2); } return permutor; }
@Test public void testPermutorForRule_EPISODE_TITLE_DESC() { Permutor<FeedItem> permutor = FeedItemPermutors.getPermutor(SortOrder.EPISODE_TITLE_Z_A); List<FeedItem> itemList = getTestList(); assertTrue(checkIdOrder(itemList, 1, 3, 2)); // before sorting permutor.reorder(itemList); assertTrue(checkIdOrder(itemList, 3, 2, 1)); // after sorting }
@Override public boolean test(final String resourceName) { return resourceName.matches(blackList); }
@Test public void shouldBlackListAllInPackage() throws IOException { writeBlacklist(ImmutableList.of("java.lang")); final Blacklist blacklist = new Blacklist(this.blacklistFile); assertTrue(blacklist.test("java.lang.Class")); assertFalse(blacklist.test("java.util.List")); }