focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public boolean shouldCopy(Path path) { return true; }
@Test public void testShouldCopyWithNull() { Assert.assertTrue(new TrueCopyFilter().shouldCopy(new Path("fake"))); }
@Override public boolean remove(long key1, long key2) { assert key1 != unassignedSentinel : "remove() called with key1 == nullKey1 (" + unassignedSentinel + ')'; return super.remove0(key1, key2); }
@Test public void testRemove() { final long key1 = randomKey(); final long key2 = randomKey(); hsa.ensure(key1, key2); assertTrue(hsa.remove(key1, key2)); assertFalse(hsa.remove(key1, key2)); }
@Override public ServerGroup servers() { return cache.get(); }
@Test public void unknown_endpoint_is_down() { NginxHealthClient client = createClient("nginx-health-output.json"); assertFalse(client.servers().isHealthy("no.such.endpoint")); }
@GetMapping("/metrics") public ObjectNode metrics(HttpServletRequest request) { boolean onlyStatus = Boolean.parseBoolean(WebUtils.optional(request, "onlyStatus", "true")); ObjectNode result = JacksonUtils.createEmptyJsonNode(); result.put("status", serverStatusManager.getServerStatus().name()); if (onlyStatus) { return result; } Collection<String> allClientId = clientManager.allClientId(); int connectionBasedClient = 0; int ephemeralIpPortClient = 0; int persistentIpPortClient = 0; int responsibleClientCount = 0; int responsibleIpCount = 0; for (String clientId : allClientId) { if (clientId.contains(IpPortBasedClient.ID_DELIMITER)) { if (clientId.endsWith(ClientConstants.PERSISTENT_SUFFIX)) { persistentIpPortClient += 1; } else { ephemeralIpPortClient += 1; } } else { connectionBasedClient += 1; } Client client = clientManager.getClient(clientId); if (clientManager.isResponsibleClient(client)) { responsibleClientCount += 1; responsibleIpCount += client.getAllPublishedService().size(); } } result.put("serviceCount", MetricsMonitor.getDomCountMonitor().get()); result.put("instanceCount", MetricsMonitor.getIpCountMonitor().get()); result.put("subscribeCount", MetricsMonitor.getSubscriberCount().get()); result.put("responsibleInstanceCount", responsibleIpCount); result.put("clientCount", allClientId.size()); result.put("connectionBasedClientCount", connectionBasedClient); result.put("ephemeralIpPortClientCount", ephemeralIpPortClient); result.put("persistentIpPortClientCount", persistentIpPortClient); result.put("responsibleClientCount", responsibleClientCount); result.put("cpu", EnvUtil.getCpu()); result.put("load", EnvUtil.getLoad()); result.put("mem", EnvUtil.getMem()); return result; }
@Test void testMetrics() { Mockito.when(serverStatusManager.getServerStatus()).thenReturn(ServerStatus.UP); Collection<String> clients = new HashSet<>(); clients.add("1628132208793_127.0.0.1_8080"); clients.add("127.0.0.1:8081#true"); clients.add("127.0.0.1:8082#false"); Mockito.when(clientManager.allClientId()).thenReturn(clients); Client client = new IpPortBasedClient("127.0.0.1:8081#true", true); client.addServiceInstance(Service.newService("", "", ""), new InstancePublishInfo()); Mockito.when(clientManager.getClient("127.0.0.1:8081#true")).thenReturn(client); Mockito.when(clientManager.isResponsibleClient(client)).thenReturn(Boolean.TRUE); MockHttpServletRequest servletRequest = new MockHttpServletRequest(); servletRequest.addParameter("onlyStatus", "false"); ObjectNode objectNode = operatorController.metrics(servletRequest); assertEquals(1, objectNode.get("responsibleInstanceCount").asInt()); assertEquals(ServerStatus.UP.toString(), objectNode.get("status").asText()); assertEquals(3, objectNode.get("clientCount").asInt()); assertEquals(1, objectNode.get("connectionBasedClientCount").asInt()); assertEquals(1, objectNode.get("ephemeralIpPortClientCount").asInt()); assertEquals(1, objectNode.get("persistentIpPortClientCount").asInt()); assertEquals(1, objectNode.get("responsibleClientCount").asInt()); }
public final BarcodeParameters getParams() { return params; }
@Test final void testConstructorWithAll() throws IOException { try (BarcodeDataFormat barcodeDataFormat = new BarcodeDataFormat(200, 250, BarcodeImageType.JPG, BarcodeFormat.AZTEC)) { this.checkParams(BarcodeImageType.JPG, 200, 250, BarcodeFormat.AZTEC, barcodeDataFormat.getParams()); } }
public TopicMessageDTO deserialize(ConsumerRecord<Bytes, Bytes> rec) { var message = new TopicMessageDTO(); fillKey(message, rec); fillValue(message, rec); fillHeaders(message, rec); message.setPartition(rec.partition()); message.setOffset(rec.offset()); message.setTimestampType(mapToTimestampType(rec.timestampType())); message.setTimestamp(OffsetDateTime.ofInstant(Instant.ofEpochMilli(rec.timestamp()), UTC_ZONE_ID)); message.setKeySize(getKeySize(rec)); message.setValueSize(getValueSize(rec)); message.setHeadersSize(getHeadersSize(rec)); return masker.apply(message); }
@Test void dataMaskingAppliedOnDeserializedMessage() { UnaryOperator<TopicMessageDTO> maskerMock = mock(); Serde.Deserializer deser = (headers, data) -> new DeserializeResult("test", STRING, Map.of()); var recordDeser = new ConsumerRecordDeserializer("test", deser, "test", deser, "test", deser, deser, maskerMock); recordDeser.deserialize(new ConsumerRecord<>("t", 1, 1L, Bytes.wrap("t".getBytes()), Bytes.wrap("t".getBytes()))); verify(maskerMock).apply(any(TopicMessageDTO.class)); }
boolean isWriteShareGroupStateSuccessful(List<PersisterStateBatch> stateBatches) { WriteShareGroupStateResult response; try { response = persister.writeState(new WriteShareGroupStateParameters.Builder() .setGroupTopicPartitionData(new GroupTopicPartitionData.Builder<PartitionStateBatchData>() .setGroupId(this.groupId) .setTopicsData(Collections.singletonList(new TopicData<>(topicIdPartition.topicId(), Collections.singletonList(PartitionFactory.newPartitionStateBatchData( topicIdPartition.partition(), stateEpoch, startOffset, 0, stateBatches)))) ).build()).build()).get(); } catch (InterruptedException | ExecutionException e) { log.error("Failed to write the share group state for share partition: {}-{}", groupId, topicIdPartition, e); throw new IllegalStateException(String.format("Failed to write the share group state for share partition %s-%s", groupId, topicIdPartition), e); } if (response == null || response.topicsData() == null || response.topicsData().size() != 1) { log.error("Failed to write the share group state for share partition: {}-{}. Invalid state found: {}", groupId, topicIdPartition, response); throw new IllegalStateException(String.format("Failed to write the share group state for share partition %s-%s", groupId, topicIdPartition)); } TopicData<PartitionErrorData> state = response.topicsData().get(0); if (state.topicId() != topicIdPartition.topicId() || state.partitions().size() != 1 || state.partitions().get(0).partition() != topicIdPartition.partition()) { log.error("Failed to write the share group state for share partition: {}-{}. Invalid topic partition response: {}", groupId, topicIdPartition, response); throw new IllegalStateException(String.format("Failed to write the share group state for share partition %s-%s", groupId, topicIdPartition)); } PartitionErrorData partitionData = state.partitions().get(0); if (partitionData.errorCode() != Errors.NONE.code()) { Exception exception = Errors.forCode(partitionData.errorCode()).exception(partitionData.errorMessage()); log.error("Failed to write the share group state for share partition: {}-{} due to exception", groupId, topicIdPartition, exception); return false; } return true; }
@Test public void testWriteShareGroupStateWithNullResponse() { Persister persister = Mockito.mock(Persister.class); mockPersisterReadStateMethod(persister); SharePartition sharePartition = SharePartitionBuilder.builder().withPersister(persister).build(); Mockito.when(persister.writeState(Mockito.any())).thenReturn(CompletableFuture.completedFuture(null)); assertThrows(IllegalStateException.class, () -> sharePartition.isWriteShareGroupStateSuccessful(Collections.emptyList())); }
public static ConfigQueryResponse buildFailResponse(int errorCode, String message) { ConfigQueryResponse response = new ConfigQueryResponse(); response.setErrorInfo(errorCode, message); return response; }
@Override @Test public void testSerializeFailResponse() throws JsonProcessingException { ConfigQueryResponse configQueryResponse = ConfigQueryResponse.buildFailResponse(500, "Fail"); String json = mapper.writeValueAsString(configQueryResponse); assertTrue(json.contains("\"resultCode\":" + ResponseCode.FAIL.getCode())); assertTrue(json.contains("\"errorCode\":500")); assertTrue(json.contains("\"message\":\"Fail\"")); assertTrue(json.contains("\"success\":false")); }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .nullable(column.isNullable()) .comment(column.getComment()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case BOOLEAN: builder.columnType(REDSHIFT_BOOLEAN); builder.dataType(REDSHIFT_BOOLEAN); break; case TINYINT: case SMALLINT: builder.columnType(REDSHIFT_SMALLINT); builder.dataType(REDSHIFT_SMALLINT); break; case INT: builder.columnType(REDSHIFT_INTEGER); builder.dataType(REDSHIFT_INTEGER); break; case BIGINT: builder.columnType(REDSHIFT_BIGINT); builder.dataType(REDSHIFT_BIGINT); break; case FLOAT: builder.columnType(REDSHIFT_REAL); builder.dataType(REDSHIFT_REAL); break; case DOUBLE: builder.columnType(REDSHIFT_DOUBLE_PRECISION); builder.dataType(REDSHIFT_DOUBLE_PRECISION); break; case DECIMAL: DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = (int) Math.max(0, scale - (precision - MAX_PRECISION)); precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } builder.columnType(String.format("%s(%d,%d)", REDSHIFT_NUMERIC, precision, scale)); builder.dataType(REDSHIFT_NUMERIC); builder.precision(precision); builder.scale(scale); break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType( String.format( "%s(%d)", REDSHIFT_CHARACTER_VARYING, MAX_CHARACTER_VARYING_LENGTH)); builder.dataType(REDSHIFT_CHARACTER_VARYING); builder.length((long) MAX_CHARACTER_VARYING_LENGTH); } else if (column.getColumnLength() <= MAX_CHARACTER_VARYING_LENGTH) { builder.columnType( String.format( "%s(%d)", REDSHIFT_CHARACTER_VARYING, column.getColumnLength())); builder.dataType(REDSHIFT_CHARACTER_VARYING); builder.length(column.getColumnLength()); } else { log.warn( "The length of string column {} is {}, which exceeds the maximum length of {}, " + "the length will be set to {}", column.getName(), column.getColumnLength(), MAX_SUPER_LENGTH, MAX_SUPER_LENGTH); builder.columnType(REDSHIFT_SUPER); builder.dataType(REDSHIFT_SUPER); } break; case BYTES: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType( String.format( "%s(%d)", REDSHIFT_BINARY_VARYING, MAX_BINARY_VARYING_LENGTH)); builder.dataType(REDSHIFT_BINARY_VARYING); } else if (column.getColumnLength() <= MAX_BINARY_VARYING_LENGTH) { builder.columnType( String.format( "%s(%d)", REDSHIFT_BINARY_VARYING, column.getColumnLength())); builder.dataType(REDSHIFT_BINARY_VARYING); builder.length(column.getColumnLength()); } else { builder.columnType( String.format( "%s(%d)", REDSHIFT_BINARY_VARYING, MAX_BINARY_VARYING_LENGTH)); builder.dataType(REDSHIFT_BINARY_VARYING); log.warn( "The length of binary column {} is {}, which exceeds the maximum length of {}, " + "the length will be set to {}", column.getName(), column.getColumnLength(), MAX_BINARY_VARYING_LENGTH, MAX_BINARY_VARYING_LENGTH); } break; case TIME: Integer timeScale = column.getScale(); if (timeScale != null && timeScale > MAX_TIME_SCALE) { timeScale = MAX_TIME_SCALE; log.warn( "The time column {} type time({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to time({})", column.getName(), column.getScale(), MAX_SCALE, timeScale); } builder.columnType(REDSHIFT_TIME); builder.dataType(REDSHIFT_TIME); builder.scale(timeScale); break; case TIMESTAMP: Integer timestampScale = column.getScale(); if (timestampScale != null && timestampScale > MAX_TIMESTAMP_SCALE) { timestampScale = MAX_TIMESTAMP_SCALE; log.warn( "The timestamp column {} type timestamp({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to timestamp({})", column.getName(), column.getScale(), MAX_TIMESTAMP_SCALE, timestampScale); } builder.columnType(REDSHIFT_TIMESTAMP); builder.dataType(REDSHIFT_TIMESTAMP); builder.scale(timestampScale); break; case MAP: case ARRAY: case ROW: builder.columnType(REDSHIFT_SUPER); builder.dataType(REDSHIFT_SUPER); break; default: try { return super.reconvert(column); } catch (SeaTunnelRuntimeException e) { throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.REDSHIFT, column.getDataType().getSqlType().name(), column.getName()); } } return builder.build(); }
@Test public void testReconvertShort() { Column column = PhysicalColumn.builder().name("test").dataType(BasicType.SHORT_TYPE).build(); BasicTypeDefine typeDefine = RedshiftTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals( RedshiftTypeConverter.REDSHIFT_SMALLINT, typeDefine.getColumnType()); Assertions.assertEquals(RedshiftTypeConverter.REDSHIFT_SMALLINT, typeDefine.getDataType()); }
public List<Seat> createAutoIncrementSeats( Block block, Stadium stadium, Section section, List<BlockRow> rows) { List<Seat> seats = new ArrayList<>(); int blockSeatNum = BLOCK_SEAT_START_NUM; for (BlockRow row : rows) { do { seats.add( Seat.builder() .stadium(stadium) .section(section) .block(block) .row(row) .seatNumber(blockSeatNum) .build()); blockSeatNum++; } while (blockSeatNum <= row.getMaxSeats()); } return seats; }
@Test void 블록_열_정보가_주어졌을_때_자동으로_좌석을_채번할_수_있다() { // given Block block = Block.builder().id(1L).build(); Stadium stadium = Stadium.builder().id(1L).build(); Section section = Section.builder().id(1L).build(); List<BlockRow> rows = new ArrayList<>( List.of( BlockRow.builder().number(1).maxSeats(10).build(), BlockRow.builder().number(2).maxSeats(14).build(), BlockRow.builder().number(3).maxSeats(18).build())); // when List<Seat> seats = createSeatService.createAutoIncrementSeats(block, stadium, section, rows); // then assertEquals(18, seats.size()); IntStream.rangeClosed(1, 18) .forEach(i -> assertEquals(i, seats.get(i - 1).getSeatNumber())); }
public static List<String> getJavaOpts(Configuration conf) { String adminOpts = conf.get(YarnConfiguration.NM_CONTAINER_LOCALIZER_ADMIN_JAVA_OPTS_KEY, YarnConfiguration.NM_CONTAINER_LOCALIZER_ADMIN_JAVA_OPTS_DEFAULT); String userOpts = conf.get(YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_KEY, YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_DEFAULT); boolean isExtraJDK17OptionsConfigured = conf.getBoolean(YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_ADD_EXPORTS_KEY, YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_ADD_EXPORTS_DEFAULT); if (Shell.isJavaVersionAtLeast(17) && isExtraJDK17OptionsConfigured) { userOpts = userOpts.trim().concat(" " + ADDITIONAL_JDK17_PLUS_OPTIONS); } List<String> adminOptionList = Arrays.asList(adminOpts.split("\\s+")); List<String> userOptionList = Arrays.asList(userOpts.split("\\s+")); return Stream.concat(adminOptionList.stream(), userOptionList.stream()) .filter(s -> !s.isEmpty()) .collect(Collectors.toList()); }
@Test public void testJavaOptionsWithoutDefinedAdminOrUserOptions() throws Exception { ContainerLocalizerWrapper wrapper = new ContainerLocalizerWrapper(); ContainerLocalizer localizer = wrapper.setupContainerLocalizerForTest(); Configuration conf = new Configuration(); List<String> javaOpts = localizer.getJavaOpts(conf); Assert.assertEquals(1, javaOpts.size()); Assert.assertTrue(javaOpts.get(0).equals("-Xmx256m")); }
public static String getContent(String content) { int index = content.indexOf(WORD_SEPARATOR); if (index == -1) { throw new IllegalArgumentException("The content does not contain separator!"); } return content.substring(index + 1); }
@Test void testGetContent() { String content = "abc" + Constants.WORD_SEPARATOR + "edf"; String result = ContentUtils.getContent(content); assertEquals("edf", result); content = "test"; try { ContentUtils.getContent(content); fail(); } catch (IllegalArgumentException e) { assertNotNull(e.toString()); } }
@Override public CompletableFuture<Collection<TaskManagerInfo>> requestTaskManagerInfo(Time timeout) { final ArrayList<TaskManagerInfo> taskManagerInfos = new ArrayList<>(taskExecutors.size()); for (Map.Entry<ResourceID, WorkerRegistration<WorkerType>> taskExecutorEntry : taskExecutors.entrySet()) { final ResourceID resourceId = taskExecutorEntry.getKey(); final WorkerRegistration<WorkerType> taskExecutor = taskExecutorEntry.getValue(); taskManagerInfos.add( new TaskManagerInfo( resourceId, taskExecutor.getTaskExecutorGateway().getAddress(), taskExecutor.getDataPort(), taskExecutor.getJmxPort(), taskManagerHeartbeatManager.getLastHeartbeatFrom(resourceId), slotManager.getNumberRegisteredSlotsOf(taskExecutor.getInstanceID()), slotManager.getNumberFreeSlotsOf(taskExecutor.getInstanceID()), slotManager.getRegisteredResourceOf(taskExecutor.getInstanceID()), slotManager.getFreeResourceOf(taskExecutor.getInstanceID()), taskExecutor.getHardwareDescription(), taskExecutor.getMemoryConfiguration(), blocklistHandler.isBlockedTaskManager(taskExecutor.getResourceID()))); } return CompletableFuture.completedFuture(taskManagerInfos); }
@Test void testRequestTaskManagerInfo() throws Exception { final ResourceID taskManagerId = ResourceID.generate(); final TaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setAddress(UUID.randomUUID().toString()) .createTestingTaskExecutorGateway(); rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway); resourceManager = new ResourceManagerBuilder().withSlotManager(createSlotManager()).buildAndStart(); final ResourceManagerGateway resourceManagerGateway = resourceManager.getSelfGateway(ResourceManagerGateway.class); registerTaskExecutor( resourceManagerGateway, taskManagerId, taskExecutorGateway.getAddress()); CompletableFuture<TaskManagerInfoWithSlots> taskManagerInfoFuture = resourceManagerGateway.requestTaskManagerDetailsInfo( taskManagerId, TestingUtils.TIMEOUT); TaskManagerInfoWithSlots taskManagerInfoWithSlots = taskManagerInfoFuture.get(); TaskManagerInfo taskManagerInfo = taskManagerInfoWithSlots.getTaskManagerInfo(); assertThat(taskManagerInfo.getResourceId()).isEqualTo(taskManagerId); assertThat(taskManagerInfo.getHardwareDescription()).isEqualTo(hardwareDescription); assertThat(taskManagerInfo.getAddress()).isEqualTo(taskExecutorGateway.getAddress()); assertThat(taskManagerInfo.getDataPort()).isEqualTo(dataPort); assertThat(taskManagerInfo.getJmxPort()).isEqualTo(jmxPort); assertThat(taskManagerInfo.getNumberSlots()).isEqualTo(0); assertThat(taskManagerInfo.getNumberAvailableSlots()).isEqualTo(0); assertThat(taskManagerInfoWithSlots.getAllocatedSlots()).isEmpty(); }
public Expression rewrite(final Expression expression) { return new ExpressionTreeRewriter<>(new OperatorPlugin()::process) .rewrite(expression, null); }
@Test public void shouldNotReplaceArithmetic() { // Given: final Expression predicate = getPredicate( "SELECT * FROM orders where '2017-01-01' + 10000 > ROWTIME;"); // When: final Expression rewritten = rewriter.rewrite(predicate); // Then: verify(parser, never()).parse(any()); assertThat(rewritten.toString(), containsString("(('2017-01-01' + 10000) > ORDERS.ROWTIME)")); }
@Override @SuppressWarnings("checkstyle:magicnumber") public void process(int ordinal, @Nonnull Inbox inbox) { try { switch (ordinal) { case 0: process0(inbox); break; case 1: process1(inbox); break; case 2: process2(inbox); break; case 3: process3(inbox); break; case 4: process4(inbox); break; default: processAny(ordinal, inbox); } } catch (Exception e) { throw sneakyThrow(e); } }
@Test public void when_processInbox3_then_tryProcess3Called() { // When tryProcessP.process(ORDINAL_3, inbox); // Then tryProcessP.validateReceptionOfItem(ORDINAL_3, MOCK_ITEM); }
@Override public long getCurrentTime() { return deploymentStart == NOT_STARTED ? 0L : Math.max(0, clock.absoluteTimeMillis() - deploymentStart); }
@Test void testGetCurrentTime() { final ManualClock clock = new ManualClock(Duration.ofMillis(5).toNanos()); final DeploymentStateTimeMetrics metrics = new DeploymentStateTimeMetrics(JobType.BATCH, settings, clock); final ExecutionAttemptID id1 = createExecutionAttemptId(); final ExecutionAttemptID id2 = createExecutionAttemptId(); metrics.onStateUpdate(id1, ExecutionState.CREATED, ExecutionState.SCHEDULED); metrics.onStateUpdate(id1, ExecutionState.SCHEDULED, ExecutionState.DEPLOYING); clock.advanceTime(Duration.ofMillis(5)); assertThat(metrics.getCurrentTime()).isEqualTo(5L); }
@Override public CompletableFuture<RpcOutput> invokeRpc(RpcInput input) { return super.invokeRpc(new RpcInput(toAbsoluteId(input.id()), input.data())); }
@Test public void testInvokeRpc() { RpcInput input = new RpcInput(relIntf, null); view.invokeRpc(input); assertTrue(ResourceIds.isPrefix(rid, realId)); }
@Override public void write(MemoryBuffer buffer, T value) { for (FieldResolver.FieldInfo fieldInfo : fieldResolver.getEmbedTypes4Fields()) { buffer.writeInt32((int) fieldInfo.getEncodedFieldInfo()); readAndWriteFieldValue(buffer, fieldInfo, value); } for (FieldResolver.FieldInfo fieldInfo : fieldResolver.getEmbedTypes9Fields()) { buffer.writeInt64(fieldInfo.getEncodedFieldInfo()); readAndWriteFieldValue(buffer, fieldInfo, value); } for (FieldResolver.FieldInfo fieldInfo : fieldResolver.getEmbedTypesHashFields()) { buffer.writeInt64(fieldInfo.getEncodedFieldInfo()); readAndWriteFieldValue(buffer, fieldInfo, value); } for (FieldResolver.FieldInfo fieldInfo : fieldResolver.getSeparateTypesHashFields()) { buffer.writeInt64(fieldInfo.getEncodedFieldInfo()); readAndWriteFieldValue(buffer, fieldInfo, value); } buffer.writeInt64(fieldResolver.getEndTag()); }
@Test(dataProvider = "referenceTrackingConfig") public void testWrite(boolean referenceTracking) { Fury fury = Fury.builder() .withLanguage(Language.JAVA) .withRefTracking(referenceTracking) .requireClassRegistration(false) .build(); fury.registerSerializer(Foo.class, new CompatibleSerializer<>(fury, Foo.class)); fury.registerSerializer(BeanA.class, new CompatibleSerializer<>(fury, BeanA.class)); fury.registerSerializer(BeanB.class, new CompatibleSerializer<>(fury, BeanB.class)); serDeCheck(fury, Foo.create()); serDeCheck(fury, BeanB.createBeanB(2)); serDeCheck(fury, BeanA.createBeanA(2)); }
public LogicalSchema resolve(final ExecutionStep<?> step, final LogicalSchema schema) { return Optional.ofNullable(HANDLERS.get(step.getClass())) .map(h -> h.handle(this, schema, step)) .orElseThrow(() -> new IllegalStateException("Unhandled step class: " + step.getClass())); }
@Test public void shouldResolveSchemaForTableAggregate() { // Given: givenAggregateFunction("SUM"); final TableAggregate step = new TableAggregate( PROPERTIES, groupedTableSource, formats, ImmutableList.of(ColumnName.of("ORANGE")), ImmutableList.of(functionCall("SUM", "APPLE")) ); // When: final LogicalSchema result = resolver.resolve(step, SCHEMA); // Then: assertThat(result, is( LogicalSchema.builder() .keyColumn(ColumnName.of("K0"), SqlTypes.INTEGER) .valueColumn(ColumnName.of("ORANGE"), SqlTypes.INTEGER) .valueColumn(ColumnNames.aggregateColumn(0), SqlTypes.BIGINT) .build()) ); }
@Override public boolean accept(final Path source, final Local local, final TransferStatus parent) { return true; }
@Test public void testAcceptDirectoryExists() throws Exception { final HashMap<Path, Path> files = new HashMap<>(); final Path source = new Path("a", EnumSet.of(Path.Type.directory)); files.put(source, new Path("a", EnumSet.of(Path.Type.directory))); final Find find = new Find() { @Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { return true; } }; AbstractCopyFilter f = new OverwriteFilter(new NullTransferSession(new Host(new TestProtocol())), new NullTransferSession(new Host(new TestProtocol())) { @Override @SuppressWarnings("unchecked") public <T> T _getFeature(final Class<T> type) { if(type.equals(Find.class)) { return (T) find; } return super._getFeature(type); } }, files); assertTrue(f.accept(source, null, new TransferStatus().exists(true))); final TransferStatus status = f.prepare(source, null, new TransferStatus().exists(true), new DisabledProgressListener()); assertTrue(status.isExists()); }
public GenericRow createRow(final KeyValue<List<?>, GenericRow> row) { if (row.value() != null) { throw new IllegalArgumentException("Not a tombstone: " + row); } final List<?> key = row.key(); if (key.size() < keyIndexes.size()) { throw new IllegalArgumentException("Not enough key columns. " + "expected at least" + keyIndexes.size() + ", got: " + key); } final GenericRow values = new GenericRow(numColumns); for (int columnIdx = 0; columnIdx < numColumns; columnIdx++) { final Integer keyIdx = keyIndexes.get(columnIdx); if (keyIdx == null) { values.append(null); } else { values.append(key.get(keyIdx)); } } return values; }
@Test public void shouldHandleSomeKeyColumnsNotInProjection() { // Given: givenSchema(LogicalSchema.builder() .keyColumn(ColumnName.of("K1"), SqlTypes.INTEGER) .keyColumn(ColumnName.of("K2"), SqlTypes.INTEGER) .valueColumn(ColumnName.of("V0"), SqlTypes.INTEGER) .valueColumn(ColumnName.of("K2"), SqlTypes.INTEGER) .valueColumn(ColumnName.of("V1"), SqlTypes.INTEGER) .valueColumn(ColumnName.of("V2"), SqlTypes.INTEGER) .build()); final KeyValue<List<?>, GenericRow> kv = KeyValue.keyValue( ImmutableList.of(10, 2), null ); // When: final GenericRow result = factory.createRow(kv); // Then: assertThat(result, is(genericRow(null, 2, null, null))); }
public String getValue(String template) { StringBuilder builder = new StringBuilder(); // Just delegate parsing stuffs to Expression parser to retrieve all the expressions ordered. Expression[] expressions = ExpressionParser.parseExpressions(template, context, expressionPrefix, expressionSuffix); // Now just go through expressions and evaluate them. for (Expression expression : expressions) { builder.append(expression.getValue(context)); } return builder.toString(); }
@Test void testPostmanNotationCompatibility() { String template = "{\"signedAt\": \"{{ now() }}\", \"fullName\": \"{{ randomFullName() }}\", \"email\": \"{{ randomEmail() }}\", \"age\": {{ randomInt(20, 99) }}} \n"; String postmanTemplate = "{\"signedAt\": \"{{ $timestamp }}\", \"fullName\": \"{{ $randomFullName }}\", \"email\": \"{{ $randomEmail }}\", \"age\": {{ $randomInt }}} \n"; TemplateEngine engine = TemplateEngineFactory.getTemplateEngine(); String content = null; String postmanContent = null; try { content = engine.getValue(template); postmanContent = engine.getValue(postmanTemplate); } catch (Throwable t) { fail("Contextless template should not fail."); } assertTrue(content.startsWith("{\"signedAt\": \"1")); assertTrue(postmanContent.startsWith("{\"signedAt\": \"1")); }
@Override public Stream<MappingField> resolveAndValidateFields( boolean isKey, List<MappingField> userFields, Map<String, String> options, InternalSerializationService serializationService ) { Map<QueryPath, MappingField> fieldsByPath = extractFields(userFields, isKey); PortableId portableId = getPortableId(fieldsByPath, options, isKey); ClassDefinition classDefinition = serializationService.getPortableContext() .lookupClassDefinition(portableId); // Fallback option for the case, when the portable objects were not de/serialized yet // and user fields were not provided by the user explicitly. In this case we try to // manually create a Portable instance and register its ClassDefinition. if (userFields.isEmpty() && classDefinition == null) { SerializationServiceV1 ss = (SerializationServiceV1) serializationService; // Try to create a Portable instance with the default constructor, // register its ClassDefinition, and throw object away. var tempPortableObj = ss.getPortableSerializer() .createNewPortableInstance(portableId.getFactoryId(), portableId.getClassId()); if (tempPortableObj != null) { try { ss.getPortableContext().lookupOrRegisterClassDefinition(tempPortableObj); } catch (Exception e) { // If the default constructor doesn't make Portable fields non-null,we're done: // we can't register the class, so we interrupt the execution with the exception. throw QueryException.error("Cannot create mapping for Portable type. " + "Please, provide the explicit definition for all columns."); } classDefinition = serializationService.getPortableContext().lookupClassDefinition(portableId); } } return userFields.isEmpty() ? resolveFields(isKey, classDefinition) : resolveAndValidateFields(isKey, fieldsByPath, classDefinition); }
@Test @Parameters({ "true, __key", "false, this" }) public void test_resolveFields(boolean key, String prefix) { Stream<MappingField> resolvedFields = INSTANCE.resolveAndValidateFields( key, singletonList(field("field", QueryDataType.INT, prefix + ".field")), ImmutableMap.of( (key ? OPTION_KEY_FACTORY_ID : OPTION_VALUE_FACTORY_ID), "1", (key ? OPTION_KEY_CLASS_ID : OPTION_VALUE_CLASS_ID), "2", (key ? OPTION_KEY_CLASS_VERSION : OPTION_VALUE_CLASS_VERSION), "3" ), new DefaultSerializationServiceBuilder().build() ); assertThat(resolvedFields).containsExactly(field("field", QueryDataType.INT, prefix + ".field")); }
public boolean match(List<String> left, String right) { if (Objects.isNull(left)) { return false; } if (right.startsWith("\"") && right.endsWith("\"")) { right = right.substring(1, right.length() - 1); } return !left.contains(right); }
@Test public void match() { NotContainMatch notContainMatch = new NotContainMatch(); assertFalse(notContainMatch.match(null, "http.method:GET")); assertFalse( notContainMatch.match(Arrays.asList("http.method:GET", "http.method:POST"), "http.method:GET")); assertTrue(notContainMatch.match(Arrays.asList("http.method:GET", "http.method:POST"), "http.method:PUT")); }
public static List<InetSocketAddress> getMasterRpcAddresses(AlluxioConfiguration conf) { // First check whether rpc addresses are explicitly configured. if (conf.isSet(PropertyKey.MASTER_RPC_ADDRESSES)) { return parseInetSocketAddresses(conf.getList(PropertyKey.MASTER_RPC_ADDRESSES)); } // Fall back on server-side journal configuration. int rpcPort = NetworkAddressUtils.getPort(NetworkAddressUtils.ServiceType.MASTER_RPC, conf); return overridePort(getEmbeddedJournalAddresses(conf, ServiceType.MASTER_RAFT), rpcPort); }
@Test public void getMasterRpcAddresses() { AlluxioConfiguration conf = createConf(ImmutableMap.of(PropertyKey.MASTER_RPC_ADDRESSES, "host1:99,host2:100")); assertEquals( Arrays.asList(InetSocketAddress.createUnresolved("host1", 99), InetSocketAddress.createUnresolved("host2", 100)), ConfigurationUtils.getMasterRpcAddresses(conf)); }
public static <T> Window<T> into(WindowFn<? super T, ?> fn) { try { fn.windowCoder().verifyDeterministic(); } catch (NonDeterministicException e) { throw new IllegalArgumentException("Window coders must be deterministic.", e); } return Window.<T>configure().withWindowFn(fn); }
@Test public void testWindowIntoWindowFnAssign() { pipeline .apply(Create.of(1, 2, 3)) .apply( Window.into(FixedWindows.of(Duration.standardMinutes(11L).plus(Duration.millis(1L))))); final AtomicBoolean foundAssign = new AtomicBoolean(false); pipeline.traverseTopologically( new PipelineVisitor.Defaults() { @Override public void visitPrimitiveTransform(TransformHierarchy.Node node) { if (node.getTransform() instanceof Window.Assign) { foundAssign.set(true); } } }); assertThat(foundAssign.get(), is(true)); }
protected final AnyKeyboardViewBase getMiniKeyboard() { return mMiniKeyboard; }
@Test public void testShortPressWhenNoPrimaryKeyAndNoPopupItemsShouldNotOutput() throws Exception { ExternalAnyKeyboard anyKeyboard = new ExternalAnyKeyboard( new DefaultAddOn(getApplicationContext(), getApplicationContext()), getApplicationContext(), keyboard_with_keys_with_no_codes, keyboard_with_keys_with_no_codes, "test", 0, 0, "en", "", "", KEYBOARD_ROW_MODE_NORMAL); anyKeyboard.loadKeyboard(mViewUnderTest.mKeyboardDimens); mViewUnderTest.setKeyboard(anyKeyboard, 0); Assert.assertEquals(7, anyKeyboard.getKeys().size()); Assert.assertNull(mViewUnderTest.getMiniKeyboard()); Assert.assertFalse(mViewUnderTest.mMiniKeyboardPopup.isShowing()); final AnyKeyboard.AnyKey key = (AnyKeyboard.AnyKey) anyKeyboard.getKeys().get(3); Assert.assertEquals(0, key.getPrimaryCode()); Assert.assertEquals(0, key.getCodesCount()); Assert.assertEquals(0, key.popupResId); Assert.assertNull(key.label); Assert.assertNull(key.popupCharacters); ViewTestUtils.navigateFromTo(mViewUnderTest, key, key, 30, true, false); Assert.assertNull(mViewUnderTest.getMiniKeyboard()); Assert.assertFalse(mViewUnderTest.mMiniKeyboardPopup.isShowing()); ViewTestUtils.navigateFromTo(mViewUnderTest, key, key, 30, false, true); Mockito.verify(mMockKeyboardListener).onKey(eq(0), same(key), eq(0), any(), anyBoolean()); }
public long acquire() { final long currOpIndex = V_TIME_UPDATER.getAndIncrement(this); long start = this.start; if (start == Long.MIN_VALUE) { start = nanoClock.get(); if (!START_UPDATER.compareAndSet(this, Long.MIN_VALUE, start)) { start = this.start; assert start != Long.MIN_VALUE; } } long intendedTime = start + currOpIndex * intervalNs; long now = nanoClock.get(); // If we are behind schedule too much, update V_TIME if (now > intendedTime + MAX_V_TIME_BACK_SHIFT_SEC * ONE_SEC_IN_NS) { long newVTime = (now - start) / intervalNs; // no need to CAS, updated by multiple threads is acceptable V_TIME_UPDATER.set(this, newVTime + 1); intendedTime = start + newVTime * intervalNs; } return intendedTime; }
@Test void acquireSlowSingleThread() { Supplier<Long> mockClock = mock(Supplier.class); when(mockClock.get()).thenReturn(SECONDS.toNanos(2)); UniformRateLimiter rateLimiter = new UniformRateLimiter(1000, mockClock); assertThat(rateLimiter.acquire()).isEqualTo(2000000000L); assertThat(rateLimiter.acquire()).isEqualTo(2001000000L); assertThat(rateLimiter.acquire()).isEqualTo(2002000000L); }
<T extends PipelineOptions> T as(Class<T> iface) { checkNotNull(iface); checkArgument(iface.isInterface(), "Not an interface: %s", iface); T existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { synchronized (this) { // double check existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { Registration<T> registration = PipelineOptionsFactory.CACHE .get() .validateWellFormed(iface, computedProperties.knownInterfaces); List<PropertyDescriptor> propertyDescriptors = registration.getPropertyDescriptors(); Class<T> proxyClass = registration.getProxyClass(); existingOption = InstanceBuilder.ofType(proxyClass) .fromClass(proxyClass) .withArg(InvocationHandler.class, this) .build(); computedProperties = computedProperties.updated(iface, existingOption, propertyDescriptors); } } } return existingOption; }
@Test public void testUpCastRetainsSuperInterfaceValues() throws Exception { ProxyInvocationHandler handler = new ProxyInvocationHandler(Maps.newHashMap()); SubClass extended = handler.as(SubClass.class); extended.setString("parentValue"); Simple simple = extended.as(Simple.class); assertEquals("parentValue", simple.getString()); }
public Optional<DateTime> nextTime(JobTriggerDto trigger) { return nextTime(trigger, trigger.nextTime()); }
@Test public void nextTime() { final JobTriggerDto trigger = JobTriggerDto.builderWithClock(clock) .jobDefinitionId("abc-123") .jobDefinitionType("event-processor-execution-v1") .schedule(IntervalJobSchedule.builder() .interval(1) .unit(TimeUnit.SECONDS) .build()) .build(); final DateTime nextFutureTime1 = strategies.nextTime(trigger).orElse(null); assertThat(nextFutureTime1) .isNotNull() .isGreaterThanOrEqualTo(clock.nowUTC()) .isEqualByComparingTo(clock.nowUTC().plusSeconds(1)); clock.plus(10, TimeUnit.SECONDS); final DateTime nextFutureTime2 = strategies.nextTime(trigger).orElse(null); assertThat(nextFutureTime2) .isNotNull() .isEqualByComparingTo(trigger.nextTime().plusSeconds(1)); }
@Override public Set<RuleDescriptionSectionDto> generateSections(RulesDefinition.Rule rule) { return getDescriptionInHtml(rule) .map(this::generateSections) .orElse(emptySet()); }
@Test public void parse_return_null_risk_when_desc_starts_with_ask_yourself_title() { when(rule.htmlDescription()).thenReturn(ASKATRISK + RECOMMENTEDCODINGPRACTICE); Set<RuleDescriptionSectionDto> results = generator.generateSections(rule); Map<String, String> sectionKeyToContent = results.stream().collect(toMap(RuleDescriptionSectionDto::getKey, RuleDescriptionSectionDto::getContent)); assertThat(sectionKeyToContent).hasSize(3) .containsEntry(DEFAULT_SECTION_KEY, rule.htmlDescription()) .containsEntry(ASSESS_THE_PROBLEM_SECTION_KEY, ASKATRISK) .containsEntry(HOW_TO_FIX_SECTION_KEY, RECOMMENTEDCODINGPRACTICE); }
@Override public AppSettings load() { Properties p = loadPropertiesFile(homeDir); Set<String> keysOverridableFromEnv = stream(ProcessProperties.Property.values()).map(ProcessProperties.Property::getKey) .collect(Collectors.toSet()); keysOverridableFromEnv.addAll(p.stringPropertyNames()); // 1st pass to load static properties Props staticProps = reloadProperties(keysOverridableFromEnv, p); keysOverridableFromEnv.addAll(getDynamicPropertiesKeys(staticProps)); // 2nd pass to load dynamic properties like `ldap.*.url` or `ldap.*.baseDn` which keys depend on values of static // properties loaded in 1st step Props props = reloadProperties(keysOverridableFromEnv, p); new ProcessProperties(serviceLoaderWrapper).completeDefaults(props); stream(consumers).forEach(c -> c.accept(props)); return new AppSettingsImpl(props); }
@Test public void load_properties_from_file() throws Exception { File homeDir = temp.newFolder(); File propsFile = new File(homeDir, "conf/sonar.properties"); FileUtils.write(propsFile, "foo=bar", UTF_8); AppSettingsLoaderImpl underTest = new AppSettingsLoaderImpl(system, new String[0], homeDir, serviceLoaderWrapper); AppSettings settings = underTest.load(); assertThat(settings.getProps().rawProperties()).contains(entry("foo", "bar")); }
public static final String[] convertLineToStrings( LogChannelInterface log, String line, TextFileInputMeta inf, String delimiter, String enclosure, String escapeCharacters ) throws KettleException { String[] strings = new String[inf.inputFields.length]; int fieldnr; String pol; // piece of line try { if ( line == null ) { return null; } if ( inf.content.fileType.equalsIgnoreCase( "CSV" ) ) { // Split string in pieces, only for CSV! fieldnr = 0; int pos = 0; int length = line.length(); boolean dencl = false; int len_encl = ( enclosure == null ? 0 : enclosure.length() ); int len_esc = ( escapeCharacters == null ? 0 : escapeCharacters.length() ); while ( pos < length ) { int from = pos; int next; boolean encl_found; boolean contains_escaped_enclosures = false; boolean contains_escaped_separators = false; boolean contains_escaped_escape = false; // Is the field beginning with an enclosure? // "aa;aa";123;"aaa-aaa";000;... if ( len_encl > 0 && line.substring( from, from + len_encl ).equalsIgnoreCase( enclosure ) ) { if ( log.isRowLevel() ) { log.logRowlevel( BaseMessages.getString( PKG, "TextFileInput.Log.ConvertLineToRowTitle" ), BaseMessages .getString( PKG, "TextFileInput.Log.Encloruse", line.substring( from, from + len_encl ) ) ); } encl_found = true; int p = from + len_encl; boolean is_enclosure = len_encl > 0 && p + len_encl < length && line.substring( p, p + len_encl ).equalsIgnoreCase( enclosure ); boolean is_escape = len_esc > 0 && p + len_esc < length && line.substring( p, p + len_esc ).equalsIgnoreCase( inf.content.escapeCharacter ); boolean enclosure_after = false; // Is it really an enclosure? See if it's not repeated twice or escaped! if ( ( is_enclosure || is_escape ) && p < length - 1 ) { String strnext = line.substring( p + len_encl, p + 2 * len_encl ); if ( strnext.equalsIgnoreCase( enclosure ) ) { p++; enclosure_after = true; dencl = true; // Remember to replace them later on! if ( is_escape ) { contains_escaped_enclosures = true; } } else if ( strnext.equals( inf.content.escapeCharacter ) ) { p++; // Remember to replace them later on! if ( is_escape ) { contains_escaped_escape = true; // remember } } } // Look for a closing enclosure! while ( ( !is_enclosure || enclosure_after ) && p < line.length() ) { p++; enclosure_after = false; is_enclosure = len_encl > 0 && p + len_encl < length && line.substring( p, p + len_encl ).equals( enclosure ); is_escape = len_esc > 0 && p + len_esc < length && line.substring( p, p + len_esc ).equals( inf.content.escapeCharacter ); // Is it really an enclosure? See if it's not repeated twice or escaped! if ( ( is_enclosure || is_escape ) && p < length - 1 ) { String strnext = line.substring( p + len_encl, p + 2 * len_encl ); if ( strnext.equals( enclosure ) ) { p++; enclosure_after = true; dencl = true; // Remember to replace them later on! if ( is_escape ) { contains_escaped_enclosures = true; // remember } } else if ( strnext.equals( inf.content.escapeCharacter ) ) { p++; // Remember to replace them later on! if ( is_escape ) { contains_escaped_escape = true; // remember } } } } if ( p >= length ) { next = p; } else { next = p + len_encl; } if ( log.isRowLevel() ) { log.logRowlevel( BaseMessages.getString( PKG, "TextFileInput.Log.ConvertLineToRowTitle" ), BaseMessages .getString( PKG, "TextFileInput.Log.EndOfEnclosure", "" + p ) ); } } else { encl_found = false; boolean found = false; int startpoint = from; // int tries = 1; do { next = line.indexOf( delimiter, startpoint ); // See if this position is preceded by an escape character. if ( len_esc > 0 && next > 0 ) { String before = line.substring( next - len_esc, next ); if ( inf.content.escapeCharacter.equals( before ) ) { int previous_escapes = 1; int start = next - len_esc - 1; int end = next - 1; while ( start >= 0 ) { if ( inf.content.escapeCharacter.equals( line.substring( start, end ) ) ) { previous_escapes++; start--; end--; } else { break; } } // If behind the seperator there are a odd number of escaped // The separator is escaped. if ( previous_escapes % 2 != 0 ) { // take the next separator, this one is escaped... startpoint = next + 1; // tries++; contains_escaped_separators = true; } else { found = true; } } else { found = true; } } else { found = true; } } while ( !found && next >= 0 ); } if ( next == -1 ) { next = length; } if ( encl_found && ( ( from + len_encl ) <= ( next - len_encl ) ) ) { pol = line.substring( from + len_encl, next - len_encl ); if ( log.isRowLevel() ) { log.logRowlevel( BaseMessages.getString( PKG, "TextFileInput.Log.ConvertLineToRowTitle" ), BaseMessages .getString( PKG, "TextFileInput.Log.EnclosureFieldFound", "" + pol ) ); } } else { pol = line.substring( from, next ); if ( log.isRowLevel() ) { log.logRowlevel( BaseMessages.getString( PKG, "TextFileInput.Log.ConvertLineToRowTitle" ), BaseMessages .getString( PKG, "TextFileInput.Log.NormalFieldFound", "" + pol ) ); } } if ( dencl && Utils.isEmpty( inf.content.escapeCharacter ) ) { StringBuilder sbpol = new StringBuilder( pol ); int idx = sbpol.indexOf( enclosure + enclosure ); while ( idx >= 0 ) { sbpol.delete( idx, idx + enclosure.length() ); idx = sbpol.indexOf( enclosure + enclosure ); } pol = sbpol.toString(); } if ( !Utils.isEmpty( inf.content.escapeCharacter ) && ( inf.content.escapeCharacter.equals( enclosure ) ) && ( contains_escaped_escape || contains_escaped_enclosures ) ) { // replace the escaped enclosures with enclosures... String replace = inf.content.escapeCharacter + enclosure; String replaceWith = enclosure; pol = Const.replace( pol, replace, replaceWith ); } else { if ( contains_escaped_enclosures ) { String replace = inf.content.escapeCharacter + enclosure; String replaceWith = enclosure; pol = Const.replace( pol, replace, replaceWith ); } contains_escaped_escape = !Utils.isEmpty( inf.content.escapeCharacter ) && pol.contains( inf.content.escapeCharacter + inf.content.escapeCharacter ); if ( contains_escaped_escape ) { String replace = inf.content.escapeCharacter + inf.content.escapeCharacter; String replaceWith = inf.content.escapeCharacter; pol = Const.replace( pol, replace, replaceWith ); } } // replace the escaped separators with separators... if ( contains_escaped_separators ) { String replace = inf.content.escapeCharacter + delimiter; String replaceWith = delimiter; pol = Const.replace( pol, replace, replaceWith ); } // Now add pol to the strings found! try { strings[fieldnr] = pol; } catch ( ArrayIndexOutOfBoundsException e ) { // In case we didn't allocate enough space. // This happens when you have less header values specified than there are actual values in the rows. // As this is "the exception" we catch and resize here. // String[] newStrings = new String[strings.length]; for ( int x = 0; x < strings.length; x++ ) { newStrings[x] = strings[x]; } strings = newStrings; } pos = next + delimiter.length(); fieldnr++; } if ( pos == length ) { if ( log.isRowLevel() ) { log.logRowlevel( BaseMessages.getString( PKG, "TextFileInput.Log.ConvertLineToRowTitle" ), BaseMessages .getString( PKG, "TextFileInput.Log.EndOfEmptyLineFound" ) ); } if ( fieldnr < strings.length ) { strings[fieldnr] = Const.EMPTY_STRING; } fieldnr++; } } else { // Fixed file format: Simply get the strings at the required positions... // Note - charBased is the old default behavior. If this is an old transformation, content.length will be null // and should be processed as before. If the content.length is equal to "Characters" or there is no specified encoding, // it will still use the old behavior. The *only* way to get the new behavior is if content.length = "Bytes" and // the encoding is specified. boolean charBased = ( inf.content.length == null || inf.content.length.equalsIgnoreCase( "Characters" ) || inf.getEncoding() == null ); // Default to classic behavior for ( int i = 0; i < inf.inputFields.length; i++ ) { BaseFileField field = inf.inputFields[i]; int length; int fPos = field.getPosition(); int fLength = field.getLength(); int fPl = fPos + fLength; if ( charBased ) { length = line.length(); if ( fPl <= length ) { strings[i] = line.substring( fPos, fPl ); } else { if ( fPos < length ) { strings[i] = line.substring( fPos ); } else { strings[i] = ""; } } } else { byte[] b = null; String enc = inf.getEncoding(); b = line.getBytes( enc ); length = b.length; if ( fPl <= length ) { strings[i] = new String( Arrays.copyOfRange( b, fPos, fPl ), enc ); } else { if ( fPos < length ) { strings[i] = new String( Arrays.copyOfRange( b, fPos, length - 1 ), enc ); } else { strings[i] = ""; } } } } } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "TextFileInput.Log.Error.ErrorConvertingLine", e .toString() ), e ); } return strings; }
@Test public void convertLineToStrings() throws Exception { TextFileInputMeta inputMeta = Mockito.mock( TextFileInputMeta.class ); inputMeta.content = new TextFileInputMeta.Content(); inputMeta.content.fileType = "CSV"; inputMeta.inputFields = new BaseFileField[ 3 ]; inputMeta.content.escapeCharacter = "\\"; String line = "\"\\\\fie\\\\l\\dA\"|\"fieldB\\\\\"|\"fie\\\\ldC\""; // ""\\fie\\l\dA"|"fieldB\\"|"Fie\\ldC"" String[] strings = TextFileInputUtils .convertLineToStrings( Mockito.mock( LogChannelInterface.class ), line, inputMeta, "|", "\"", "\\" ); Assert.assertNotNull( strings ); Assert.assertEquals( "\\fie\\l\\dA", strings[ 0 ] ); Assert.assertEquals( "fieldB\\", strings[ 1 ] ); Assert.assertEquals( "fie\\ldC", strings[ 2 ] ); }
@Override protected List<MatchResult> match(List<String> specs) throws IOException { return match(new File(".").getAbsolutePath(), specs); }
@Test public void testMatchWithFileThreeSlashesPrefix() throws Exception { List<String> expected = ImmutableList.of(temporaryFolder.newFile("a").toString()); temporaryFolder.newFile("aa"); temporaryFolder.newFile("ab"); String file = "file:///" + temporaryFolder.getRoot().toPath().resolve("a").toString(); List<MatchResult> results = localFileSystem.match(ImmutableList.of(file)); assertThat( toFilenames(results), containsInAnyOrder(expected.toArray(new String[expected.size()]))); }
@Override public Optional<Listener> acquire(ContextT context) { return tryAcquire(context).map(delegate -> new Listener() { @Override public void onSuccess() { delegate.onSuccess(); unblock(); } @Override public void onIgnore() { delegate.onIgnore(); unblock(); } @Override public void onDropped() { delegate.onDropped(); unblock(); } }); }
@Test public void blockWhenFullAndTimeout() { // Acquire all 4 available tokens for (int i = 0; i < 4; i++) { Optional<Limiter.Listener> listener = blockingLimiter.acquire(null); Assert.assertTrue(listener.isPresent()); } // Next acquire will block for 1 second long start = System.nanoTime(); Optional<Limiter.Listener> listener = blockingLimiter.acquire(null); long duration = TimeUnit.NANOSECONDS.toSeconds(System.nanoTime() - start); Assert.assertTrue(duration >= 1); Assert.assertFalse(listener.isPresent()); }
public static String getUserAgent(HttpServletRequest request) { String userAgent = request.getHeader(HttpHeaderConsts.USER_AGENT_HEADER); if (StringUtils.isEmpty(userAgent)) { userAgent = StringUtils .defaultIfEmpty(request.getHeader(HttpHeaderConsts.CLIENT_VERSION_HEADER), StringUtils.EMPTY); } return userAgent; }
@Test void testGetUserAgent() { MockHttpServletRequest servletRequest = new MockHttpServletRequest(); String userAgent = WebUtils.getUserAgent(servletRequest); assertEquals("", userAgent); servletRequest.addHeader(HttpHeaderConsts.CLIENT_VERSION_HEADER, "0"); assertEquals("0", WebUtils.getUserAgent(servletRequest)); servletRequest.addHeader(HttpHeaderConsts.USER_AGENT_HEADER, "1"); assertEquals("1", WebUtils.getUserAgent(servletRequest)); }
public void openFile() { openFile( false ); }
@Test public void testLoadLastUsedRepTransNoRepository() throws Exception { String repositoryName = null; String fileName = "fileName"; setLoadLastUsedJobLocalWithRepository( true, repositoryName, null, fileName, false ); verify( spoon, never() ).openFile( anyString(), anyBoolean() ); }
@Override public void deleteGroup(Long id) { // 校验存在 validateGroupExists(id); // 校验分组下是否有用户 validateGroupHasUser(id); // 删除 memberGroupMapper.deleteById(id); }
@Test public void testDeleteGroup_success() { // mock 数据 MemberGroupDO dbGroup = randomPojo(MemberGroupDO.class); groupMapper.insert(dbGroup);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbGroup.getId(); // 调用 groupService.deleteGroup(id); // 校验数据不存在了 assertNull(groupMapper.selectById(id)); }
public <T> IfrVfrStatus statusOf(Track<T> track, Instant time) { checkNotNull(track); checkNotNull(time); checkArgument( track.asTimeWindow().contains(time), "This track does not exist at this moment in time" ); EnumMultiset<IfrVfrStatus> counts = EnumMultiset.create(IfrVfrStatus.class); Collection<Point<T>> localPoints = track.kNearestPoints(time, numPointsToConsider); for (Point<T> point : localPoints) { counts.add(statusOf(point)); } return (counts.count(IFR) > counts.count(VFR)) ? IFR : VFR; }
@Test public void testStatusOfPoint_goodBeaconAndGoodcallsign_vfr() { String rawNop = "[RH],STARS,D21_B,03/24/2018,14:42:00.130,N518SP,C172,,5256,032,110,186,042.92704,-083.70974,3472,5256,-14.5730,42.8527,1,Y,A,D21,,POL,ARB,1446,ARB,ACT,VFR,,01500,,,,,,S,1,,0,{RH}"; Point<NopHit> point = NopHit.from(rawNop); assertTrue(point.rawData().hasFlightRules()); assertTrue(point.hasValidCallsign()); assertEquals(IfrVfrStatus.from(point.rawData().flightRules()), VFR); IfrVfrAssigner assigner = new IfrVfrAssigner(); assertEquals(VFR, assigner.statusOf(point)); }
@SuppressWarnings({"rawtypes", "unchecked"}) public <T extends Gauge> T gauge(String name) { return (T) getOrAdd(name, MetricBuilder.GAUGES); }
@Test public void accessingAnExistingGaugeReusesIt() { final Gauge<String> gauge1 = registry.gauge("thing", () -> () -> "string-gauge"); final Gauge<String> gauge2 = registry.gauge("thing", () -> new DefaultSettableGauge<>("settable-gauge")); assertThat(gauge1).isSameAs(gauge2); assertThat(gauge2.getValue()).isEqualTo("string-gauge"); verify(listener).onGaugeAdded("thing", gauge1); }
ExclusivePublication addExclusivePublication(final String channel, final int streamId) { clientLock.lock(); try { ensureActive(); ensureNotReentrant(); final long registrationId = driverProxy.addExclusivePublication(channel, streamId); stashedChannelByRegistrationId.put(registrationId, channel); awaitResponse(registrationId); return (ExclusivePublication)resourceByRegIdMap.get(registrationId); } finally { clientLock.unlock(); } }
@Test void shouldNotPreTouchLogBuffersForExclusivePublicationIfDisabled() { final int streamId = -53453894; final String channel = "aeron:ipc?alias=test"; final long publicationId = 113; final String logFileName = SESSION_ID_2 + "-log"; context.preTouchMappedMemory(false); whenReceiveBroadcastOnMessage( ControlProtocolEvents.ON_EXCLUSIVE_PUBLICATION_READY, publicationReadyBuffer, (buffer) -> { publicationReady.correlationId(publicationId); publicationReady.registrationId(publicationId); publicationReady.logFileName(logFileName); return publicationReady.length(); }); when(driverProxy.addExclusivePublication(channel, streamId)).thenReturn(publicationId); final ExclusivePublication publication = conductor.addExclusivePublication(channel, streamId); assertNotNull(publication); final LogBuffers logBuffers = logBuffersFactory.map(logFileName); assertNotNull(logBuffers); verify(logBuffers, never()).preTouch(); }
public static FactoryBuilder newFactoryBuilder() { return new FactoryBuilder(); }
@Test void injectKindFormats_cantBeBothSingle() { assertThatThrownBy(() -> B3Propagation.newFactoryBuilder() .injectFormats(Span.Kind.CLIENT, Format.SINGLE, Format.SINGLE_NO_PARENT)) .isInstanceOf(IllegalArgumentException.class); }
static void closeStateManager(final Logger log, final String logPrefix, final boolean closeClean, final boolean eosEnabled, final ProcessorStateManager stateMgr, final StateDirectory stateDirectory, final TaskType taskType) { // if EOS is enabled, wipe out the whole state store for unclean close since it is now invalid final boolean wipeStateStore = !closeClean && eosEnabled; final TaskId id = stateMgr.taskId(); log.trace("Closing state manager for {} task {}", taskType, id); final AtomicReference<ProcessorStateException> firstException = new AtomicReference<>(null); try { if (stateDirectory.lock(id)) { try { stateMgr.close(); } catch (final ProcessorStateException e) { firstException.compareAndSet(null, e); } finally { try { if (wipeStateStore) { log.debug("Wiping state stores for {} task {}", taskType, id); // we can just delete the whole dir of the task, including the state store images and the checkpoint files, // and then we write an empty checkpoint file indicating that the previous close is graceful and we just // need to re-bootstrap the restoration from the beginning Utils.delete(stateMgr.baseDir()); } } finally { stateDirectory.unlock(id); } } } else { log.error("Failed to acquire lock while closing the state store for {} task {}", taskType, id); } } catch (final IOException e) { final ProcessorStateException exception = new ProcessorStateException( String.format("%sFatal error while trying to close the state manager for task %s", logPrefix, id), e ); firstException.compareAndSet(null, exception); } final ProcessorStateException exception = firstException.get(); if (exception != null) { throw exception; } }
@Test public void testCloseStateManagerClean() { final InOrder inOrder = inOrder(stateManager, stateDirectory); when(stateManager.taskId()).thenReturn(taskId); when(stateDirectory.lock(taskId)).thenReturn(true); StateManagerUtil.closeStateManager(logger, "logPrefix:", true, false, stateManager, stateDirectory, TaskType.ACTIVE); inOrder.verify(stateManager).close(); inOrder.verify(stateDirectory).unlock(taskId); verifyNoMoreInteractions(stateManager, stateDirectory); }
@Override public String requestMessageForLatestRevision(SCMPropertyConfiguration scmConfiguration, Map<String, String> materialData, String flyweightFolder) { Map configuredValues = new LinkedHashMap(); configuredValues.put("scm-configuration", jsonResultMessageHandler.configurationToMap(scmConfiguration)); configuredValues.put("scm-data", materialData); configuredValues.put("flyweight-folder", flyweightFolder); return GSON.toJson(configuredValues); }
@Test public void shouldBuildRequestBodyForLatestRevisionRequest() throws Exception { String requestBody = messageHandler.requestMessageForLatestRevision(scmPropertyConfiguration, materialData, "flyweight"); assertThat(requestBody, is("{\"scm-configuration\":{\"key-one\":{\"value\":\"value-one\"},\"key-two\":{\"value\":\"value-two\"}},\"scm-data\":{\"key-one\":\"value-one\"},\"flyweight-folder\":\"flyweight\"}")); }
@Override public void updateNetwork(Network osNet) { checkNotNull(osNet, ERR_NULL_NETWORK); checkArgument(!Strings.isNullOrEmpty(osNet.getId()), ERR_NULL_NETWORK_ID); osNetworkStore.updateNetwork(osNet); OpenstackNetwork finalAugmentedNetwork = buildAugmentedNetworkFromType(osNet); augmentedNetworkMap.compute(osNet.getId(), (id, existing) -> { final String error = osNet.getId() + ERR_NOT_FOUND; checkArgument(existing != null, error); return finalAugmentedNetwork; }); log.info(String.format(MSG_NETWORK, osNet.getId(), MSG_UPDATED)); }
@Test(expected = IllegalArgumentException.class) public void testUpdateUnregisteredNetwork() { target.updateNetwork(NETWORK); }
@Override public int getMaxColumnsInIndex() { return 0; }
@Test void assertGetMaxColumnsInIndex() { assertThat(metaData.getMaxColumnsInIndex(), is(0)); }
public static SourceDescription create( final DataSource dataSource, final boolean extended, final List<RunningQuery> readQueries, final List<RunningQuery> writeQueries, final Optional<TopicDescription> topicDescription, final List<QueryOffsetSummary> queryOffsetSummaries, final List<String> sourceConstraints, final MetricCollectors metricCollectors ) { return create( dataSource, extended, readQueries, writeQueries, topicDescription, queryOffsetSummaries, sourceConstraints, Stream.empty(), Stream.empty(), new KsqlHostInfo("", 0), metricCollectors ); }
@Test public void shouldReturnEmptyTimestampColumn() { // Given: final String kafkaTopicName = "kafka"; final DataSource dataSource = buildDataSource(kafkaTopicName, Optional.empty()); // When final SourceDescription sourceDescription = SourceDescriptionFactory.create( dataSource, true, Collections.emptyList(), Collections.emptyList(), Optional.empty(), Collections.emptyList(), Collections.emptyList(), new MetricCollectors() ); // Then: assertThat(sourceDescription.getTimestamp(), is("")); }
public static ImmutableSet<HttpUrl> allSubPaths(String url) { return allSubPaths(HttpUrl.parse(url)); }
@Test public void allSubPaths_whenSingleSubPathsWithTrailingSlash_returnsExpectedUrl() { assertThat(allSubPaths("http://localhost/a/")) .containsExactly(HttpUrl.parse("http://localhost/"), HttpUrl.parse("http://localhost/a/")); }
@Override public T add(K name, V value) { validateName(nameValidator, true, name); validateValue(valueValidator, name, value); checkNotNull(value, "value"); int h = hashingStrategy.hashCode(name); int i = index(h); add0(h, i, name, value); return thisT(); }
@Test public void headersWithSameNamesButDifferentValuesShouldNotBeEquivalent() { TestDefaultHeaders headers1 = newInstance(); headers1.add(of("name1"), of("value1")); TestDefaultHeaders headers2 = newInstance(); headers1.add(of("name1"), of("value2")); assertNotEquals(headers1, headers2); }
public static <T> T getBean(Class<T> interfaceClass, Class typeClass) { Object object = serviceMap.get(interfaceClass.getName() + "<" + typeClass.getName() + ">"); if(object == null) return null; if(object instanceof Object[]) { return (T)Array.get(object, 0); } else { return (T)object; } }
@Test public void testInitializerInterfaceWithBuilder() { ChannelMapping channelMapping = SingletonServiceFactory.getBean(ChannelMapping.class); Assert.assertNotNull(channelMapping); Assert.assertTrue(channelMapping.transform("ReplyTo").startsWith("aggregate-destination-")); }
@Override public int get(PageId pageId, int bytesToRead, CacheScope cacheScope) { mShadowCachePageRead.getAndIncrement(); mShadowCacheByteRead.getAndAdd(bytesToRead); if (mFilter.mightContainAndResetClock(pageId)) { mShadowCachePageHit.getAndIncrement(); mShadowCacheByteHit.getAndAdd(bytesToRead); return bytesToRead; } return 0; }
@Test public void getNotExist() throws Exception { assertEquals(0, mCacheManager.get(PAGE_ID1, PAGE1_BYTES, SCOPE1)); }
public static JWTValidator of(String token) { return new JWTValidator(JWT.of(token)); }
@Test public void validateTest() { String token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJNb0xpIiwiZXhwIjoxNjI0OTU4MDk0NTI4LCJpYXQiOjE2MjQ5NTgwMzQ1MjAsInVzZXIiOiJ1c2VyIn0.L0uB38p9sZrivbmP0VlDe--j_11YUXTu3TfHhfQhRKc"; byte[] key = "1234567890".getBytes(); boolean validate = JWT.of(token).setKey(key).validate(0); assertFalse(validate); }
public Optional<String> retrieveTitle(final GRN itemGrn, final SearchUser searchUser) { if (isSpecialView(itemGrn)) { final ViewResolverDecoder decoder = new ViewResolverDecoder(itemGrn.entity()); if (decoder.isResolverViewId()) { final ViewResolver viewResolver = viewResolvers.get(decoder.getResolverName()); if (viewResolver != null) { Optional<ViewDTO> view = viewResolver.get(decoder.getViewId()); if (view.isPresent() && searchUser.canReadView(view.get())) { return Optional.ofNullable(view.get().title()); } } } } final Optional<Catalog.Entry> entry = catalog.getEntry(itemGrn); final Optional<String> title = entry.map(Catalog.Entry::title); if (title.isPresent()) { return title; } else { return entry.map(Catalog.Entry::id); } }
@Test void testReturnsIdIfTitleIsMissing() throws Exception { doReturn(Optional.of(new Catalog.Entry("id", null))).when(catalog).getEntry(any()); assertEquals(Optional.of("id"), toTest.retrieveTitle(grn, searchUser)); }
@ConstantFunction(name = "bitShiftLeft", argTypes = {TINYINT, BIGINT}, returnType = TINYINT) public static ConstantOperator bitShiftLeftTinyInt(ConstantOperator first, ConstantOperator second) { return ConstantOperator.createTinyInt((byte) (first.getTinyInt() << second.getBigint())); }
@Test public void bitShiftLeftTinyInt() { assertEquals(80, ScalarOperatorFunctions.bitShiftLeftTinyInt(O_TI_10, O_BI_3).getTinyInt()); }
public CellFormatter getFormatter(String columnId) { checkId(columnId); CellFormatter fmt = formatters.get(columnId); return fmt == null ? DEF_FMT : fmt; }
@Test(expected = IllegalArgumentException.class) public void formatterBadColumn() { tm = new TableModel(FOO); fmt = tm.getFormatter(BAR); }
static Serde<List<?>> createSerde(final PersistenceSchema schema) { final List<SimpleColumn> columns = schema.columns(); if (columns.isEmpty()) { // No columns: return new KsqlVoidSerde<>(); } if (columns.size() != 1) { throw new KsqlException("The '" + FormatFactory.KAFKA.name() + "' format only supports a single field. Got: " + columns); } final SimpleColumn singleColumn = columns.get(0); final Class<?> javaType = SchemaConverters.sqlToJavaConverter() .toJavaType(singleColumn.type()); return createSerde(singleColumn, javaType); }
@Test public void shouldThrowIfArray() { // Given: final PersistenceSchema schema = schemaWithFieldOfType(SqlTypes.array(SqlTypes.STRING)); // When: final Exception e = assertThrows( KsqlException.class, () -> KafkaSerdeFactory.createSerde(schema) ); // Then: assertThat(e.getMessage(), containsString("The 'KAFKA' format does not support type 'ARRAY'")); }
@Udf public List<String> items(@UdfParameter final String jsonItems) { if (jsonItems == null) { return null; } final List<JsonNode> objectList = UdfJsonMapper.readAsJsonArray(jsonItems); final List<String> res = new ArrayList<>(); objectList.forEach(jsonObject -> { res.add(jsonObject.toString()); }); return res; }
@Test public void shouldReturnEmptyListForEmptyArray() { assertEquals(Collections.emptyList(), udf.items("[]")); }
public static DeletePOptions deleteDefaults(AlluxioConfiguration conf) { return deleteDefaults(conf, true); }
@Test public void deleteOptionsDefaults() { DeletePOptions options = FileSystemOptionsUtils.deleteDefaults(mConf); assertNotNull(options); assertFalse(options.getRecursive()); assertFalse(options.getAlluxioOnly()); assertFalse(options.getUnchecked()); }
static AnnotatedClusterState generatedStateFrom(final Params params) { final ContentCluster cluster = params.cluster; final ClusterState workingState = ClusterState.emptyState(); final Map<Node, NodeStateReason> nodeStateReasons = new HashMap<>(); for (final NodeInfo nodeInfo : cluster.getNodeInfos()) { final NodeState nodeState = computeEffectiveNodeState(nodeInfo, params, nodeStateReasons); workingState.setNodeState(nodeInfo.getNode(), nodeState); } takeDownGroupsWithTooLowAvailability(workingState, nodeStateReasons, params); final Optional<ClusterStateReason> reasonToBeDown = clusterDownReason(workingState, params); if (reasonToBeDown.isPresent()) { workingState.setClusterState(State.DOWN); } workingState.setDistributionBits(inferDistributionBitCount(cluster, workingState, params)); return new AnnotatedClusterState(workingState, reasonToBeDown, nodeStateReasons); }
@Test void maintenance_mode_counted_as_down_for_cluster_availability() { final ClusterFixture fixture = ClusterFixture.forFlatCluster(3) .bringEntireClusterUp() .reportStorageNodeState(0, State.DOWN) .proposeStorageNodeWantedState(2, State.MAINTENANCE); final ClusterStateGenerator.Params params = fixture.generatorParams().minStorageNodesUp(2); final AnnotatedClusterState state = ClusterStateGenerator.generatedStateFrom(params); assertThat(state.toString(), equalTo("cluster:d distributor:3 storage:3 .0.s:d .2.s:m")); }
@SuppressFBWarnings(justification = "Accepting that this is a bad practice - used a Boolean as we needed three states", value = {"NP_BOOLEAN_RETURN_NULL"}) public Boolean getBooleanArgument(String argument) { if (line != null && line.hasOption(argument)) { final String value = line.getOptionValue(argument); if (value != null) { return Boolean.parseBoolean(value); } } return null; }
@Test public void testGetBooleanArgument() throws ParseException { String[] args = {"--scan", "missing.file", "--artifactoryUseProxy", "false", "--artifactoryParallelAnalysis", "true", "--project", "test"}; CliParser instance = new CliParser(getSettings()); try { instance.parse(args); Assert.fail("invalid scan should have caused an error"); } catch (FileNotFoundException ex) { Assert.assertTrue(ex.getMessage().contains("Invalid 'scan' argument")); } boolean expResult; Boolean result = instance.getBooleanArgument("missingArgument"); Assert.assertNull(result); expResult = false; result = instance.getBooleanArgument(CliParser.ARGUMENT.ARTIFACTORY_USES_PROXY); Assert.assertEquals(expResult, result); expResult = true; result = instance.getBooleanArgument(CliParser.ARGUMENT.ARTIFACTORY_PARALLEL_ANALYSIS); Assert.assertEquals(expResult, result); }
public ImmutableList<PluginMatchingResult<VulnDetector>> getVulnDetectors( ReconnaissanceReport reconnaissanceReport) { return tsunamiPlugins.entrySet().stream() .filter(entry -> isVulnDetector(entry.getKey())) .map(entry -> matchAllVulnDetectors(entry.getKey(), entry.getValue(), reconnaissanceReport)) .flatMap(Streams::stream) .collect(toImmutableList()); }
@Test public void getVulnDetectors_whenServiceNameFilterHasMatchingService_returnsMatchedService() { NetworkService httpService = NetworkService.newBuilder() .setNetworkEndpoint(NetworkEndpointUtils.forIpAndPort("1.1.1.1", 80)) .setTransportProtocol(TransportProtocol.TCP) .setServiceName("http") .build(); NetworkService httpsService = NetworkService.newBuilder() .setNetworkEndpoint(NetworkEndpointUtils.forIpAndPort("1.1.1.1", 443)) .setTransportProtocol(TransportProtocol.TCP) .setServiceName("https") .build(); NetworkService noNameService = NetworkService.newBuilder() .setNetworkEndpoint(NetworkEndpointUtils.forIpAndPort("1.1.1.1", 12345)) .setTransportProtocol(TransportProtocol.TCP) .build(); ReconnaissanceReport fakeReconnaissanceReport = ReconnaissanceReport.newBuilder() .setTargetInfo(TargetInfo.getDefaultInstance()) .addNetworkServices(httpService) .addNetworkServices(httpsService) .addNetworkServices(noNameService) .build(); PluginManager pluginManager = Guice.createInjector( new FakePortScannerBootstrapModule(), new FakeServiceFingerprinterBootstrapModule(), FakeServiceNameFilteringDetector.getModule()) .getInstance(PluginManager.class); ImmutableList<PluginMatchingResult<VulnDetector>> vulnDetectors = pluginManager.getVulnDetectors(fakeReconnaissanceReport); assertThat(vulnDetectors).hasSize(1); assertThat(vulnDetectors.get(0).tsunamiPlugin().getClass()) .isEqualTo(FakeServiceNameFilteringDetector.class); assertThat(vulnDetectors.get(0).matchedServices()).containsExactly(httpService, noNameService); }
@Override public TopicCleanupPolicy getTopicCleanupPolicy(final String topicName) { final String policy = getTopicConfig(topicName) .getOrDefault(TopicConfig.CLEANUP_POLICY_CONFIG, "") .toLowerCase(); if (policy.equals("compact")) { return TopicCleanupPolicy.COMPACT; } else if (policy.equals("delete")) { return TopicCleanupPolicy.DELETE; } else if (policy.contains("compact") && policy.contains("delete")) { return TopicCleanupPolicy.COMPACT_DELETE; } else { throw new KsqlException("Could not get the topic configs for : " + topicName); } }
@Test public void shouldGetTopicCleanUpPolicyCompact() { // Given: givenTopicConfigs( "foo", overriddenConfigEntry(CLEANUP_POLICY_CONFIG, CLEANUP_POLICY_COMPACT) ); // When / Then: assertThat(kafkaTopicClient.getTopicCleanupPolicy("foo"), is(TopicCleanupPolicy.COMPACT)); }
public ClassLoader compile(CompileUnit... units) { return compile(Arrays.asList(units), compileState -> compileState.lock.lock()); }
@Test public void testCompile() throws Exception { CodeGenerator codeGenerator = CodeGenerator.getSharedCodeGenerator(getClass().getClassLoader()); CompileUnit unit1 = new CompileUnit( "demo.pkg1", "A", ("" + "package demo.pkg1;\n" + "public class A {\n" + " public static String hello() { return \"HELLO\"; }\n" + "}")); ClassLoader classLoader = codeGenerator.compile(unit1); Assert.assertEquals(classLoader.loadClass("demo.pkg1.A").getSimpleName(), "A"); Assert.assertNotEquals(classLoader, getClass().getClassLoader()); Assert.assertEquals(classLoader.getClass(), ClassLoaderUtils.ByteArrayClassLoader.class); }
@Subscribe public void onScriptCallbackEvent(ScriptCallbackEvent event) { String eventName = event.getEventName(); int[] intStack = client.getIntStack(); String[] stringStack = client.getStringStack(); int intStackSize = client.getIntStackSize(); int stringStackSize = client.getStringStackSize(); switch (eventName) { case "setSearchBankInputText": stringStack[stringStackSize - 1] = SEARCH_BANK_INPUT_TEXT; break; case "setSearchBankInputTextFound": { int matches = intStack[intStackSize - 1]; stringStack[stringStackSize - 1] = String.format(SEARCH_BANK_INPUT_TEXT_FOUND, matches); break; } case "bankSearchFilter": final int itemId = intStack[intStackSize - 1]; String searchfilter = stringStack[stringStackSize - 1]; BankTag tag = activeTag; boolean tagSearch = true; // Shared storage uses ~bankmain_filteritem too. Allow using tag searches in it but don't // apply the tag search from the active tab. final boolean bankOpen = client.getItemContainer(InventoryID.BANK) != null; if (tag == null || !bankOpen) { if (searchfilter.isEmpty()) { return; } tagSearch = searchfilter.startsWith(TAG_SEARCH); if (tagSearch) { searchfilter = searchfilter.substring(TAG_SEARCH.length()).trim(); } // Build a temporary BankTag using the search filter tag = buildSearchFilterBankTag(searchfilter); } if (itemId == -1 && tag.layout() != null) { // item -1 always passes on a laid out tab so items can be dragged to it return; } if (itemId > -1 && tag.contains(itemId)) { // return true intStack[intStackSize - 2] = 1; } else if (tagSearch) { // if the item isn't tagged we return false to prevent the item matching if the item name happens // to contain the tag name. intStack[intStackSize - 2] = 0; } break; case "getSearchingTagTab": intStack[intStackSize - 1] = activeTag != null ? 1 : 0; break; case "bankBuildTab": // Use the per-tab view when we want to hide the separators to avoid having to reposition items & // recomputing the scroll height. if (activeTag != null && (tabInterface.isTagTabActive() || config.removeSeparators() || activeTag.layout() != null)) { var stack = client.getIntStack(); var sz = client.getIntStackSize(); stack[sz - 1] = 1; // use single tab view mode } break; } }
@Test public void testNonExplicitSearch() { when(client.getIntStack()).thenReturn(new int[]{0, ABYSSAL_WHIP}); when(client.getStringStack()).thenReturn(new String[]{"whip"}); when(configManager.getConfiguration(BankTagsPlugin.CONFIG_GROUP, TagManager.ITEM_KEY_PREFIX + ABYSSAL_WHIP)).thenReturn("herb,bossing,whip long tag"); bankTagsPlugin.onScriptCallbackEvent(EVENT); assertEquals(1, client.getIntStack()[0]); }
@SuppressWarnings("unchecked") @VisibleForTesting Schema<T> initializeSchema() throws ClassNotFoundException { if (StringUtils.isEmpty(this.pulsarSinkConfig.getTypeClassName())) { return (Schema<T>) Schema.BYTES; } Class<?> typeArg = Reflections.loadClass(this.pulsarSinkConfig.getTypeClassName(), functionClassLoader); if (Void.class.equals(typeArg)) { // return type is 'void', so there's no schema to check return null; } ConsumerConfig consumerConfig = new ConsumerConfig(); consumerConfig.setSchemaProperties(pulsarSinkConfig.getSchemaProperties()); if (!StringUtils.isEmpty(pulsarSinkConfig.getSchemaType())) { if (GenericRecord.class.isAssignableFrom(typeArg)) { consumerConfig.setSchemaType(SchemaType.AUTO_CONSUME.toString()); SchemaType configuredSchemaType = SchemaType.valueOf(pulsarSinkConfig.getSchemaType()); if (SchemaType.AUTO_CONSUME != configuredSchemaType) { log.info("The configured schema type {} is not able to write GenericRecords." + " So overwrite the schema type to be {}", configuredSchemaType, SchemaType.AUTO_CONSUME); } } else { consumerConfig.setSchemaType(pulsarSinkConfig.getSchemaType()); } return (Schema<T>) topicSchema.getSchema(pulsarSinkConfig.getTopic(), typeArg, consumerConfig, false); } else { consumerConfig.setSchemaType(pulsarSinkConfig.getSerdeClassName()); return (Schema<T>) topicSchema.getSchema(pulsarSinkConfig.getTopic(), typeArg, consumerConfig, false, functionClassLoader); } }
@Test public void testVoidOutputClasses() throws Exception { PulsarSinkConfig pulsarConfig = getPulsarConfigs(); // set type to void pulsarConfig.setTypeClassName(Void.class.getName()); PulsarSink pulsarSink = new PulsarSink(getPulsarClient(), pulsarConfig, new HashMap<>(), mock(ComponentStatsManager.class), Thread.currentThread().getContextClassLoader(), producerCache); try { Schema schema = pulsarSink.initializeSchema(); assertNull(schema); } catch (Exception ex) { ex.printStackTrace(); assertNull(ex); fail(); } }
@Override public List<RedisClientInfo> getClientList(RedisClusterNode node) { RedisClient entry = getEntry(node); RFuture<List<String>> f = executorService.readAsync(entry, StringCodec.INSTANCE, RedisCommands.CLIENT_LIST); List<String> list = syncFuture(f); return CONVERTER.convert(list.toArray(new String[list.size()])); }
@Test public void testGetClientList() { RedisClusterNode master = getFirstMaster(); List<RedisClientInfo> list = connection.getClientList(master); assertThat(list.size()).isGreaterThan(10); }
List<MappingField> resolveAndValidateFields(List<MappingField> userFields, Map<String, ?> options) { if (options.get(OPTION_FORMAT) == null) { throw QueryException.error("Missing '" + OPTION_FORMAT + "' option"); } if (options.get(OPTION_PATH) == null) { throw QueryException.error("Missing '" + OPTION_PATH + "' option"); } List<MappingField> fields = findMetadataResolver(options).resolveAndValidateFields(userFields, options); if (fields.isEmpty()) { throw QueryException.error("The resolved field list is empty"); } return fields; }
@Test public void test_resolveAndValidateFields() { // given Map<String, String> options = Map.of(OPTION_FORMAT, FORMAT, OPTION_PATH, "/path", OPTION_GLOB, "*"); given(resolver.resolveAndValidateFields(emptyList(), options)) .willReturn(singletonList(new MappingField("field", QueryDataType.VARCHAR))); // when List<MappingField> resolvedFields = resolvers.resolveAndValidateFields(emptyList(), options); // then assertThat(resolvedFields).containsOnly(new MappingField("field", QueryDataType.VARCHAR)); }
@Override public void deleteAiVideoTemplate(Long id) { // 校验存在 validateAiVideoTemplateExists(id); // 删除 aiVideoTemplateMapper.deleteById(id); }
@Test public void testDeleteAiVideoTemplate_notExists() { // 准备参数 Long id = randomLongId(); // 调用, 并断言异常 assertServiceException(() -> aiVideoTemplateService.deleteAiVideoTemplate(id), AI_VIDEO_TEMPLATE_NOT_EXISTS); }
public static PipelineIR compileSources(List<SourceWithMetadata> sourcesWithMetadata, boolean supportEscapes, ConfigVariableExpander cve) throws InvalidIRException { Map<PluginDefinition.Type, List<Graph>> groupedPipelineSections = sourcesWithMetadata.stream() .map(swm -> compileGraph(swm, supportEscapes, cve)) .flatMap(m -> m.entrySet().stream()) .filter(e -> e.getValue() != null) .collect(groupingBy(Map.Entry::getKey, mapping(Map.Entry::getValue, toList()))); Graph inputGraph = Graph.combine(groupedPipelineSections.get(PluginDefinition.Type.INPUT).toArray(new Graph[0])).graph; Graph outputGraph = Graph.combine(groupedPipelineSections.get(PluginDefinition.Type.OUTPUT).toArray(new Graph[0])).graph; Graph filterGraph = groupedPipelineSections.get(PluginDefinition.Type.FILTER).stream() .reduce(ConfigCompiler::chainWithUntypedException).orElse(null); String originalSource = sourcesWithMetadata.stream().map(SourceWithMetadata::getText).collect(Collectors.joining("\n")); return new PipelineIR(inputGraph, filterGraph, outputGraph, originalSource); }
@Test public void testCompileWithFullyCommentedSource() throws InvalidIRException { List<SourceWithMetadata> sourcesWithMetadata = Arrays.asList( new SourceWithMetadata("str", "in_plugin", 0, 0, "input { input_0 {} } "), new SourceWithMetadata("str","commented_filter",0,0,"#filter{...}\n"), new SourceWithMetadata("str","out_plugin",0,0,"output { output_0 {} } ") ); PipelineIR pipeline = ConfigCompiler.compileSources(sourcesWithMetadata, false, null); assertEquals("should compile only non commented text parts", 2L, pipeline.pluginVertices().count()); }
public Optional<String> fetchFileIfNotModified(String url) throws IOException { return fetchFile(url, true); }
@Test public void successfulRetrieve() throws Exception { this.server.enqueue(new MockResponse() .setResponseCode(200) .setBody("foobar")); server.start(); final HTTPFileRetriever httpFileRetriever = new HTTPFileRetriever(new OkHttpClient()); final Optional<String> body = httpFileRetriever.fetchFileIfNotModified(server.url("/").toString()); final RecordedRequest request = server.takeRequest(); assertThat(request).isNotNull(); assertThat(request.getPath()).isEqualTo("/"); assertThat(body).isNotNull() .isPresent() .contains("foobar"); }
public DoubleArrayAsIterable usingTolerance(double tolerance) { return new DoubleArrayAsIterable(tolerance(tolerance), iterableSubject()); }
@Test public void usingTolerance_containsExactly_primitiveDoubleArray_inOrder_success() { assertThat(array(1.1, TOLERABLE_2POINT2, 3.3)) .usingTolerance(DEFAULT_TOLERANCE) .containsExactly(array(1.1, 2.2, 3.3)) .inOrder(); }
@Override public AppResponse process(Flow flow, ActivateAppRequest body) { String decodedPin = ChallengeService.decodeMaskedPin(appSession.getIv(), appAuthenticator.getSymmetricKey(), body.getMaskedPincode()); if ((decodedPin == null || !Pattern.compile("\\d{5}").matcher(decodedPin).matches())) { return flow.setFailedStateAndReturnNOK(appSession); } else if (!appAuthenticator.getUserAppId().equals(body.getUserAppId())){ digidClient.remoteLog("754", Map.of(lowerUnderscore(ACCOUNT_ID) ,appAuthenticator.getAccountId())); return flow.setFailedStateAndReturnNOK(appSession); } appAuthenticator.setMaskedPin(decodedPin); appAuthenticator.setLastSignInAt(ZonedDateTime.now()); if (!switchService.digidAppSwitchEnabled() ) { digidClient.remoteLog("824", Map.of(lowerUnderscore(ACCOUNT_ID), appAuthenticator.getAccountId())); throw new SwitchDisabledException(); } if (flow instanceof RequestAccountAndAppFlow || flow instanceof ActivateAppWithPasswordLetterFlow) { Map<String, String> result = digidClient.finishRegistration(appSession.getRegistrationId(), appSession.getAccountId(), flow.getName()); if (result.get(lowerUnderscore(STATUS)).equals("PENDING") && result.get(lowerUnderscore(ACTIVATION_CODE)) != null && result.get(lowerUnderscore(GELDIGHEIDSTERMIJN)) != null) { appAuthenticator.setStatus("pending"); appAuthenticator.setActivationCode(result.get(lowerUnderscore(ACTIVATION_CODE))); appAuthenticator.setGeldigheidstermijn(result.get(lowerUnderscore(GELDIGHEIDSTERMIJN))); appAuthenticator.setRequestedAt(ZonedDateTime.now()); return new StatusResponse("PENDING"); } else { return new NokResponse(); } } else { return ((ActivationFlow) flow).activateApp(appAuthenticator, appSession); } }
@Test void processDigidAppNotEnabled(){ when(mockedFlow.activateApp(eq(mockedAppAuthenticator), any(AppSession.class))).thenReturn(mockedActivateAppResponse); when(mockedFlow.setFailedStateAndReturnNOK(any(AppSession.class))).thenReturn(new NokResponse()); assertThrows(SwitchDisabledException.class, () -> pincodeSet.process(mockedFlow, mockedActivateAppRequest)); verify(digidClientMock, times(1)).remoteLog("824", Map.of(lowerUnderscore(ACCOUNT_ID), mockedAppAuthenticator.getAccountId())); }
@PutMapping(value = "/node/list") @Secured(action = ActionTypes.WRITE, resource = "nacos/admin", signType = SignType.CONSOLE) public Result<Boolean> updateNodes(@RequestBody List<Member> nodes) throws NacosApiException { if (nodes == null || nodes.size() == 0) { throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.PARAMETER_MISSING, "required parameter 'nodes' is missing"); } return Result.success(nacosClusterOperationService.updateNodes(nodes)); }
@Test void testUpdate() throws NacosApiException { Member member = new Member(); member.setIp("1.1.1.1"); member.setPort(8848); member.setAddress("test"); when(nacosClusterOperationService.updateNodes(any())).thenReturn(true); Result<Boolean> result = nacosClusterControllerV2.updateNodes(Collections.singletonList(member)); verify(nacosClusterOperationService).updateNodes(any()); assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode()); assertTrue(result.getData()); }
public static void build(Map<String, Object> argsMap, String agentPath) { final Properties configMap = loadConfig(agentPath); addNotNullEntries(argsMap, configMap); addNormalEntries(argsMap, configMap); addPathEntries(argsMap, agentPath); }
@Test public void testParseArgs() { String agentArgs = "appName=test,command=INSTALL_PLUGIN:monitor/flowcontrol,server.port=9000"; Map<String, String> agentArgsMap = AgentArgsResolver.resolveAgentArgs(agentArgs); Map<String, Object> bootArgsMap = new HashMap<>(agentArgsMap); BootArgsBuilder.build(bootArgsMap, PathDeclarer.getAgentPath()); Assert.assertEquals("test", bootArgsMap.get("appName")); Assert.assertEquals("9000", bootArgsMap.get("server.port")); Assert.assertEquals("INSTALL_PLUGIN:monitor/flowcontrol", bootArgsMap.get("command")); }
public static int compareVersion(final String versionA, final String versionB) { final String[] sA = versionA.split("\\."); final String[] sB = versionB.split("\\."); int expectSize = 3; if (sA.length != expectSize || sB.length != expectSize) { throw new IllegalArgumentException("version must be like x.y.z(-beta)"); } int first = Objects.compare(sA[0], sB[0], STRING_COMPARATOR); if (first != 0) { return first; } int second = Objects.compare(sA[1], sB[1], STRING_COMPARATOR); if (second != 0) { return second; } return Objects.compare(sA[2].split("-")[0], sB[2].split("-")[0], STRING_COMPARATOR); }
@Test void testVersionCompareLt() { assertTrue(VersionUtils.compareVersion("1.2.0", "1.2.1") < 0); assertTrue(VersionUtils.compareVersion("0.2.0", "1.2.0") < 0); assertTrue(VersionUtils.compareVersion("1.2.0", "1.3.0") < 0); }
@Override public String getProcessRoleConfigsResponseBody(List<PluginRoleConfig> roles) { List<Map> list = new ArrayList<>(); for (PluginRoleConfig role : roles) { LinkedHashMap<String, Object> e = new LinkedHashMap<>(); e.put("name", role.getName().toString()); e.put("configuration", role.getConfigurationAsMap(true)); list.add(e); } return GSON.toJson(list); }
@Test void getProcessRoleConfigsResponseBody() { String json = converter.getProcessRoleConfigsResponseBody(List.of(new PluginRoleConfig("blackbird", "ldap", create("foo", false, "bar")))); assertThatJson("[{\"name\":\"blackbird\",\"configuration\":{\"foo\":\"bar\"}}]").isEqualTo(json); }
@GetMapping(value = "/previous") @Secured(action = ActionTypes.READ, signType = SignType.CONFIG) public Result<ConfigHistoryInfo> getPreviousConfigHistoryInfo(@RequestParam("dataId") String dataId, @RequestParam("group") String group, @RequestParam(value = "namespaceId", required = false, defaultValue = StringUtils.EMPTY) String namespaceId, @RequestParam("id") Long id) throws AccessException, NacosApiException { ConfigHistoryInfo configHistoryInfo; try { //fix issue #9783. namespaceId = NamespaceUtil.processNamespaceParameter(namespaceId); configHistoryInfo = historyService.getPreviousConfigHistoryInfo(dataId, group, namespaceId, id); } catch (DataAccessException e) { throw new NacosApiException(HttpStatus.NOT_FOUND.value(), ErrorCode.RESOURCE_NOT_FOUND, "previous config history for id = " + id + " not exist"); } return Result.success(configHistoryInfo); }
@Test void testGetPreviousConfigHistoryInfoWhenNameSpaceIsPublic() throws Exception { ConfigHistoryInfo configHistoryInfo = new ConfigHistoryInfo(); configHistoryInfo.setDataId(TEST_DATA_ID); configHistoryInfo.setGroup(TEST_GROUP); configHistoryInfo.setContent(TEST_CONTENT); configHistoryInfo.setTenant(TEST_NAMESPACE_ID); configHistoryInfo.setCreatedTime(new Timestamp(new Date().getTime())); configHistoryInfo.setLastModifiedTime(new Timestamp(new Date().getTime())); when(historyService.getPreviousConfigHistoryInfo(TEST_DATA_ID, TEST_GROUP, TEST_NAMESPACE_ID, 1L)).thenReturn(configHistoryInfo); Result<ConfigHistoryInfo> result = historyControllerV2.getPreviousConfigHistoryInfo(TEST_DATA_ID, TEST_GROUP, TEST_NAMESPACE_ID_PUBLIC, 1L); verify(historyService).getPreviousConfigHistoryInfo(TEST_DATA_ID, TEST_GROUP, TEST_NAMESPACE_ID, 1L); ConfigHistoryInfo resConfigHistoryInfo = result.getData(); assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode()); assertEquals(configHistoryInfo.getDataId(), resConfigHistoryInfo.getDataId()); assertEquals(configHistoryInfo.getGroup(), resConfigHistoryInfo.getGroup()); assertEquals(configHistoryInfo.getContent(), resConfigHistoryInfo.getContent()); }
@Override public void serialize(Asn1OutputStream out, String oid) throws IOException { Asn1Utils.encodeObjectIdentifier(oid, out); }
@Test public void shouldSerialize() { assertArrayEquals( new byte[] { 0x2a, 0x03 }, serialize(new ObjectIdentifierConverter(), String.class, "1.2.3") ); }
protected static boolean isBeanPropertyWriteMethod(Method method) { return method != null && Modifier.isPublic(method.getModifiers()) && !Modifier.isStatic(method.getModifiers()) && method.getDeclaringClass() != Object.class && method.getParameterTypes().length == 1 && method.getName().startsWith("set") // 排除就叫set的方法 && !"set".equals(method.getName()); }
@Test public void testIsBeanPropertyWriteMethod() throws Exception { Assert.assertFalse(isBeanPropertyWriteMethod(null)); Assert.assertTrue(isBeanPropertyWriteMethod(TestReflect.class.getMethod("setS", int.class))); Assert.assertFalse(isBeanPropertyWriteMethod(TestReflect.class.getMethod("set", int.class))); Assert.assertFalse(isBeanPropertyWriteMethod(TestReflect.class.getDeclaredMethod("set1", int.class))); Assert.assertFalse(isBeanPropertyWriteMethod(TestReflect.class.getMethod("set2", int.class))); Assert.assertFalse(isBeanPropertyWriteMethod(TestReflect.class.getMethod("set3", int.class, int.class))); Assert.assertFalse(isBeanPropertyWriteMethod(TestReflect.class.getMethod("aset4", int.class))); }
public <T> HttpResponse<T> httpRequest(String url, String method, HttpHeaders headers, Object requestBodyData, TypeReference<T> responseFormat) { return httpRequest(url, method, headers, requestBodyData, responseFormat, null, null); }
@Test public void testIOExceptionCausesInternalServerError() throws Exception { Request req = mock(Request.class); ContentResponse resp = mock(ContentResponse.class); setupHttpClient(201, req, resp); ConnectRestException e = assertThrows(ConnectRestException.class, () -> httpRequest( httpClient, MOCK_URL, TEST_METHOD, TEST_TYPE, TEST_SIGNATURE_ALGORITHM )); assertIsInternalServerError(e); }
@Override // mappedStatementId 参数,暂时没有用。以后,可以基于 mappedStatementId + DataPermission 进行缓存 public List<DataPermissionRule> getDataPermissionRule(String mappedStatementId) { // 1. 无数据权限 if (CollUtil.isEmpty(rules)) { return Collections.emptyList(); } // 2. 未配置,则默认开启 DataPermission dataPermission = DataPermissionContextHolder.get(); if (dataPermission == null) { return rules; } // 3. 已配置,但禁用 if (!dataPermission.enable()) { return Collections.emptyList(); } // 4. 已配置,只选择部分规则 if (ArrayUtil.isNotEmpty(dataPermission.includeRules())) { return rules.stream().filter(rule -> ArrayUtil.contains(dataPermission.includeRules(), rule.getClass())) .collect(Collectors.toList()); // 一般规则不会太多,所以不采用 HashSet 查询 } // 5. 已配置,只排除部分规则 if (ArrayUtil.isNotEmpty(dataPermission.excludeRules())) { return rules.stream().filter(rule -> !ArrayUtil.contains(dataPermission.excludeRules(), rule.getClass())) .collect(Collectors.toList()); // 一般规则不会太多,所以不采用 HashSet 查询 } // 6. 已配置,全部规则 return rules; }
@Test public void testGetDataPermissionRule_06() { // 准备参数 String mappedStatementId = randomString(); // mock 方法 DataPermissionContextHolder.add(AnnotationUtils.findAnnotation(TestClass06.class, DataPermission.class)); // 调用 List<DataPermissionRule> result = dataPermissionRuleFactory.getDataPermissionRule(mappedStatementId); // 断言 assertSame(rules, result); }
protected Record[] getRecords(Name name, int type) { Record[] result = null; ExecutorService executor = Executors.newSingleThreadExecutor(); Future<Record[]> future = executor.submit(new LookupTask(name, type)); try { result = future.get(1500, TimeUnit.MILLISECONDS); return result; } catch (InterruptedException | ExecutionException | TimeoutException | NullPointerException | ExceptionInInitializerError e) { LOG.warn("Failed to lookup: {} type: {}", name, Type.string(type), e); return result; } finally { executor.shutdown(); } }
@Test(timeout=5000) public void testUpstreamFault() throws Exception { Name name = Name.fromString("19.0.17.172.in-addr.arpa."); Record[] recs = getRegistryDNS().getRecords(name, Type.CNAME); assertNull("Record is not null", recs); }
public static String getMetaDataProcessConfigPath(final String jobType) { return String.join("/", getMetaDataRootPath(jobType), "process_config"); }
@Test void assertGetMetaDataProcessConfigPath() { assertThat(PipelineMetaDataNode.getMetaDataProcessConfigPath("FIXTURE"), is(migrationMetaDataRootPath + "/process_config")); }
private AccessLog(String logFormat, Object... args) { Objects.requireNonNull(logFormat, "logFormat"); this.logFormat = logFormat; this.args = args; }
@Test void accessLogCustomFormat() { disposableServer = createServer() .handle((req, resp) -> { resp.withConnection(conn -> { ChannelHandler handler = conn.channel().pipeline().get(NettyPipeline.AccessLogHandler); resp.header(ACCESS_LOG_HANDLER, handler != null ? FOUND : NOT_FOUND); }); return resp.send(); }) .accessLog(true, CUSTOM_ACCESS_LOG) .bindNow(); Tuple2<String, String> response = getHttpClientResponse(URI_1); assertAccessLogging(response, true, false, CUSTOM_FORMAT); }
@SuppressWarnings("unchecked") public static <T> T[] concat(Class<T> type, T[]... arrays) { T[] result = (T[]) Array.newInstance(type, totalLength(arrays)); int currentLength = 0; for (T[] array : arrays) { int length = array.length; if (length > 0) { System.arraycopy(array, 0, result, currentLength, length); currentLength += length; } } return result; }
@Test public void testConcatWithEmptyArrays() { String[] array1 = {}; String[] array2 = {"a", "b"}; String[] result = ArrayUtil.concat(String.class, array1, array2); assertThat(result).isEqualTo(new String[] {"a", "b"}); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatInsertIntoPartitionBy() { final String statementString = "INSERT INTO ADDRESS SELECT * FROM ADDRESS PARTITION BY ADDRESS;"; final Statement statement = parseSingle(statementString); final String result = SqlFormatter.formatSql(statement); assertThat(result, startsWith("INSERT INTO ADDRESS SELECT *\n" + "FROM ADDRESS ADDRESS\n" + "PARTITION BY ADDRESS\n" + "EMIT CHANGES" )); }
void releaseMemory(@Nonnegative long size) { if (size == 0) { return; } boolean released = false; long currentAvailableMemorySize = 0L; while (!released && totalMemorySize >= (currentAvailableMemorySize = availableMemorySize.get()) + size) { released = availableMemorySize.compareAndSet( currentAvailableMemorySize, currentAvailableMemorySize + size); } if (!released) { throw new IllegalStateException( String.format( "Trying to release more managed memory (%d bytes) than has been allocated (%d bytes), the total size is %d bytes", size, currentAvailableMemorySize, totalMemorySize)); } }
@Test void testReleaseMemory() throws MemoryReservationException { UnsafeMemoryBudget budget = createUnsafeMemoryBudget(); budget.reserveMemory(50L); budget.releaseMemory(30L); assertThat(budget.getAvailableMemorySize()).isEqualTo(80L); }
public static Map<String, PartitionColumnFilter> convertColumnFilter(List<ScalarOperator> predicates) { return convertColumnFilter(predicates, null); }
@Test public void convertColumnFilterNormal() { ScalarOperator root1 = new BinaryPredicateOperator(BinaryType.EQ, new ColumnRefOperator(1, Type.INT, "age", true), ConstantOperator.createInt(1)); ScalarOperator root2 = new InPredicateOperator(new ColumnRefOperator(2, Type.INT, "name", true), ConstantOperator.createVarchar("1"), ConstantOperator.createVarchar("2"), ConstantOperator.createVarchar("3"), ConstantOperator.createVarchar("4")); ScalarOperator root3 = new IsNullPredicateOperator(new ColumnRefOperator(3, Type.BOOLEAN, "sex", true)); ScalarOperator root4 = ConstantOperator.createBoolean(true); ScalarOperator root5 = new BinaryPredicateOperator(BinaryType.EQ, ConstantOperator.createInt(2), ConstantOperator.createInt(1)); ScalarOperator root6 = new BinaryPredicateOperator(BinaryType.EQ, new ColumnRefOperator(4, Type.INT, "value1", true), new ColumnRefOperator(5, Type.INT, "value2", true)); List<ScalarOperator> list = Lists.newArrayList(root1, root2, root3, root4, root5, root6); Map<String, PartitionColumnFilter> result = ColumnFilterConverter.convertColumnFilter(list); assertEquals(3, result.size()); assertTrue(result.containsKey("age")); assertTrue(result.containsKey("name")); assertTrue(result.containsKey("sex")); assertEquals(new IntLiteral(1), result.get("age").getLowerBound()); assertEquals(new IntLiteral(1), result.get("age").getUpperBound()); assertEquals(4, result.get("name").getInPredicateLiterals().size()); assertEquals(new StringLiteral("1"), result.get("name").getInPredicateLiterals().get(0)); assertEquals(new StringLiteral("2"), result.get("name").getInPredicateLiterals().get(1)); assertEquals(new StringLiteral("3"), result.get("name").getInPredicateLiterals().get(2)); assertEquals(new StringLiteral("4"), result.get("name").getInPredicateLiterals().get(3)); assertEquals(new NullLiteral(), result.get("sex").getLowerBound()); assertEquals(new NullLiteral(), result.get("sex").getUpperBound()); }
@Override public Material toOldMaterial(String name, String folder, String password) { HgMaterial hg = new HgMaterial(url, folder); setName(name, hg); hg.setId(id); hg.setUserName(username); hg.setPassword(password); hg.setBranch(branch); return hg; }
@Test void shouldCreateMaterialFromMaterialInstance() { final HgMaterialInstance materialInstance = new HgMaterialInstance("https://example.com", "bob", "feature", "some-flyweight"); materialInstance.setId(100L); final HgMaterial material = (HgMaterial) materialInstance.toOldMaterial("example", "destination", "pass"); assertThat(material.getName()).isEqualTo(new CaseInsensitiveString("example")); assertThat(material.getUrl()).isEqualTo("https://example.com"); assertThat(material.getUserName()).isEqualTo("bob"); assertThat(material.getPassword()).isEqualTo("pass"); assertThat(material.getBranch()).isEqualTo("feature"); assertThat(material.getFolder()).isEqualTo("destination"); assertThat(material.getId()).isEqualTo(materialInstance.getId()); }
public int deleteByIdAndRevision(ModelId id, int revision) { final DBQuery.Query query = DBQuery.is(Identified.FIELD_META_ID, id).is(Revisioned.FIELD_META_REVISION, revision); final WriteResult<ContentPack, ObjectId> writeResult = dbCollection.remove(query); return writeResult.getN(); }
@Test @MongoDBFixtures("ContentPackPersistenceServiceTest.json") public void deleteByIdAndRevision() { final int deletedContentPacks = contentPackPersistenceService.deleteByIdAndRevision(ModelId.of("dcd74ede-6832-4ef7-9f69-deadbeef0000"), 2); final Set<ContentPack> contentPacks = contentPackPersistenceService.loadAll(); assertThat(deletedContentPacks).isEqualTo(1); assertThat(contentPacks) .hasSize(4) .noneMatch(contentPack -> contentPack.id().equals(ModelId.of("dcd74ede-6832-4ef7-9f69-deadbeef0000")) && contentPack.revision() == 2); }
public void startAsync() { try { udfLoader.load(); ProcessingLogServerUtils.maybeCreateProcessingLogTopic( serviceContext.getTopicClient(), processingLogConfig, ksqlConfig); if (processingLogConfig.getBoolean(ProcessingLogConfig.STREAM_AUTO_CREATE)) { log.warn("processing log auto-create is enabled, but this is not supported " + "for headless mode."); } rocksDBConfigSetterHandler.accept(ksqlConfig); processesQueryFile(readQueriesFile(queriesFile)); showWelcomeMessage(); final Properties properties = new Properties(); ksqlConfig.originals().forEach((key, value) -> { if (nonNull(value)) { properties.put(key, value.toString()); } }); versionChecker.start(KsqlModuleType.SERVER, properties); } catch (final Exception e) { log.error("Failed to start KSQL Server with query file: " + queriesFile, e); throw e; } }
@Test public void shouldLoadQueryFile() { // Given: givenQueryFileContains("This statement"); // When: standaloneExecutor.startAsync(); // Then: verify(ksqlEngine).parse("This statement"); }
@Override public SchemaResult getValueSchema( final Optional<String> topicName, final Optional<Integer> schemaId, final FormatInfo expectedFormat, final SerdeFeatures serdeFeatures ) { return getSchema(topicName, schemaId, expectedFormat, serdeFeatures, false); }
@Test public void shouldReturnErrorFromGetValueSchemaIfSchemaIsNotInExpectedFormat() { // Given: when(parsedSchema.schemaType()).thenReturn(ProtobufSchema.TYPE); // When: final SchemaResult result = supplier.getValueSchema(Optional.of(TOPIC_NAME), Optional.empty(), expectedFormat, SerdeFeatures.of()); // Then: assertThat(result.schemaAndId, is(Optional.empty())); assertThat(result.failureReason, is(not(Optional.empty()))); assertThat(result.failureReason.get().getMessage(), is( "Value schema is not in the expected format. " + "You may want to set VALUE_FORMAT to 'PROTOBUF'." + System.lineSeparator() + "topic: " + TOPIC_NAME + System.lineSeparator() + "expected format: AVRO" + System.lineSeparator() + "actual format from Schema Registry: PROTOBUF" )); }
@Override public Optional<BlobDescriptor> handleHttpResponseException(ResponseException responseException) throws ResponseException { if (responseException.getStatusCode() != HttpStatusCodes.STATUS_CODE_NOT_FOUND) { throw responseException; } if (responseException.getContent() == null) { // TODO: The Google HTTP client gives null content for HEAD requests. Make the content never // be null, even for HEAD requests. return Optional.empty(); } // Find a BLOB_UNKNOWN error response code. ErrorCodes errorCode = ErrorResponseUtil.getErrorCode(responseException); if (errorCode == ErrorCodes.BLOB_UNKNOWN) { return Optional.empty(); } // BLOB_UNKNOWN was not found as a error response code. throw responseException; }
@Test public void testHandleHttpResponseException() throws IOException { ResponseException mockResponseException = Mockito.mock(ResponseException.class); Mockito.when(mockResponseException.getStatusCode()) .thenReturn(HttpStatusCodes.STATUS_CODE_NOT_FOUND); ErrorResponseTemplate emptyErrorResponseTemplate = new ErrorResponseTemplate() .addError(new ErrorEntryTemplate(ErrorCodes.BLOB_UNKNOWN.name(), "some message")); Mockito.when(mockResponseException.getContent()) .thenReturn(JsonTemplateMapper.toUtf8String(emptyErrorResponseTemplate)); Assert.assertFalse( testBlobChecker.handleHttpResponseException(mockResponseException).isPresent()); }
@Override public Optional<ReadError> read(DbFileSources.Line.Builder lineBuilder) { if (scmReport.hasChangesetForLine(lineBuilder.getLine())) { Changeset changeset = scmReport.getChangesetForLine(lineBuilder.getLine()); String author = changeset.getAuthor(); if (author != null) { lineBuilder.setScmAuthor(author); } String revision = changeset.getRevision(); if (revision != null) { lineBuilder.setScmRevision(revision); } lineBuilder.setScmDate(changeset.getDate()); updateLatestChange(changeset); if (revision != null) { updateLatestChangeWithRevision(changeset); } } return Optional.empty(); }
@Test public void set_scm() { ScmInfo scmInfo = new ScmInfoImpl(new Changeset[] { Changeset.newChangesetBuilder() .setAuthor("john") .setDate(123_456_789L) .setRevision("rev-1") .build()}); ScmLineReader lineScm = new ScmLineReader(scmInfo); DbFileSources.Line.Builder lineBuilder = DbFileSources.Data.newBuilder().addLinesBuilder().setLine(1); assertThat(lineScm.read(lineBuilder)).isEmpty(); assertThat(lineBuilder.getScmAuthor()).isEqualTo("john"); assertThat(lineBuilder.getScmDate()).isEqualTo(123_456_789L); assertThat(lineBuilder.getScmRevision()).isEqualTo("rev-1"); }
public <InputT, CollectionT extends PCollection<? extends InputT>> Map<String, DataSet<?>> applyMultiOutputBeamPTransform( DataSet<InputT> input, PTransform<CollectionT, PCollectionTuple> transform) { return applyBeamPTransformInternal( ImmutableMap.of("input", input), (pipeline, map) -> (CollectionT) getNonNull(map, "input"), BeamAdapterUtils::tupleToMap, transform, input.getExecutionEnvironment()); }
@Test public void testApplyMultiOutputTransform() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment(); DataSet<String> input = env.fromCollection(ImmutableList.of("a", "b", "c")); Map<String, DataSet<?>> result = new BeamFlinkDataSetAdapter() .applyMultiOutputBeamPTransform( input, new PTransform<PCollection<String>, PCollectionTuple>() { @Override public PCollectionTuple expand(PCollection<String> input) { return PCollectionTuple.of("x", input.apply(withPrefix("x"))) .and("y", input.apply(withPrefix("y"))); } }); assertThat(result.get("x").collect(), containsInAnyOrder("xa", "xb", "xc")); assertThat(result.get("y").collect(), containsInAnyOrder("ya", "yb", "yc")); }
public T getMetaForEntry( Repository rep, IMetaStore metaStore, VariableSpace space ) throws KettleException { try { T theMeta = null; if ( jobEntryBase.getParentJob() != null ) { metaFileCache = jobEntryBase.getParentJobMeta().getMetaFileCache(); //Get the cache from the parent or create it } CurrentDirectoryResolver r = new CurrentDirectoryResolver(); VariableSpace tmpSpace = r.resolveCurrentDirectory( specificationMethod, space, rep, jobEntryBase.getParentJob(), filename ); final String[] idContainer = new String[ 1 ]; //unigue portion of cache key passed though argument switch ( specificationMethod ) { case FILENAME: String realFilename = tmpSpace.environmentSubstitute( filename ); try { theMeta = attemptLoadMeta( realFilename, rep, metaStore, tmpSpace, null, idContainer ); } catch ( KettleException e ) { // try to load from repository, this trans may have been developed locally and later uploaded to the // repository if ( rep == null ) { theMeta = isTransMeta() ? (T) new TransMeta( realFilename, metaStore, null, true, jobEntryBase.getParentVariableSpace(), null ) : (T) new JobMeta( jobEntryBase.getParentVariableSpace(), realFilename, rep, metaStore, null ); } else { theMeta = getMetaFromRepository( rep, r, realFilename, tmpSpace ); } if ( theMeta != null ) { idContainer[ 0 ] = realFilename; } } break; case REPOSITORY_BY_NAME: String realDirectory = tmpSpace.environmentSubstitute( directory != null ? directory : "" ); String realName = tmpSpace.environmentSubstitute( metaName ); String metaPath = StringUtil.trimEnd( realDirectory, '/' ) + RepositoryFile.SEPARATOR + StringUtil .trimStart( realName, '/' ); if ( metaPath.startsWith( "file://" ) || metaPath.startsWith( "zip:file://" ) || metaPath.startsWith( "hdfs://" ) ) { String extension = isTransMeta() ? RepositoryObjectType.TRANSFORMATION.getExtension() : RepositoryObjectType.JOB.getExtension(); if ( !metaPath.endsWith( extension ) ) { metaPath = metaPath + extension; } theMeta = attemptCacheRead( metaPath ); //try to get from the cache first if ( theMeta == null ) { if ( isTransMeta() ) { theMeta = (T) new TransMeta( metaPath, metaStore, null, true, jobEntryBase.getParentVariableSpace(), null ); } else { theMeta = (T) new JobMeta( tmpSpace, metaPath, rep, metaStore, null ); } idContainer[ 0 ] = metaPath; } } else { theMeta = attemptCacheRead( metaPath ); //try to get from the cache first if ( theMeta == null ) { if ( isTransMeta() ) { theMeta = rep == null ? (T) new TransMeta( metaPath, metaStore, null, true, jobEntryBase.getParentVariableSpace(), null ) : getMetaFromRepository( rep, r, metaPath, tmpSpace ); } else { theMeta = getMetaFromRepository( rep, r, metaPath, tmpSpace ); } if ( theMeta != null ) { idContainer[ 0 ] = metaPath; } } } break; case REPOSITORY_BY_REFERENCE: if ( metaObjectId == null ) { if ( isTransMeta() ) { throw new KettleException( BaseMessages.getString( persistentClass, "JobTrans.Exception.ReferencedTransformationIdIsNull" ) ); } else { throw new KettleException( BaseMessages.getString( persistentClass, "JobJob.Exception.ReferencedTransformationIdIsNull" ) ); } } if ( rep != null ) { theMeta = attemptCacheRead( metaObjectId.toString() ); //try to get from the cache first if ( theMeta == null ) { // Load the last revision if ( isTransMeta() ) { theMeta = (T) rep.loadTransformation( metaObjectId, null ); } else { theMeta = (T) rep.loadJob( metaObjectId, null ); } idContainer[ 0 ] = metaObjectId.toString(); } } else { throw new KettleException( "Could not execute " + friendlyMetaType + " specified in a repository since we're not connected to one" ); } break; default: throw new KettleException( "The specified object location specification method '" + specificationMethod + "' is not yet supported in this " + friendlyMetaType + " entry." ); } cacheMeta( idContainer[ 0 ], theMeta ); return theMeta; } catch ( final KettleException ke ) { // if we get a KettleException, simply re-throw it throw ke; } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( persistentClass, "JobTrans.Exception.MetaDataLoad" ), e ); } }
@Test //A job getting the TransMeta from the fileSystem public void getMetaForEntryAsTransFromFileSystemTest() throws Exception { setupJobEntryTrans(); specificationMethod = ObjectLocationSpecificationMethod.FILENAME; MetaFileLoaderImpl metaFileLoader = new MetaFileLoaderImpl<TransMeta>( jobEntryBase, specificationMethod ); TransMeta transMeta = (TransMeta) metaFileLoader.getMetaForEntry( repository, store, space ); validateFirstTransMetaAccess( transMeta ); transMeta = (TransMeta) metaFileLoader.getMetaForEntry( repository, store, space ); validateSecondTransMetaAccess( transMeta ); }
static double estimatePixelCount(final Image image, final double widthOverHeight) { if (image.getHeight() == HEIGHT_UNKNOWN) { if (image.getWidth() == WIDTH_UNKNOWN) { // images whose size is completely unknown will be in their own subgroups, so // any one of them will do, hence returning the same value for all of them return 0; } else { return image.getWidth() * image.getWidth() / widthOverHeight; } } else if (image.getWidth() == WIDTH_UNKNOWN) { return image.getHeight() * image.getHeight() * widthOverHeight; } else { return image.getHeight() * image.getWidth(); } }
@Test public void testEstimatePixelCountAllUnknown() { assertEquals(0.0, estimatePixelCount(img(HEIGHT_UNKNOWN, WIDTH_UNKNOWN), 1.0 ), 0.0); assertEquals(0.0, estimatePixelCount(img(HEIGHT_UNKNOWN, WIDTH_UNKNOWN), 12.0 ), 0.0); assertEquals(0.0, estimatePixelCount(img(HEIGHT_UNKNOWN, WIDTH_UNKNOWN), 0.1 ), 0.0); assertEquals(0.0, estimatePixelCount(img(HEIGHT_UNKNOWN, WIDTH_UNKNOWN), 16.0/9.0), 0.0); }
@Override public void handle(final RoutingContext routingContext) { if (routingContext.request().isSSL()) { final String indicatedServerName = routingContext.request().connection() .indicatedServerName(); final String requestHost = routingContext.request().host(); if (indicatedServerName != null && requestHost != null) { // sometimes the port is present in the host header, remove it final String requestHostNoPort = requestHost.replaceFirst(":\\d+", ""); if (!requestHostNoPort.equals(indicatedServerName)) { log.error(String.format( "Sni check failed, host header: %s, sni value %s", requestHostNoPort, indicatedServerName) ); routingContext.fail(MISDIRECTED_REQUEST.code(), new KsqlApiException("This request was incorrectly sent to this ksqlDB server", Errors.ERROR_CODE_MISDIRECTED_REQUEST)); return; } } } routingContext.next(); }
@Test public void shouldReturnMisdirectedResponse() { // Given: when(serverRequest.host()).thenReturn("localhost"); when(httpConnection.indicatedServerName()).thenReturn("anotherhost"); // When: sniHandler.handle(routingContext); // Then: verify(routingContext, never()).next(); verify(routingContext).fail(anyInt(), any()); }
@Override public FsCompletedCheckpointStorageLocation closeAndFinalizeCheckpoint() throws IOException { synchronized (this) { if (!closed) { try { // make a best effort attempt to figure out the size long size = 0; try { size = outputStreamWrapper.getOutput().getPos(); } catch (Exception ignored) { } outputStreamWrapper.closeForCommit(); FileStateHandle metaDataHandle = new FileStateHandle(metadataFilePath, size); return new FsCompletedCheckpointStorageLocation( fileSystem, exclusiveCheckpointDir, metaDataHandle, metaDataHandle.getFilePath().getParent().toString()); } catch (Exception e) { try { outputStreamWrapper.cleanup(); } catch (Exception deleteException) { LOG.warn( "Could not delete the checkpoint stream file {}.", metadataFilePath, deleteException); } throw new IOException( "Could not flush and close the file system " + "output stream to " + metadataFilePath + " in order to obtain the " + "stream state handle", e); } finally { closed = true; } } else { throw new IOException("Stream has already been closed and discarded."); } } }
@TestTemplate void testFileExistence() throws Exception { Path metaDataFilePath = baseFolder(); FsCheckpointMetadataOutputStream stream = createTestStream(metaDataFilePath, fileSystem); if (fileSystem instanceof FsWithoutRecoverableWriter) { assertThat(fileSystem.exists(metaDataFilePath)).isTrue(); } else { assertThat(fileSystem.exists(metaDataFilePath)).isFalse(); } stream.closeAndFinalizeCheckpoint(); assertThat(fileSystem.exists(metaDataFilePath)).isTrue(); }
public Plan validateReservationSubmissionRequest( ReservationSystem reservationSystem, ReservationSubmissionRequest request, ReservationId reservationId) throws YarnException { String message; if (reservationId == null) { message = "Reservation id cannot be null. Please try again specifying " + " a valid reservation id by creating a new reservation id."; throw RPCUtil.getRemoteException(message); } // Check if it is a managed queue String queue = request.getQueue(); Plan plan = getPlanFromQueue(reservationSystem, queue, AuditConstants.SUBMIT_RESERVATION_REQUEST); validateReservationDefinition(reservationId, request.getReservationDefinition(), plan, AuditConstants.SUBMIT_RESERVATION_REQUEST); return plan; }
@Test public void testSubmitReservationInvalidDeadline() { ReservationSubmissionRequest request = createSimpleReservationSubmissionRequest(1, 1, 1, 0, 3); Plan plan = null; try { plan = rrValidator.validateReservationSubmissionRequest(rSystem, request, ReservationSystemTestUtil.getNewReservationId()); Assert.fail(); } catch (YarnException e) { Assert.assertNull(plan); String message = e.getMessage(); Assert.assertTrue(message .startsWith("The specified deadline: 0 is the past")); LOG.info(message); } }