focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public ClusterInfo getAllClusterInfo() { ClusterInfo clusterInfoSerializeWrapper = new ClusterInfo(); clusterInfoSerializeWrapper.setBrokerAddrTable(this.brokerAddrTable); clusterInfoSerializeWrapper.setClusterAddrTable(this.clusterAddrTable); return clusterInfoSerializeWrapper; }
@Test public void testGetAllClusterInfo() { byte[] clusterInfo = routeInfoManager.getAllClusterInfo().encode(); assertThat(clusterInfo).isNotNull(); }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String mysqlDataType = typeDefine.getDataType().toUpperCase(); if (mysqlDataType.endsWith("ZEROFILL")) { mysqlDataType = mysqlDataType.substring(0, mysqlDataType.length() - "ZEROFILL".length()).trim(); } if (typeDefine.isUnsigned() && !(mysqlDataType.endsWith(" UNSIGNED"))) { mysqlDataType = mysqlDataType + " UNSIGNED"; } switch (mysqlDataType) { case MYSQL_NULL: builder.dataType(BasicType.VOID_TYPE); break; case MYSQL_BIT: if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.dataType(BasicType.BOOLEAN_TYPE); } else if (typeDefine.getLength() == 1) { builder.dataType(BasicType.BOOLEAN_TYPE); } else { builder.dataType(PrimitiveByteArrayType.INSTANCE); // BIT(M) -> BYTE(M/8) long byteLength = typeDefine.getLength() / 8; byteLength += typeDefine.getLength() % 8 > 0 ? 1 : 0; builder.columnLength(byteLength); } break; case MYSQL_TINYINT: if (typeDefine.getColumnType().equalsIgnoreCase("tinyint(1)")) { builder.dataType(BasicType.BOOLEAN_TYPE); } else { builder.dataType(BasicType.BYTE_TYPE); } break; case MYSQL_TINYINT_UNSIGNED: case MYSQL_SMALLINT: builder.dataType(BasicType.SHORT_TYPE); break; case MYSQL_SMALLINT_UNSIGNED: case MYSQL_MEDIUMINT: case MYSQL_MEDIUMINT_UNSIGNED: case MYSQL_INT: case MYSQL_INTEGER: case MYSQL_YEAR: builder.dataType(BasicType.INT_TYPE); break; case MYSQL_INT_UNSIGNED: case MYSQL_INTEGER_UNSIGNED: case MYSQL_BIGINT: builder.dataType(BasicType.LONG_TYPE); break; case MYSQL_BIGINT_UNSIGNED: DecimalType intDecimalType = new DecimalType(20, 0); builder.dataType(intDecimalType); builder.columnLength(Long.valueOf(intDecimalType.getPrecision())); builder.scale(intDecimalType.getScale()); break; case MYSQL_FLOAT: builder.dataType(BasicType.FLOAT_TYPE); break; case MYSQL_FLOAT_UNSIGNED: log.warn("{} will probably cause value overflow.", MYSQL_FLOAT_UNSIGNED); builder.dataType(BasicType.FLOAT_TYPE); break; case MYSQL_DOUBLE: builder.dataType(BasicType.DOUBLE_TYPE); break; case MYSQL_DOUBLE_UNSIGNED: log.warn("{} will probably cause value overflow.", MYSQL_DOUBLE_UNSIGNED); builder.dataType(BasicType.DOUBLE_TYPE); break; case MYSQL_DECIMAL: Preconditions.checkArgument(typeDefine.getPrecision() > 0); DecimalType decimalType; if (typeDefine.getPrecision() > DEFAULT_PRECISION) { log.warn("{} will probably cause value overflow.", MYSQL_DECIMAL); decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE); } else { decimalType = new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale() == null ? 0 : typeDefine.getScale().intValue()); } builder.dataType(decimalType); builder.columnLength(Long.valueOf(decimalType.getPrecision())); builder.scale(decimalType.getScale()); break; case MYSQL_DECIMAL_UNSIGNED: Preconditions.checkArgument(typeDefine.getPrecision() > 0); log.warn("{} will probably cause value overflow.", MYSQL_DECIMAL_UNSIGNED); DecimalType decimalUnsignedType = new DecimalType( typeDefine.getPrecision().intValue() + 1, typeDefine.getScale() == null ? 0 : typeDefine.getScale().intValue()); builder.dataType(decimalUnsignedType); builder.columnLength(Long.valueOf(decimalUnsignedType.getPrecision())); builder.scale(decimalUnsignedType.getScale()); break; case MYSQL_ENUM: builder.dataType(BasicType.STRING_TYPE); if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(100L); } else { builder.columnLength(typeDefine.getLength()); } break; case MYSQL_CHAR: case MYSQL_VARCHAR: if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(TypeDefineUtils.charTo4ByteLength(1L)); } else { builder.columnLength(typeDefine.getLength()); } builder.dataType(BasicType.STRING_TYPE); break; case MYSQL_TINYTEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_8 - 1); break; case MYSQL_TEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_16 - 1); break; case MYSQL_MEDIUMTEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_24 - 1); break; case MYSQL_LONGTEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_32 - 1); break; case MYSQL_JSON: builder.dataType(BasicType.STRING_TYPE); break; case MYSQL_BINARY: case MYSQL_VARBINARY: if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(1L); } else { builder.columnLength(typeDefine.getLength()); } builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case MYSQL_TINYBLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_8 - 1); break; case MYSQL_BLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_16 - 1); break; case MYSQL_MEDIUMBLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_24 - 1); break; case MYSQL_LONGBLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_32 - 1); break; case MYSQL_GEOMETRY: builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case MYSQL_DATE: builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case MYSQL_TIME: builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); builder.scale(typeDefine.getScale()); break; case MYSQL_DATETIME: case MYSQL_TIMESTAMP: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(typeDefine.getScale()); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.MYSQL, mysqlDataType, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertTinyint() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder() .name("test") .columnType("tinyint(1)") .dataType("tinyint") .length(1L) .build(); Column column = MySqlTypeConverter.DEFAULT_INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.BOOLEAN_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("tinyint(2)") .dataType("tinyint") .length(2L) .build(); column = MySqlTypeConverter.DEFAULT_INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.BYTE_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("tinyint unsigned") .dataType("tinyint unsigned") .build(); column = MySqlTypeConverter.DEFAULT_INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.SHORT_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("tinyint") .dataType("tinyint") .unsigned(true) .build(); column = MySqlTypeConverter.DEFAULT_INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.SHORT_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); }
public static Object serialize(Object bean) throws NullPointerException { return serialize(bean, false); }
@Test public void serializeMap() { boolean error = false; Map<Object, Object> bean = createTestMap(); try { Map map = (Map) BeanSerializer.serialize(bean, true); for (Map.Entry<Object, Object> entry : bean.entrySet()) { if (!map.containsKey(entry.getKey())) { error = true; break; } } Assert.assertFalse(error); map = (Map) BeanSerializer.serialize(bean); for (Map.Entry<Object, Object> entry : bean.entrySet()) { if (!map.containsKey(entry.getKey())) { error = true; break; } } Assert.assertFalse(error); } catch (Exception e) { error = true; } Assert.assertFalse(error); }
@Override public Comparable<?> visit(NameRefNode n) { throw new UnsupportedOperationException("Gaps/Overlaps analysis cannot be performed for InputEntry with unary test containing symbol reference: '" + n.getText() + "'."); // ref DROOLS-4607 }
@Test void smokeTest() { DMNDTAnalyserValueFromNodeVisitor ut = new DMNDTAnalyserValueFromNodeVisitor(Collections.emptyList()); assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> ut.visit(compile("date()"))); assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> ut.visit(compile("date and time()"))); assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> ut.visit(compile("time()"))); assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> ut.visit(compile("number()"))); assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> ut.visit(compile("string()"))); assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> ut.visit(compile("duration()"))); assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> ut.visit(compile("years and months duration()"))); assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> ut.visit(compile("x()"))); }
public FEELFnResult<Boolean> invoke(@ParameterName( "point1" ) Comparable point1, @ParameterName( "point2" ) Comparable point2) { if ( point1 == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "point1", "cannot be null")); } if ( point2 == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "point2", "cannot be null")); } try { boolean result = point1.compareTo( point2 ) > 0; return FEELFnResult.ofResult( result ); } catch( Exception e ) { // points are not comparable return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "point1", "cannot be compared to point2")); } }
@Test void invokeParamRangeAndSingle() { FunctionTestUtil.assertResult( afterFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ), "f" ), Boolean.FALSE ); FunctionTestUtil.assertResult( afterFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ), "a"), Boolean.FALSE ); FunctionTestUtil.assertResult( afterFunction.invoke( new RangeImpl( Range.RangeBoundary.OPEN, "a", "f", Range.RangeBoundary.CLOSED ), "a" ), Boolean.TRUE ); FunctionTestUtil.assertResult( afterFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "b", "f", Range.RangeBoundary.CLOSED ), "a" ), Boolean.TRUE ); }
@VisibleForTesting ExportResult<PhotosContainerResource> exportOneDrivePhotos( TokensAndUrlAuthData authData, Optional<IdOnlyContainerResource> albumData, Optional<PaginationData> paginationData, UUID jobId) throws IOException { Optional<String> albumId = Optional.empty(); if (albumData.isPresent()) { albumId = Optional.of(albumData.get().getId()); } Optional<String> paginationUrl = getDrivePaginationToken(paginationData); MicrosoftDriveItemsResponse driveItemsResponse; if (paginationData.isPresent() || albumData.isPresent()) { driveItemsResponse = getOrCreatePhotosInterface(authData).getDriveItems(albumId, paginationUrl); } else { driveItemsResponse = getOrCreatePhotosInterface(authData) .getDriveItemsFromSpecialFolder(MicrosoftSpecialFolder.FolderType.photos); } PaginationData nextPageData = setNextPageToken(driveItemsResponse); ContinuationData continuationData = new ContinuationData(nextPageData); PhotosContainerResource containerResource; MicrosoftDriveItem[] driveItems = driveItemsResponse.getDriveItems(); List<PhotoAlbum> albums = new ArrayList<>(); List<PhotoModel> photos = new ArrayList<>(); if (driveItems != null && driveItems.length > 0) { for (MicrosoftDriveItem driveItem : driveItems) { PhotoAlbum album = tryConvertDriveItemToPhotoAlbum(driveItem, jobId); if (album != null) { albums.add(album); continuationData.addContainerResource(new IdOnlyContainerResource(driveItem.id)); } PhotoModel photo = tryConvertDriveItemToPhotoModel(albumId, driveItem, jobId); if (photo != null) { photos.add(photo); } } } ExportResult.ResultType result = nextPageData == null ? ExportResult.ResultType.END : ExportResult.ResultType.CONTINUE; containerResource = new PhotosContainerResource(albums, photos); return new ExportResult<>(result, containerResource, continuationData); }
@Test public void exportAlbumWithoutNextPage() throws IOException { // Setup MicrosoftDriveItem folderItem = setUpSingleAlbum(); when(driveItemsResponse.getDriveItems()).thenReturn(new MicrosoftDriveItem[] {folderItem}); when(driveItemsResponse.getNextPageLink()).thenReturn(null); StringPaginationToken inputPaginationToken = new StringPaginationToken(DRIVE_TOKEN_PREFIX + DRIVE_PAGE_URL); // Run ExportResult<PhotosContainerResource> result = microsoftPhotosExporter.exportOneDrivePhotos( null, Optional.empty(), Optional.of(inputPaginationToken), uuid); // Verify method calls verify(photosInterface).getDriveItems(Optional.empty(), Optional.of(DRIVE_PAGE_URL)); verify(driveItemsResponse).getDriveItems(); // Verify next pagination token is absent ContinuationData continuationData = result.getContinuationData(); StringPaginationToken paginationToken = (StringPaginationToken) continuationData.getPaginationData(); assertThat(paginationToken).isEqualTo(null); // Verify one album is ready for import Collection<PhotoAlbum> actualAlbums = result.getExportedData().getAlbums(); assertThat(actualAlbums.stream().map(PhotoAlbum::getId).collect(Collectors.toList())) .containsExactly(FOLDER_ID); // Verify photos should be empty (in the root) Collection<PhotoModel> actualPhotos = result.getExportedData().getPhotos(); assertThat(actualPhotos).isEmpty(); // Verify there is one container ready for sub-processing List<ContainerResource> actualResources = continuationData.getContainerResources(); assertThat( actualResources.stream() .map(a -> ((IdOnlyContainerResource) a).getId()) .collect(Collectors.toList())) .containsExactly(FOLDER_ID); }
public boolean add(final Integer value) { return add(value.intValue()); }
@Test void setsWithTheDifferentSizesAreNotEqual() { final IntHashSet other = new IntHashSet(100); addTwoElements(testSet); other.add(1001); assertNotEquals(testSet, other); }
@Override public IndexRange calculateRange(String index) { checkIfHealthy(indices.waitForRecovery(index), (status) -> new RuntimeException("Unable to calculate range for index <" + index + ">, index is unhealthy: " + status)); final DateTime now = DateTime.now(DateTimeZone.UTC); final Stopwatch sw = Stopwatch.createStarted(); final IndexRangeStats stats = indices.indexRangeStatsOfIndex(index); final int duration = Ints.saturatedCast(sw.stop().elapsed(TimeUnit.MILLISECONDS)); LOG.info("Calculated range of [{}] in [{}ms].", index, duration); return MongoIndexRange.create(index, stats.min(), stats.max(), now, duration, stats.streamIds()); }
@Test @MongoDBFixtures("MongoIndexRangeServiceTest.json") public void calculateRangeReturnsIndexRange() throws Exception { final String index = "graylog"; final DateTime min = new DateTime(2015, 1, 1, 1, 0, DateTimeZone.UTC); final DateTime max = new DateTime(2015, 1, 1, 5, 0, DateTimeZone.UTC); when(indices.waitForRecovery(index)).thenReturn(HealthStatus.Green); when(indices.indexRangeStatsOfIndex(index)).thenReturn(IndexRangeStats.create(min, max)); final IndexRange indexRange = indexRangeService.calculateRange(index); assertThat(indexRange.indexName()).isEqualTo(index); assertThat(indexRange.begin()).isEqualTo(min); assertThat(indexRange.end()).isEqualTo(max); Assertions.assertThat(indexRange.calculatedAt()).isEqualToIgnoringHours(DateTime.now(DateTimeZone.UTC)); }
protected ThreadPoolExecutor getWorkersThreadPool() { return workersThreadPool; }
@Test void testConstructor() { URL url = URL.valueOf("default://") .addParameter(THREAD_POOL_PREFIX_PARAM_NAME, "test") .addParameter(THREAD_POOL_SIZE_PARAM_NAME, 10) .addParameter(THREAD_POOL_KEEP_ALIVE_TIME_PARAM_NAME, 100); AbstractDynamicConfiguration configuration = new AbstractDynamicConfiguration(url) { @Override protected String doGetConfig(String key, String group) { return null; } @Override protected void doClose() {} @Override protected boolean doRemoveConfig(String key, String group) { return false; } }; ThreadPoolExecutor threadPoolExecutor = configuration.getWorkersThreadPool(); ThreadFactory threadFactory = threadPoolExecutor.getThreadFactory(); Thread thread = threadFactory.newThread(() -> {}); assertEquals(10, threadPoolExecutor.getCorePoolSize()); assertEquals(10, threadPoolExecutor.getMaximumPoolSize()); assertEquals(100, threadPoolExecutor.getKeepAliveTime(TimeUnit.MILLISECONDS)); assertEquals("test-thread-1", thread.getName()); }
@Deprecated public static String getJwt(JwtClaims claims) throws JoseException { String jwt; RSAPrivateKey privateKey = (RSAPrivateKey) getPrivateKey( jwtConfig.getKey().getFilename(),jwtConfig.getKey().getPassword(), jwtConfig.getKey().getKeyName()); // A JWT is a JWS and/or a JWE with JSON claims as the payload. // In this example it is a JWS nested inside a JWE // So we first create a JsonWebSignature object. JsonWebSignature jws = new JsonWebSignature(); // The payload of the JWS is JSON content of the JWT Claims jws.setPayload(claims.toJson()); // The JWT is signed using the sender's private key jws.setKey(privateKey); // Get provider from security config file, it should be two digit // And the provider id will set as prefix for keyid in the token header, for example: 05100 // if there is no provider id, we use "00" for the default value String provider_id = ""; if (jwtConfig.getProviderId() != null) { provider_id = jwtConfig.getProviderId(); if (provider_id.length() == 1) { provider_id = "0" + provider_id; } else if (provider_id.length() > 2) { logger.error("provider_id defined in the security.yml file is invalid; the length should be 2"); provider_id = provider_id.substring(0, 2); } } jws.setKeyIdHeaderValue(provider_id + jwtConfig.getKey().getKid()); // Set the signature algorithm on the JWT/JWS that will integrity protect the claims jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256); // Sign the JWS and produce the compact serialization, which will be the inner JWT/JWS // representation, which is a string consisting of three dot ('.') separated // base64url-encoded parts in the form Header.Payload.Signature jwt = jws.getCompactSerialization(); return jwt; }
@Test public void longLivedProductSubjectJwt() throws Exception { Map<String, String> custom = new HashMap<>(); custom.put("consumer_application_id", "361"); custom.put("request_transit", "67"); JwtClaims claims = ClaimsUtil.getCustomClaims("steve", "EMPLOYEE", "f7d42348-c647-4efb-a52d-4c5787421e72", null, custom, "user"); claims.setExpirationTimeMinutesInTheFuture(5256000); String jwt = JwtIssuer.getJwt(claims, long_kid, KeyUtil.deserializePrivateKey(long_key, KeyUtil.RSA)); System.out.println("***LongLived product subject JWT***: " + jwt); }
public static int run(String[] args) { BrokerTool brokerTool = new BrokerTool(); CommandLine commander = new CommandLine(brokerTool); GenerateDocsCommand generateDocsCommand = new GenerateDocsCommand(commander); commander.addSubcommand(LoadReportCommand.class) .addSubcommand(generateDocsCommand); return commander.execute(args); }
@Test public void testGenerateDocs() throws Exception { PrintStream oldStream = System.out; try { ByteArrayOutputStream baoStream = new ByteArrayOutputStream(); System.setOut(new PrintStream(baoStream)); BrokerTool.run(new String[]{"gen-doc"}); String message = baoStream.toString(); Class argumentsClass = Class.forName("org.apache.pulsar.broker.tools.LoadReportCommand"); Field[] fields = argumentsClass.getDeclaredFields(); for (Field field : fields) { boolean fieldHasAnno = field.isAnnotationPresent(Option.class); if (fieldHasAnno) { Option fieldAnno = field.getAnnotation(Option.class); String[] names = fieldAnno.names(); String nameStr = Arrays.asList(names).toString(); nameStr = nameStr.substring(1, nameStr.length() - 1); assertTrue(message.indexOf(nameStr) > 0); } } } finally { System.setOut(oldStream); } }
@Deprecated @Override public void toXML(Object obj, OutputStream out) { super.toXML(obj, out); }
@Issue("JENKINS-21017") @Test public void unmarshalToDefault_default() { String defaultXml = "<hudson.util.XStream2Test_-WithDefaults>\n" + " <stringDefaultValue>defaultValue</stringDefaultValue>\n" + " <arrayDefaultValue>\n" + " <string>first</string>\n" + " <string>second</string>\n" + " </arrayDefaultValue>\n" + " <arrayDefaultEmpty/>\n" + " <listDefaultValue>\n" + " <string>first</string>\n" + " <string>second</string>\n" + " </listDefaultValue>\n" + " <listDefaultEmpty/>\n" + "</hudson.util.XStream2Test_-WithDefaults>"; WithDefaults existingInstance = new WithDefaults("foobar", "foobar", new String[]{"foobar", "barfoo", "fumanchu"}, new String[]{"foobar", "barfoo", "fumanchu"}, new String[]{"foobar", "barfoo", "fumanchu"}, Arrays.asList("foobar", "barfoo", "fumanchu"), Arrays.asList("foobar", "barfoo", "fumanchu"), Arrays.asList("foobar", "barfoo", "fumanchu") ); WithDefaults newInstance = new WithDefaults(); String xmlA = Jenkins.XSTREAM2.toXML(fromXMLNullingOut(defaultXml, existingInstance)); String xmlB = Jenkins.XSTREAM2.toXML(fromXMLNullingOut(defaultXml, newInstance)); String xmlC = Jenkins.XSTREAM2.toXML(fromXMLNullingOut(defaultXml, null)); assertThat("Deserializing over an existing instance is the same as with no root", xmlA, is(xmlC)); assertThat("Deserializing over an new instance is the same as with no root", xmlB, is(xmlC)); }
@Override public int executeUpdate() throws SQLException { return ExecuteTemplate.execute(this, (statement, args) -> statement.executeUpdate()); }
@Test public void testExecuteUpdate() throws SQLException { Assertions.assertNotNull(preparedStatementProxy.executeUpdate()); }
@BuildStep HealthBuildItem addHealthCheck(Capabilities capabilities, JobRunrBuildTimeConfiguration jobRunrBuildTimeConfiguration) { if (capabilities.isPresent(Capability.SMALLRYE_HEALTH)) { return new HealthBuildItem(JobRunrHealthCheck.class.getName(), jobRunrBuildTimeConfiguration.healthEnabled()); } return null; }
@Test void addHealthCheckDoesNotAddHealthBuildItemIfSmallRyeHealthCapabilityIsNotPresent() { lenient().when(capabilities.isPresent(Capability.SMALLRYE_HEALTH)).thenReturn(false); final HealthBuildItem healthBuildItem = jobRunrExtensionProcessor.addHealthCheck(capabilities, jobRunrBuildTimeConfiguration); assertThat(healthBuildItem).isNull(); }
protected final static List<String> splitStringPreserveDelimiter(String str, Pattern SPLIT_PATTERN) { List<String> list = new ArrayList<>(); if (str != null) { Matcher matcher = SPLIT_PATTERN.matcher(str); int pos = 0; while (matcher.find()) { if (pos < matcher.start()) { list.add(str.substring(pos, matcher.start())); } list.add(matcher.group()); pos = matcher.end(); } if (pos < str.length()) { list.add(str.substring(pos)); } } return list; }
@Test public void testSplitString3() { List<String> list = DiffRowGenerator.splitStringPreserveDelimiter("test,test2,", DiffRowGenerator.SPLIT_BY_WORD_PATTERN); System.out.println(list); assertEquals(4, list.size()); assertEquals("[test, ,, test2, ,]", list.toString()); }
public String exportRepository(List<String> ids, String format) { StringBuilder result = new StringBuilder("{"); ObjectMapper mapper = new ObjectMapper(); // First, retrieve service list. List<Service> services = serviceRepository.findByIdIn(ids); try { String jsonArray = mapper.writeValueAsString(services); result.append("\"services\":").append(jsonArray).append(", "); } catch (Exception e) { log.error("Exception while serializing services for export", e); } // Then, get resources associated to services. List<Resource> resources = resourceRepository.findByServiceIdIn(ids); try { String jsonArray = mapper.writeValueAsString(resources); result.append("\"resources\":").append(jsonArray).append(", "); } catch (Exception e) { log.error("Exception while serializing resources for export", e); } // Finally, get requests and responses associated to the services. List<String> operationIds = new ArrayList<>(); for (Service service : services) { for (Operation operation : service.getOperations()) { operationIds.add(IdBuilder.buildOperationId(service, operation)); } } List<Request> requests = requestRepository.findByOperationIdIn(operationIds); List<Response> responses = responseRepository.findByOperationIdIn(operationIds); List<EventMessage> eventMessages = eventMessageRepository.findByOperationIdIn(operationIds); try { String jsonArray = mapper.writeValueAsString(requests); result.append("\"requests\":").append(jsonArray).append(", "); jsonArray = mapper.writeValueAsString(responses); result.append("\"responses\":").append(jsonArray).append(", "); jsonArray = mapper.writeValueAsString(eventMessages); result.append("\"eventMessages\":").append(jsonArray); } catch (Exception e) { log.error("Exception while serializing messages for export", e); } return result.append("}").toString(); }
@Test void testExportRepository() { String result = service.exportRepository(ids, "json"); ObjectMapper mapper = new ObjectMapper(); // Check that result is a valid JSON object. JsonNode jsonObj = null; try { jsonObj = mapper.readTree(result); } catch (IOException e) { fail("No exception should be thrown when parsing Json"); } try { // Retrieve and assert on services part. ArrayNode services = (ArrayNode) jsonObj.get("services"); assertEquals(3, services.size()); for (int i = 0; i < services.size(); i++) { JsonNode service = services.get(i); String name = service.get("name").asText(); assertTrue("HelloWorld".equals(name) || "MyService-hello".equals(name)); } } catch (Exception e) { fail("Exception while getting services array"); } try { // Retrieve and assert on resources part. ArrayNode resources = (ArrayNode) jsonObj.get("resources"); assertEquals(1, resources.size()); JsonNode resource = resources.get(0); assertEquals("Resource 1", resource.get("name").asText()); assertEquals("<wsdl></wsdl>", resource.get("content").asText()); } catch (Exception e) { fail("Exception while getting resources array"); } }
public static void tripSuggestions( List<CharSequence> suggestions, final int maxSuggestions, List<CharSequence> stringsPool) { while (suggestions.size() > maxSuggestions) { removeSuggestion(suggestions, maxSuggestions, stringsPool); } }
@Test public void testTrimSuggestionsNoRecycleBackToPool() { ArrayList<CharSequence> list = new ArrayList<>( Arrays.<CharSequence>asList("typed", "something", "duped", "car", "something")); Assert.assertEquals(0, mStringPool.size()); IMEUtil.tripSuggestions(list, 2, mStringPool); Assert.assertEquals(2, list.size()); Assert.assertEquals("typed", list.get(0)); Assert.assertEquals("something", list.get(1)); Assert.assertEquals(0, mStringPool.size()); }
public Properties apply(final Properties properties) { if (properties == null) { throw new IllegalArgumentException("properties must not be null"); } else { if (properties.isEmpty()) { return new Properties(); } else { final Properties filtered = new Properties(); for (Map.Entry<Object, Object> entry : properties.entrySet()) { final Object key = entry.getKey(); final Object value = entry.getValue(); if (!keysToRemove.contains(key)) { filtered.put(key, value); } } return filtered; } } }
@Test public void doesNotFilterMismatchingKeys() { // Given Properties properties = new Properties(); properties.put("one", 1); properties.put("two", 2); Set<String> keysToRemove = new HashSet<>(); keysToRemove.add("three"); Filter f = new Filter(keysToRemove); // When Properties filtered = f.apply(properties); // Then assertEquals(properties.size(), filtered.size()); assertTrue(filtered.containsKey("one")); assertTrue(filtered.containsKey("two")); }
@Override public Processor<K, SubscriptionResponseWrapper<VO>, K, VR> get() { return new ContextualProcessor<K, SubscriptionResponseWrapper<VO>, K, VR>() { private String valueHashSerdePseudoTopic; private Serializer<V> runtimeValueSerializer = constructionTimeValueSerializer; private KTableValueGetter<K, V> valueGetter; private Sensor droppedRecordsSensor; @SuppressWarnings("unchecked") @Override public void init(final ProcessorContext<K, VR> context) { super.init(context); valueHashSerdePseudoTopic = valueHashSerdePseudoTopicSupplier.get(); valueGetter = valueGetterSupplier.get(); valueGetter.init(context); if (runtimeValueSerializer == null) { runtimeValueSerializer = (Serializer<V>) context.valueSerde().serializer(); } final InternalProcessorContext<?, ?> internalProcessorContext = (InternalProcessorContext<?, ?>) context; droppedRecordsSensor = TaskMetrics.droppedRecordsSensor( Thread.currentThread().getName(), internalProcessorContext.taskId().toString(), internalProcessorContext.metrics() ); } @Override public void process(final Record<K, SubscriptionResponseWrapper<VO>> record) { if (record.value().getVersion() != SubscriptionResponseWrapper.CURRENT_VERSION) { //Guard against modifications to SubscriptionResponseWrapper. Need to ensure that there is //compatibility with previous versions to enable rolling upgrades. Must develop a strategy for //upgrading from older SubscriptionWrapper versions to newer versions. throw new UnsupportedVersionException("SubscriptionResponseWrapper is of an incompatible version."); } final ValueAndTimestamp<V> currentValueWithTimestamp = valueGetter.get(record.key()); final long[] currentHash = currentValueWithTimestamp == null ? null : Murmur3.hash128(runtimeValueSerializer.serialize(valueHashSerdePseudoTopic, currentValueWithTimestamp.value())); final long[] messageHash = record.value().getOriginalValueHash(); //If this value doesn't match the current value from the original table, it is stale and should be discarded. if (java.util.Arrays.equals(messageHash, currentHash)) { final VR result; if (record.value().getForeignValue() == null && (!leftJoin || currentValueWithTimestamp == null)) { result = null; //Emit tombstone } else { result = joiner.apply(currentValueWithTimestamp == null ? null : currentValueWithTimestamp.value(), record.value().getForeignValue()); } context().forward(record.withValue(result)); } else { LOG.trace("Dropping FK-join response due to hash mismatch. Expected {}. Actual {}", messageHash, currentHash); droppedRecordsSensor.record(); } } }; }
@Test public void shouldEmitTombstoneForInnerJoinWhenRightIsNull() { final TestKTableValueGetterSupplier<String, String> valueGetterSupplier = new TestKTableValueGetterSupplier<>(); final boolean leftJoin = false; final ResponseJoinProcessorSupplier<String, String, String, String> processorSupplier = new ResponseJoinProcessorSupplier<>( valueGetterSupplier, STRING_SERIALIZER, () -> "value-hash-dummy-topic", JOINER, leftJoin ); final Processor<String, SubscriptionResponseWrapper<String>, String, String> processor = processorSupplier.get(); final MockInternalNewProcessorContext<String, String> context = new MockInternalNewProcessorContext<>(); processor.init(context); context.setRecordMetadata("topic", 0, 0); valueGetterSupplier.put("lhs1", "lhsValue"); final long[] hash = Murmur3.hash128(STRING_SERIALIZER.serialize("topic-join-resolver", "lhsValue")); processor.process(new Record<>("lhs1", new SubscriptionResponseWrapper<>(hash, null, 0), 0)); final List<MockProcessorContext.CapturedForward<? extends String, ? extends String>> forwarded = context.forwarded(); assertThat(forwarded.size(), is(1)); assertThat(forwarded.get(0).record(), is(new Record<>("lhs1", null, 0))); }
final boolean isRootDirectory() { // only root directories have their parent link pointing to themselves return isDirectory() && equals(((Directory) this).parent()); }
@Test public void testRootDirectory() { Directory file = Directory.createRoot(0, fileTimeSource.now(), Name.simple("/")); assertThat(file.isRootDirectory()).isTrue(); Directory otherFile = Directory.createRoot(1, fileTimeSource.now(), Name.simple("$")); assertThat(otherFile.isRootDirectory()).isTrue(); }
protected RemotingCommand request(ChannelHandlerContext ctx, RemotingCommand request, ProxyContext context, long timeoutMillis) throws Exception { String brokerName; if (request.getCode() == RequestCode.SEND_MESSAGE_V2) { if (request.getExtFields().get(BROKER_NAME_FIELD_FOR_SEND_MESSAGE_V2) == null) { return RemotingCommand.buildErrorResponse(ResponseCode.VERSION_NOT_SUPPORTED, "Request doesn't have field bname"); } brokerName = request.getExtFields().get(BROKER_NAME_FIELD_FOR_SEND_MESSAGE_V2); } else { if (request.getExtFields().get(BROKER_NAME_FIELD) == null) { return RemotingCommand.buildErrorResponse(ResponseCode.VERSION_NOT_SUPPORTED, "Request doesn't have field bname"); } brokerName = request.getExtFields().get(BROKER_NAME_FIELD); } if (request.isOnewayRPC()) { messagingProcessor.requestOneway(context, brokerName, request, timeoutMillis); return null; } messagingProcessor.request(context, brokerName, request, timeoutMillis) .thenAccept(r -> writeResponse(ctx, context, request, r)) .exceptionally(t -> { writeErrResponse(ctx, context, request, t); return null; }); return null; }
@Test public void testRequestAclException() throws Exception { ArgumentCaptor<RemotingCommand> captor = ArgumentCaptor.forClass(RemotingCommand.class); String brokerName = "broker"; String remark = "exception"; CompletableFuture<RemotingCommand> future = new CompletableFuture<>(); future.completeExceptionally(new AclException(remark, ResponseCode.MESSAGE_ILLEGAL)); when(messagingProcessorMock.request(any(), eq(brokerName), any(), anyLong())).thenReturn(future); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.PULL_MESSAGE, null); request.addExtField(AbstractRemotingActivity.BROKER_NAME_FIELD, brokerName); RemotingCommand remotingCommand = remotingActivity.request(ctx, request, null, 10000); assertThat(remotingCommand).isNull(); verify(ctx, times(1)).writeAndFlush(captor.capture()); assertThat(captor.getValue().getCode()).isEqualTo(ResponseCode.NO_PERMISSION); }
@Override public Class<? extends ModuleDefine> module() { return ClusterModule.class; }
@Test public void module() { assertEquals(ClusterModule.class, provider.module()); }
public Versions subtract(Versions other) { if (other.lowest() <= lowest) { if (other.highest >= highest) { // Case 1: other is a superset of this. Trim everything. return Versions.NONE; } else if (other.highest < lowest) { // Case 2: other is a disjoint version range that is lower than this. Trim nothing. return this; } else { // Case 3: trim some values from the beginning of this range. // // Note: it is safe to assume that other.highest() + 1 will not overflow. // The reason is because if other.highest() were Short.MAX_VALUE, // other.highest() < highest could not be true. return new Versions((short) (other.highest() + 1), highest); } } else if (other.highest >= highest) { int newHighest = other.lowest - 1; if (newHighest < 0) { // Case 4: other was NONE. Trim nothing. return this; } else if (newHighest < highest) { // Case 5: trim some values from the end of this range. return new Versions(lowest, (short) newHighest); } else { // Case 6: other is a disjoint range that is higher than this. Trim nothing. return this; } } else { // Case 7: the difference between this and other would be two ranges, not one. return null; } }
@Test public void testSubtract() { assertEquals(Versions.NONE, Versions.NONE.subtract(Versions.NONE)); assertEquals(newVersions(0, 0), newVersions(0, 0).subtract(Versions.NONE)); assertEquals(newVersions(1, 1), newVersions(1, 2).subtract(newVersions(2, 2))); assertEquals(newVersions(2, 2), newVersions(1, 2).subtract(newVersions(1, 1))); assertNull(newVersions(0, Short.MAX_VALUE).subtract(newVersions(1, 100))); assertEquals(newVersions(10, 10), newVersions(1, 10).subtract(newVersions(1, 9))); assertEquals(newVersions(1, 1), newVersions(1, 10).subtract(newVersions(2, 10))); assertEquals(newVersions(2, 4), newVersions(2, Short.MAX_VALUE).subtract(newVersions(5, Short.MAX_VALUE))); assertEquals(newVersions(5, Short.MAX_VALUE), newVersions(0, Short.MAX_VALUE).subtract(newVersions(0, 4))); }
public Searcher searcher() { return new Searcher(); }
@Test void requireThatPredicateIndexCanSearchWithNotExpression() { { PredicateIndexBuilder builder = new PredicateIndexBuilder(10); builder.indexDocument(1, Predicate.fromString("country in ['no'] and gender not in ['male']")); PredicateIndex index = builder.build(); PredicateIndex.Searcher searcher = index.searcher(); PredicateQuery query = new PredicateQuery(); query.addFeature("country", "no"); query.addFeature("gender", "female"); assertEquals("[1]", searcher.search(query).toList().toString()); } { PredicateIndexBuilder builder = new PredicateIndexBuilder(10); builder.indexDocument(DOC_ID, Predicate.fromString("country in ['no'] and gender in ['male']")); builder.indexDocument(DOC_ID + 1, Predicate.fromString("country not in ['no']")); PredicateIndex index = builder.build(); PredicateIndex.Searcher searcher = index.searcher(); PredicateQuery query = new PredicateQuery(); assertEquals("[43]", searcher.search(query).toList().toString()); query.addFeature("country", "no"); assertEquals(0, searcher.search(query).count()); } { PredicateIndexBuilder builder = new PredicateIndexBuilder(10); builder.indexDocument(DOC_ID, Predicate.fromString("country not in ['no'] and gender not in ['male']")); PredicateIndex index = builder.build(); PredicateIndex.Searcher searcher = index.searcher(); PredicateQuery query = new PredicateQuery(); assertEquals(1, searcher.search(query).count()); query.addFeature("country", "no"); assertEquals(0, searcher.search(query).count()); query.addFeature("gender", "male"); assertEquals(0, searcher.search(query).count()); query = new PredicateQuery(); query.addFeature("gender", "male"); assertEquals(0, searcher.search(query).count()); } }
@SuppressWarnings("unchecked") public static <T extends FEELFunction> T getFunction(Class<T> functionClazz) { return Stream.of(FUNCTIONS) .filter(f -> functionClazz.isAssignableFrom(f.getClass())) .map(f -> (T) f) .findFirst() .orElseThrow(() -> new IllegalArgumentException("Cannot find function by class " + functionClazz.getCanonicalName() + "!")); }
@Test void getFunctionsByClassFails() { assertThrows(IllegalArgumentException.class, () -> BuiltInFunctions.getFunction(FakeFunction.class)); }
@Override public void execute(Runnable command) { if (command == null) { throw new NullPointerException(); } try { super.execute(command); } catch (RejectedExecutionException rx) { // retry to offer the task into queue. final TaskQueue queue = (TaskQueue) super.getQueue(); try { if (!queue.retryOffer(command, 0, TimeUnit.MILLISECONDS)) { throw new RejectedExecutionException("Queue capacity is full.", rx); } } catch (InterruptedException x) { throw new RejectedExecutionException(x); } } }
@Test void testEagerThreadPoolFast() { String name = "eager-tf"; int queues = 5; int cores = 5; int threads = 10; // alive 1 second long alive = 1000; // init queue and executor TaskQueue<Runnable> taskQueue = new TaskQueue<>(queues); final EagerThreadPoolExecutor executor = new EagerThreadPoolExecutor( cores, threads, alive, TimeUnit.MILLISECONDS, taskQueue, new NamedThreadFactory(name, true), new AbortPolicyWithReport(name, URL)); taskQueue.setExecutor(executor); CountDownLatch countDownLatch1 = new CountDownLatch(1); for (int i = 0; i < 10; i++) { executor.execute(() -> { try { countDownLatch1.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } }); } await().until(() -> executor.getPoolSize() == 10); Assertions.assertEquals(10, executor.getActiveCount()); CountDownLatch countDownLatch2 = new CountDownLatch(1); AtomicBoolean started = new AtomicBoolean(false); for (int i = 0; i < 5; i++) { executor.execute(() -> { started.set(true); try { countDownLatch2.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } }); } await().until(() -> executor.getQueue().size() == 5); Assertions.assertEquals(10, executor.getActiveCount()); Assertions.assertEquals(10, executor.getPoolSize()); Assertions.assertFalse(started.get()); countDownLatch1.countDown(); await().until(() -> executor.getActiveCount() == 5); Assertions.assertTrue(started.get()); countDownLatch2.countDown(); await().until(() -> executor.getActiveCount() == 0); await().until(() -> executor.getPoolSize() == cores); }
public static boolean isTimeoutException(Throwable exception) { if (exception == null) return false; if (exception instanceof ExecutionException) { exception = exception.getCause(); if (exception == null) return false; } return exception instanceof TimeoutException; }
@Test public void testTopicExistsExceptionIsNotTimeoutException() { assertFalse(isTimeoutException(new TopicExistsException("Topic exists."))); }
@ConstantFunction(name = "makedate", argTypes = {INT, INT}, returnType = DATETIME) public static ConstantOperator makeDate(ConstantOperator year, ConstantOperator dayOfYear) { if (year.isNull() || dayOfYear.isNull()) { return ConstantOperator.createNull(Type.DATE); } int yearInt = year.getInt(); if (yearInt < YEAR_MIN || yearInt > YEAR_MAX) { return ConstantOperator.createNull(Type.DATE); } int dayOfYearInt = dayOfYear.getInt(); if (dayOfYearInt < DAY_OF_YEAR_MIN || dayOfYearInt > DAY_OF_YEAR_MAX) { return ConstantOperator.createNull(Type.DATE); } LocalDate ld = LocalDate.of(yearInt, 1, 1) .plusDays(dayOfYearInt - 1); if (ld.getYear() != year.getInt()) { return ConstantOperator.createNull(Type.DATE); } return ConstantOperator.createDateOrNull(ld.atTime(0, 0, 0)); }
@Test public void makeDate() { ConnectContext ctx = new ConnectContext(null); ctx.setThreadLocalInfo(); ctx.setStartTime(); assertEquals(ConstantOperator.createNull(Type.DATE), ScalarOperatorFunctions.makeDate(ConstantOperator.createNull(Type.INT), ConstantOperator.createNull(Type.INT))); assertEquals(ConstantOperator.createNull(Type.DATE), ScalarOperatorFunctions.makeDate(ConstantOperator.createNull(Type.INT), ConstantOperator.createInt(1))); assertEquals(ConstantOperator.createNull(Type.DATE), ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(1), ConstantOperator.createNull(Type.INT))); assertEquals(ConstantOperator.createNull(Type.DATE), ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(2000), ConstantOperator.createInt(0))); assertEquals(ConstantOperator.createNull(Type.DATE), ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(2000), ConstantOperator.createInt(367))); assertEquals(ConstantOperator.createNull(Type.DATE), ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(-1), ConstantOperator.createInt(1))); assertEquals(ConstantOperator.createNull(Type.DATE), ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(10000), ConstantOperator.createInt(1))); assertEquals(ConstantOperator.createDate(LocalDateTime.of(2000, 1, 1, 0, 0, 0)), ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(2000), ConstantOperator.createInt(1))); assertEquals(ConstantOperator.createDate(LocalDateTime.of(2000, 12, 31, 0, 0, 0)), ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(2000), ConstantOperator.createInt(366))); assertEquals(ConstantOperator.createDate(LocalDateTime.of(0, 1, 1, 0, 0, 0)), ScalarOperatorFunctions.makeDate(ConstantOperator.createInt(0), ConstantOperator.createInt(1))); }
public static ComposeCombineFnBuilder compose() { return new ComposeCombineFnBuilder(); }
@Test public void testDuplicatedTagsWithContext() { expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("it is already present in the composition"); TupleTag<UserString> tag = new TupleTag<>(); CombineFns.compose() .with(new GetUserStringFunction(), new ConcatStringWithContext(null /* view */), tag) .with(new GetUserStringFunction(), new ConcatStringWithContext(null /* view */), tag); }
public EntryWriter launchEntryWriter(SequenceFile.Writer writer, int capacity) { final EntryWriter ew = new EntryWriter(writer, capacity); ew.start(); return ew; }
@Test public void testCreateInvalidWriterCapacity() throws Throwable { intercept(IllegalStateException.class, () -> entryFileIO.launchEntryWriter(null, 0)); }
@Override public ChannelFuture writeData(final ChannelHandlerContext ctx, final int streamId, ByteBuf data, int padding, final boolean endOfStream, ChannelPromise promise) { promise = promise.unvoid(); final Http2Stream stream; try { stream = requireStream(streamId); // Verify that the stream is in the appropriate state for sending DATA frames. switch (stream.state()) { case OPEN: case HALF_CLOSED_REMOTE: // Allowed sending DATA frames in these states. break; default: throw new IllegalStateException("Stream " + stream.id() + " in unexpected state " + stream.state()); } } catch (Throwable e) { data.release(); return promise.setFailure(e); } // Hand control of the frame to the flow controller. flowController().addFlowControlled(stream, new FlowControlledData(stream, data, padding, endOfStream, promise)); return promise; }
@Test public void canWriteDataFrameAfterGoAwaySent() throws Exception { Http2Stream stream = createStream(STREAM_ID, false); connection.goAwaySent(0, 0, EMPTY_BUFFER); ByteBuf data = mock(ByteBuf.class); encoder.writeData(ctx, STREAM_ID, data, 0, false, newPromise()); verify(remoteFlow).addFlowControlled(eq(stream), any(FlowControlled.class)); }
@Override public Mono<SearchResult> search(SearchOption option) { // validate the option var errors = validator.validateObject(option); if (errors.hasErrors()) { return Mono.error(new RequestBodyValidationException(errors)); } return extensionGetter.getEnabledExtension(SearchEngine.class) .filter(SearchEngine::available) .switchIfEmpty(Mono.error(SearchEngineUnavailableException::new)) .flatMap(searchEngine -> Mono.fromSupplier(() -> searchEngine.search(option) ).subscribeOn(Schedulers.boundedElastic())); }
@Test void shouldThrowSearchEngineUnavailableExceptionIfNoSearchEngineAvailable() { var option = new SearchOption(); option.setKeyword("halo"); var errors = mock(Errors.class); when(errors.hasErrors()).thenReturn(false); when(validator.validateObject(option)).thenReturn(errors); when(extensionGetter.getEnabledExtension(SearchEngine.class)) .thenAnswer(invocation -> Mono.fromSupplier(() -> { var searchEngine = mock(SearchEngine.class); when(searchEngine.available()).thenReturn(false); return searchEngine; })); searchService.search(option) .as(StepVerifier::create) .expectError(SearchEngineUnavailableException.class); }
public static String wrap(String input, Formatter formatter) throws FormatterException { return StringWrapper.wrap(Formatter.MAX_LINE_LENGTH, input, formatter); }
@Test public void textBlock() throws Exception { assumeTrue(Runtime.version().feature() >= 15); String input = lines( "package com.mypackage;", "public class ReproBug {", " private String myString;", " private ReproBug() {", " String str =", " \"\"\"", "{\"sourceEndpoint\":\"ri.something.1-1.object-internal.1\",\"targetEndpoint" + "\":\"ri.something.1-1.object-internal.2\",\"typeId\":\"typeId\"}\"\"\";", " myString = str;", " }", "}"); assertThat(StringWrapper.wrap(100, input, new Formatter())).isEqualTo(input); }
public List<String> splitSql(String text) { List<String> queries = new ArrayList<>(); StringBuilder query = new StringBuilder(); char character; boolean multiLineComment = false; boolean singleLineComment = false; boolean singleQuoteString = false; boolean doubleQuoteString = false; for (int index = 0; index < text.length(); index++) { character = text.charAt(index); // end of single line comment if (singleLineComment && (character == '\n')) { singleLineComment = false; query.append(character); if (index == (text.length() - 1) && !query.toString().trim().isEmpty()) { // add query when it is the end of sql. queries.add(query.toString()); } continue; } // end of multiple line comment if (multiLineComment && (index - 1) >= 0 && text.charAt(index - 1) == '/' && (index - 2) >= 0 && text.charAt(index - 2) == '*') { multiLineComment = false; } if (character == '\'' && !(singleLineComment || multiLineComment)) { if (singleQuoteString) { singleQuoteString = false; } else if (!doubleQuoteString) { singleQuoteString = true; } } if (character == '"' && !(singleLineComment || multiLineComment)) { if (doubleQuoteString && index > 0) { doubleQuoteString = false; } else if (!singleQuoteString) { doubleQuoteString = true; } } if (!singleQuoteString && !doubleQuoteString && !multiLineComment && !singleLineComment && text.length() > (index + 1)) { if (isSingleLineComment(text.charAt(index), text.charAt(index + 1))) { singleLineComment = true; } else if (text.charAt(index) == '/' && text.length() > (index + 2) && text.charAt(index + 1) == '*' && text.charAt(index + 2) != '+') { multiLineComment = true; } } if (character == ';' && !singleQuoteString && !doubleQuoteString && !multiLineComment && !singleLineComment) { // meet the end of semicolon if (!query.toString().trim().isEmpty()) { queries.add(query.toString()); query = new StringBuilder(); } } else if (index == (text.length() - 1)) { // meet the last character if ((!singleLineComment && !multiLineComment)) { query.append(character); } if (!query.toString().trim().isEmpty()) { queries.add(query.toString()); query = new StringBuilder(); } } else if (!singleLineComment && !multiLineComment) { // normal case, not in single line comment and not in multiple line comment query.append(character); } else if (character == '\n') { query.append(character); } } List<String> refinedQueries = new ArrayList<>(); for (int i = 0; i < queries.size(); ++i) { String emptyLine = ""; if (i > 0) { emptyLine = createEmptyLine(refinedQueries.get(i-1)); } if (isSingleLineComment(queries.get(i)) || isMultipleLineComment(queries.get(i))) { // refine the last refinedQuery if (refinedQueries.size() > 0) { String lastRefinedQuery = refinedQueries.get(refinedQueries.size() - 1); refinedQueries.set(refinedQueries.size() - 1, lastRefinedQuery + createEmptyLine(queries.get(i))); } } else { String refinedQuery = emptyLine + queries.get(i); refinedQueries.add(refinedQuery); } } return refinedQueries; }
@Test void testNormalSql() { SqlSplitter sqlSplitter = new SqlSplitter(); List<String> sqls = sqlSplitter.splitSql("show tables"); assertEquals(1, sqls.size()); assertEquals("show tables", sqls.get(0)); sqls = sqlSplitter.splitSql("show tables;"); assertEquals(1, sqls.size()); assertEquals("show tables", sqls.get(0)); sqls = sqlSplitter.splitSql("show tables;\n"); assertEquals(1, sqls.size()); assertEquals("show tables", sqls.get(0)); sqls = sqlSplitter.splitSql("\nshow tables;"); assertEquals(1, sqls.size()); assertEquals("\nshow tables", sqls.get(0)); sqls = sqlSplitter.splitSql("show tables;\nselect * from table_1"); assertEquals(2, sqls.size()); assertEquals("show tables", sqls.get(0)); assertEquals("\nselect * from table_1", sqls.get(1)); sqls = sqlSplitter.splitSql("show tables;\n\nselect * from table_1"); assertEquals(2, sqls.size()); assertEquals("show tables", sqls.get(0)); assertEquals("\n\nselect * from table_1", sqls.get(1)); sqls = sqlSplitter.splitSql("show\ntables;\nselect * \nfrom table_1"); assertEquals(2, sqls.size()); assertEquals("show\ntables", sqls.get(0)); assertEquals("\n\nselect * \nfrom table_1", sqls.get(1)); }
@CheckForNull public String get() { // branches will be empty in CE if (branchConfiguration.isPullRequest() || branches.isEmpty()) { return null; } return Optional.ofNullable(getFromProperties()).orElseGet(this::loadWs); }
@Test public void get_returns_null_if_no_branches() { when(projectBranches.isEmpty()).thenReturn(true); assertThat(referenceBranchSupplier.get()).isNull(); verify(branchConfiguration).isPullRequest(); verify(projectBranches).isEmpty(); verifyNoMoreInteractions(branchConfiguration); verifyNoInteractions(newCodePeriodLoader); }
public void upgrade() { final List<User> users = userService.loadAll(); for (User user : users) { final Map<String, Set<String>> migratableEntities = getMigratableEntities(ImmutableSet.copyOf(user.getPermissions())); if (!migratableEntities.isEmpty()) { migrateUserPermissions(user, migratableEntities); } } }
@Test void migrateSomeUserPermissions() { User testuser2 = userService.load("testuser2"); assertThat(testuser2).isNotNull(); assertThat(testuser2.getPermissions().size()).isEqualTo(6 + userSelfEditPermissionCount); assertThat(dbGrantService.getForGranteesOrGlobal(ImmutableSet.of(grnRegistry.ofUser(testuser2)))).isEmpty(); migration.upgrade(); // check created grants for testuser2 final ImmutableSet<GrantDTO> grants = dbGrantService.getForGranteesOrGlobal(ImmutableSet.of(grnRegistry.ofUser(testuser2))); assertGrantInSet(grants, "grn::::dashboard:5e2afc66cd19517ec2dabadf", Capability.MANAGE); assertThat(grants.size()).isEqualTo(1); // reload user and check that all migrated permissions have been removed. (should be only two less) testuser2 = userService.load("testuser2"); assertThat(testuser2).isNotNull(); assertThat(testuser2.getPermissions().size()).isEqualTo(4 + userSelfEditPermissionCount); }
@Override public Row withValue( final GenericRow newValue, final LogicalSchema newSchema ) { return new Row( newSchema, key, newValue, rowTime, validator ); }
@SuppressFBWarnings("RV_RETURN_VALUE_IGNORED_INFERRED") @Test public void shouldValidateOnCopy() { // Given: final Row row = new Row(SCHEMA, A_KEY, A_VALUE, A_ROWTIME, validator); clearInvocations(validator); // When: row.withValue(A_VALUE, SCHEMA); // Then: verify(validator).validate(SCHEMA, A_KEY, A_VALUE); }
@Override public void addSink(McastRoute route, ConnectPoint connectPoint) { checkNotNull(route, "Route cannot be null"); checkNotNull(connectPoint, "Sink cannot be null"); store.storeSink(route, connectPoint, McastStore.Type.ADD); }
@Test public void testAddSink() { manager.addSink(r1, cp1); validateEvents(McastEvent.Type.SINK_ADDED); assertEquals("Route is not equal", Sets.newHashSet(cp1), manager.fetchSinks(r1)); }
public static IpAddress valueOf(int value) { byte[] bytes = ByteBuffer.allocate(INET_BYTE_LENGTH).putInt(value).array(); return new IpAddress(Version.INET, bytes); }
@Test(expected = IllegalArgumentException.class) public void testInvalidValueOfEmptyString() { IpAddress ipAddress; String fromString = ""; ipAddress = IpAddress.valueOf(fromString); }
@Override public String execute(CommandContext commandContext, String[] args) { if (ArrayUtils.isEmpty(args)) { return "Please input method name, eg: \r\ninvoke xxxMethod(1234, \"abcd\", {\"prop\" : \"value\"})\r\n" + "invoke XxxService.xxxMethod(1234, \"abcd\", {\"prop\" : \"value\"})\r\n" + "invoke com.xxx.XxxService.xxxMethod(1234, \"abcd\", {\"prop\" : \"value\"})"; } Channel channel = commandContext.getRemote(); String service = channel.attr(ChangeTelnet.SERVICE_KEY) != null ? channel.attr(ChangeTelnet.SERVICE_KEY).get() : null; String message = args[0]; int i = message.indexOf("("); if (i < 0 || !message.endsWith(")")) { return "Invalid parameters, format: service.method(args)"; } String method = message.substring(0, i).trim(); String param = message.substring(i + 1, message.length() - 1).trim(); i = method.lastIndexOf("."); if (i >= 0) { service = method.substring(0, i).trim(); method = method.substring(i + 1).trim(); } if (StringUtils.isEmpty(service)) { return "If you want to invoke like [invoke sayHello(\"xxxx\")], please execute cd command first," + " or you can execute it like [invoke IHelloService.sayHello(\"xxxx\")]"; } List<Object> list; try { list = JsonUtils.toJavaList("[" + param + "]", Object.class); } catch (Throwable t) { return "Invalid json argument, cause: " + t.getMessage(); } StringBuilder buf = new StringBuilder(); Method invokeMethod = null; ProviderModel selectedProvider = null; if (isInvokedSelectCommand(channel)) { selectedProvider = channel.attr(INVOKE_METHOD_PROVIDER_KEY).get(); invokeMethod = channel.attr(SelectTelnet.SELECT_METHOD_KEY).get(); } else { for (ProviderModel provider : frameworkModel.getServiceRepository().allProviderModels()) { if (!isServiceMatch(service, provider)) { continue; } selectedProvider = provider; List<Method> methodList = findSameSignatureMethod(provider.getAllMethods(), method, list); if (CollectionUtils.isEmpty(methodList)) { break; } if (methodList.size() == 1) { invokeMethod = methodList.get(0); } else { List<Method> matchMethods = findMatchMethods(methodList, list); if (CollectionUtils.isEmpty(matchMethods)) { break; } if (matchMethods.size() == 1) { invokeMethod = matchMethods.get(0); } else { // exist overridden method channel.attr(INVOKE_METHOD_PROVIDER_KEY).set(provider); channel.attr(INVOKE_METHOD_LIST_KEY).set(matchMethods); channel.attr(INVOKE_MESSAGE_KEY).set(message); printSelectMessage(buf, matchMethods); return buf.toString(); } } break; } } if (!StringUtils.isEmpty(service)) { buf.append("Use default service ").append(service).append('.'); } if (selectedProvider == null) { buf.append("\r\nNo such service ").append(service); return buf.toString(); } if (invokeMethod == null) { buf.append("\r\nNo such method ") .append(method) .append(" in service ") .append(service); return buf.toString(); } try { Object[] array = realize(list.toArray(), invokeMethod.getParameterTypes(), invokeMethod.getGenericParameterTypes()); long start = System.currentTimeMillis(); AppResponse result = new AppResponse(); try { Object o = invokeMethod.invoke(selectedProvider.getServiceInstance(), array); boolean setValueDone = false; if (RpcContext.getServerAttachment().isAsyncStarted()) { AsyncContext asyncContext = RpcContext.getServerAttachment().getAsyncContext(); if (asyncContext instanceof AsyncContextImpl) { CompletableFuture<Object> internalFuture = ((AsyncContextImpl) asyncContext).getInternalFuture(); result.setValue(internalFuture.get()); setValueDone = true; } } if (!setValueDone) { result.setValue(o); } } catch (Throwable t) { result.setException(t); if (t instanceof InterruptedException) { Thread.currentThread().interrupt(); } } finally { RpcContext.removeContext(); } long end = System.currentTimeMillis(); buf.append("\r\nresult: "); buf.append(JsonUtils.toJson(result.recreate())); buf.append("\r\nelapsed: "); buf.append(end - start); buf.append(" ms."); } catch (Throwable t) { return "Failed to invoke method " + invokeMethod.getName() + ", cause: " + StringUtils.toString(t); } return buf.toString(); }
@Test void testMessageNull() throws RemotingException { defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY).set(null); defaultAttributeMap.attr(SelectTelnet.SELECT_KEY).set(null); given(mockChannel.attr(ChangeTelnet.SERVICE_KEY)) .willReturn(defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY)); given(mockChannel.attr(SelectTelnet.SELECT_KEY)).willReturn(defaultAttributeMap.attr(SelectTelnet.SELECT_KEY)); String result = invoke.execute(mockCommandContext, new String[0]); assertEquals( "Please input method name, eg: \r\ninvoke xxxMethod(1234, \"abcd\", {\"prop\" : \"value\"})\r\ninvoke XxxService.xxxMethod(1234, \"abcd\", {\"prop\" : \"value\"})\r\ninvoke com.xxx.XxxService.xxxMethod(1234, \"abcd\", {\"prop\" : \"value\"})", result); defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY).remove(); defaultAttributeMap.attr(SelectTelnet.SELECT_KEY).remove(); }
public AMRMClient.ContainerRequest getContainerRequest( Resource containerResource, Priority priority, String nodeLabel) { if (StringUtils.isNullOrWhitespaceOnly(nodeLabel) || defaultConstructor == null) { return new AMRMClient.ContainerRequest(containerResource, null, null, priority); } try { /** * Set the param of relaxLocality to true, which tells the Yarn ResourceManager if the * application wants locality to be loose (i.e. allows fall-through to rack or any) */ return defaultConstructor.newInstance( containerResource, null, null, priority, true, nodeLabel); } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { LOG.warn("Errors on creating Container Request.", e); } return new AMRMClient.ContainerRequest(containerResource, null, null, priority); }
@Test void testGetContainerRequestIfConstructorAbsent() { final ContainerRequestReflector containerRequestReflector = new ContainerRequestReflector(ContainerRequestWithoutConstructor.class); Resource resource = Resource.newInstance(100, 1); Priority priority = Priority.newInstance(1); AMRMClient.ContainerRequest containerRequest = containerRequestReflector.getContainerRequest(resource, priority, "GPU"); assertThat(containerRequest).isNotInstanceOf(ContainerRequestWithoutConstructor.class); containerRequest = containerRequestReflector.getContainerRequest(resource, priority, null); assertThat(containerRequest).isNotInstanceOf(ContainerRequestWithoutConstructor.class); containerRequest = containerRequestReflector.getContainerRequest(resource, priority, ""); assertThat(containerRequest).isNotInstanceOf(ContainerRequestWithoutConstructor.class); }
public Single<Boolean> addAll(Publisher<? extends V> c) { return new PublisherAdder<V>() { @Override public RFuture<Boolean> add(Object o) { return instance.addAsync((V) o); } }.addAll(c); }
@Test public void testAddAllEmpty() { RListRx<Integer> list = redisson.getList("list"); Assertions.assertEquals(false, sync(list.addAll(Collections.<Integer>emptyList()))); Assertions.assertEquals(0, sync(list.size()).intValue()); }
private synchronized boolean validateClientAcknowledgement(long h) { if (h < 0) { throw new IllegalArgumentException("Argument 'h' cannot be negative, but was: " + h); } if (h > MASK) { throw new IllegalArgumentException("Argument 'h' cannot be larger than 2^32 -1, but was: " + h); } final long oldH = clientProcessedStanzas.get(); final Long lastUnackedX = unacknowledgedServerStanzas.isEmpty() ? null : unacknowledgedServerStanzas.getLast().x; return validateClientAcknowledgement(h, oldH, lastUnackedX); }
@Test public void testValidateClientAcknowledgement_rollover_edgecase2() throws Exception { // Setup test fixture. final long MAX = new BigInteger( "2" ).pow( 32 ).longValue() - 1; final long h = MAX; final long oldH = MAX-2; final Long lastUnackedX = MAX; // Execute system under test. final boolean result = StreamManager.validateClientAcknowledgement(h, oldH, lastUnackedX); // Verify results. assertTrue(result); }
@Override public void export(RegisterTypeEnum registerType) { if (this.exported) { return; } if (getScopeModel().isLifeCycleManagedExternally()) { // prepare model for reference getScopeModel().getDeployer().prepare(); } else { // ensure start module, compatible with old api usage getScopeModel().getDeployer().start(); } synchronized (this) { if (this.exported) { return; } if (!this.isRefreshed()) { this.refresh(); } if (this.shouldExport()) { this.init(); if (shouldDelay()) { // should register if delay export doDelayExport(); } else if (Integer.valueOf(-1).equals(getDelay()) && Boolean.parseBoolean(ConfigurationUtils.getProperty( getScopeModel(), CommonConstants.DUBBO_MANUAL_REGISTER_KEY, "false"))) { // should not register by default doExport(RegisterTypeEnum.MANUAL_REGISTER); } else { doExport(registerType); } } } }
@Test void testServiceListener() { ExtensionLoader<ServiceListener> extensionLoader = ExtensionLoader.getExtensionLoader(ServiceListener.class); MockServiceListener mockServiceListener = (MockServiceListener) extensionLoader.getExtension("mock"); assertNotNull(mockServiceListener); mockServiceListener.clearExportedServices(); service.export(); Map<String, ServiceConfig> exportedServices = mockServiceListener.getExportedServices(); assertEquals(1, exportedServices.size()); ServiceConfig serviceConfig = exportedServices.get(service.getUniqueServiceName()); assertSame(service, serviceConfig); }
@Override public void pluginStateChanged(PluginStateEvent event) { var pluginState = event.getPluginState(); String pluginId = event.getPlugin().getPluginId(); if (pluginState == PluginState.UNLOADED) { entries.remove(pluginId); } else if (pluginState == PluginState.CREATED || pluginState == PluginState.RESOLVED) { entries.computeIfAbsent(pluginId, id -> readPluginStorage(event.getPlugin())); } }
@Test void shouldPutEntryIfPluginCreated() throws FileNotFoundException { var pluginWrapper = mockPluginWrapper(); when(pluginWrapper.getPluginState()).thenReturn(PluginState.CREATED); var event = new PluginStateEvent(pluginManager, pluginWrapper, null); finder.pluginStateChanged(event); var classNames = finder.findClassNames("fake-plugin"); assertEquals(Set.of("run.halo.fake.FakePlugin"), classNames); }
public static boolean isBasicInfoChanged(Member actual, Member expected) { if (null == expected) { return null != actual; } if (!expected.getIp().equals(actual.getIp())) { return true; } if (expected.getPort() != actual.getPort()) { return true; } if (!expected.getAddress().equals(actual.getAddress())) { return true; } if (!expected.getState().equals(actual.getState())) { return true; } // if change if (expected.isGrpcReportEnabled() != actual.isGrpcReportEnabled()) { return true; } return isBasicInfoChangedInExtendInfo(expected, actual); }
@Test void testIsBasicInfoChangedForStatus() { Member newMember = buildMember(); newMember.setState(NodeState.DOWN); assertTrue(MemberUtil.isBasicInfoChanged(newMember, originalMember)); }
@VisibleForTesting static IssueCache.Issue toProto(IssueCache.Issue.Builder builder, DefaultIssue defaultIssue) { builder.clear(); builder.setKey(defaultIssue.key()); builder.setRuleType(defaultIssue.type().getDbConstant()); ofNullable(defaultIssue.getCleanCodeAttribute()).ifPresent(value -> builder.setCleanCodeAttribute(value.name())); ofNullable(defaultIssue.componentUuid()).ifPresent(builder::setComponentUuid); builder.setComponentKey(defaultIssue.componentKey()); builder.setProjectUuid(defaultIssue.projectUuid()); builder.setProjectKey(defaultIssue.projectKey()); builder.setRuleKey(defaultIssue.ruleKey().toString()); ofNullable(defaultIssue.language()).ifPresent(builder::setLanguage); ofNullable(defaultIssue.severity()).ifPresent(builder::setSeverity); builder.setManualSeverity(defaultIssue.manualSeverity()); ofNullable(defaultIssue.message()).ifPresent(builder::setMessage); ofNullable(defaultIssue.getMessageFormattings()).ifPresent(m -> builder.setMessageFormattings((DbIssues.MessageFormattings) m)); ofNullable(defaultIssue.line()).ifPresent(builder::setLine); ofNullable(defaultIssue.gap()).ifPresent(builder::setGap); ofNullable(defaultIssue.effort()).map(Duration::toMinutes).ifPresent(builder::setEffort); builder.setStatus(defaultIssue.status()); ofNullable(defaultIssue.resolution()).ifPresent(builder::setResolution); ofNullable(defaultIssue.assignee()).ifPresent(builder::setAssigneeUuid); ofNullable(defaultIssue.assigneeLogin()).ifPresent(builder::setAssigneeLogin); ofNullable(defaultIssue.checksum()).ifPresent(builder::setChecksum); ofNullable(defaultIssue.authorLogin()).ifPresent(builder::setAuthorLogin); defaultIssue.defaultIssueComments().forEach(c -> builder.addComments(toProtoComment(c))); ofNullable(defaultIssue.tags()).ifPresent(t -> builder.setTags(String.join(TAGS_SEPARATOR, t))); ofNullable(defaultIssue.codeVariants()).ifPresent(codeVariant -> builder.setCodeVariants(String.join(TAGS_SEPARATOR, codeVariant))); ofNullable(defaultIssue.getLocations()).ifPresent(l -> builder.setLocations((DbIssues.Locations) l)); defaultIssue.getRuleDescriptionContextKey().ifPresent(builder::setRuleDescriptionContextKey); builder.setIsFromExternalRuleEngine(defaultIssue.isFromExternalRuleEngine()); builder.setCreationDate(defaultIssue.creationDate().getTime()); ofNullable(defaultIssue.updateDate()).map(Date::getTime).ifPresent(builder::setUpdateDate); ofNullable(defaultIssue.closeDate()).map(Date::getTime).ifPresent(builder::setCloseDate); ofNullable(defaultIssue.currentChange()).ifPresent(c -> builder.setCurrentChanges(toProtoIssueChanges(c))); builder.setIsNew(defaultIssue.isNew()); builder.setIsOnChangedLine(defaultIssue.isOnChangedLine()); builder.setIsPrioritizedRule(defaultIssue.isPrioritizedRule()); builder.setIsNewCodeReferenceIssue(defaultIssue.isNewCodeReferenceIssue()); builder.setIsCopied(defaultIssue.isCopied()); builder.setBeingClosed(defaultIssue.isBeingClosed()); builder.setOnDisabledRule(defaultIssue.isOnDisabledRule()); builder.setIsChanged(defaultIssue.isChanged()); builder.setSendNotifications(defaultIssue.mustSendNotifications()); ofNullable(defaultIssue.selectedAt()).ifPresent(builder::setSelectedAt); builder.setQuickFixAvailable(defaultIssue.isQuickFixAvailable()); builder.setIsNoLongerNewCodeReferenceIssue(defaultIssue.isNoLongerNewCodeReferenceIssue()); defaultIssue.getAnticipatedTransitionUuid().ifPresent(builder::setAnticipatedTransitionUuid); for (Map.Entry<SoftwareQuality, Severity> impact : defaultIssue.impacts().entrySet()) { builder.addImpacts(IssueCache.Impact.newBuilder() .setSoftwareQuality(impact.getKey().name()) .setSeverity(impact.getValue().name()) .build()); } for (FieldDiffs fieldDiffs : defaultIssue.changes()) { builder.addChanges(toProtoIssueChanges(fieldDiffs)); } return builder.build(); }
@Test public void toProto_whenRuleDescriptionContextKeySet_shouldCopyToIssueProto() { DefaultIssue defaultIssue = createDefaultIssueWithMandatoryFields(); defaultIssue.setRuleDescriptionContextKey(TEST_CONTEXT_KEY); IssueCache.Issue issue = ProtobufIssueDiskCache.toProto(IssueCache.Issue.newBuilder(), defaultIssue); assertThat(issue.hasRuleDescriptionContextKey()).isTrue(); assertThat(issue.getRuleDescriptionContextKey()).isEqualTo(TEST_CONTEXT_KEY); }
@Nonnull public static List<Object> getValues(@Nonnull final JsonArray array, @Nonnull final String path) throws ParsingException { final List<Object> result = new ArrayList<>(); for (int i = 0; i < array.size(); i++) { final JsonObject obj = array.getObject(i); result.add(getValue(obj, path)); } return result; }
@Test public void testGetValues() throws JsonParserException, ParsingException { JsonObject obj = JsonParser.object().from("{\"id\":\"0001\",\"type\":\"donut\",\"name\":\"Cake\",\"ppu\":0.55,\"batters\":{\"batter\":[{\"id\":\"1001\",\"type\":\"Regular\"},{\"id\":\"1002\",\"type\":\"Chocolate\"},{\"id\":\"1003\",\"type\":\"Blueberry\"},{\"id\":\"1004\",\"type\":\"Devil's Food\"}]},\"topping\":[{\"id\":\"5001\",\"type\":\"None\"},{\"id\":\"5002\",\"type\":\"Glazed\"},{\"id\":\"5005\",\"type\":\"Sugar\"},{\"id\":\"5007\",\"type\":\"Powdered Sugar\"},{\"id\":\"5006\",\"type\":\"Chocolate with Sprinkles\"},{\"id\":\"5003\",\"type\":\"Chocolate\"},{\"id\":\"5004\",\"type\":\"Maple\"}]}"); JsonArray arr = (JsonArray) JsonUtils.getValue(obj, "topping"); List<Object> types = JsonUtils.getValues(arr, "type"); assertTrue(types.contains("Chocolate with Sprinkles")); }
@Override public String formatSmsTemplateContent(String content, Map<String, Object> params) { return StrUtil.format(content, params); }
@Test public void testFormatSmsTemplateContent() { // 准备参数 String content = "正在进行登录操作{operation},您的验证码是{code}"; Map<String, Object> params = MapUtil.<String, Object>builder("operation", "登录") .put("code", "1234").build(); // 调用 String result = smsTemplateService.formatSmsTemplateContent(content, params); // 断言 assertEquals("正在进行登录操作登录,您的验证码是1234", result); }
@SuppressWarnings("ResultOfMethodCallIgnored") public static boolean tryShutdownExecutorElegantly(ExecutorService executor, Duration timeout) { try { executor.shutdown(); executor.awaitTermination(timeout.toMillis(), TimeUnit.MILLISECONDS); } catch (InterruptedException ie) { // Let it go. } if (!executor.isTerminated()) { shutdownExecutorForcefully(executor, Duration.ZERO, false); } return executor.isTerminated(); }
@Test void testTryShutdownExecutorElegantlyTimeoutWithForcefulShutdown() { MockExecutorService executor = new MockExecutorService(5); executor.timeoutAfterNumForcefulShutdown(clock, 0); assertThat(ComponentClosingUtils.tryShutdownExecutorElegantly(executor, Duration.ofDays(1))) .isFalse(); assertThat(executor.forcefullyShutdownCount).isOne(); }
@SuppressWarnings("unchecked") public <IN, OUT> AvroDatumConverter<IN, OUT> create(Class<IN> inputClass) { boolean isMapOnly = ((JobConf) getConf()).getNumReduceTasks() == 0; if (AvroKey.class.isAssignableFrom(inputClass)) { Schema schema; if (isMapOnly) { schema = AvroJob.getMapOutputKeySchema(getConf()); if (null == schema) { schema = AvroJob.getOutputKeySchema(getConf()); } } else { schema = AvroJob.getOutputKeySchema(getConf()); } if (null == schema) { throw new IllegalStateException("Writer schema for output key was not set. Use AvroJob.setOutputKeySchema()."); } return (AvroDatumConverter<IN, OUT>) new AvroWrapperConverter(schema); } if (AvroValue.class.isAssignableFrom(inputClass)) { Schema schema; if (isMapOnly) { schema = AvroJob.getMapOutputValueSchema(getConf()); if (null == schema) { schema = AvroJob.getOutputValueSchema(getConf()); } } else { schema = AvroJob.getOutputValueSchema(getConf()); } if (null == schema) { throw new IllegalStateException( "Writer schema for output value was not set. Use AvroJob.setOutputValueSchema()."); } return (AvroDatumConverter<IN, OUT>) new AvroWrapperConverter(schema); } if (BooleanWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new BooleanWritableConverter(); } if (BytesWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new BytesWritableConverter(); } if (ByteWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new ByteWritableConverter(); } if (DoubleWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new DoubleWritableConverter(); } if (FloatWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new FloatWritableConverter(); } if (IntWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new IntWritableConverter(); } if (LongWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new LongWritableConverter(); } if (NullWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new NullWritableConverter(); } if (Text.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new TextConverter(); } throw new UnsupportedOperationException("Unsupported input type: " + inputClass.getName()); }
@Test void convertIntWritable() { AvroDatumConverter<IntWritable, Integer> converter = mFactory.create(IntWritable.class); assertEquals(2, converter.convert(new IntWritable(2)).intValue()); }
public static Coin valueOf(final long satoshis) { // Avoid allocating a new object for Coins of value zero return satoshis == 0 ? Coin.ZERO : new Coin(satoshis); }
@Test public void testValueOf() { // int version assertEquals(CENT, valueOf(0, 1)); assertEquals(SATOSHI, valueOf(1)); assertEquals(NEGATIVE_SATOSHI, valueOf(-1)); assertEquals(MAX_MONEY, valueOf(MAX_MONEY.value)); assertEquals(MAX_MONEY.negate(), valueOf(MAX_MONEY.value * -1)); valueOf(MAX_MONEY.value + 1); valueOf((MAX_MONEY.value * -1) - 1); valueOf(Long.MAX_VALUE); valueOf(Long.MIN_VALUE); try { valueOf(1, -1); fail(); } catch (IllegalArgumentException e) {} try { valueOf(-1, 0); fail(); } catch (IllegalArgumentException e) {} }
@Override public List<FileEntriesLayer> createLayers() throws IOException { // Clear the exploded-artifact root first if (Files.exists(targetExplodedJarRoot)) { MoreFiles.deleteRecursively(targetExplodedJarRoot, RecursiveDeleteOption.ALLOW_INSECURE); } // Add dependencies layers. List<FileEntriesLayer> layers = JarLayers.getDependenciesLayers(jarPath, ProcessingMode.exploded); // Determine class and resource files in the directory containing jar contents and create // FileEntriesLayer for each type of layer (classes or resources). ZipUtil.unzip(jarPath, targetExplodedJarRoot, true); Predicate<Path> isClassFile = path -> path.getFileName().toString().endsWith(".class"); Predicate<Path> isResourceFile = isClassFile.negate().and(Files::isRegularFile); FileEntriesLayer classesLayer = ArtifactLayers.getDirectoryContentsAsLayer( ArtifactLayers.CLASSES, targetExplodedJarRoot, isClassFile, JarLayers.APP_ROOT.resolve("explodedJar")); FileEntriesLayer resourcesLayer = ArtifactLayers.getDirectoryContentsAsLayer( ArtifactLayers.RESOURCES, targetExplodedJarRoot, isResourceFile, JarLayers.APP_ROOT.resolve("explodedJar")); if (!resourcesLayer.getEntries().isEmpty()) { layers.add(resourcesLayer); } if (!classesLayer.getEntries().isEmpty()) { layers.add(classesLayer); } return layers; }
@Test public void testCreateLayers_emptyJar() throws IOException, URISyntaxException { Path standardJar = Paths.get(Resources.getResource(STANDARD_JAR_EMPTY).toURI()); Path destDir = temporaryFolder.newFolder().toPath(); StandardExplodedProcessor standardExplodedModeProcessor = new StandardExplodedProcessor(standardJar, destDir, JAR_JAVA_VERSION); List<FileEntriesLayer> layers = standardExplodedModeProcessor.createLayers(); assertThat(layers.size()).isEqualTo(1); FileEntriesLayer resourcesLayer = layers.get(0); assertThat(resourcesLayer.getEntries().size()).isEqualTo(1); assertThat(resourcesLayer.getEntries().get(0).getExtractionPath()) .isEqualTo(AbsoluteUnixPath.get("/app/explodedJar/META-INF/MANIFEST.MF")); }
public static Optional<IndexSetValidator.Violation> validate(ElasticsearchConfiguration elasticsearchConfiguration, IndexLifetimeConfig retentionConfig) { Period indexLifetimeMin = retentionConfig.indexLifetimeMin(); Period indexLifetimeMax = retentionConfig.indexLifetimeMax(); final Period leeway = indexLifetimeMax.minus(indexLifetimeMin); if (leeway.toStandardSeconds().getSeconds() < 0) { return Optional.of(IndexSetValidator.Violation.create(f("%s <%s> is shorter than %s <%s>", FIELD_INDEX_LIFETIME_MAX, indexLifetimeMax, FIELD_INDEX_LIFETIME_MIN, indexLifetimeMin))); } if (leeway.toStandardSeconds().isLessThan(elasticsearchConfiguration.getTimeSizeOptimizingRotationPeriod().toStandardSeconds())) { return Optional.of(IndexSetValidator.Violation.create(f("The duration between %s and %s <%s> cannot be shorter than %s <%s>", FIELD_INDEX_LIFETIME_MAX, FIELD_INDEX_LIFETIME_MIN, leeway, TIME_SIZE_OPTIMIZING_ROTATION_PERIOD, elasticsearchConfiguration.getTimeSizeOptimizingRotationPeriod()))); } Period fixedLeeway = elasticsearchConfiguration.getTimeSizeOptimizingRetentionFixedLeeway(); if (Objects.nonNull(fixedLeeway) && leeway.toStandardSeconds().isLessThan(fixedLeeway.toStandardSeconds())) { return Optional.of(IndexSetValidator.Violation.create(f("The duration between %s and %s <%s> cannot be shorter than %s <%s>", FIELD_INDEX_LIFETIME_MAX, FIELD_INDEX_LIFETIME_MIN, leeway, TIME_SIZE_OPTIMIZING_RETENTION_FIXED_LEEWAY, fixedLeeway))); } final Period maxRetentionPeriod = elasticsearchConfiguration.getMaxIndexRetentionPeriod(); if (maxRetentionPeriod != null && indexLifetimeMax.toStandardSeconds().isGreaterThan(maxRetentionPeriod.toStandardSeconds())) { return Optional.of(IndexSetValidator.Violation.create(f("Lifetime setting %s <%s> exceeds the configured maximum of %s=%s.", FIELD_INDEX_LIFETIME_MAX, indexLifetimeMax, ElasticsearchConfiguration.MAX_INDEX_RETENTION_PERIOD, maxRetentionPeriod))); } if (periodOtherThanDays(indexLifetimeMax) && !elasticsearchConfiguration.allowFlexibleRetentionPeriod()) { return Optional.of(IndexSetValidator.Violation.create(f("Lifetime setting %s <%s> can only be a multiple of days", FIELD_INDEX_LIFETIME_MAX, indexLifetimeMax))); } if (periodOtherThanDays(indexLifetimeMin) && !elasticsearchConfiguration.allowFlexibleRetentionPeriod()) { return Optional.of(IndexSetValidator.Violation.create(f("Lifetime setting %s <%s> can only be a multiple of days", FIELD_INDEX_LIFETIME_MIN, indexLifetimeMin))); } return Optional.empty(); }
@Test public void validateMaxRetentionPeriod() { when(elasticConfig.getTimeSizeOptimizingRotationPeriod()).thenReturn(Period.days(1)); when(elasticConfig.getMaxIndexRetentionPeriod()).thenReturn(Period.days(9)); IndexLifetimeConfig config = IndexLifetimeConfig.builder() .indexLifetimeMin(Period.days(2).withHours(2)) .indexLifetimeMax(Period.days(30)) .build(); assertThat(validate(elasticConfig, config)).hasValueSatisfying(v -> assertThat(v.message()).contains( "Lifetime setting index_lifetime_max <P30D> exceeds the configured maximum of max_index_retention_period=P9D") ); }
public static void deletePathRecursively(String path) throws IOException { if (!exists(path)) { return; } Path root = Paths.get(path); Files.walkFileTree(root, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.delete(file); return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException e) throws IOException { if (e == null) { Files.delete(dir); return FileVisitResult.CONTINUE; } else { throw e; } } }); }
@Test public void deletePathRecursively() throws IOException { File tmpDir = mTestFolder.newFolder("dir"); File tmpDir1 = mTestFolder.newFolder("dir", "dir1"); File tmpDir2 = mTestFolder.newFolder("dir", "dir2"); File tmpFile1 = mTestFolder.newFile("dir/dir1/file1"); File tmpFile2 = mTestFolder.newFile("dir/dir1/file2"); File tmpFile3 = mTestFolder.newFile("dir/file3"); // Delete all of these. FileUtils.deletePathRecursively(tmpDir.getAbsolutePath()); assertFalse(tmpDir.exists()); assertFalse(tmpDir1.exists()); assertFalse(tmpDir2.exists()); assertFalse(tmpFile1.exists()); assertFalse(tmpFile2.exists()); assertFalse(tmpFile3.exists()); }
@Override public CompletableFuture<Void> offload(ReadHandle readHandle, UUID uuid, Map<String, String> extraMetadata) { final String managedLedgerName = extraMetadata.get(MANAGED_LEDGER_NAME); final String topicName = TopicName.fromPersistenceNamingEncoding(managedLedgerName); CompletableFuture<Void> promise = new CompletableFuture<>(); scheduler.chooseThread(readHandle.getId()).execute(() -> { final BlobStore writeBlobStore = getBlobStore(config.getBlobStoreLocation()); log.info("offload {} uuid {} extraMetadata {} to {} {}", readHandle.getId(), uuid, extraMetadata, config.getBlobStoreLocation(), writeBlobStore); if (readHandle.getLength() == 0 || !readHandle.isClosed() || readHandle.getLastAddConfirmed() < 0) { promise.completeExceptionally( new IllegalArgumentException("An empty or open ledger should never be offloaded")); return; } OffloadIndexBlockBuilder indexBuilder = OffloadIndexBlockBuilder.create() .withLedgerMetadata(readHandle.getLedgerMetadata()) .withDataBlockHeaderLength(BlockAwareSegmentInputStreamImpl.getHeaderSize()); String dataBlockKey = DataBlockUtils.dataBlockOffloadKey(readHandle.getId(), uuid); String indexBlockKey = DataBlockUtils.indexBlockOffloadKey(readHandle.getId(), uuid); log.info("ledger {} dataBlockKey {} indexBlockKey {}", readHandle.getId(), dataBlockKey, indexBlockKey); MultipartUpload mpu = null; List<MultipartPart> parts = Lists.newArrayList(); // init multi part upload for data block. try { BlobBuilder blobBuilder = writeBlobStore.blobBuilder(dataBlockKey); Map<String, String> objectMetadata = new HashMap<>(userMetadata); objectMetadata.put("role", "data"); if (extraMetadata != null) { objectMetadata.putAll(extraMetadata); } DataBlockUtils.addVersionInfo(blobBuilder, objectMetadata); Blob blob = blobBuilder.build(); log.info("initiateMultipartUpload bucket {}, metadata {} ", config.getBucket(), blob.getMetadata()); mpu = writeBlobStore.initiateMultipartUpload(config.getBucket(), blob.getMetadata(), new PutOptions()); } catch (Throwable t) { promise.completeExceptionally(t); return; } long dataObjectLength = 0; // start multi part upload for data block. try { long startEntry = 0; int partId = 1; long start = System.nanoTime(); long entryBytesWritten = 0; while (startEntry <= readHandle.getLastAddConfirmed()) { int blockSize = BlockAwareSegmentInputStreamImpl .calculateBlockSize(config.getMaxBlockSizeInBytes(), readHandle, startEntry, entryBytesWritten); try (BlockAwareSegmentInputStream blockStream = new BlockAwareSegmentInputStreamImpl( readHandle, startEntry, blockSize, this.offloaderStats, managedLedgerName)) { Payload partPayload = Payloads.newInputStreamPayload(blockStream); partPayload.getContentMetadata().setContentLength((long) blockSize); partPayload.getContentMetadata().setContentType("application/octet-stream"); parts.add(writeBlobStore.uploadMultipartPart(mpu, partId, partPayload)); log.debug("UploadMultipartPart. container: {}, blobName: {}, partId: {}, mpu: {}", config.getBucket(), dataBlockKey, partId, mpu.id()); indexBuilder.addBlock(startEntry, partId, blockSize); if (blockStream.getEndEntryId() != -1) { startEntry = blockStream.getEndEntryId() + 1; } else { // could not read entry from ledger. break; } entryBytesWritten += blockStream.getBlockEntryBytesCount(); partId++; this.offloaderStats.recordOffloadBytes(topicName, blockStream.getBlockEntryBytesCount()); } dataObjectLength += blockSize; } String etag = writeBlobStore.completeMultipartUpload(mpu, parts); log.info("Ledger {}, upload finished, etag {}", readHandle.getId(), etag); mpu = null; } catch (Throwable t) { try { if (mpu != null) { writeBlobStore.abortMultipartUpload(mpu); } } catch (Throwable throwable) { log.error("Failed abortMultipartUpload in bucket - {} with key - {}, uploadId - {}.", config.getBucket(), dataBlockKey, mpu.id(), throwable); } this.offloaderStats.recordWriteToStorageError(topicName); this.offloaderStats.recordOffloadError(topicName); promise.completeExceptionally(t); return; } // upload index block try (OffloadIndexBlock index = indexBuilder.withDataObjectLength(dataObjectLength).build(); IndexInputStream indexStream = index.toStream()) { // write the index block BlobBuilder blobBuilder = writeBlobStore.blobBuilder(indexBlockKey); Map<String, String> objectMetadata = new HashMap<>(userMetadata); objectMetadata.put("role", "index"); if (extraMetadata != null) { objectMetadata.putAll(extraMetadata); } DataBlockUtils.addVersionInfo(blobBuilder, objectMetadata); Payload indexPayload = Payloads.newInputStreamPayload(indexStream); indexPayload.getContentMetadata().setContentLength((long) indexStream.getStreamSize()); indexPayload.getContentMetadata().setContentType("application/octet-stream"); Blob blob = blobBuilder .payload(indexPayload) .contentLength((long) indexStream.getStreamSize()) .build(); writeBlobStore.putBlob(config.getBucket(), blob); promise.complete(null); } catch (Throwable t) { try { writeBlobStore.removeBlob(config.getBucket(), dataBlockKey); } catch (Throwable throwable) { log.error("Failed deleteObject in bucket - {} with key - {}.", config.getBucket(), dataBlockKey, throwable); } this.offloaderStats.recordWriteToStorageError(topicName); this.offloaderStats.recordOffloadError(topicName); promise.completeExceptionally(t); return; } }); return promise; }
@Test(timeOut = 600000) // 10 minutes. public void testHappyCase() throws Exception { LedgerOffloader offloader = getOffloader(); offloader.offload(buildReadHandle(), UUID.randomUUID(), new HashMap<>()).get(); }
@Override public Runnable beforeTaskExecute(@NonNull Runnable runnable) { for (TaskDecorator decorator : decorators) { runnable = decorator.decorate(runnable); } return runnable; }
@Test public void testBeforeTaskExecute() { ExtensibleThreadPoolExecutor executor = new ExtensibleThreadPoolExecutor( "test", new DefaultThreadPoolPluginManager(), 5, 5, 1000L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue<>(1), Thread::new, new ThreadPoolExecutor.DiscardPolicy()); TaskDecoratorPlugin plugin = new TaskDecoratorPlugin(); plugin.addDecorator(runnable -> () -> { taskExecuteCount.incrementAndGet(); runnable.run(); }); plugin.addDecorator(runnable -> () -> { taskExecuteCount.incrementAndGet(); runnable.run(); }); executor.register(plugin); executor.execute(() -> { }); ThreadUtil.sleep(500L); Assert.assertEquals(2, taskExecuteCount.get()); }
@Override public long skip(long ns) throws IOException { ensureOpen(); if (mPosition >= mLimit) { return 0; } long n = Math.min(mLimit - mPosition, ns); n = Math.max(-mPosition, n); mPosition += n; return n; }
@Test void testSkip() throws IOException { UnsafeStringReader reader = new UnsafeStringReader("abc"); assertThat(reader.ready(), is(true)); reader.skip(1); assertThat(reader.read(), is((int) 'b')); }
public boolean isWriteCoalescing() { return writeCoalescing; }
@Test public void isWriteCoalescing() { assertEquals(MapStoreConfig.DEFAULT_WRITE_COALESCING, new MapStoreConfig().isWriteCoalescing()); }
@Override public Integer getInt(K name) { return null; }
@Test public void testGetInt() { assertNull(HEADERS.getInt("name1")); }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Argument argument1 = (Argument) o; if (argument != null ? !argument.equals(argument1.argument) : argument1.argument != null) return false; return true; }
@Test public void equalsInputNullOutputFalse() { // Arrange final Argument objectUnderTest = new Argument(null); final Object o = null; // Act final boolean retval = objectUnderTest.equals(o); // Assert result Assert.assertEquals(false, retval); }
@Override protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) { WafConfig wafConfig = Singleton.INST.get(WafConfig.class); if (Objects.isNull(selector) && Objects.isNull(rule)) { if (WafModelEnum.BLACK.getName().equals(wafConfig.getModel())) { return chain.execute(exchange); } exchange.getResponse().setStatusCode(HttpStatus.FORBIDDEN); Object error = ShenyuResultWrap.error(exchange, HttpStatus.FORBIDDEN.value(), Constants.REJECT_MSG, null); return WebFluxResultUtils.result(exchange, error); } WafHandle wafHandle = buildRuleHandle(rule); if (Objects.isNull(wafHandle) || StringUtils.isBlank(wafHandle.getPermission())) { LOG.error("waf handler can not configuration:{}", wafHandle); return chain.execute(exchange); } if (WafEnum.REJECT.getName().equals(wafHandle.getPermission())) { exchange.getResponse().setStatusCode(HttpStatus.FORBIDDEN); Object error = ShenyuResultWrap.error(exchange, Integer.parseInt(wafHandle.getStatusCode()), Constants.REJECT_MSG, null); return WebFluxResultUtils.result(exchange, error); } return chain.execute(exchange); }
@Test public void testWafPluginReject() { ruleData.setId("waf"); ruleData.setSelectorId("waf"); WafHandle handle = GsonUtils.getGson().fromJson("{\"permission\":\"reject\",\"statusCode\":\"0\"}", WafHandle.class); WafPluginDataHandler.CACHED_HANDLE.get().cachedHandle(CacheKeyUtils.INST.getKey(ruleData), handle); Mono<Void> execute = wafPluginUnderTest.doExecute(exchange, chain, selectorData, ruleData); StepVerifier.create(execute).expectSubscription().verifyComplete(); }
public Map<String, Object> getTelemetryResponse(User currentUser) { TelemetryUserSettings telemetryUserSettings = getTelemetryUserSettings(currentUser); if (isTelemetryEnabled && telemetryUserSettings.telemetryEnabled()) { DateTime clusterCreationDate = telemetryClusterService.getClusterCreationDate().orElse(null); String clusterId = telemetryClusterService.getClusterId(); List<TelemetryLicenseStatus> licenseStatuses = enterpriseDataProvider.licenseStatus(); return telemetryResponseFactory.createTelemetryResponse( getClusterInfo(clusterId, clusterCreationDate, licenseStatuses), getUserInfo(currentUser, clusterId), getPluginInfo(), getSearchClusterInfo(), licenseStatuses, telemetryUserSettings, getDataNodeInfo()); } else { return telemetryResponseFactory.createTelemetryDisabledResponse(telemetryUserSettings); } }
@Test void test_telemetry_is_disabled_globally() { TelemetryService telemetryService = createTelemetryService(false); mockUserTelemetryEnabled(true); Map<String, Object> response = telemetryService.getTelemetryResponse(user); assertThat(response).containsOnlyKeys(USER_TELEMETRY_SETTINGS); }
@Override public void deleteFile(Long id) throws Exception { // 校验存在 FileDO file = validateFileExists(id); // 从文件存储器中删除 FileClient client = fileConfigService.getFileClient(file.getConfigId()); Assert.notNull(client, "客户端({}) 不能为空", file.getConfigId()); client.delete(file.getPath()); // 删除记录 fileMapper.deleteById(id); }
@Test public void testDeleteFile_success() throws Exception { // mock 数据 FileDO dbFile = randomPojo(FileDO.class, o -> o.setConfigId(10L).setPath("tudou.jpg")); fileMapper.insert(dbFile);// @Sql: 先插入出一条存在的数据 // mock Master 文件客户端 FileClient client = mock(FileClient.class); when(fileConfigService.getFileClient(eq(10L))).thenReturn(client); // 准备参数 Long id = dbFile.getId(); // 调用 fileService.deleteFile(id); // 校验数据不存在了 assertNull(fileMapper.selectById(id)); // 校验调用 verify(client).delete(eq("tudou.jpg")); }
@Override public Instance selectOneHealthyInstance(String serviceName) throws NacosException { return selectOneHealthyInstance(serviceName, new ArrayList<>()); }
@Test void testSelectOneHealthyInstance4() throws NacosException { //given Instance healthyInstance = new Instance(); healthyInstance.setIp("1.1.1.1"); healthyInstance.setPort(1000); List<Instance> hosts = new ArrayList<>(); hosts.add(healthyInstance); ServiceInfo infoWithHealthyInstance = new ServiceInfo(); infoWithHealthyInstance.setHosts(hosts); when(proxy.queryInstancesOfService(anyString(), anyString(), anyString(), anyBoolean())).thenReturn( infoWithHealthyInstance); String serviceName = "service1"; String groupName = "group1"; //when client.selectOneHealthyInstance(serviceName, groupName, false); //then verify(proxy, times(1)).queryInstancesOfService(serviceName, groupName, "", false); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback connectionCallback) throws BackgroundException { final MantaHttpHeaders headers = new MantaHttpHeaders(); try { try { if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); headers.setByteRange(range.getStart(), range.getEnd() < 0 ? null : range.getEnd()); } // Requesting an empty file as an InputStream doesn't work, but we also don't want to // perform a HEAD request for every read so we'll opt to handle the exception instead // see https://github.com/joyent/java-manta/issues/248 return session.getClient().getAsInputStream(file.getAbsolute(), headers); } catch(UnsupportedOperationException e) { final MantaObject probablyEmptyFile = session.getClient().head(file.getAbsolute()); if(probablyEmptyFile.getContentLength() != 0) { throw new AccessDeniedException(); } return new NullInputStream(0L); } } catch(MantaException e) { throw new MantaExceptionMappingService().map("Download {0} failed", e, file); } catch(MantaClientHttpResponseException e) { throw new MantaHttpExceptionMappingService().map("Download {0} failed", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Download {0} failed", e, file); } }
@Test public void testReadRange() throws Exception { final Path drive = new MantaDirectoryFeature(session).mkdir(randomDirectory(), new TransferStatus()); final Path test = new Path(drive, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new MantaTouchFeature(session).touch(test, new TransferStatus()); final Local local = new Local(PROPERTIES.get("java.io.tmpdir"), new AlphanumericRandomStringService().random()); final int BYTES_TOTAL = 10;//00; final int BYTES_OFFSET = 1;//00; final byte[] content = RandomUtils.nextBytes(BYTES_TOTAL); final OutputStream out = local.getOutputStream(false); assertNotNull(out); IOUtils.write(content, out); out.close(); new DefaultUploadFeature<>(new MantaWriteFeature(session)).upload( test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED), new DisabledStreamListener(), new TransferStatus().withLength(content.length), new DisabledConnectionCallback()); final TransferStatus status = new TransferStatus(); status.setLength(content.length); status.setAppend(true); status.setOffset(BYTES_OFFSET); final MantaReadFeature read = new MantaReadFeature(session); assertTrue(read.offset(test)); final InputStream in = read.read(test, status.withLength(content.length - BYTES_OFFSET), new DisabledConnectionCallback()); assertNotNull(in); final ByteArrayOutputStream buffer = new ByteArrayOutputStream(content.length - BYTES_OFFSET); new StreamCopier(status, status).transfer(in, buffer); final byte[] reference = new byte[content.length - BYTES_OFFSET]; System.arraycopy(content, BYTES_OFFSET, reference, 0, content.length - BYTES_OFFSET); assertArrayEquals(reference, buffer.toByteArray()); in.close(); new MantaDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public ConfigEntry get(String name) { return entries.get(name); }
@Test public void shouldGetEntry() { assertEquals(E1, config.get("a")); assertEquals(E2, config.get("c")); }
@Override public LogicalSchema getSchema() { return getSource().getSchema(); }
@Test public void shouldThrowKeyExpressionThatDoestCoverKey() { // Given: when(source.getSchema()).thenReturn(INPUT_SCHEMA); final Expression expression = new ComparisonExpression( Type.EQUAL, new UnqualifiedColumnReferenceExp(ColumnName.of("WINDOWSTART")), new IntegerLiteral(1234) ); // When: final KsqlException e = assertThrows( KsqlException.class, () -> new QueryFilterNode( NODE_ID, source, expression, metaStore, ksqlConfig, true, plannerOptions )); // Then: assertThat(e.getMessage(), containsString("WHERE clause missing key column for disjunct: " + "(WINDOWSTART = 1234)")); }
public static Features<SupportedVersionRange> supportedFeatures(Map<String, SupportedVersionRange> features) { return new Features<>(features); }
@Test public void testNullFeatures() { assertThrows( NullPointerException.class, () -> Features.supportedFeatures(null)); }
public static BigDecimal cast(final Integer value, final int precision, final int scale) { if (value == null) { return null; } return cast(value.longValue(), precision, scale); }
@Test public void shouldCastBigInt() { // When: final BigDecimal decimal = DecimalUtil.cast((Long)1L, 2, 1); // Then: assertThat(decimal, is(new BigDecimal("1.0"))); }
void readSourceValuesFromInfoSteps() throws KettleStepException { Map<String, Integer> inputStepWasProcessed = new HashMap<>(); for ( int i = 0; i < meta.getValidations().size(); i++ ) { Validation field = meta.getValidations().get( i ); List<StreamInterface> streams = meta.getStepIOMeta().getInfoStreams(); // If we need to source the allowed values data from a different step, we do this here as well // if ( field.isSourcingValues() ) { if ( streams.get( i ).getStepMeta() == null ) { throw new KettleStepException( "There is no valid source step specified for the allowed values of validation [" + field.getName() + "]" ); } if ( Utils.isEmpty( field.getSourcingField() ) ) { throw new KettleStepException( "There is no valid source field specified for the allowed values of validation [" + field.getName() + "]" ); } // Still here : OK, read the data from the specified step... // The data is stored in data.listValues[i] and data.constantsMeta // String stepName = streams.get( i ).getStepname(); if ( inputStepWasProcessed.containsKey( stepName ) ) { // step was processed for other StreamInterface data.listValues[ i ] = data.listValues[ inputStepWasProcessed.get( stepName ) ]; data.constantsMeta[ i ] = data.constantsMeta[ inputStepWasProcessed.get( stepName ) ]; continue; } RowSet allowedRowSet = findInputRowSet( stepName ); int fieldIndex = -1; List<Object> allowedValues = new ArrayList<Object>(); Object[] allowedRowData = getRowFrom( allowedRowSet ); while ( allowedRowData != null ) { RowMetaInterface allowedRowMeta = allowedRowSet.getRowMeta(); if ( fieldIndex < 0 ) { fieldIndex = allowedRowMeta.indexOfValue( field.getSourcingField() ); if ( fieldIndex < 0 ) { throw new KettleStepException( "Source field [" + field.getSourcingField() + "] is not found in the source row data" ); } data.constantsMeta[ i ] = allowedRowMeta.getValueMeta( fieldIndex ); } Object allowedValue = allowedRowData[ fieldIndex ]; if ( allowedValue != null ) { allowedValues.add( allowedValue ); } // Grab another row too... // allowedRowData = getRowFrom( allowedRowSet ); } // Set the list values in the data block... // data.listValues[ i ] = allowedValues.toArray( new Object[ allowedValues.size() ] ); inputStepWasProcessed.put( stepName, i ); } } }
@Test public void readSourceValuesFromInfoStepsTest() throws Exception { String name = "Valid list"; String field = "sourcing field 1"; String values = "A"; mockHelper.stepMeta.setName( name ); ValidatorMeta meta = new ValidatorMeta(); List<Validation> validations = new ArrayList<>(); Validation validation1 = new Validation( "validation1" ); validation1.setSourcingValues( true ); validation1.setSourcingField( field ); validations.add( validation1 ); Validation validation2 = new Validation( "validation2" ); validation2.setSourcingValues( true ); validation2.setSourcingField( "sourcing field 2" ); validations.add( validation2 ); meta.setValidations( validations ); StepMeta stepMeta = new StepMeta(); stepMeta.setName( name ); RowSet rowSet = Mockito.mock( RowSet.class ); Mockito.when( rowSet.getOriginStepName() ).thenReturn( name ); Mockito.when( rowSet.getDestinationStepName() ).thenReturn( "Validator" ); Mockito.when( rowSet.getOriginStepCopy() ).thenReturn( 0 ); Mockito.when( rowSet.getDestinationStepCopy() ).thenReturn( 0 ); Mockito.when( rowSet.getRow() ).thenReturn( new String[] { values } ).thenReturn( null ); Mockito.when( rowSet.isDone() ).thenReturn( true ); RowMetaInterface allowedRowMeta = Mockito.mock( RowMetaInterface.class ); Mockito.when( rowSet.getRowMeta() ).thenReturn( allowedRowMeta ); Mockito.when( rowSet.getRowMeta() ).thenReturn( Mockito.mock( RowMetaInterface.class ) ); Mockito.when( allowedRowMeta.indexOfValue( field ) ).thenReturn( 0 ); Mockito.when( allowedRowMeta.getValueMeta( 0 ) ).thenReturn( Mockito.mock( ValueMetaInterface.class ) ); List<RowSet> rowSets = new ArrayList<>(); rowSets.add( rowSet ); validator.setInputRowSets( rowSets ); mockHelper.transMeta.setStep( 0, stepMeta ); Mockito.when( mockHelper.transMeta.findStep( Mockito.eq( name ) ) ).thenReturn( stepMeta ); StepMeta stepMetaValidList = new StepMeta(); stepMetaValidList.setName( name ); meta.getStepIOMeta().getInfoStreams().get( 0 ).setStepMeta( stepMetaValidList ); meta.getStepIOMeta().getInfoStreams().get( 1 ).setStepMeta( stepMetaValidList ); Class<?> validatorClass = Validator.class; Field metaField = validatorClass.getDeclaredField( "meta" ); metaField.setAccessible( true ); metaField.set( validator, meta ); ValidatorData data = new ValidatorData(); data.constantsMeta = new ValueMetaInterface[ 2 ]; Field dataField = validatorClass.getDeclaredField( "data" ); dataField.setAccessible( true ); dataField.set( validator, data ); data.listValues = new Object[ 2 ][ 2 ]; validator.readSourceValuesFromInfoSteps(); Assert.assertEquals( values, data.listValues[ 0 ][ 0 ] ); Assert.assertEquals( values, data.listValues[ 1 ][ 0 ] ); }
@Override protected void write(Object datum, Encoder out) throws IOException { final Timestamp timestamp = (Timestamp) datum; if (timestamp == null) { out.writeLong(-1L); out.writeInt(-1); } else { out.writeLong(timestamp.getSeconds()); out.writeInt(timestamp.getNanos()); } }
@Test(expected = ClassCastException.class) public void testThrowsExceptionWhenWritingNonTimestamp() throws IOException { encoding.write(1L, null); }
static S3ResourceId fromComponents(String scheme, String bucket, String key) { if (!key.startsWith("/")) { key = "/" + key; } return new S3ResourceId(scheme, bucket, key, null, null); }
@Test public void testInvalidBucket() { assertThrows( IllegalArgumentException.class, () -> S3ResourceId.fromComponents("s3", "invalid/", "")); }
@Override public void consume(Update update) { super.consume(update); }
@Test void sendsPrivacyViolation() { Update update = mockFullUpdate(bot, USER, "/admin"); bot.consume(update); verify(silent, times(1)).send("Sorry, you don't have the required access level to do that.", USER.getId()); }
@Override public Mono<GetPreKeysResponse> getPreKeys(final GetPreKeysRequest request) { final AuthenticatedDevice authenticatedDevice = AuthenticationUtil.requireAuthenticatedDevice(); final ServiceIdentifier targetIdentifier = ServiceIdentifierUtil.fromGrpcServiceIdentifier(request.getTargetIdentifier()); final byte deviceId = DeviceIdUtil.validate(request.getDeviceId()); final String rateLimitKey = authenticatedDevice.accountIdentifier() + "." + authenticatedDevice.deviceId() + "__" + targetIdentifier.uuid() + "." + deviceId; return rateLimiters.getPreKeysLimiter().validateReactive(rateLimitKey) .then(Mono.fromFuture(() -> accountsManager.getByServiceIdentifierAsync(targetIdentifier)) .flatMap(Mono::justOrEmpty)) .switchIfEmpty(Mono.error(Status.NOT_FOUND.asException())) .flatMap(targetAccount -> KeysGrpcHelper.getPreKeys(targetAccount, targetIdentifier.identityType(), deviceId, keysManager)); }
@Test void getPreKeysRateLimited() { final Account targetAccount = mock(Account.class); when(targetAccount.getUuid()).thenReturn(UUID.randomUUID()); when(targetAccount.getIdentityKey(IdentityType.ACI)).thenReturn(new IdentityKey(Curve.generateKeyPair().getPublicKey())); when(targetAccount.getDevices()).thenReturn(Collections.emptyList()); when(targetAccount.getDevice(anyByte())).thenReturn(Optional.empty()); when(accountsManager.getByServiceIdentifierAsync(any())) .thenReturn(CompletableFuture.completedFuture(Optional.of(targetAccount))); final Duration retryAfterDuration = Duration.ofMinutes(7); when(preKeysRateLimiter.validateReactive(anyString())) .thenReturn(Mono.error(new RateLimitExceededException(retryAfterDuration))); assertRateLimitExceeded(retryAfterDuration, () -> authenticatedServiceStub().getPreKeys(GetPreKeysRequest.newBuilder() .setTargetIdentifier(ServiceIdentifier.newBuilder() .setIdentityType(org.signal.chat.common.IdentityType.IDENTITY_TYPE_ACI) .setUuid(UUIDUtil.toByteString(UUID.randomUUID())) .build()) .build())); verifyNoInteractions(accountsManager); }
@Override public ClassLoaderLease registerClassLoaderLease(JobID jobId) { synchronized (lockObject) { return cacheEntries .computeIfAbsent(jobId, jobID -> new LibraryCacheEntry(jobId)) .obtainLease(); } }
@Test public void releaseUserCodeClassLoader_willRunReleaseHooks() throws IOException, InterruptedException { final BlobLibraryCacheManager libraryCacheManager = new TestingBlobLibraryCacheManagerBuilder().build(); final LibraryCacheManager.ClassLoaderLease classLoaderLease = libraryCacheManager.registerClassLoaderLease(new JobID()); final UserCodeClassLoader userCodeClassLoader = classLoaderLease.getOrResolveClassLoader( Collections.emptyList(), Collections.emptyList()); final OneShotLatch releaseHookLatch = new OneShotLatch(); userCodeClassLoader.registerReleaseHookIfAbsent("test", releaseHookLatch::trigger); // this should trigger the release of the class loader classLoaderLease.release(); releaseHookLatch.await(); }
public FEELFnResult<List> invoke(@ParameterName("list") List list, @ParameterName("position") BigDecimal position, @ParameterName("newItem") Object newItem) { if (list == null) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", CANNOT_BE_NULL)); } if (position == null) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "position", CANNOT_BE_NULL)); } int intPosition = position.intValue(); if (intPosition == 0 || Math.abs(intPosition) > list.size()) { String paramProblem = String.format("%s outside valid boundaries (1-%s)", intPosition, list.size()); return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "position", paramProblem)); } Object e = NumberEvalHelper.coerceNumber(newItem); List toReturn = new ArrayList(list); int replacementPosition = intPosition > 0 ? intPosition -1 : list.size() - Math.abs(intPosition); toReturn.set(replacementPosition, e); return FEELFnResult.ofResult(toReturn); }
@Test void invokeReplaceByMatchWithNull() { List list = getList(); List expected = new ArrayList<>(list); expected.set(1, null); String validMatchFunction = "function(item, newItem) item = \"Element-1\""; Object expressionObject = parseCodegenCompileEvaluate(validMatchFunction); assertThat(expressionObject).isInstanceOf(AbstractCustomFEELFunction.class); FunctionTestUtil.assertResult(listReplaceFunction.invoke(list, (AbstractCustomFEELFunction)expressionObject, null), expected); }
@VisibleForTesting public int getAppActivitiesMaxQueueLength() { return appActivitiesMaxQueueLength; }
@Test (timeout = 10000) public void testAppActivitiesMaxQueueLengthUpdate() throws TimeoutException, InterruptedException { Configuration conf = new Configuration(); int configuredAppActivitiesMaxQueueLength = 1; conf.setInt(YarnConfiguration. RM_ACTIVITIES_MANAGER_APP_ACTIVITIES_MAX_QUEUE_LENGTH, configuredAppActivitiesMaxQueueLength); conf.setInt(YarnConfiguration.RM_ACTIVITIES_MANAGER_CLEANUP_INTERVAL_MS, 500); ConcurrentMap<NodeId, RMNode> mockNodes = new ConcurrentHashMap<>(); int numNodes = 5; for (int i = 0; i < numNodes; i++) { mockNodes.put(NodeId.newInstance("node" + i, 0), mock(RMNode.class)); } CapacityScheduler cs = Mockito.mock(CapacityScheduler.class); RMContext mockRMContext = Mockito.mock(RMContext.class); Mockito.when(mockRMContext.getRMNodes()).thenReturn(mockNodes); Mockito.when(mockRMContext.getYarnConfiguration()).thenReturn(conf); Mockito.when(mockRMContext.getScheduler()).thenReturn(cs); /* * Test for async-scheduling with multi-node placement disabled */ Mockito.when(cs.isMultiNodePlacementEnabled()).thenReturn(false); int numAsyncSchedulerThreads = 3; Mockito.when(cs.getNumAsyncSchedulerThreads()) .thenReturn(numAsyncSchedulerThreads); ActivitiesManager newActivitiesManager = new ActivitiesManager(mockRMContext); Assert.assertEquals(1, newActivitiesManager.getAppActivitiesMaxQueueLength()); newActivitiesManager.init(conf); newActivitiesManager.start(); GenericTestUtils.waitFor( () -> newActivitiesManager.getAppActivitiesMaxQueueLength() == numNodes * numAsyncSchedulerThreads, 100, 3000); Assert.assertEquals(15, newActivitiesManager.getAppActivitiesMaxQueueLength()); /* * Test for HB-driven scheduling with multi-node placement disabled */ Mockito.when(cs.getNumAsyncSchedulerThreads()).thenReturn(0); GenericTestUtils.waitFor( () -> newActivitiesManager.getAppActivitiesMaxQueueLength() == numNodes * 1.2, 100, 3000); Assert.assertEquals(6, newActivitiesManager.getAppActivitiesMaxQueueLength()); /* * Test for scheduling with multi-node placement enabled */ Mockito.when(cs.isMultiNodePlacementEnabled()).thenReturn(true); GenericTestUtils.waitFor( () -> newActivitiesManager.getAppActivitiesMaxQueueLength() == configuredAppActivitiesMaxQueueLength, 100, 3000); Assert.assertEquals(1, newActivitiesManager.getAppActivitiesMaxQueueLength()); }
public static <E> BoundedList<E> newArrayBacked(int maxLength) { return new BoundedList<>(maxLength, new ArrayList<>()); }
@Test public void testMaxLengthMustNotBeNegative() { assertEquals("Invalid non-positive maxLength of -123", assertThrows(IllegalArgumentException.class, () -> BoundedList.newArrayBacked(-123)).getMessage()); assertEquals("Invalid non-positive maxLength of -123", assertThrows(IllegalArgumentException.class, () -> BoundedList.newArrayBacked(-123, 100)).getMessage()); }
public static List<Event> computeEventDiff(final Params params) { final List<Event> events = new ArrayList<>(); emitPerNodeDiffEvents(createBaselineParams(params), events); emitWholeClusterDiffEvent(createBaselineParams(params), events); emitDerivedBucketSpaceStatesDiffEvents(params, events); return events; }
@Test void multiple_node_state_transitions_emit_multiple_node_state_events() { final EventFixture fixture = EventFixture.createForNodes(3) .clusterStateBefore("distributor:3 storage:3 .1.s:d") .clusterStateAfter("distributor:3 .2.s:d storage:3 .0.s:r"); final List<Event> events = fixture.computeEventDiff(); assertThat(events.size(), equalTo(3)); assertThat(events, hasItem(allOf( eventForNode(distributorNode(2)), nodeEventWithDescription("Altered node state in cluster state from 'U' to 'D'")))); assertThat(events, hasItem(allOf( eventForNode(storageNode(0)), nodeEventWithDescription("Altered node state in cluster state from 'U' to 'R'")))); assertThat(events, hasItem(allOf( eventForNode(storageNode(1)), nodeEventWithDescription("Altered node state in cluster state from 'D' to 'U'")))); }
@Override public PageResult<ConfigDO> getConfigPage(ConfigPageReqVO pageReqVO) { return configMapper.selectPage(pageReqVO); }
@Test public void testGetConfigPage() { // mock 数据 ConfigDO dbConfig = randomConfigDO(o -> { // 等会查询到 o.setName("芋艿"); o.setConfigKey("yunai"); o.setType(ConfigTypeEnum.SYSTEM.getType()); o.setCreateTime(buildTime(2021, 2, 1)); }); configMapper.insert(dbConfig); // 测试 name 不匹配 configMapper.insert(cloneIgnoreId(dbConfig, o -> o.setName("土豆"))); // 测试 key 不匹配 configMapper.insert(cloneIgnoreId(dbConfig, o -> o.setConfigKey("tudou"))); // 测试 type 不匹配 configMapper.insert(cloneIgnoreId(dbConfig, o -> o.setType(ConfigTypeEnum.CUSTOM.getType()))); // 测试 createTime 不匹配 configMapper.insert(cloneIgnoreId(dbConfig, o -> o.setCreateTime(buildTime(2021, 1, 1)))); // 准备参数 ConfigPageReqVO reqVO = new ConfigPageReqVO(); reqVO.setName("艿"); reqVO.setKey("nai"); reqVO.setType(ConfigTypeEnum.SYSTEM.getType()); reqVO.setCreateTime(buildBetweenTime(2021, 1, 15, 2021, 2, 15)); // 调用 PageResult<ConfigDO> pageResult = configService.getConfigPage(reqVO); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbConfig, pageResult.getList().get(0)); }
@Override public String toString() { checkNotNull(codecFactory, "Inner CodecFactory is null, please use non default constructor"); return codecFactory.toString(); }
@Test(expected = NullPointerException.class) public void testNullCodecToString() throws Exception { // use default CTR (available cause Serializable) SerializableAvroCodecFactory codec = new SerializableAvroCodecFactory(); assertEquals("null", codec.toString()); }
public static String toStepName(ExecutableStage executableStage) { /* * Look for the first/input ParDo/DoFn in this executable stage by * matching ParDo/DoFn's input PCollection with executable stage's * input PCollection */ Set<PipelineNode.PTransformNode> inputs = executableStage.getTransforms().stream() .filter( transform -> transform .getTransform() .getInputsMap() .containsValue(executableStage.getInputPCollection().getId())) .collect(Collectors.toSet()); Set<String> outputIds = executableStage.getOutputPCollections().stream() .map(PipelineNode.PCollectionNode::getId) .collect(Collectors.toSet()); /* * Look for the last/output ParDo/DoFn in this executable stage by * matching ParDo/DoFn's output PCollection(s) with executable stage's * out PCollection(s) */ Set<PipelineNode.PTransformNode> outputs = executableStage.getTransforms().stream() .filter( transform -> CollectionUtils.containsAny( transform.getTransform().getOutputsMap().values(), outputIds)) .collect(Collectors.toSet()); return String.format("[%s-%s]", toStepName(inputs), toStepName(outputs)); }
@Test public void testExecutableStageWithCustomizedName() { pipeline.apply("MyCreateOf", Create.of(KV.of(1L, "1"))); assertEquals("[MyCreateOf-]", DoFnUtils.toStepName(getOnlyExecutableStage(pipeline))); }
@Udf(description = "Returns the inverse (arc) tangent of y / x") public Double atan2( @UdfParameter( value = "y", description = "The ordinate (y) coordinate." ) final Integer y, @UdfParameter( value = "x", description = "The abscissa (x) coordinate." ) final Integer x ) { return atan2(y == null ? null : y.doubleValue(), x == null ? null : x.doubleValue()); }
@Test public void shouldHandlePositiveYPositiveX() { assertThat(udf.atan2(1.1, 0.24), closeTo(1.355980926393238, 0.000000000000001)); assertThat(udf.atan2(6.0, 7.1), closeTo(0.7016252196536817, 0.000000000000001)); assertThat(udf.atan2(2, 3), closeTo(0.5880026035475675, 0.000000000000001)); assertThat(udf.atan2(2L, 2L), closeTo(0.7853981633974483, 0.000000000000001)); }
@Override public void deleteProjectProperty(DbSession session, String key, String projectUuid, String projectKey, String projectName, String qualifier) { // do nothing }
@Test public void deleteProjectProperty() { underTest.deleteProjectProperty(null, null, null, null, null, null); assertNoInteraction(); }
@VisibleForTesting protected Map<String, Object> read(String json) { final Object result = jsonPath.read(json); final Map<String, Object> fields = Maps.newHashMap(); if (result instanceof Integer || result instanceof Double || result instanceof Long) { fields.put("result", result); } else if (result instanceof List) { final List list = (List) result; if (!list.isEmpty()) { fields.put("result", list.get(0).toString()); } } else { // Now it's most likely a string or something we do not map. fields.put("result", result.toString()); } return fields; }
@Test public void testReadResultingInSingleString() throws Exception { String json = "{\"url\":\"https://api.github.com/repos/Graylog2/graylog2-server/releases/assets/22660\",\"download_count\":76185,\"id\":22660,\"name\":\"graylog2-server-0.20.0-preview.1.tgz\",\"label\":\"graylog2-server-0.20.0-preview.1.tgz\",\"content_type\":\"application/octet-stream\",\"state\":\"uploaded\",\"size\":38179285,\"updated_at\":\"2013-09-30T20:05:46Z\"}"; String path = "$.state"; Map<String, Object> result = new JsonPathCodec(configOf(CK_PATH, path), objectMapperProvider.get(), messageFactory).read(json); assertThat(result.size()).isEqualTo(1); assertThat(result.get("result")).isEqualTo("uploaded"); }
@Override public Iterator<ShardCheckpoint> iterator() { return shardCheckpoints.iterator(); }
@Test(expected = UnsupportedOperationException.class) public void isImmutable() { KinesisReaderCheckpoint checkpoint = new KinesisReaderCheckpoint(asList(a, b, c)); Iterator<ShardCheckpoint> iterator = checkpoint.iterator(); iterator.remove(); }
static void maybeReportHybridDiscoveryIssue(PluginDiscoveryMode discoveryMode, PluginScanResult serviceLoadingScanResult, PluginScanResult mergedResult) { SortedSet<PluginDesc<?>> missingPlugins = new TreeSet<>(); mergedResult.forEach(missingPlugins::add); serviceLoadingScanResult.forEach(missingPlugins::remove); if (missingPlugins.isEmpty()) { if (discoveryMode == PluginDiscoveryMode.HYBRID_WARN || discoveryMode == PluginDiscoveryMode.HYBRID_FAIL) { log.warn("All plugins have ServiceLoader manifests, consider reconfiguring {}={}", WorkerConfig.PLUGIN_DISCOVERY_CONFIG, PluginDiscoveryMode.SERVICE_LOAD); } } else { String message = String.format( "One or more plugins are missing ServiceLoader manifests may not be usable with %s=%s: %s%n" + "Read the documentation at %s for instructions on migrating your plugins " + "to take advantage of the performance improvements of %s mode.", WorkerConfig.PLUGIN_DISCOVERY_CONFIG, PluginDiscoveryMode.SERVICE_LOAD, missingPlugins.stream() .map(pluginDesc -> pluginDesc.location() + "\t" + pluginDesc.className() + "\t" + pluginDesc.type() + "\t" + pluginDesc.version()) .collect(Collectors.joining("\n", "[\n", "\n]")), "https://kafka.apache.org/documentation.html#connect_plugindiscovery", PluginDiscoveryMode.SERVICE_LOAD ); if (discoveryMode == PluginDiscoveryMode.HYBRID_WARN) { log.warn("{} To silence this warning, set {}={} in the worker config.", message, WorkerConfig.PLUGIN_DISCOVERY_CONFIG, PluginDiscoveryMode.ONLY_SCAN); } else if (discoveryMode == PluginDiscoveryMode.HYBRID_FAIL) { throw new ConnectException(String.format("%s To silence this error, set %s=%s in the worker config.", message, WorkerConfig.PLUGIN_DISCOVERY_CONFIG, PluginDiscoveryMode.HYBRID_WARN)); } } }
@Test public void testHybridFailWithPlugins() { try (LogCaptureAppender logCaptureAppender = LogCaptureAppender.createAndRegister(Plugins.class)) { Plugins.maybeReportHybridDiscoveryIssue(PluginDiscoveryMode.HYBRID_FAIL, nonEmpty, nonEmpty); assertTrue(logCaptureAppender.getEvents().stream().anyMatch(e -> e.getLevel().equals("WARN") && !e.getMessage().contains(missingPluginClass) && e.getMessage().contains(WorkerConfig.PLUGIN_DISCOVERY_CONFIG) )); } }
public boolean shouldHandle(OutOfMemoryError oome) { return true; }
@Test public void testShouldHandle() { assertTrue(outOfMemoryHandler.shouldHandle(new OutOfMemoryError())); }
public void startAsync() { try { udfLoader.load(); ProcessingLogServerUtils.maybeCreateProcessingLogTopic( serviceContext.getTopicClient(), processingLogConfig, ksqlConfig); if (processingLogConfig.getBoolean(ProcessingLogConfig.STREAM_AUTO_CREATE)) { log.warn("processing log auto-create is enabled, but this is not supported " + "for headless mode."); } rocksDBConfigSetterHandler.accept(ksqlConfig); processesQueryFile(readQueriesFile(queriesFile)); showWelcomeMessage(); final Properties properties = new Properties(); ksqlConfig.originals().forEach((key, value) -> { if (nonNull(value)) { properties.put(key, value.toString()); } }); versionChecker.start(KsqlModuleType.SERVER, properties); } catch (final Exception e) { log.error("Failed to start KSQL Server with query file: " + queriesFile, e); throw e; } }
@Test public void shouldLoadUdfs() { // When: standaloneExecutor.startAsync(); // Then: verify(udfLoader).load(); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void deleteMessages() { Message message = bot.execute(new SendMessage(chatId, "message for delete")).message(); int[] ids = {message.messageId()}; BaseResponse response = bot.execute(new DeleteMessages(chatId, ids)); assertTrue(response.isOk()); }
public static List<FieldInfo> buildSourceSchemaEntity(final LogicalSchema schema) { final List<FieldInfo> allFields = schema.columns().stream() .map(EntityUtil::toFieldInfo) .collect(Collectors.toList()); if (allFields.isEmpty()) { throw new IllegalArgumentException("Root schema should contain columns: " + schema); } return allFields; }
@Test public void shouldSupportRowTimeAndKeyInValueSchema() { // Given: final LogicalSchema schema = LogicalSchema.builder() .valueColumn(ColumnName.of("ROWKEY"), SqlTypes.STRING) .valueColumn(ColumnName.of("ROWTIME"), SqlTypes.INTEGER) .valueColumn(ColumnName.of("field1"), SqlTypes.INTEGER) .build(); // When: final List<FieldInfo> fields = EntityUtil.buildSourceSchemaEntity(schema); // Then: assertThat(fields, hasSize(3)); assertThat(fields.get(0).getName(), equalTo("ROWKEY")); assertThat(fields.get(0).getType(), equalTo(Optional.empty())); assertThat(fields.get(1).getName(), equalTo("ROWTIME")); assertThat(fields.get(1).getType(), equalTo(Optional.empty())); }
Duration getLockAtLeastFor(AnnotationData annotation) { return getValue( annotation.getLockAtLeastFor(), annotation.getLockAtLeastForString(), this.defaultLockAtLeastFor, "lockAtLeastForString"); }
@Test public void shoulFailOnNegativeLockAtMostFor() throws NoSuchMethodException { noopResolver(); SpringLockConfigurationExtractor.AnnotationData annotation = getAnnotation("annotatedMethodWithNegativeGracePeriod"); assertThatThrownBy(() -> extractor.getLockAtLeastFor(annotation)).isInstanceOf(IllegalArgumentException.class); }
@VisibleForTesting static Map<String, ExternalResourceDriver> externalResourceDriversFromConfig( Configuration config, PluginManager pluginManager) { final Set<String> resourceSet = getExternalResourceSet(config); if (resourceSet.isEmpty()) { return Collections.emptyMap(); } final Iterator<ExternalResourceDriverFactory> factoryIterator = pluginManager.load(ExternalResourceDriverFactory.class); final Map<String, ExternalResourceDriverFactory> externalResourceFactories = new HashMap<>(); factoryIterator.forEachRemaining( externalResourceDriverFactory -> externalResourceFactories.put( externalResourceDriverFactory.getClass().getName(), externalResourceDriverFactory)); final Map<String, ExternalResourceDriver> externalResourceDrivers = new HashMap<>(); for (String resourceName : resourceSet) { final ConfigOption<String> driverClassOption = key(ExternalResourceOptions .getExternalResourceDriverFactoryConfigOptionForResource( resourceName)) .stringType() .noDefaultValue(); final String driverFactoryClassName = config.get(driverClassOption); if (StringUtils.isNullOrWhitespaceOnly(driverFactoryClassName)) { LOG.warn( "Could not find driver class name for {}. Please make sure {} is configured.", resourceName, driverClassOption.key()); continue; } ExternalResourceDriverFactory externalResourceDriverFactory = externalResourceFactories.get(driverFactoryClassName); if (externalResourceDriverFactory != null) { DelegatingConfiguration delegatingConfiguration = new DelegatingConfiguration( config, ExternalResourceOptions .getExternalResourceParamConfigPrefixForResource( resourceName)); try { externalResourceDrivers.put( resourceName, externalResourceDriverFactory.createExternalResourceDriver( delegatingConfiguration)); LOG.info("Add external resources driver for {}.", resourceName); } catch (Exception e) { LOG.warn( "Could not instantiate driver with factory {} for {}. {}", driverFactoryClassName, resourceName, e); } } else { LOG.warn( "Could not find factory class {} for {}.", driverFactoryClassName, resourceName); } } return externalResourceDrivers; }
@Test public void testFactoryFailedToCreateDriver() { final Configuration config = new Configuration(); final String driverFactoryClassName = TestingFailedExternalResourceDriverFactory.class.getName(); final Map<Class<?>, Iterator<?>> plugins = new HashMap<>(); plugins.put( ExternalResourceDriverFactory.class, IteratorUtils.singletonIterator(new TestingFailedExternalResourceDriverFactory())); final PluginManager testingPluginManager = new TestingPluginManager(plugins); config.set( ExternalResourceOptions.EXTERNAL_RESOURCE_LIST, Collections.singletonList(RESOURCE_NAME_1)); config.setString( ExternalResourceOptions.getExternalResourceDriverFactoryConfigOptionForResource( RESOURCE_NAME_1), driverFactoryClassName); final Map<String, ExternalResourceDriver> externalResourceDrivers = ExternalResourceUtils.externalResourceDriversFromConfig( config, testingPluginManager); assertThat(externalResourceDrivers.entrySet(), is(empty())); }
public PartitionStatistics triggerManualCompaction(PartitionIdentifier partition) { PartitionStatistics statistics = partitionStatisticsHashMap.compute(partition, (k, v) -> { if (v == null) { v = new PartitionStatistics(partition); } v.setPriority(PartitionStatistics.CompactionPriority.MANUAL_COMPACT); return v; }); LOG.info("Trigger manual compaction, {}", statistics); return statistics; }
@Test public void testTriggerManualCompaction() { CompactionMgr compactionManager = new CompactionMgr(); PartitionIdentifier partition = new PartitionIdentifier(1, 2, 3); PartitionStatistics statistics = compactionManager.triggerManualCompaction(partition); Assert.assertEquals(PartitionStatistics.CompactionPriority.MANUAL_COMPACT, statistics.getPriority()); Collection<PartitionStatistics> allStatistics = compactionManager.getAllStatistics(); Assert.assertEquals(1, allStatistics.size()); Assert.assertTrue(allStatistics.contains(statistics)); }
@VisibleForTesting static int checkJar(Path file) throws Exception { final URI uri = file.toUri(); int numSevereIssues = 0; try (final FileSystem fileSystem = FileSystems.newFileSystem( new URI("jar:file", uri.getHost(), uri.getPath(), uri.getFragment()), Collections.emptyMap())) { if (isTestJarAndEmpty(file, fileSystem.getPath("/"))) { return 0; } if (!noticeFileExistsAndIsValid(fileSystem.getPath("META-INF", "NOTICE"), file)) { numSevereIssues++; } if (!licenseFileExistsAndIsValid(fileSystem.getPath("META-INF", "LICENSE"), file)) { numSevereIssues++; } numSevereIssues += getNumLicenseFilesOutsideMetaInfDirectory(file, fileSystem.getPath("/")); numSevereIssues += getFilesWithIncompatibleLicenses(file, fileSystem.getPath("/")); } return numSevereIssues; }
@Test void testRejectedOnInvalidNoticeFile(@TempDir Path tempDir) throws Exception { assertThat( JarFileChecker.checkJar( createJar( tempDir, Entry.fileEntry(INVALID_NOTICE_CONTENTS, VALID_NOTICE_PATH), Entry.fileEntry( VALID_LICENSE_CONTENTS, VALID_LICENSE_PATH)))) .isEqualTo(1); }
public <T> SideInput<T> fetchSideInput( PCollectionView<T> view, BoundedWindow sideWindow, String stateFamily, SideInputState state, Supplier<Closeable> scopedReadStateSupplier) { Callable<SideInput<T>> loadSideInputFromWindmill = () -> loadSideInputFromWindmill(view, sideWindow, stateFamily, scopedReadStateSupplier); SideInputCache.Key<T> sideInputCacheKey = SideInputCache.Key.create( getInternalTag(view), sideWindow, getViewFn(view).getTypeDescriptor()); try { if (state == SideInputState.KNOWN_READY) { Optional<SideInput<T>> existingCacheEntry = sideInputCache.get(sideInputCacheKey); if (!existingCacheEntry.isPresent()) { return sideInputCache.getOrLoad(sideInputCacheKey, loadSideInputFromWindmill); } if (!existingCacheEntry.get().isReady()) { return sideInputCache.invalidateThenLoadNewEntry( sideInputCacheKey, loadSideInputFromWindmill); } return existingCacheEntry.get(); } return sideInputCache.getOrLoad(sideInputCacheKey, loadSideInputFromWindmill); } catch (Exception e) { LOG.error("Fetch failed: ", e); throw new RuntimeException("Exception while fetching side input: ", e); } }
@Test public void testFetchGlobalDataCacheOverflow() throws Exception { Coder<List<String>> coder = ListCoder.of(StringUtf8Coder.of()); ByteStringOutputStream stream = new ByteStringOutputStream(); coder.encode(Collections.singletonList("data1"), stream, Coder.Context.OUTER); ByteString encodedIterable1 = stream.toByteStringAndReset(); coder.encode(Collections.singletonList("data2"), stream, Coder.Context.OUTER); ByteString encodedIterable2 = stream.toByteString(); Cache<SideInputCache.Key<?>, SideInput<?>> cache = CacheBuilder.newBuilder().build(); SideInputStateFetcher fetcher = new SideInputStateFetcher(server::getSideInputData, new SideInputCache(cache)); PCollectionView<String> view1 = TestPipeline.create().apply(Create.empty(StringUtf8Coder.of())).apply(View.asSingleton()); PCollectionView<String> view2 = TestPipeline.create().apply(Create.empty(StringUtf8Coder.of())).apply(View.asSingleton()); String tag1 = view1.getTagInternal().getId(); String tag2 = view2.getTagInternal().getId(); // Test four calls in a row. First, fetch view1, then view2 (which evicts view1 from the cache), // then view 1 again twice. when(server.getSideInputData(any(Windmill.GlobalDataRequest.class))) .thenReturn( buildGlobalDataResponse(tag1, true, encodedIterable1), buildGlobalDataResponse(tag2, true, encodedIterable2), buildGlobalDataResponse(tag1, true, encodedIterable1)); assertEquals( "data1", fetcher .fetchSideInput( view1, GlobalWindow.INSTANCE, STATE_FAMILY, SideInputState.UNKNOWN, readStateSupplier) .value() .orElse(null)); assertEquals( "data2", fetcher .fetchSideInput( view2, GlobalWindow.INSTANCE, STATE_FAMILY, SideInputState.UNKNOWN, readStateSupplier) .value() .orElse(null)); cache.invalidateAll(); assertEquals( "data1", fetcher .fetchSideInput( view1, GlobalWindow.INSTANCE, STATE_FAMILY, SideInputState.UNKNOWN, readStateSupplier) .value() .orElse(null)); assertEquals( "data1", fetcher .fetchSideInput( view1, GlobalWindow.INSTANCE, STATE_FAMILY, SideInputState.UNKNOWN, readStateSupplier) .value() .orElse(null)); ArgumentCaptor<Windmill.GlobalDataRequest> captor = ArgumentCaptor.forClass(Windmill.GlobalDataRequest.class); verify(server, times(3)).getSideInputData(captor.capture()); verifyNoMoreInteractions(server); assertThat( captor.getAllValues(), contains( buildGlobalDataRequest(tag1), buildGlobalDataRequest(tag2), buildGlobalDataRequest(tag1))); }
protected Object[] callRest( Object[] rowData ) throws KettleException { // get dynamic url ? if ( meta.isUrlInField() ) { data.realUrl = data.inputRowMeta.getString( rowData, data.indexOfUrlField ); } // get dynamic method? if ( meta.isDynamicMethod() ) { data.method = data.inputRowMeta.getString( rowData, data.indexOfMethod ); if ( Utils.isEmpty( data.method ) ) { throw new KettleException( BaseMessages.getString( PKG, "Rest.Error.MethodMissing" ) ); } } WebTarget webResource = null; Client client = null; Object[] newRow = null; if ( rowData != null ) { newRow = rowData.clone(); } try { if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "Rest.Log.ConnectingToURL", data.realUrl ) ); } // // Register a custom StringMessageBodyWriter to solve PDI-17423 ClientBuilder clientBuilder = ClientBuilder.newBuilder(); clientBuilder .withConfig( data.config ) .property( HttpUrlConnectorProvider.SET_METHOD_WORKAROUND, true ); if ( meta.isIgnoreSsl() || !Utils.isEmpty( data.trustStoreFile ) ) { clientBuilder.sslContext( data.sslContext ); clientBuilder.hostnameVerifier( ( s1, s2 ) -> true ); } client = clientBuilder.build(); if ( data.basicAuthentication != null ) { client.register( data.basicAuthentication ); } // create a WebResource object, which encapsulates a web resource for the client webResource = client.target( data.realUrl ); // used for calculating the responseTime long startTime = System.currentTimeMillis(); if ( data.useMatrixParams ) { // Add matrix parameters UriBuilder builder = webResource.getUriBuilder(); for ( int i = 0; i < data.nrMatrixParams; i++ ) { String value = data.inputRowMeta.getString( rowData, data.indexOfMatrixParamFields[ i ] ); if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "Rest.Log.matrixParameterValue", data.matrixParamNames[ i ], value ) ); } builder = builder.matrixParam( data.matrixParamNames[ i ], UriComponent.encode( value, UriComponent.Type.QUERY_PARAM ) ); } webResource = client.target( builder.build() ); } if ( data.useParams ) { // Add query parameters for ( int i = 0; i < data.nrParams; i++ ) { String value = data.inputRowMeta.getString( rowData, data.indexOfParamFields[ i ] ); if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "Rest.Log.queryParameterValue", data.paramNames[ i ], value ) ); } webResource = webResource.queryParam( data.paramNames[ i ], UriComponent.encode( value, UriComponent.Type.QUERY_PARAM ) ); } } if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "Rest.Log.ConnectingToURL", webResource.getUri() ) ); } Invocation.Builder invocationBuilder = webResource.request(); String contentType = null; // media type override, if not null if ( data.useHeaders ) { // Add headers for ( int i = 0; i < data.nrheader; i++ ) { String value = data.inputRowMeta.getString( rowData, data.indexOfHeaderFields[ i ] ); // unsure if an already set header will be returned to builder invocationBuilder.header( data.headerNames[ i ], value ); if ( "Content-Type".equals( data.headerNames[ i ] ) ) { contentType = value; } if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "Rest.Log.HeaderValue", data.headerNames[ i ], value ) ); } } } Response response; String entityString = ""; if ( data.useBody ) { // Set Http request entity entityString = Const.NVL( data.inputRowMeta.getString( rowData, data.indexOfBodyField ), "" ); if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "Rest.Log.BodyValue", entityString ) ); } } try { if ( data.method.equals( RestMeta.HTTP_METHOD_GET ) ) { response = invocationBuilder.get( Response.class ); } else if ( data.method.equals( RestMeta.HTTP_METHOD_POST ) ) { if ( null != contentType ) { response = invocationBuilder.post( Entity.entity( entityString, contentType ) ); } else { // response = builder.type( data.mediaType ).post( ClientResponse.class, entityString ); response = invocationBuilder.post( Entity.entity( entityString, data.mediaType ) ); } } else if ( data.method.equals( RestMeta.HTTP_METHOD_PUT ) ) { if ( null != contentType ) { response = invocationBuilder.put( Entity.entity( entityString, contentType ) ); } else { response = invocationBuilder.put( Entity.entity( entityString, data.mediaType ) ); } } else if ( data.method.equals( RestMeta.HTTP_METHOD_DELETE ) ) { response = invocationBuilder.delete(); } else if ( data.method.equals( RestMeta.HTTP_METHOD_HEAD ) ) { response = invocationBuilder.head(); } else if ( data.method.equals( RestMeta.HTTP_METHOD_OPTIONS ) ) { response = invocationBuilder.options(); } else if ( data.method.equals( RestMeta.HTTP_METHOD_PATCH ) ) { if ( null != contentType ) { response = invocationBuilder.method( RestMeta.HTTP_METHOD_PATCH, Entity.entity( entityString, contentType ) ); } else { response = invocationBuilder.method( RestMeta.HTTP_METHOD_PATCH, Entity.entity( entityString, data.mediaType ) ); } } else { throw new KettleException( BaseMessages.getString( PKG, "Rest.Error.UnknownMethod", data.method ) ); } } catch ( Exception e ) { throw new KettleException( "Request could not be processed", e ); } // Get response time long responseTime = System.currentTimeMillis() - startTime; if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "Rest.Log.ResponseTime", String.valueOf( responseTime ), data.realUrl ) ); } // Get status int status = response.getStatus(); // Display status code if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "Rest.Log.ResponseCode", "" + status ) ); } // Get Response String body; String headerString = null; try { body = response.readEntity( String.class ); } catch ( Exception ex ) { body = ""; } // get Header MultivaluedMap<String, Object> headers = searchForHeaders( response ); JSONObject json = new JSONObject(); for ( java.util.Map.Entry<String, List<Object>> entry : headers.entrySet() ) { String name = entry.getKey(); List<Object> value = entry.getValue(); if ( value.size() > 1 ) { json.put( name, value ); } else { json.put( name, value.get( 0 ) ); } } headerString = json.toJSONString(); // for output int returnFieldsOffset = data.inputRowMeta.size(); // add response to output if ( !Utils.isEmpty( data.resultFieldName ) ) { newRow = RowDataUtil.addValueData( newRow, returnFieldsOffset, body ); returnFieldsOffset++; } // add status to output if ( !Utils.isEmpty( data.resultCodeFieldName ) ) { newRow = RowDataUtil.addValueData( newRow, returnFieldsOffset, new Long( status ) ); returnFieldsOffset++; } // add response time to output if ( !Utils.isEmpty( data.resultResponseFieldName ) ) { newRow = RowDataUtil.addValueData( newRow, returnFieldsOffset, new Long( responseTime ) ); returnFieldsOffset++; } // add response header to output if ( !Utils.isEmpty( data.resultHeaderFieldName ) ) { newRow = RowDataUtil.addValueData( newRow, returnFieldsOffset, headerString ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "Rest.Error.CanNotReadURL", data.realUrl ), e ); } finally { if ( webResource != null ) { webResource = null; } if ( client != null ) { client.close(); } } return newRow; }
@Test public void testPutWithEmptyBody() throws KettleException { Invocation.Builder builder = mock( Invocation.Builder.class ); WebTarget resource = mock( WebTarget.class ); lenient().doReturn( builder ).when( resource ).request(); Client client = mock( Client.class ); lenient().doReturn( resource ).when( client ).target( anyString() ); ClientBuilder clientBuilder = mock( ClientBuilder.class ); lenient().when( clientBuilder.build() ).thenReturn( client ); RestMeta meta = mock( RestMeta.class ); lenient().doReturn( false ).when( meta ).isUrlInField(); lenient().doReturn( false ).when( meta ).isDynamicMethod(); RowMetaInterface rmi = mock( RowMetaInterface.class ); RestData data = mock( RestData.class ); data.method = RestMeta.HTTP_METHOD_PUT; data.config = new ClientConfig(); data.inputRowMeta = rmi; data.realUrl = "http://localhost:8080/pentaho"; data.mediaType = MediaType.TEXT_PLAIN_TYPE; data.useBody = true; // do not set data.indexOfBodyField Rest rest = mock( Rest.class ); doCallRealMethod().when( rest ).callRest( any() ); ReflectionTestUtils.setField( rest, "meta", meta ); ReflectionTestUtils.setField( rest, "data", data ); try { rest.callRest( new Object[] { 0 } ); } catch ( Exception exception ) { // Ignore the ConnectException which is expected as rest call to localhost:8080 will fail in unit test // IllegalStateException is throws when the body is null if ( exception.getCause().getCause() instanceof IllegalStateException ) { Assert.fail( "PUT request with an empty body should not have failed with an IllegalStateException" ); } } }
public String toString() { StringBuilder sb = new StringBuilder(); int i = 1; for (Identifier identifier: mIdentifiers) { if (i > 1) { sb.append(" "); } sb.append("id"); sb.append(i); sb.append(": "); sb.append(identifier == null ? "null" : identifier.toString()); i++; } return sb.toString(); }
@Test public void testToString() { Region region = new Region("myRegion", Identifier.parse("1"), Identifier.parse("2"), null); assertEquals("id1: 1 id2: 2 id3: null", region.toString()); }
@Override public TCreatePartitionResult createPartition(TCreatePartitionRequest request) throws TException { LOG.info("Receive create partition: {}", request); TCreatePartitionResult result; try { if (partitionRequestNum.incrementAndGet() >= Config.thrift_server_max_worker_threads / 4) { result = new TCreatePartitionResult(); TStatus errorStatus = new TStatus(SERVICE_UNAVAILABLE); errorStatus.setError_msgs(Lists.newArrayList( String.format("Too many create partition requests, please try again later txn_id=%d", request.getTxn_id()))); result.setStatus(errorStatus); return result; } result = createPartitionProcess(request); } catch (Exception t) { LOG.warn(DebugUtil.getStackTrace(t)); result = new TCreatePartitionResult(); TStatus errorStatus = new TStatus(RUNTIME_ERROR); errorStatus.setError_msgs(Lists.newArrayList(String.format("txn_id=%d failed. %s", request.getTxn_id(), t.getMessage()))); result.setStatus(errorStatus); } finally { partitionRequestNum.decrementAndGet(); } return result; }
@Test public void testAddListPartitionConcurrency() throws UserException, TException { new MockUp<GlobalTransactionMgr>() { @Mock public TransactionState getTransactionState(long dbId, long transactionId) { return new TransactionState(); } }; Database db = GlobalStateMgr.getCurrentState().getDb("test"); Table table = db.getTable("site_access_list"); List<List<String>> partitionValues = Lists.newArrayList(); List<String> values = Lists.newArrayList(); values.add("1990-04-24"); partitionValues.add(values); List<String> values2 = Lists.newArrayList(); values2.add("1990-04-25"); partitionValues.add(values2); FrontendServiceImpl impl = new FrontendServiceImpl(exeEnv); TCreatePartitionRequest request = new TCreatePartitionRequest(); request.setDb_id(db.getId()); request.setTable_id(table.getId()); request.setPartition_values(partitionValues); TCreatePartitionResult partition = impl.createPartition(request); GlobalStateMgr currentState = GlobalStateMgr.getCurrentState(); Database testDb = currentState.getDb("test"); OlapTable olapTable = (OlapTable) testDb.getTable("site_access_list"); PartitionInfo partitionInfo = olapTable.getPartitionInfo(); DistributionInfo defaultDistributionInfo = olapTable.getDefaultDistributionInfo(); List<PartitionDesc> partitionDescs = Lists.newArrayList(); Partition p19910425 = olapTable.getPartition("p19900425"); partitionDescs.add(new ListPartitionDesc(Lists.newArrayList("p19900425"), Lists.newArrayList(new SingleItemListPartitionDesc(true, "p19900425", Lists.newArrayList("1990-04-25"), Maps.newHashMap())))); AddPartitionClause addPartitionClause = new AddPartitionClause(partitionDescs.get(0), defaultDistributionInfo.toDistributionDesc(table.getIdToColumn()), Maps.newHashMap(), false); List<Partition> partitionList = Lists.newArrayList(); partitionList.add(p19910425); currentState.getLocalMetastore().addListPartitionLog(testDb, olapTable, partitionDescs, addPartitionClause.isTempPartition(), partitionInfo, partitionList, Sets.newSet("p19900425")); }