focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public String convert(ParsedSchema schema) throws IOException { ProtobufSchema protobufSchema = (ProtobufSchema) schema; return ProtoConversionUtil.getAvroSchemaForMessageDescriptor(protobufSchema.toDescriptor(), schemaConfig).toString(); }
@Test void testConvert() throws Exception { TypedProperties properties = new TypedProperties(); properties.setProperty(ProtoClassBasedSchemaProviderConfig.PROTO_SCHEMA_CLASS_NAME.key(), Parent.class.getName()); Schema.Parser parser = new Schema.Parser(); ProtobufSchema protobufSchema = new ProtobufSchema(getProtoSchemaString()); String actual = new ProtoSchemaToAvroSchemaConverter(properties).convert(protobufSchema); Schema actualSchema = new Schema.Parser().parse(actual); Schema expectedSchema = parser.parse(getClass().getClassLoader().getResourceAsStream("schema-provider/proto/parent_schema_recursive_default_limit.avsc")); assertEquals(expectedSchema, actualSchema); }
@JsonCreator @SuppressWarnings("unused") public static LookupCacheKey createFromJSON(@JsonProperty("prefix") String prefix, @JsonProperty("key") @Nullable Object key) { return new AutoValue_LookupCacheKey(prefix, key); }
@Test public void serialize() { final LookupCacheKey cacheKey = LookupCacheKey.createFromJSON("prefix", "key"); final JsonNode node = objectMapper.convertValue(cacheKey, JsonNode.class); assertThat(node.isObject()).isTrue(); assertThat(node.fieldNames()).toIterable().containsExactlyInAnyOrder("prefix", "key"); assertThat(node.path("prefix").isTextual()).isTrue(); assertThat(node.path("prefix").asText()).isEqualTo("prefix"); assertThat(node.path("key").isTextual()).isTrue(); assertThat(node.path("key").asText()).isEqualTo("key"); }
public static <T> Pair<T, Map<String, Object>> stringToObjectAndUnrecognizedProperties(String jsonString, Class<T> valueType) throws IOException { T instance = DEFAULT_READER.forType(valueType).readValue(jsonString); Map<String, Object> inputJsonMap = flatten(DEFAULT_MAPPER.readValue(jsonString, MAP_TYPE_REFERENCE)); String instanceJson = DEFAULT_MAPPER.writeValueAsString(instance); Map<String, Object> instanceJsonMap = flatten(DEFAULT_MAPPER.readValue(instanceJson, MAP_TYPE_REFERENCE)); MapDifference<String, Object> difference = Maps.difference(inputJsonMap, instanceJsonMap); return Pair.of(instance, difference.entriesOnlyOnLeft()); }
@Test public void testUnrecognizedJsonProperties() throws Exception { String inputJsonMissingProp = "{\"primitiveIntegerField\": 123, \"missingProp\": 567," + " \"missingObjectProp\": {\"somestuff\": \"data\", \"somemorestuff\":\"moredata\"}," + " \"classField\": {\"internalIntField\": 12, \"internalMissingField\": \"somedata\"}}"; Pair<JsonUtilsTestSamplePojo, Map<String, Object>> parsedResp = JsonUtils.stringToObjectAndUnrecognizedProperties(inputJsonMissingProp, JsonUtilsTestSamplePojo.class); assertTrue(parsedResp.getRight().containsKey("/missingProp")); assertTrue(parsedResp.getRight().containsKey("/missingObjectProp/somestuff")); assertTrue(parsedResp.getRight().containsKey("/missingObjectProp/somemorestuff")); assertTrue(parsedResp.getRight().containsKey("/classField/internalMissingField")); }
public void logFrameOut(final ByteBuffer buffer, final InetSocketAddress dstAddress) { final int length = buffer.remaining() + socketAddressLength(dstAddress); final int captureLength = captureLength(length); final int encodedLength = encodedLength(captureLength); final ManyToOneRingBuffer ringBuffer = this.ringBuffer; final int index = ringBuffer.tryClaim(toEventCodeId(FRAME_OUT), encodedLength); if (index > 0) { try { encode( (UnsafeBuffer)ringBuffer.buffer(), index, captureLength, length, buffer, buffer.position(), dstAddress); } finally { ringBuffer.commit(index); } } }
@Test void logFrameOut() { final int recordOffset = 24; logBuffer.putLong(CAPACITY + TAIL_POSITION_OFFSET, recordOffset); final ByteBuffer byteBuffer = buffer.byteBuffer(); byteBuffer.position(8); final byte[] bytes = new byte[32]; fill(bytes, (byte)-1); byteBuffer.put(bytes); byteBuffer.flip().position(10).limit(38); final int encodedSocketLength = 12; final int length = byteBuffer.remaining() + encodedSocketLength; final int arrayCaptureLength = length - encodedSocketLength; logger.logFrameOut(byteBuffer, new InetSocketAddress("localhost", 3232)); verifyLogHeader(logBuffer, recordOffset, toEventCodeId(FRAME_OUT), length, length); assertEquals(3232, logBuffer.getInt(encodedMsgOffset(recordOffset + LOG_HEADER_LENGTH), LITTLE_ENDIAN)); assertEquals(4, logBuffer.getInt(encodedMsgOffset(recordOffset + LOG_HEADER_LENGTH + SIZE_OF_INT), LITTLE_ENDIAN)); for (int i = 0; i < arrayCaptureLength; i++) { assertEquals(-1, logBuffer.getByte(encodedMsgOffset(recordOffset + LOG_HEADER_LENGTH + encodedSocketLength + i))); } }
@GetMapping( path = "/api/{namespace}/{extension}", produces = MediaType.APPLICATION_JSON_VALUE ) @CrossOrigin @Operation(summary = "Provides metadata of the latest version of an extension") @ApiResponses({ @ApiResponse( responseCode = "200", description = "The extension metadata are returned in JSON format" ), @ApiResponse( responseCode = "404", description = "The specified extension could not be found", content = @Content() ), @ApiResponse( responseCode = "429", description = "A client has sent too many requests in a given amount of time", content = @Content(), headers = { @Header( name = "X-Rate-Limit-Retry-After-Seconds", description = "Number of seconds to wait after receiving a 429 response", schema = @Schema(type = "integer", format = "int32") ), @Header( name = "X-Rate-Limit-Remaining", description = "Remaining number of requests left", schema = @Schema(type = "integer", format = "int32") ) } ) }) public ResponseEntity<ExtensionJson> getExtension( @PathVariable @Parameter(description = "Extension namespace", example = "redhat") String namespace, @PathVariable @Parameter(description = "Extension name", example = "java") String extension ) { for (var registry : getRegistries()) { try { return ResponseEntity.ok() .cacheControl(CacheControl.noCache().cachePublic()) .body(registry.getExtension(namespace, extension, null)); } catch (NotFoundException exc) { // Try the next registry } } var json = ExtensionJson.error("Extension not found: " + NamingUtil.toExtensionId(namespace, extension)); return new ResponseEntity<>(json, HttpStatus.NOT_FOUND); }
@Test public void testLatestExtensionVersionNonDefaultTarget() throws Exception { var extVersion = mockExtension("alpine-arm64"); extVersion.setDisplayName("Foo Bar (alpine arm64)"); Mockito.when(repositories.findExtensionVersion("foo", "bar", null, VersionAlias.LATEST)).thenReturn(extVersion); Mockito.when(repositories.findLatestVersionForAllUrls(extVersion.getExtension(), null, false, true)).thenReturn(extVersion); mockMvc.perform(get("/api/{namespace}/{extension}/{version}", "foo", "bar", "latest")) .andExpect(status().isOk()) .andExpect(content().json(extensionJson(e -> { e.namespace = "foo"; e.name = "bar"; e.version = "1.0.0"; e.verified = false; e.timestamp = "2000-01-01T10:00Z"; e.displayName = "Foo Bar (alpine arm64)"; e.versionAlias = List.of("latest"); e.targetPlatform = "alpine-arm64"; }))); }
public static <K, V> MultiVersionedKeyQuery<K, V> withKey(final K key) { Objects.requireNonNull(key, "key cannot be null."); return new MultiVersionedKeyQuery<>(key, Optional.empty(), Optional.empty(), ResultOrder.ANY); }
@Test public void shouldThrowNPEWithNullKey() { final Exception exception = assertThrows(NullPointerException.class, () -> MultiVersionedKeyQuery.withKey(null)); assertEquals("key cannot be null.", exception.getMessage()); }
public static Builder custom() { return new Builder(); }
@Test(expected = IllegalArgumentException.class) public void zeroSlowCallRateThresholdShouldFail() { custom().slowCallRateThreshold(0).build(); }
static @NonNull CloudStringReader of(final @NonNull CommandInput commandInput) { return new CloudStringReader(commandInput); }
@Test void testPartialWorldRead() throws CommandSyntaxException { // Arrange final CommandInput commandInput = CommandInput.of("hi minecraft:pig"); final StringReader stringReader = CloudStringReader.of(commandInput); // Act final String readString1 = stringReader.readString(); stringReader.skipWhitespace(); final String readString2 = stringReader.readStringUntil(':'); // Assert assertThat(readString1).isEqualTo("hi"); assertThat(readString2).isEqualTo("minecraft"); assertThat(commandInput.remainingInput()).isEqualTo("pig"); }
static Document replacePlaceholders(Document doc, ExpressionEvalContext evalContext, JetSqlRow inputRow, String[] externalNames, boolean forRow) { Object[] values = inputRow.getValues(); return replacePlaceholders(doc, evalContext, values, externalNames, forRow); }
@Test public void replaces_mixed() { // given Document embedded = new Document("<!InputRef(1)!>", "<!DynamicParameter(0)!>"); Document doc = new Document("<!InputRef(0)!>", embedded); // when List<Object> arguments = singletonList("dwa"); Object[] inputs = {"jeden", "test"}; String[] externalNames = {"col1", "col2"}; JetSqlRow inputRow = new JetSqlRow(getInternalSerializationService(), inputs); Bson result = PlaceholderReplacer.replacePlaceholders(doc, evalContext(arguments), inputRow, externalNames, true); // then assertThat(result).isInstanceOf(Document.class); Document expected = new Document("col1", new Document("col2", "dwa")); assertThat(result).isEqualTo(expected); }
public static String propertiesToString(Properties props) throws IOException { String result = ""; if (props != null) { DataByteArrayOutputStream dataOut = new DataByteArrayOutputStream(); props.store(dataOut, ""); result = new String(dataOut.getData(), 0, dataOut.size()); dataOut.close(); } return result; }
@Test public void testPropertiesToString() throws Exception { Properties props = new Properties(); for (int i = 0; i < 10; i++) { String key = "key" + i; String value = "value" + i; props.put(key, value); } String str = MarshallingSupport.propertiesToString(props); Properties props2 = MarshallingSupport.stringToProperties(str); assertEquals(props, props2); }
@Override public KeyValueIterator<K, V> range(final K from, final K to) { final byte[] serFrom = from == null ? null : serdes.rawKey(from); final byte[] serTo = to == null ? null : serdes.rawKey(to); return new MeteredKeyValueIterator( wrapped().range(Bytes.wrap(serFrom), Bytes.wrap(serTo)), rangeSensor ); }
@Test public void shouldThrowNullPointerOnRangeIfToIsNull() { setUpWithoutContext(); assertThrows(NullPointerException.class, () -> metered.range("from", null)); }
public static Iterator<String> lineIterator(String input) { return new LineIterator(input); }
@Test public void terminalLine() { Iterator<String> it = Newlines.lineIterator("foo\nbar\n"); it.next(); it.next(); try { it.next(); fail(); } catch (NoSuchElementException e) { // expected } it = Newlines.lineIterator("foo\nbar"); it.next(); it.next(); try { it.next(); fail(); } catch (NoSuchElementException e) { // expected } }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String mysqlDataType = typeDefine.getDataType().toUpperCase(); if (mysqlDataType.endsWith("ZEROFILL")) { mysqlDataType = mysqlDataType.substring(0, mysqlDataType.length() - "ZEROFILL".length()).trim(); } if (typeDefine.isUnsigned() && !(mysqlDataType.endsWith(" UNSIGNED"))) { mysqlDataType = mysqlDataType + " UNSIGNED"; } switch (mysqlDataType) { case MYSQL_NULL: builder.dataType(BasicType.VOID_TYPE); break; case MYSQL_BIT: if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.dataType(BasicType.BOOLEAN_TYPE); } else if (typeDefine.getLength() == 1) { builder.dataType(BasicType.BOOLEAN_TYPE); } else { builder.dataType(PrimitiveByteArrayType.INSTANCE); // BIT(M) -> BYTE(M/8) long byteLength = typeDefine.getLength() / 8; byteLength += typeDefine.getLength() % 8 > 0 ? 1 : 0; builder.columnLength(byteLength); } break; case MYSQL_TINYINT: if (typeDefine.getColumnType().equalsIgnoreCase("tinyint(1)")) { builder.dataType(BasicType.BOOLEAN_TYPE); } else { builder.dataType(BasicType.BYTE_TYPE); } break; case MYSQL_TINYINT_UNSIGNED: case MYSQL_SMALLINT: builder.dataType(BasicType.SHORT_TYPE); break; case MYSQL_SMALLINT_UNSIGNED: case MYSQL_MEDIUMINT: case MYSQL_MEDIUMINT_UNSIGNED: case MYSQL_INT: case MYSQL_INTEGER: case MYSQL_YEAR: builder.dataType(BasicType.INT_TYPE); break; case MYSQL_INT_UNSIGNED: case MYSQL_INTEGER_UNSIGNED: case MYSQL_BIGINT: builder.dataType(BasicType.LONG_TYPE); break; case MYSQL_BIGINT_UNSIGNED: DecimalType intDecimalType = new DecimalType(20, 0); builder.dataType(intDecimalType); builder.columnLength(Long.valueOf(intDecimalType.getPrecision())); builder.scale(intDecimalType.getScale()); break; case MYSQL_FLOAT: builder.dataType(BasicType.FLOAT_TYPE); break; case MYSQL_FLOAT_UNSIGNED: log.warn("{} will probably cause value overflow.", MYSQL_FLOAT_UNSIGNED); builder.dataType(BasicType.FLOAT_TYPE); break; case MYSQL_DOUBLE: builder.dataType(BasicType.DOUBLE_TYPE); break; case MYSQL_DOUBLE_UNSIGNED: log.warn("{} will probably cause value overflow.", MYSQL_DOUBLE_UNSIGNED); builder.dataType(BasicType.DOUBLE_TYPE); break; case MYSQL_DECIMAL: Preconditions.checkArgument(typeDefine.getPrecision() > 0); DecimalType decimalType; if (typeDefine.getPrecision() > DEFAULT_PRECISION) { log.warn("{} will probably cause value overflow.", MYSQL_DECIMAL); decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE); } else { decimalType = new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale() == null ? 0 : typeDefine.getScale().intValue()); } builder.dataType(decimalType); builder.columnLength(Long.valueOf(decimalType.getPrecision())); builder.scale(decimalType.getScale()); break; case MYSQL_DECIMAL_UNSIGNED: Preconditions.checkArgument(typeDefine.getPrecision() > 0); log.warn("{} will probably cause value overflow.", MYSQL_DECIMAL_UNSIGNED); DecimalType decimalUnsignedType = new DecimalType( typeDefine.getPrecision().intValue() + 1, typeDefine.getScale() == null ? 0 : typeDefine.getScale().intValue()); builder.dataType(decimalUnsignedType); builder.columnLength(Long.valueOf(decimalUnsignedType.getPrecision())); builder.scale(decimalUnsignedType.getScale()); break; case MYSQL_ENUM: builder.dataType(BasicType.STRING_TYPE); if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(100L); } else { builder.columnLength(typeDefine.getLength()); } break; case MYSQL_CHAR: case MYSQL_VARCHAR: if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(TypeDefineUtils.charTo4ByteLength(1L)); } else { builder.columnLength(typeDefine.getLength()); } builder.dataType(BasicType.STRING_TYPE); break; case MYSQL_TINYTEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_8 - 1); break; case MYSQL_TEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_16 - 1); break; case MYSQL_MEDIUMTEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_24 - 1); break; case MYSQL_LONGTEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_32 - 1); break; case MYSQL_JSON: builder.dataType(BasicType.STRING_TYPE); break; case MYSQL_BINARY: case MYSQL_VARBINARY: if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(1L); } else { builder.columnLength(typeDefine.getLength()); } builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case MYSQL_TINYBLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_8 - 1); break; case MYSQL_BLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_16 - 1); break; case MYSQL_MEDIUMBLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_24 - 1); break; case MYSQL_LONGBLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_32 - 1); break; case MYSQL_GEOMETRY: builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case MYSQL_DATE: builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case MYSQL_TIME: builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); builder.scale(typeDefine.getScale()); break; case MYSQL_DATETIME: case MYSQL_TIMESTAMP: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(typeDefine.getScale()); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.MYSQL, mysqlDataType, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertBinary() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder() .name("test") .columnType("binary(1)") .dataType("binary") .length(1L) .build(); Column column = MySqlTypeConverter.DEFAULT_INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(PrimitiveByteArrayType.INSTANCE, column.getDataType()); Assertions.assertEquals(1, column.getColumnLength()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("varbinary(1)") .dataType("varbinary") .length(1L) .build(); column = MySqlTypeConverter.DEFAULT_INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(PrimitiveByteArrayType.INSTANCE, column.getDataType()); Assertions.assertEquals(1, column.getColumnLength()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); }
public static HudiTableType fromInputFormat(String inputFormat) { switch (inputFormat) { case COW_INPUT_FORMAT: case COW_INPUT_FORMAT_LEGACY: return HudiTableType.COW; case MOR_RT_INPUT_FORMAT: case MOR_RT_INPUT_FORMAT_LEGACY: return HudiTableType.MOR; default: return HudiTableType.UNKNOWN; } }
@Test public void testInputFormat() { Assert.assertEquals(HudiTable.HudiTableType.COW, HudiTable.fromInputFormat("org.apache.hudi.hadoop.HoodieParquetInputFormat")); Assert.assertEquals(HudiTable.HudiTableType.COW, HudiTable.fromInputFormat("com.uber.hoodie.hadoop.HoodieInputFormat")); Assert.assertEquals(HudiTable.HudiTableType.MOR, HudiTable.fromInputFormat("org.apache.hudi.hadoop.realtime.HoodieParquetRealtimeInputFormat")); Assert.assertEquals(HudiTable.HudiTableType.MOR, HudiTable.fromInputFormat("com.uber.hoodie.hadoop.realtime.HoodieRealtimeInputFormat")); Assert.assertEquals(HudiTable.HudiTableType.UNKNOWN, HudiTable.fromInputFormat("org.apache.hadoop.hive.ql.io.HiveInputFormat")); }
@Override public final Object getCalendarValue(final int columnIndex, final Class<?> type, final Calendar calendar) { // TODO implement with calendar Object result = currentResultSetRow.getCell(columnIndex); wasNull = null == result; return result; }
@Test void assertGetCalendarValue() { when(memoryResultSetRow.getCell(1)).thenReturn(new Date(0L)); assertThat(memoryMergedResult.getCalendarValue(1, Object.class, Calendar.getInstance()), is(new Date(0L))); }
@Override public void decorateRouteContext(final RouteContext routeContext, final QueryContext queryContext, final ShardingSphereDatabase database, final SingleRule rule, final ConfigurationProperties props, final ConnectionContext connectionContext) { SQLStatementContext sqlStatementContext = queryContext.getSqlStatementContext(); Collection<QualifiedTable> singleTables = getSingleTables(database, rule, routeContext, sqlStatementContext); SingleRouteEngineFactory.newInstance(singleTables, sqlStatementContext.getSqlStatement()).ifPresent(optional -> optional.route(routeContext, rule)); }
@Test void assertDecorateRouteContextWithMultiDataSource() throws SQLException { SingleRule rule = new SingleRule(new SingleRuleConfiguration(), DefaultDatabase.LOGIC_NAME, new H2DatabaseType(), createMultiDataSourceMap(), Collections.emptyList()); RouteContext routeContext = new RouteContext(); routeContext.getRouteUnits().add(new RouteUnit(new RouteMapper("ds_0", "ds_0"), Collections.emptyList())); routeContext.getRouteUnits().add(new RouteUnit(new RouteMapper("ds_1", "ds_1"), Collections.emptyList())); SingleSQLRouter sqlRouter = (SingleSQLRouter) OrderedSPILoader.getServices(SQLRouter.class, Collections.singleton(rule)).get(rule); sqlRouter.decorateRouteContext( routeContext, createQueryContext(), mockDatabaseWithMultipleResources(), rule, new ConfigurationProperties(new Properties()), new ConnectionContext(Collections::emptySet)); Iterator<String> routedDataSourceNames = routeContext.getActualDataSourceNames().iterator(); assertThat(routedDataSourceNames.next(), is("ds_1")); assertThat(routedDataSourceNames.next(), is("ds_0")); }
@Deprecated @Restricted(DoNotUse.class) public static String resolve(ConfigurationContext context, String toInterpolate) { return context.getSecretSourceResolver().resolve(toInterpolate); }
@Test public void resolve_multipleEntriesEscaped() { assertThat(resolve("^${FOO}:^${BAR}"), equalTo("${FOO}:${BAR}")); }
public long betweenMonth(boolean isReset) { final Calendar beginCal = DateUtil.calendar(begin); final Calendar endCal = DateUtil.calendar(end); final int betweenYear = endCal.get(Calendar.YEAR) - beginCal.get(Calendar.YEAR); final int betweenMonthOfYear = endCal.get(Calendar.MONTH) - beginCal.get(Calendar.MONTH); int result = betweenYear * 12 + betweenMonthOfYear; if (false == isReset) { endCal.set(Calendar.YEAR, beginCal.get(Calendar.YEAR)); endCal.set(Calendar.MONTH, beginCal.get(Calendar.MONTH)); long between = endCal.getTimeInMillis() - beginCal.getTimeInMillis(); if (between < 0) { return result - 1; } } return result; }
@Test public void betweenMonthTest() { Date start = DateUtil.parse("2017-02-01 12:23:46"); Date end = DateUtil.parse("2018-02-01 12:23:46"); long betweenMonth = new DateBetween(start, end).betweenMonth(false); assertEquals(12, betweenMonth); Date start1 = DateUtil.parse("2017-02-01 12:23:46"); Date end1 = DateUtil.parse("2018-03-01 12:23:46"); long betweenMonth1 = new DateBetween(start1, end1).betweenMonth(false); assertEquals(13, betweenMonth1); // 不足 Date start2 = DateUtil.parse("2017-02-01 12:23:46"); Date end2 = DateUtil.parse("2018-02-01 11:23:46"); long betweenMonth2 = new DateBetween(start2, end2).betweenMonth(false); assertEquals(11, betweenMonth2); }
public void writeStatus(Session.Status sessionStatus) { try { createWriteStatusTransaction(sessionStatus).commit(); } catch (Exception e) { throw new RuntimeException("Unable to write session status", e); } }
@Test public void require_that_status_is_written_to_zk() { int sessionId = 2; SessionZooKeeperClient zkc = createSessionZKClient(sessionId); zkc.writeStatus(Session.Status.NEW); Path path = sessionPath(sessionId).append(SESSIONSTATE_ZK_SUBPATH); assertTrue(curator.exists(path)); assertEquals("NEW", Utf8.toString(curator.getData(path).get())); }
@Override public boolean isOutput() { return true; }
@Test public void testIsOutput() throws Exception { assertTrue( analyzer.isOutput() ); }
@Override protected void doStop() throws Exception { super.doStop(); int openRequestsAtStop = openRequests.get(); log.debug("Stopping with {} open requests", openRequestsAtStop); if (openRequestsAtStop > 0) { log.warn("There are still {} open requests", openRequestsAtStop); } }
@Test public void doStopOpenRequest() throws Exception { sut.openRequests.incrementAndGet(); sut.doStop(); }
public static Configuration resolve(Configuration conf) { Configuration resolved = new Configuration(false); for (Map.Entry<String, String> entry : conf) { resolved.set(entry.getKey(), conf.get(entry.getKey())); } return resolved; }
@Test public void resolve() { Configuration conf = new Configuration(false); conf.set("a", "A"); conf.set("b", "${a}"); assertEquals(conf.getRaw("a"), "A"); assertEquals(conf.getRaw("b"), "${a}"); conf = ConfigurationUtils.resolve(conf); assertEquals(conf.getRaw("a"), "A"); assertEquals(conf.getRaw("b"), "A"); }
@PostMapping( path = "/user/token/create", produces = MediaType.APPLICATION_JSON_VALUE ) public ResponseEntity<AccessTokenJson> createAccessToken(@RequestParam(required = false) String description) { if (description != null && description.length() > TOKEN_DESCRIPTION_SIZE) { var json = AccessTokenJson.error("The description must not be longer than " + TOKEN_DESCRIPTION_SIZE + " characters."); return new ResponseEntity<>(json, HttpStatus.BAD_REQUEST); } var user = users.findLoggedInUser(); if (user == null) { return new ResponseEntity<>(HttpStatus.FORBIDDEN); } return new ResponseEntity<>(users.createAccessToken(user, description), HttpStatus.CREATED); }
@Test public void testCreateAccessToken() throws Exception { mockUserData(); Mockito.doReturn("foobar").when(users).generateTokenValue(); mockMvc.perform(post("/user/token/create?description={description}", "This is my token") .with(user("test_user")) .with(csrf().asHeader())) .andExpect(status().isCreated()) .andExpect(content().json(accessTokenJson(t -> { t.value = "foobar"; t.description = "This is my token"; }))); }
public Map<String, Object> convertValue(final Object entity, final Class<?> entityClass) { if (entityClass.equals(String.class)) { return Collections.singletonMap("data", objectMapper.convertValue(entity, String.class)); } else if (!entityClass.equals(Void.class) && !entityClass.equals(void.class)) { final TypeReference<Map<String, Object>> typeRef = new TypeReference<>() { }; try { return objectMapper.convertValue(entity, typeRef); } catch (IllegalArgumentException e) { // Try to convert the response to a list if converting to a map failed. final TypeReference<List<Object>> arrayTypeRef = new TypeReference<>() { }; return Collections.singletonMap("data", objectMapper.convertValue(entity, arrayTypeRef)); } } return null; }
@Test public void convertsListOfEntities() { final SimpleEntity firstObject = new SimpleEntity("foo", 1); final SimpleEntity secondObject = new SimpleEntity("bar", 42); final Map<String, Object> result = toTest.convertValue(Arrays.asList(firstObject, secondObject), SimpleEntity.class); assertNotNull(result); assertEquals(1, result.size()); final Object data = result.get("data"); assertTrue(data instanceof List); List dataList = (List) data; assertEquals(2, dataList.size()); assertTrue(dataList.get(0) instanceof Map); final Map element1 = (Map) dataList.get(0); assertEquals("foo", element1.get("text")); assertEquals(1, element1.get("number")); assertTrue(dataList.get(1) instanceof Map); final Map element2 = (Map) dataList.get(1); assertEquals("bar", element2.get("text")); assertEquals(42, element2.get("number")); }
@Override public ByteBuf setBytes(int index, byte[] src) { setBytes(index, src, 0, src.length); return this; }
@Test public void testSetBytesAfterRelease5() { assertThrows(IllegalReferenceCountException.class, new Executable() { @Override public void execute() { releasedBuffer().setBytes(0, new byte[8], 0, 1); } }); }
public static byte[] swapLongs(byte b[], int off, int len) { checkLength(len, 8); for (int i = off, n = off + len; i < n; i += 8) { swap(b, i, i+7); swap(b, i+1, i+6); swap(b, i+2, i+5); swap(b, i+3, i+4); } return b; }
@Test public void testSwapLongs() { assertArrayEquals(DOUBLE_PI_LE , ByteUtils.swapLongs(DOUBLE_PI_BE.clone(), 0, DOUBLE_PI_BE.length)); }
public static void i(String tag, String message, Object... args) { sLogger.i(tag, message, args); }
@Test public void infoWithThrowable() { String tag = "TestTag"; String message = "Test message"; Throwable t = new Throwable(); LogManager.i(t, tag, message); verify(logger).i(t, tag, message); }
public String parse(Function<String, String> propertyMapping) { init(); boolean inPrepare = false; char[] expression = new char[128]; int expressionPos = 0; while (next()) { if (isPrepare()) { inPrepare = true; } else if (inPrepare && isPrepareEnd()) { inPrepare = false; setParsed(propertyMapping.apply(new String(expression, 0, expressionPos)).toCharArray()); expressionPos = 0; } else if (inPrepare) { if (expression.length <= expressionPos) { expression = Arrays.copyOf(expression, (int)(expression.length * 1.5)); } expression[expressionPos++] = symbol; } else if (!isPreparing()) { setParsed(symbol); } } if (isPrepareEnd() && expressionPos > 0) { setParsed(propertyMapping.apply(new String(expression, 0, expressionPos)).toCharArray()); } else { setParsed(symbol); } return new String(newArr, 0, len); }
@Test public void testLargeExpr() { String expr = ""; for (int i = 0; i < 1000; i++) { expr += "expr_" + i; } String result = TemplateParser.parse("${"+expr+"}", Function.identity()); assertEquals(expr,result); }
int preferredLocalParallelism() { if (options.containsKey(SqlConnector.OPTION_PREFERRED_LOCAL_PARALLELISM)) { return Integer.parseInt(options.get(SqlConnector.OPTION_PREFERRED_LOCAL_PARALLELISM)); } return StreamKafkaP.PREFERRED_LOCAL_PARALLELISM; }
@Test public void when_preferredLocalParallelism_isNotDefined_then_useDefault() { KafkaTable table = new KafkaTable( null, null, null, null, null, null, null, emptyMap(), null, null, null, null, null ); assertThat(table.preferredLocalParallelism()) .isEqualTo(StreamKafkaP.PREFERRED_LOCAL_PARALLELISM); }
@Override public boolean add(Integer integer) { throw new UnsupportedOperationException("RangeSet is immutable"); }
@Test public void equals2() throws Exception { IntSet rs = new RangeSet(4); Set<Integer> hashSet = new HashSet<>(); hashSet.add(0); hashSet.add(1); hashSet.add(2); // Verify equals both ways assertNotEquals(rs, hashSet); assertNotEquals(hashSet, rs); hashSet.add(3); assertEquals(rs, hashSet); assertEquals(hashSet, rs); }
@Override public WebSocketClientExtension handshakeExtension(WebSocketExtensionData extensionData) { if (!PERMESSAGE_DEFLATE_EXTENSION.equals(extensionData.name())) { return null; } boolean succeed = true; int clientWindowSize = MAX_WINDOW_SIZE; int serverWindowSize = MAX_WINDOW_SIZE; boolean serverNoContext = false; boolean clientNoContext = false; Iterator<Entry<String, String>> parametersIterator = extensionData.parameters().entrySet().iterator(); while (succeed && parametersIterator.hasNext()) { Entry<String, String> parameter = parametersIterator.next(); if (CLIENT_MAX_WINDOW.equalsIgnoreCase(parameter.getKey())) { // allowed client_window_size_bits if (allowClientWindowSize) { clientWindowSize = Integer.parseInt(parameter.getValue()); if (clientWindowSize > MAX_WINDOW_SIZE || clientWindowSize < MIN_WINDOW_SIZE) { succeed = false; } } else { succeed = false; } } else if (SERVER_MAX_WINDOW.equalsIgnoreCase(parameter.getKey())) { // acknowledged server_window_size_bits serverWindowSize = Integer.parseInt(parameter.getValue()); if (serverWindowSize > MAX_WINDOW_SIZE || serverWindowSize < MIN_WINDOW_SIZE) { succeed = false; } } else if (CLIENT_NO_CONTEXT.equalsIgnoreCase(parameter.getKey())) { // allowed client_no_context_takeover if (allowClientNoContext) { clientNoContext = true; } else { succeed = false; } } else if (SERVER_NO_CONTEXT.equalsIgnoreCase(parameter.getKey())) { // acknowledged server_no_context_takeover serverNoContext = true; } else { // unknown parameter succeed = false; } } if ((requestedServerNoContext && !serverNoContext) || requestedServerWindowSize < serverWindowSize) { succeed = false; } if (succeed) { return new PermessageDeflateExtension(serverNoContext, serverWindowSize, clientNoContext, clientWindowSize, extensionFilterProvider); } else { return null; } }
@Test public void testNormalHandshake() { PerMessageDeflateClientExtensionHandshaker handshaker = new PerMessageDeflateClientExtensionHandshaker(); WebSocketClientExtension extension = handshaker.handshakeExtension( new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, Collections.<String, String>emptyMap())); assertNotNull(extension); assertEquals(RSV1, extension.rsv()); assertTrue(extension.newExtensionDecoder() instanceof PerMessageDeflateDecoder); assertTrue(extension.newExtensionEncoder() instanceof PerMessageDeflateEncoder); }
@Override public int compareTo(SegmentPointer other) { if (idPage == other.idPage) { return Long.compare(offset, other.offset); } else { return Integer.compare(idPage, other.idPage); } }
@Test public void testCompareInDifferentSegments() { final SegmentPointer minor = new SegmentPointer(1, 10); final SegmentPointer otherMinor = new SegmentPointer(1, 10); final SegmentPointer major = new SegmentPointer(2, 4); assertEquals(-1, minor.compareTo(major), "minor is less than major"); assertEquals(1, major.compareTo(minor), "major is greater than minor"); assertEquals(0, minor.compareTo(otherMinor), "minor equals itself"); }
public String ensureVideosFolder() throws IOException, InvalidTokenException { String rootFolder = ensureRootFolder(); if (!videosEnsured) { ensureFolder(rootFolder, VIDEOS_NAME); videosEnsured = true; } return rootFolder + "/" + VIDEOS_NAME; }
@Test public void testEnsureVideosFolder() throws Exception { server.enqueue(new MockResponse().setResponseCode(200)); client.ensureRootFolder(); assertEquals(1, server.getRequestCount()); server.takeRequest(); server.enqueue(new MockResponse().setResponseCode(200)); client.ensureVideosFolder(); assertEquals(2, server.getRequestCount()); final RecordedRequest recordedRequest = server.takeRequest(); assertEquals("POST", recordedRequest.getMethod()); assertEquals( "/api/v2/mounts/primary/files/folder?path=%2FData+transfer", recordedRequest.getPath()); assertEquals("Bearer acc", recordedRequest.getHeader("Authorization")); assertEquals("2.1", recordedRequest.getHeader("X-Koofr-Version")); assertEquals( "application/json; charset=utf-8", recordedRequest.getHeader("Content-Type")); assertEquals("{\"name\":\"Videos\"}", recordedRequest.getBody().readUtf8()); client.ensureVideosFolder(); assertEquals(2, server.getRequestCount()); }
public static Configuration adjustForLocalExecution(Configuration config) { UNUSED_CONFIG_OPTIONS.forEach( option -> warnAndRemoveOptionHasNoEffectIfSet(config, option)); setConfigOptionToPassedMaxIfNotSet( config, TaskManagerOptions.CPU_CORES, LOCAL_EXECUTION_CPU_CORES); setConfigOptionToPassedMaxIfNotSet( config, TaskManagerOptions.TASK_HEAP_MEMORY, LOCAL_EXECUTION_TASK_MEMORY); setConfigOptionToPassedMaxIfNotSet( config, TaskManagerOptions.TASK_OFF_HEAP_MEMORY, LOCAL_EXECUTION_TASK_MEMORY); adjustNetworkMemoryForLocalExecution(config); setConfigOptionToDefaultIfNotSet( config, TaskManagerOptions.MANAGED_MEMORY_SIZE, DEFAULT_MANAGED_MEMORY_SIZE); // Set valid default values for unused config options which should have been removed. config.set( TaskManagerOptions.FRAMEWORK_HEAP_MEMORY, TaskManagerOptions.FRAMEWORK_HEAP_MEMORY.defaultValue()); config.set( TaskManagerOptions.FRAMEWORK_OFF_HEAP_MEMORY, TaskManagerOptions.FRAMEWORK_OFF_HEAP_MEMORY.defaultValue()); config.set( TaskManagerOptions.JVM_METASPACE, TaskManagerOptions.JVM_METASPACE.defaultValue()); config.set( TaskManagerOptions.JVM_OVERHEAD_MAX, TaskManagerOptions.JVM_OVERHEAD_MAX.defaultValue()); config.set( TaskManagerOptions.JVM_OVERHEAD_MIN, TaskManagerOptions.JVM_OVERHEAD_MAX.defaultValue()); return config; }
@Test public void testNetworkMaxAdjustForLocalExecutionIfMinSet() { MemorySize networkMemorySize = MemorySize.ofMebiBytes(1); Configuration configuration = new Configuration(); configuration.set(TaskManagerOptions.NETWORK_MEMORY_MIN, networkMemorySize); TaskExecutorResourceUtils.adjustForLocalExecution(configuration); assertThat(configuration.get(TaskManagerOptions.NETWORK_MEMORY_MIN)) .isEqualTo(networkMemorySize); assertThat(configuration.get(TaskManagerOptions.NETWORK_MEMORY_MAX)) .isEqualTo(networkMemorySize); }
public static boolean dumpTag(String dataId, String group, String tenant, String tag, String content, long lastModifiedTs, String encryptedDataKey4Tag) { final String groupKey = GroupKey2.getKey(dataId, group, tenant); makeSure(groupKey, null); final int lockResult = tryWriteLock(groupKey); if (lockResult < 0) { DUMP_LOG.warn("[dump-tag-error] write lock failed. {}", groupKey); return false; } try { //check timestamp long localTagLastModifiedTs = ConfigCacheService.getTagLastModifiedTs(groupKey, tag); boolean timestampOutdated = lastModifiedTs < localTagLastModifiedTs; if (timestampOutdated) { DUMP_LOG.warn("[dump-tag-ignore] timestamp is outdated,groupKey={}", groupKey); return true; } boolean timestampChanged = lastModifiedTs > localTagLastModifiedTs; final String md5 = MD5Utils.md5Hex(content, ENCODE_UTF8); String localContentTagMd5 = ConfigCacheService.getContentTagMd5(groupKey, tag); boolean md5Changed = !md5.equals(localContentTagMd5); if (md5Changed) { ConfigDiskServiceFactory.getInstance().saveTagToDisk(dataId, group, tenant, tag, content); } if (md5Changed) { DUMP_LOG.warn( "[dump-tag] md5 changed, update local jvm cache, groupKey={},tag={}, newMd5={},oldMd5={},lastModifiedTs={}", groupKey, tag, md5, localContentTagMd5, lastModifiedTs); updateTagMd5(groupKey, tag, md5, lastModifiedTs, encryptedDataKey4Tag); } else if (timestampChanged) { DUMP_LOG.warn( "[dump-tag] timestamp changed, update last modified in local jvm cache, groupKey={},tag={}," + "tagLastModifiedTs={},oldTagLastModifiedTs={}", groupKey, tag, lastModifiedTs, localTagLastModifiedTs); updateTagTimeStamp(groupKey, tag, lastModifiedTs); } else { DUMP_LOG.warn("[dump-tag-ignore] md5 & timestamp not changed. groupKey={},tag={}", groupKey, tag); } return true; } catch (IOException ioe) { DUMP_LOG.error("[dump-tag-exception] save disk error. " + groupKey + ", " + ioe.toString(), ioe); return false; } finally { releaseWriteLock(groupKey); } }
@Test void testDumpTag() throws Exception { String dataId = "dataIdtestDumpTag133323"; String group = "group11"; String tenant = "tenant112"; String content = "mockContnet11"; String tag = "tag12345"; String groupKey = GroupKey2.getKey(dataId, group, tenant); String encryptedDataKey = "key12345"; long ts = System.currentTimeMillis(); //init dump tag boolean dumpTagResult = ConfigCacheService.dumpTag(dataId, group, tenant, tag, content, ts, encryptedDataKey); assertTrue(dumpTagResult); Mockito.verify(configDiskService, times(1)).saveTagToDisk(eq(dataId), eq(group), eq(tenant), eq(tag), eq(content)); CacheItem contentCache = ConfigCacheService.getContentCache(groupKey); ConfigCache configCacheTag = contentCache.getConfigCacheTags().get(tag); assertEquals(ts, configCacheTag.getLastModifiedTs()); String md5 = MD5Utils.md5Hex(content, "UTF-8"); assertEquals(md5, configCacheTag.getMd5Utf8()); //ts newer ,md5 update long tsNew = System.currentTimeMillis(); String contentNew = content + tsNew; String md5New = MD5Utils.md5Hex(contentNew, "UTF-8"); boolean resultNew = ConfigCacheService.dumpTag(dataId, group, tenant, tag, contentNew, tsNew, encryptedDataKey); assertTrue(resultNew); assertEquals(md5New, configCacheTag.getMd5Utf8()); assertEquals(tsNew, configCacheTag.getLastModifiedTs()); assertEquals(encryptedDataKey, configCacheTag.getEncryptedDataKey()); Mockito.verify(configDiskService, times(1)).saveTagToDisk(eq(dataId), eq(group), eq(tenant), eq(tag), eq(contentNew)); //ts old ,md5 update long tsOld = tsNew - 1; String contentWithOldTs = "contentWithOldTs" + tsOld; boolean resultOld = ConfigCacheService.dumpTag(dataId, group, tenant, tag, contentWithOldTs, tsOld, encryptedDataKey); assertTrue(resultOld); assertEquals(md5New, configCacheTag.getMd5Utf8()); assertEquals(tsNew, configCacheTag.getLastModifiedTs()); assertEquals(encryptedDataKey, configCacheTag.getEncryptedDataKey()); Mockito.verify(configDiskService, times(0)).saveTagToDisk(eq(dataId), eq(group), eq(tenant), eq(tag), eq(contentWithOldTs)); //ts new only,md5 not update long tsNew2 = tsNew + 1; String contentWithPrev2 = contentNew; boolean resultNew2 = ConfigCacheService.dumpTag(dataId, group, tenant, tag, contentWithPrev2, tsNew2, encryptedDataKey); assertTrue(resultNew2); assertEquals(md5New, configCacheTag.getMd5Utf8()); assertEquals(tsNew2, configCacheTag.getLastModifiedTs()); assertEquals(encryptedDataKey, configCacheTag.getEncryptedDataKey()); //ts not update,md5 not update long tsNew3 = tsNew2; String contentWithPrev3 = contentNew; boolean resultNew3 = ConfigCacheService.dumpTag(dataId, group, tenant, tag, contentWithPrev3, tsNew3, encryptedDataKey); assertTrue(resultNew3); assertEquals(md5New, configCacheTag.getMd5Utf8()); assertEquals(tsNew3, configCacheTag.getLastModifiedTs()); assertEquals(encryptedDataKey, configCacheTag.getEncryptedDataKey()); //test remove boolean removeTag = ConfigCacheService.removeTag(dataId, group, tenant, tag); assertTrue(removeTag); Mockito.verify(configDiskService, times(1)).removeConfigInfo4Tag(dataId, group, tenant, tag); Map<String, ConfigCache> configCacheTags = ConfigCacheService.getContentCache(groupKey).getConfigCacheTags(); assertNull(configCacheTags); }
@Override public SocialUserDO getSocialUser(Long id) { return socialUserMapper.selectById(id); }
@Test public void testGetSocialUser() { // 准备参数 Integer userType = UserTypeEnum.ADMIN.getValue(); Integer type = SocialTypeEnum.GITEE.getType(); String code = "tudou"; String state = "yuanma"; // mock 社交用户 SocialUserDO socialUserDO = randomPojo(SocialUserDO.class).setType(type).setCode(code).setState(state); socialUserMapper.insert(socialUserDO); // mock 社交用户的绑定 Long userId = randomLong(); SocialUserBindDO socialUserBind = randomPojo(SocialUserBindDO.class).setUserType(userType).setUserId(userId) .setSocialType(type).setSocialUserId(socialUserDO.getId()); socialUserBindMapper.insert(socialUserBind); // 调用 SocialUserRespDTO socialUser = socialUserService.getSocialUserByCode(userType, type, code, state); // 断言 assertEquals(userId, socialUser.getUserId()); assertEquals(socialUserDO.getOpenid(), socialUser.getOpenid()); }
@Override @SuppressWarnings("rawtypes") public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { MetaData.Builder metaData = new MetaData.Builder(sanitize, hostName, clock.getTime() / 1000, period) .type(COLLECTD_TYPE_GAUGE); try { connect(sender); for (Map.Entry<String, Gauge> entry : gauges.entrySet()) { serializeGauge(metaData.plugin(entry.getKey()), entry.getValue()); } for (Map.Entry<String, Counter> entry : counters.entrySet()) { serializeCounter(metaData.plugin(entry.getKey()), entry.getValue()); } for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { serializeHistogram(metaData.plugin(entry.getKey()), entry.getValue()); } for (Map.Entry<String, Meter> entry : meters.entrySet()) { serializeMeter(metaData.plugin(entry.getKey()), entry.getValue()); } for (Map.Entry<String, Timer> entry : timers.entrySet()) { serializeTimer(metaData.plugin(entry.getKey()), entry.getValue()); } } catch (IOException e) { LOG.warn("Unable to report to Collectd", e); } finally { disconnect(sender); } }
@Test public void sanitizesMetricName() throws Exception { Counter counter = registry.counter("dash-illegal.slash/illegal"); counter.inc(); reporter.report(); ValueList values = receiver.next(); assertThat(values.getPlugin()).isEqualTo("dash_illegal.slash_illegal"); }
public static Set<String> findKeywordsFromCrashReport(String crashReport) { Matcher matcher = CRASH_REPORT_STACK_TRACE_PATTERN.matcher(crashReport); Set<String> result = new HashSet<>(); if (matcher.find()) { for (String line : matcher.group("stacktrace").split("\\n")) { Matcher lineMatcher = STACK_TRACE_LINE_PATTERN.matcher(line); if (lineMatcher.find()) { String[] method = lineMatcher.group("method").split("\\."); for (int i = 0; i < method.length - 2; i++) { if (PACKAGE_KEYWORD_BLACK_LIST.contains(method[i])) { continue; } result.add(method[i]); } Matcher moduleMatcher = STACK_TRACE_LINE_MODULE_PATTERN.matcher(line); if (moduleMatcher.find()) { for (String module : moduleMatcher.group("tokens").split(",")) { String[] split = module.split(":"); if (split.length >= 2 && "xf".equals(split[0])) { if (PACKAGE_KEYWORD_BLACK_LIST.contains(split[1])) { continue; } result.add(split[1]); } } } } } } return result; }
@Test public void twilightforest() throws IOException { assertEquals( Collections.singleton("twilightforest"), CrashReportAnalyzer.findKeywordsFromCrashReport(loadLog("/crash-report/mod/twilightforest.txt"))); }
public static IdGenerator incrementingLongs() { AtomicLong longs = new AtomicLong(); return () -> Long.toString(longs.incrementAndGet()); }
@Test public void incrementingIndependent() { IdGenerator gen = IdGenerators.incrementingLongs(); IdGenerator otherGen = IdGenerators.incrementingLongs(); assertThat(gen.getId(), equalTo("1")); assertThat(gen.getId(), equalTo("2")); assertThat(otherGen.getId(), equalTo("1")); }
public void setProperty(String name, String value) { if (value == null) { return; } Method setter = aggregationAssessor.findSetterMethod(name); if (setter == null) { addWarn("No setter for property [" + name + "] in " + objClass.getName() + "."); } else { try { setProperty(setter, value); } catch (PropertySetterException ex) { addWarn("Failed to set property [" + name + "] to value \"" + value + "\". ", ex); } } }
@Test public void testSetCamelProperty() { setter.setProperty("camelCase", "trot"); assertEquals("trot", house.getCamelCase()); setter.setProperty("camelCase", "gh"); assertEquals("gh", house.getCamelCase()); }
@Override public long getSequence() { return sequence.get(); }
@Test public void testGetSequence() { assertEquals(0, sequencer.getSequence()); }
@Override public boolean supportsCatalogsInDataManipulation() { return false; }
@Test void assertSupportsCatalogsInDataManipulation() { assertFalse(metaData.supportsCatalogsInDataManipulation()); }
@Override public void execute(Context context) { editionProvider.get().ifPresent(edition -> { if (!edition.equals(EditionProvider.Edition.COMMUNITY)) { return; } Map<String, Integer> filesPerLanguage = reportReader.readMetadata().getNotAnalyzedFilesByLanguageMap() .entrySet() .stream() .filter(entry -> entry.getValue() > 0) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); if (filesPerLanguage.isEmpty()) { return; } ceTaskMessages.add(constructMessage(filesPerLanguage)); computeMeasures(filesPerLanguage); }); }
@Test public void adds_warning_in_SQ_community_edition_if_there_are_cpp_files() { when(editionProvider.get()).thenReturn(Optional.of(EditionProvider.Edition.COMMUNITY)); reportReader.setMetadata(ScannerReport.Metadata.newBuilder() .putNotAnalyzedFilesByLanguage("C++", 1) .build()); underTest.execute(new TestComputationStepContext()); verify(ceTaskMessages, times(1)).add(argumentCaptor.capture()); List<CeTaskMessages.Message> messages = argumentCaptor.getAllValues(); assertThat(messages).extracting(CeTaskMessages.Message::getText).containsExactly( "1 unanalyzed C++ file was detected in this project during the last analysis. C++ cannot be analyzed with your current SonarQube edition. Please" + " consider <a target=\"_blank\" href=\"https://www.sonarsource.com/plans-and-pricing/developer/?referrer=sonarqube-cpp\">upgrading to Developer" + " Edition</a> to find Bugs, Code Smells, Vulnerabilities and Security Hotspots in this file."); assertThat(measureRepository.getAddedRawMeasure(PROJECT_REF, UNANALYZED_CPP_KEY).get().getIntValue()).isOne(); assertThat(measureRepository.getAddedRawMeasure(PROJECT_REF, UNANALYZED_C_KEY)).isEmpty(); }
public void command(String primaryCommand, SecureConfig config, String... allArguments) { terminal.writeLine(""); final Optional<CommandLine> commandParseResult; try { commandParseResult = Command.parse(primaryCommand, allArguments); } catch (InvalidCommandException e) { terminal.writeLine(String.format("ERROR: %s", e.getMessage())); return; } if (commandParseResult.isEmpty()) { printHelp(); return; } final CommandLine commandLine = commandParseResult.get(); switch (commandLine.getCommand()) { case CREATE: { if (commandLine.hasOption(CommandOptions.HELP)){ terminal.writeLine("Creates a new keystore. For example: 'bin/logstash-keystore create'"); return; } if (secretStoreFactory.exists(config.clone())) { terminal.write("An Logstash keystore already exists. Overwrite ? [y/N] "); if (isYes(terminal.readLine())) { create(config); } } else { create(config); } break; } case LIST: { if (commandLine.hasOption(CommandOptions.HELP)){ terminal.writeLine("List all secret identifiers from the keystore. For example: " + "`bin/logstash-keystore list`. Note - only the identifiers will be listed, not the secrets."); return; } Collection<SecretIdentifier> ids = secretStoreFactory.load(config).list(); List<String> keys = ids.stream().filter(id -> !id.equals(LOGSTASH_MARKER)).map(id -> id.getKey()).collect(Collectors.toList()); Collections.sort(keys); keys.forEach(terminal::writeLine); break; } case ADD: { if (commandLine.hasOption(CommandOptions.HELP)){ terminal.writeLine("Add secrets to the keystore. For example: " + "`bin/logstash-keystore add my-secret`, at the prompt enter your secret. You will use the identifier ${my-secret} in your Logstash configuration."); return; } if (commandLine.getArguments().isEmpty()) { terminal.writeLine("ERROR: You must supply an identifier to add. (e.g. bin/logstash-keystore add my-secret)"); return; } if (secretStoreFactory.exists(config.clone())) { final SecretStore secretStore = secretStoreFactory.load(config); for (String argument : commandLine.getArguments()) { final SecretIdentifier id = new SecretIdentifier(argument); final byte[] existingValue = secretStore.retrieveSecret(id); if (existingValue != null) { SecretStoreUtil.clearBytes(existingValue); terminal.write(String.format("%s already exists. Overwrite ? [y/N] ", argument)); if (!isYes(terminal.readLine())) { continue; } } final String enterValueMessage = String.format("Enter value for %s: ", argument); char[] secret = null; while(secret == null) { terminal.write(enterValueMessage); final char[] readSecret = terminal.readSecret(); if (readSecret == null || readSecret.length == 0) { terminal.writeLine("ERROR: Value cannot be empty"); continue; } if (!ASCII_ENCODER.canEncode(CharBuffer.wrap(readSecret))) { terminal.writeLine("ERROR: Value must contain only ASCII characters"); continue; } secret = readSecret; } add(secretStore, id, SecretStoreUtil.asciiCharToBytes(secret)); } } else { terminal.writeLine("ERROR: Logstash keystore not found. Use 'create' command to create one."); } break; } case REMOVE: { if (commandLine.hasOption(CommandOptions.HELP)){ terminal.writeLine("Remove secrets from the keystore. For example: " + "`bin/logstash-keystore remove my-secret`"); return; } if (commandLine.getArguments().isEmpty()) { terminal.writeLine("ERROR: You must supply a value to remove. (e.g. bin/logstash-keystore remove my-secret)"); return; } final SecretStore secretStore = secretStoreFactory.load(config); for (String argument : commandLine.getArguments()) { SecretIdentifier id = new SecretIdentifier(argument); if (secretStore.containsSecret(id)) { secretStore.purgeSecret(id); terminal.writeLine(String.format("Removed '%s' from the Logstash keystore.", id.getKey())); } else { terminal.writeLine(String.format("ERROR: '%s' does not exist in the Logstash keystore.", argument)); } } break; } } }
@Test public void testRemoveWithNoIdentifiers() { final String expectedMessage = "ERROR: You must supply a value to remove."; createKeyStore(); String[] nullArguments = null; cli.command("remove", newStoreConfig.clone(), nullArguments); assertThat(terminal.out).containsIgnoringCase(expectedMessage); terminal.reset(); cli.command("remove", newStoreConfig.clone()); assertThat(terminal.out).containsIgnoringCase(expectedMessage); }
@Override public QualityGate.Condition apply(Condition input) { String metricKey = input.getMetric().getKey(); ConditionStatus conditionStatus = statusPerConditions.get(input); checkState(conditionStatus != null, "Missing ConditionStatus for condition on metric key %s", metricKey); return builder .setStatus(convert(conditionStatus.getStatus())) .setMetricKey(metricKey) .setOperator(convert(input.getOperator())) .setErrorThreshold(input.getErrorThreshold()) .setValue(conditionStatus.getValue()) .build(); }
@Test public void apply_converts_key_from_metric() { ConditionToCondition underTest = new ConditionToCondition(of(SOME_CONDITION, SOME_CONDITION_STATUS)); assertThat(underTest.apply(SOME_CONDITION).getMetricKey()).isEqualTo(METRIC_KEY); }
public static KafkaUserModel fromCrd(KafkaUser kafkaUser, String secretPrefix, boolean aclsAdminApiSupported) { KafkaUserModel result = new KafkaUserModel(kafkaUser.getMetadata().getNamespace(), kafkaUser.getMetadata().getName(), Labels.fromResource(kafkaUser).withStrimziKind(kafkaUser.getKind()), secretPrefix); validateTlsUsername(kafkaUser); validateDesiredPassword(kafkaUser); result.setOwnerReference(kafkaUser); result.setAuthentication(kafkaUser.getSpec().getAuthentication()); if (kafkaUser.getSpec().getAuthorization() != null && kafkaUser.getSpec().getAuthorization().getType().equals(KafkaUserAuthorizationSimple.TYPE_SIMPLE)) { if (aclsAdminApiSupported) { KafkaUserAuthorizationSimple simple = (KafkaUserAuthorizationSimple) kafkaUser.getSpec().getAuthorization(); result.setSimpleAclRules(simple.getAcls()); } else { throw new InvalidResourceException("Simple authorization ACL rules are configured but not supported in the Kafka cluster configuration."); } } result.setQuotas(kafkaUser.getSpec().getQuotas()); if (kafkaUser.getSpec().getTemplate() != null && kafkaUser.getSpec().getTemplate().getSecret() != null && kafkaUser.getSpec().getTemplate().getSecret().getMetadata() != null) { result.templateSecretLabels = kafkaUser.getSpec().getTemplate().getSecret().getMetadata().getLabels(); result.templateSecretAnnotations = kafkaUser.getSpec().getTemplate().getSecret().getMetadata().getAnnotations(); } return result; }
@Test public void testFromCrdTlsUserWith64CharTlsUsernameValid() { // 64 characters => Should be still OK KafkaUser notTooLong = new KafkaUserBuilder(tlsUser) .editMetadata() .withName("User123456789012345678901234567890123456789012345678901234567890") .endMetadata() .build(); KafkaUserModel.fromCrd(notTooLong, UserOperatorConfig.SECRET_PREFIX.defaultValue(), Boolean.parseBoolean(UserOperatorConfig.ACLS_ADMIN_API_SUPPORTED.defaultValue())); }
public static boolean parse(final String str, ResTable_config out) { return parse(str, out, true); }
@Test public void parse_density_any() { ResTable_config config = new ResTable_config(); ConfigDescription.parse("anydpi", config); assertThat(config.density).isEqualTo(DENSITY_ANY); }
@Override public KvMetadata resolveMetadata( boolean isKey, List<MappingField> resolvedFields, Map<String, String> options, InternalSerializationService serializationService ) { Map<QueryPath, MappingField> fieldsByPath = extractFields(resolvedFields, isKey); PortableId portableId = getPortableId(fieldsByPath, options, isKey); ClassDefinition classDefinition = resolveClassDefinition(portableId, getFields(fieldsByPath), serializationService.getPortableContext()); List<TableField> fields = new ArrayList<>(); for (Entry<QueryPath, MappingField> entry : fieldsByPath.entrySet()) { QueryPath path = entry.getKey(); QueryDataType type = entry.getValue().type(); String name = entry.getValue().name(); fields.add(new MapTableField(name, type, false, path)); } maybeAddDefaultField(isKey, resolvedFields, fields, QueryDataType.OBJECT); return new KvMetadata( fields, GenericQueryTargetDescriptor.DEFAULT, new PortableUpsertTargetDescriptor(classDefinition) ); }
@Test @Parameters({ "true, __key", "false, this" }) public void test_resolveMetadataWithExistingClassDefinition(boolean key, String prefix) { InternalSerializationService ss = new DefaultSerializationServiceBuilder().build(); ClassDefinition classDefinition = new ClassDefinitionBuilder(1, 2, 3) .addIntField("field") .build(); ss.getPortableContext().registerClassDefinition(classDefinition); Map<String, String> options = ImmutableMap.of( (key ? OPTION_KEY_FACTORY_ID : OPTION_VALUE_FACTORY_ID), String.valueOf(classDefinition.getFactoryId()), (key ? OPTION_KEY_CLASS_ID : OPTION_VALUE_CLASS_ID), String.valueOf(classDefinition.getClassId()), (key ? OPTION_KEY_CLASS_VERSION : OPTION_VALUE_CLASS_VERSION), String.valueOf(classDefinition.getVersion()) ); KvMetadata metadata = INSTANCE.resolveMetadata( key, singletonList(field("field", QueryDataType.INT, prefix + ".field")), options, ss ); assertThat(metadata.getFields()).containsExactly( new MapTableField("field", QueryDataType.INT, false, QueryPath.create(prefix + ".field")), new MapTableField(prefix, QueryDataType.OBJECT, true, QueryPath.create(prefix)) ); assertThat(metadata.getQueryTargetDescriptor()).isEqualTo(GenericQueryTargetDescriptor.DEFAULT); assertThat(metadata.getUpsertTargetDescriptor()) .isEqualToComparingFieldByField(new PortableUpsertTargetDescriptor(classDefinition)); }
static BuildResult fromImage(Image image, Class<? extends BuildableManifestTemplate> targetFormat) throws IOException { ImageToJsonTranslator imageToJsonTranslator = new ImageToJsonTranslator(image); BlobDescriptor containerConfigurationBlobDescriptor = Digests.computeDigest(imageToJsonTranslator.getContainerConfiguration()); BuildableManifestTemplate manifestTemplate = imageToJsonTranslator.getManifestTemplate( targetFormat, containerConfigurationBlobDescriptor); DescriptorDigest imageDigest = Digests.computeJsonDigest(manifestTemplate); DescriptorDigest imageId = containerConfigurationBlobDescriptor.getDigest(); return new BuildResult(imageDigest, imageId, false); }
@Test public void testFromImage() throws IOException { Image image1 = Image.builder(V22ManifestTemplate.class).setUser("user").build(); Image image2 = Image.builder(V22ManifestTemplate.class).setUser("user").build(); Image image3 = Image.builder(V22ManifestTemplate.class).setUser("anotherUser").build(); Assert.assertEquals( BuildResult.fromImage(image1, V22ManifestTemplate.class), BuildResult.fromImage(image2, V22ManifestTemplate.class)); Assert.assertNotEquals( BuildResult.fromImage(image1, V22ManifestTemplate.class), BuildResult.fromImage(image3, V22ManifestTemplate.class)); }
@Override public GETATTR3Response getattr(XDR xdr, RpcInfo info) { return getattr(xdr, getSecurityHandler(info), info.remoteAddress()); }
@Test(timeout = 60000) public void testGetattr() throws Exception { HdfsFileStatus status = nn.getRpcServer().getFileInfo("/tmp/bar"); long dirId = status.getFileId(); int namenodeId = Nfs3Utils.getNamenodeId(config); FileHandle handle = new FileHandle(dirId, namenodeId); XDR xdr_req = new XDR(); GETATTR3Request req = new GETATTR3Request(handle); req.serialize(xdr_req); // Attempt by an unpriviledged user should fail. GETATTR3Response response1 = nfsd.getattr(xdr_req.asReadOnlyWrap(), securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234)); assertEquals("Incorrect return code", Nfs3Status.NFS3ERR_ACCES, response1.getStatus()); // Attempt by a priviledged user should pass. GETATTR3Response response2 = nfsd.getattr(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234)); assertEquals("Incorrect return code", Nfs3Status.NFS3_OK, response2.getStatus()); }
@Override public ResultSet getTables(Connection connection, String dbName) throws SQLException { String tableTypes = properties.get("table_types"); if (null != tableTypes) { String[] tableTypesArray = tableTypes.split(","); if (tableTypesArray.length == 0) { throw new StarRocksConnectorException("table_types should be populated with table types separated by " + "comma, e.g. 'TABLE,VIEW'. Currently supported type includes:" + String.join(",", SUPPORTED_TABLE_TYPES)); } for (String tt : tableTypesArray) { if (!SUPPORTED_TABLE_TYPES.contains(tt)) { throw new StarRocksConnectorException("Unsupported table type found: " + tt, ",Currently supported table types includes:" + String.join(",", SUPPORTED_TABLE_TYPES)); } } return connection.getMetaData().getTables(connection.getCatalog(), dbName, null, tableTypesArray); } return connection.getMetaData().getTables(connection.getCatalog(), dbName, null, SUPPORTED_TABLE_TYPES.toArray(new String[SUPPORTED_TABLE_TYPES.size()])); }
@Test public void testListTableNames() throws SQLException { new Expectations() { { dataSource.getConnection(); result = connection; minTimes = 0; connection.getCatalog(); result = "t1"; minTimes = 0; connection.getMetaData().getTables("t1", "test", null, SUPPORTED_TABLE_TYPES.toArray(new String[SUPPORTED_TABLE_TYPES.size()])); result = tableResult; minTimes = 0; } }; try { JDBCMetadata jdbcMetadata = new JDBCMetadata(properties, "t1", dataSource); List<String> result = jdbcMetadata.listTableNames("test"); List<String> expectResult = Lists.newArrayList("tbl1", "tbl2", "tbl3"); Assert.assertEquals(expectResult, result); } catch (Exception e) { Assert.fail(); } }
@Override @TpsControl(pointName = "ConfigPublish") @Secured(action = ActionTypes.WRITE, signType = SignType.CONFIG) @ExtractorManager.Extractor(rpcExtractor = ConfigRequestParamExtractor.class) public ConfigPublishResponse handle(ConfigPublishRequest request, RequestMeta meta) throws NacosException { try { String dataId = request.getDataId(); String group = request.getGroup(); String content = request.getContent(); final String tenant = request.getTenant(); final String srcIp = meta.getClientIp(); final String requestIpApp = request.getAdditionParam("requestIpApp"); final String tag = request.getAdditionParam("tag"); final String appName = request.getAdditionParam("appName"); final String type = request.getAdditionParam("type"); final String srcUser = request.getAdditionParam("src_user"); final String encryptedDataKey = request.getAdditionParam("encryptedDataKey"); // check tenant ParamUtils.checkParam(dataId, group, "datumId", content); ParamUtils.checkParam(tag); Map<String, Object> configAdvanceInfo = new HashMap<>(10); MapUtil.putIfValNoNull(configAdvanceInfo, "config_tags", request.getAdditionParam("config_tags")); MapUtil.putIfValNoNull(configAdvanceInfo, "desc", request.getAdditionParam("desc")); MapUtil.putIfValNoNull(configAdvanceInfo, "use", request.getAdditionParam("use")); MapUtil.putIfValNoNull(configAdvanceInfo, "effect", request.getAdditionParam("effect")); MapUtil.putIfValNoNull(configAdvanceInfo, "type", type); MapUtil.putIfValNoNull(configAdvanceInfo, "schema", request.getAdditionParam("schema")); ParamUtils.checkParam(configAdvanceInfo); if (AggrWhitelist.isAggrDataId(dataId)) { Loggers.REMOTE_DIGEST.warn("[aggr-conflict] {} attempt to publish single data, {}, {}", srcIp, dataId, group); throw new NacosException(NacosException.NO_RIGHT, "dataId:" + dataId + " is aggr"); } ConfigInfo configInfo = new ConfigInfo(dataId, group, tenant, appName, content); configInfo.setMd5(request.getCasMd5()); configInfo.setType(type); configInfo.setEncryptedDataKey(encryptedDataKey); String betaIps = request.getAdditionParam("betaIps"); ConfigOperateResult configOperateResult = null; String persistEvent = ConfigTraceService.PERSISTENCE_EVENT; if (StringUtils.isBlank(betaIps)) { if (StringUtils.isBlank(tag)) { if (StringUtils.isNotBlank(request.getCasMd5())) { configOperateResult = configInfoPersistService.insertOrUpdateCas(srcIp, srcUser, configInfo, configAdvanceInfo); if (!configOperateResult.isSuccess()) { return ConfigPublishResponse.buildFailResponse(ResponseCode.FAIL.getCode(), "Cas publish fail,server md5 may have changed."); } } else { configOperateResult = configInfoPersistService.insertOrUpdate(srcIp, srcUser, configInfo, configAdvanceInfo); } ConfigChangePublisher.notifyConfigChange(new ConfigDataChangeEvent(false, dataId, group, tenant, configOperateResult.getLastModified())); } else { if (StringUtils.isNotBlank(request.getCasMd5())) { configOperateResult = configInfoTagPersistService.insertOrUpdateTagCas(configInfo, tag, srcIp, srcUser); if (!configOperateResult.isSuccess()) { return ConfigPublishResponse.buildFailResponse(ResponseCode.FAIL.getCode(), "Cas publish tag config fail,server md5 may have changed."); } } else { configOperateResult = configInfoTagPersistService.insertOrUpdateTag(configInfo, tag, srcIp, srcUser); } persistEvent = ConfigTraceService.PERSISTENCE_EVENT_TAG + "-" + tag; ConfigChangePublisher.notifyConfigChange( new ConfigDataChangeEvent(false, dataId, group, tenant, tag, configOperateResult.getLastModified())); } } else { // beta publish if (StringUtils.isNotBlank(request.getCasMd5())) { configOperateResult = configInfoBetaPersistService.insertOrUpdateBetaCas(configInfo, betaIps, srcIp, srcUser); if (!configOperateResult.isSuccess()) { return ConfigPublishResponse.buildFailResponse(ResponseCode.FAIL.getCode(), "Cas publish beta config fail,server md5 may have changed."); } } else { configOperateResult = configInfoBetaPersistService.insertOrUpdateBeta(configInfo, betaIps, srcIp, srcUser); } persistEvent = ConfigTraceService.PERSISTENCE_EVENT_BETA; ConfigChangePublisher.notifyConfigChange( new ConfigDataChangeEvent(true, dataId, group, tenant, configOperateResult.getLastModified())); } ConfigTraceService.logPersistenceEvent(dataId, group, tenant, requestIpApp, configOperateResult.getLastModified(), srcIp, persistEvent, ConfigTraceService.PERSISTENCE_TYPE_PUB, content); return ConfigPublishResponse.buildSuccessResponse(); } catch (Exception e) { Loggers.REMOTE_DIGEST.error("[ConfigPublishRequestHandler] publish config error ,request ={}", request, e); return ConfigPublishResponse.buildFailResponse( (e instanceof NacosException) ? ((NacosException) e).getErrCode() : ResponseCode.FAIL.getCode(), e.getMessage()); } }
@Test void testTagPublishCas() throws NacosException, InterruptedException { String dataId = "testTagPublishCas"; String group = "group"; ConfigPublishRequest configPublishRequest = new ConfigPublishRequest(); configPublishRequest.setDataId(dataId); configPublishRequest.setGroup(group); configPublishRequest.setCasMd5("casmd512"); Map<String, String> keyMap = new HashMap<>(); String srcUser = "src_user111"; keyMap.put("src_user", srcUser); String tag = "testTag"; keyMap.put("tag", tag); configPublishRequest.setAdditionMap(keyMap); String tenant = "tenant"; configPublishRequest.setTenant(tenant); String content = "content"; configPublishRequest.setContent(content); RequestMeta requestMeta = new RequestMeta(); requestMeta.setClientIp("127.0.0.1"); AtomicReference<ConfigDataChangeEvent> reference = new AtomicReference<>(); NotifyCenter.registerSubscriber(new Subscriber() { @Override public void onEvent(Event event) { reference.set((ConfigDataChangeEvent) event); } @Override public Class<? extends Event> subscribeType() { return ConfigDataChangeEvent.class; } }); ConfigOperateResult configOperateResult = new ConfigOperateResult(true); long timestamp = System.currentTimeMillis(); long id = timestamp / 1000; configOperateResult.setId(id); configOperateResult.setLastModified(timestamp); when(configInfoTagPersistService.insertOrUpdateTagCas(any(ConfigInfo.class), eq(tag), eq(requestMeta.getClientIp()), eq(srcUser))).thenReturn(configOperateResult); ConfigPublishResponse response = configPublishRequestHandler.handle(configPublishRequest, requestMeta); assertEquals(ResponseCode.SUCCESS.getCode(), response.getResultCode()); Thread.sleep(500L); assertTrue(reference.get() != null); assertEquals(dataId, reference.get().dataId); assertEquals(group, reference.get().group); assertEquals(tenant, reference.get().tenant); assertEquals(timestamp, reference.get().lastModifiedTs); assertFalse(reference.get().isBatch); assertFalse(reference.get().isBeta); assertEquals(tag, reference.get().tag); }
@Override public void trace(String msg) { logger.trace(msg); }
@Test public void testTraceWithException() { Log mockLog = mock(Log.class); InternalLogger logger = new CommonsLogger(mockLog, "foo"); logger.trace("a", e); verify(mockLog).trace("a", e); }
@SuppressWarnings({"rawtypes", "unchecked"}) public <T extends Gauge> T gauge(String name) { return (T) getOrAdd(name, MetricBuilder.GAUGES); }
@Test public void accessingASettableGaugeRegistersAndReusesIt() { final SettableGauge<String> gauge1 = registry.gauge("thing"); gauge1.setValue("Test"); final Gauge<String> gauge2 = registry.gauge("thing"); assertThat(gauge1).isSameAs(gauge2); assertThat(gauge2.getValue()).isEqualTo("Test"); verify(listener).onGaugeAdded("thing", gauge1); }
public void inputWatermark(Watermark watermark, int channelIndex, DataOutput<?> output) throws Exception { final SubpartitionStatus subpartitionStatus; if (watermark instanceof InternalWatermark) { int subpartitionStatusIndex = ((InternalWatermark) watermark).getSubpartitionIndex(); subpartitionStatus = subpartitionStatuses.get(channelIndex).get(subpartitionStatusIndex); } else { subpartitionStatus = subpartitionStatuses.get(channelIndex).get(subpartitionIndexes[channelIndex]); } // ignore the input watermark if its subpartition, or all subpartitions are idle (i.e. // overall the valve is idle). if (lastOutputWatermarkStatus.isActive() && subpartitionStatus.watermarkStatus.isActive()) { long watermarkMillis = watermark.getTimestamp(); // if the input watermark's value is less than the last received watermark for its // subpartition, ignore it also. if (watermarkMillis > subpartitionStatus.watermark) { subpartitionStatus.watermark = watermarkMillis; if (subpartitionStatus.isWatermarkAligned) { adjustAlignedSubpartitionStatuses(subpartitionStatus); } else if (watermarkMillis >= lastOutputWatermark) { // previously unaligned subpartitions are now aligned if its watermark has // caught up markWatermarkAligned(subpartitionStatus); } // now, attempt to find a new min watermark across all aligned subpartitions findAndOutputNewMinWatermarkAcrossAlignedSubpartitions(output); } } }
@Test void testSingleInputIncreasingWatermarks() throws Exception { StatusWatermarkOutput valveOutput = new StatusWatermarkOutput(); StatusWatermarkValve valve = new StatusWatermarkValve(1); valve.inputWatermark(new Watermark(0), 0, valveOutput); assertThat(valveOutput.popLastSeenOutput()).isEqualTo(new Watermark(0)); assertThat(valveOutput.popLastSeenOutput()).isNull(); valve.inputWatermark(new Watermark(25), 0, valveOutput); assertThat(valveOutput.popLastSeenOutput()).isEqualTo(new Watermark(25)); assertThat(valveOutput.popLastSeenOutput()).isNull(); }
private static byte[] getDateSigningKey(String secret, String date, String signMethod) { try { Mac mac = Mac.getInstance(signMethod); mac.init(new SecretKeySpec((PREFIX + secret).getBytes(StandardCharsets.UTF_8), signMethod)); return mac.doFinal(date.getBytes(StandardCharsets.UTF_8)); } catch (NoSuchAlgorithmException e) { throw new RuntimeException("unsupport Algorithm:" + signMethod); } catch (InvalidKeyException e) { throw new RuntimeException("InvalidKey"); } }
@Test public void testGetDateSigningKey() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { String secret = "mySecret"; String date = "20220101"; String signMethod = "HmacSHA256"; byte[] expectArray = new byte[]{-96, 108, 42, 75, -59, 121, -63, 108, -3, -126, 67, 3, 118, 2, 39, 59, -68, -37, -98, 122, -25, -120, 77, 56, -70, 24, -115, 33, 125, -128, -10, -26}; RamSignAdapter adapter = new RamSignAdapter(); // Use reflection to access the private method Method getDateSigningKeyMethod = RamSignAdapter.class.getDeclaredMethod("getDateSigningKey", String.class, String.class, String.class); getDateSigningKeyMethod.setAccessible(true); byte[] signingKey = (byte[]) getDateSigningKeyMethod.invoke(adapter, secret, date, signMethod); Assertions.assertEquals(32, signingKey.length); Assertions.assertArrayEquals(expectArray, signingKey); }
@Override public TimestampedSegment getOrCreateSegmentIfLive(final long segmentId, final ProcessorContext context, final long streamTime) { final TimestampedSegment segment = super.getOrCreateSegmentIfLive(segmentId, context, streamTime); cleanupExpiredSegments(streamTime); return segment; }
@Test public void shouldCloseAllOpenSegments() { final TimestampedSegment first = segments.getOrCreateSegmentIfLive(0, context, -1L); final TimestampedSegment second = segments.getOrCreateSegmentIfLive(1, context, -1L); final TimestampedSegment third = segments.getOrCreateSegmentIfLive(2, context, -1L); segments.close(); assertFalse(first.isOpen()); assertFalse(second.isOpen()); assertFalse(third.isOpen()); }
public void removeRuleData(final String pluginName) { RULE_DATA_MAP.remove(pluginName); }
@Test public void testRemoveRuleData() throws NoSuchFieldException, IllegalAccessException { RuleData cacheRuleData = RuleData.builder().id("1").pluginName(mockPluginName1).sort(1).build(); MatchDataCache.getInstance().cacheRuleData(path1, cacheRuleData, 100, 100); MatchDataCache.getInstance().removeRuleData(cacheRuleData.getPluginName()); ConcurrentHashMap<String, WindowTinyLFUMap<String, RuleData>> ruleMap = getFieldByName(ruleMapStr); assertNull(ruleMap.get(mockPluginName1)); ruleMap.clear(); }
@Override public RedisClusterNode clusterGetNodeForKey(byte[] key) { int slot = executorService.getConnectionManager().calcSlot(key); return clusterGetNodeForSlot(slot); }
@Test public void testClusterGetNodeForKey() { RedisClusterNode node = connection.clusterGetNodeForKey("123".getBytes()); assertThat(node).isNotNull(); }
@Override public long getLong(PropertyKey key) { // Low-precision types int can be implicitly converted to high-precision types long // without loss of precision checkArgument(key.getType() == PropertyKey.PropertyType.LONG || key.getType() == PropertyKey.PropertyType.INTEGER); return ((Number) get(key)).longValue(); }
@Test public void getLong() { // bigger than MAX_INT mConfiguration.set(PropertyKey.JOB_MASTER_JOB_CAPACITY, 12345678910L); assertEquals(12345678910L, mConfiguration.getLong(PropertyKey.JOB_MASTER_JOB_CAPACITY)); }
@Override public BackgroundException map(final IOException e) { if(ExceptionUtils.getRootCause(e) != e && ExceptionUtils.getRootCause(e) instanceof SSHException) { return this.map((SSHException) ExceptionUtils.getRootCause(e)); } final StringBuilder buffer = new StringBuilder(); this.append(buffer, e.getMessage()); if(ExceptionUtils.getRootCause(e) != e) { if(!StringUtils.equals(e.getMessage(), ExceptionUtils.getRootCause(e).getMessage())) { this.append(buffer, ExceptionUtils.getRootCause(e).getMessage()); } } if(e instanceof SFTPException) { final SFTPException failure = (SFTPException) e; final Response.StatusCode code = failure.getStatusCode(); switch(code) { case FILE_ALREADY_EXISTS: return new ConflictException(buffer.toString(),e); case NO_SUCH_FILE: case NO_SUCH_PATH: case INVALID_HANDLE: return new NotfoundException(buffer.toString(), e); case PERMISSION_DENIED: case WRITE_PROTECT: case CANNOT_DELETE: return new AccessDeniedException(buffer.toString(), e); case NO_CONNECTION: case CONNECITON_LOST: return new ConnectionRefusedException(buffer.toString(), e); case NO_MEDIA: break; case NO_SPACE_ON_FILESYSTEM: case QUOTA_EXCEEDED: return new QuotaException(buffer.toString(), e); case LOCK_CONFLICT: return new LockedException(buffer.toString(), e); default: return new InteroperabilityException(buffer.toString(), e); } } if(e instanceof UserAuthException) { return new LoginFailureException(buffer.toString(), e); } if(e instanceof ConnectionException) { return new ConnectionRefusedException(buffer.toString(), e); } if(e instanceof Buffer.BufferException) { return new InteroperabilityException(buffer.toString(), e); } if(e instanceof SSHException) { final SSHException failure = (SSHException) e; final DisconnectReason reason = failure.getDisconnectReason(); return this.map(e, buffer, reason); } return this.wrap(e, buffer); }
@Test public void testSocketTimeout() { assertEquals(ConnectionTimeoutException.class, new SFTPExceptionMappingService() .map(new SocketTimeoutException()).getClass()); assertEquals(ConnectionTimeoutException.class, new SFTPExceptionMappingService() .map("message", new SocketTimeoutException()).getClass()); assertEquals(ConnectionTimeoutException.class, new SFTPExceptionMappingService() .map("message", new SocketTimeoutException(), new Path("/f", EnumSet.of(Path.Type.file))).getClass()); }
public static TimestampExtractionPolicy create( final KsqlConfig ksqlConfig, final LogicalSchema schema, final Optional<TimestampColumn> timestampColumn ) { if (!timestampColumn.isPresent()) { return new MetadataTimestampExtractionPolicy(getDefaultTimestampExtractor(ksqlConfig)); } final ColumnName col = timestampColumn.get().getColumn(); final Optional<String> timestampFormat = timestampColumn.get().getFormat(); final Column column = schema.findColumn(col) .orElseThrow(() -> new KsqlException( "The TIMESTAMP column set in the WITH clause does not exist in the schema: '" + col.toString(FormatOptions.noEscape()) + "'")); final SqlBaseType tsColumnType = column.type().baseType(); if (tsColumnType == SqlBaseType.STRING) { final String format = timestampFormat.orElseThrow(() -> new KsqlException( "A String timestamp field has been specified without" + " also specifying the " + CommonCreateConfigs.TIMESTAMP_FORMAT_PROPERTY.toLowerCase())); return new StringTimestampExtractionPolicy(col, format); } if (timestampFormat.isPresent()) { throw new KsqlException("'" + CommonCreateConfigs.TIMESTAMP_FORMAT_PROPERTY + "' set in the WITH clause can only be used " + "when the timestamp column is of type STRING."); } if (tsColumnType == SqlBaseType.BIGINT) { return new LongColumnTimestampExtractionPolicy(col); } if (tsColumnType == SqlBaseType.TIMESTAMP) { return new TimestampColumnTimestampExtractionPolicy(col); } throw new KsqlException( "Timestamp column, " + col + ", should be LONG(INT64), TIMESTAMP," + " or a String with a " + CommonCreateConfigs.TIMESTAMP_FORMAT_PROPERTY.toLowerCase() + " specified."); }
@Test public void shouldFailIfStringTimestampTypeAndFormatNotSupplied() { // Given: final String field = "my_string_field"; final LogicalSchema schema = schemaBuilder2 .valueColumn(ColumnName.of(field.toUpperCase()), SqlTypes.STRING) .build(); // When: assertThrows( KsqlException.class, () -> TimestampExtractionPolicyFactory .create( ksqlConfig, schema, Optional.of( new TimestampColumn( ColumnName.of(field.toUpperCase()), Optional.empty() ) ) ) ); }
@PublicAPI(usage = ACCESS) public static Transformer matching(String packageIdentifier) { PackageMatchingSliceIdentifier sliceIdentifier = new PackageMatchingSliceIdentifier(packageIdentifier); String description = "slices matching " + sliceIdentifier.getDescription(); return new Transformer(sliceIdentifier, description); }
@Test public void default_naming_slices() { JavaClasses classes = importClassesWithContext(Object.class, String.class, Pattern.class); DescribedIterable<Slice> slices = Slices.matching("java.(*)..").transform(classes); assertThat(slices).extractingResultOf("getDescription").containsOnly("Slice lang", "Slice util"); }
public IndexingResults bulkIndex(final List<MessageWithIndex> messageList) { return bulkIndex(messageList, false, null); }
@Test public void bulkIndexingShouldAccountMessageSizesForSystemTrafficSeparately() throws IOException { final IndexSet indexSet = mock(IndexSet.class); final List<MessageWithIndex> messageList = List.of( new MessageWithIndex(wrap(messageWithSize(17)), indexSet), new MessageWithIndex(wrap(messageWithSize(23)), indexSet), new MessageWithIndex(wrap(messageWithSize(42)), indexSet) ); when(messagesAdapter.bulkIndex(any())).thenReturn(IndexingResults.create(createSuccessFromMessages(messageList), List.of())); messages.bulkIndex(messageList, true); verify(trafficAccounting, never()).addOutputTraffic(anyLong()); verify(trafficAccounting, times(1)).addSystemTraffic(82); }
@Override public void removeSubscriber(Subscriber subscriber) { subscribers.remove(subscriber); }
@Test void testRemoveSubscriber() { publisher.addSubscriber(subscriber); assertEquals(1, publisher.getSubscribers().size()); publisher.removeSubscriber(subscriber); assertEquals(0, publisher.getSubscribers().size()); }
@Override public short getTypeCode() { return MessageType.TYPE_REG_RM_RESULT; }
@Test public void getTypeCode() { RegisterRMResponse registerRMResponse = new RegisterRMResponse(); Assertions.assertEquals(MessageType.TYPE_REG_RM_RESULT, registerRMResponse.getTypeCode()); }
@Override public Map<String, Object> batchInsertOrUpdate(List<ConfigAllInfo> configInfoList, String srcUser, String srcIp, Map<String, Object> configAdvanceInfo, SameConfigPolicy policy) throws NacosException { int succCount = 0; int skipCount = 0; List<Map<String, String>> failData = null; List<Map<String, String>> skipData = null; final BiConsumer<Boolean, Throwable> callFinally = (result, t) -> { if (t != null) { throw new NacosRuntimeException(0, t); } }; for (int i = 0; i < configInfoList.size(); i++) { ConfigAllInfo configInfo = configInfoList.get(i); try { ParamUtils.checkParam(configInfo.getDataId(), configInfo.getGroup(), "datumId", configInfo.getContent()); } catch (Throwable e) { DEFAULT_LOG.error("data verification failed", e); throw e; } ConfigInfo configInfo2Save = new ConfigInfo(configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant(), configInfo.getAppName(), configInfo.getContent()); configInfo2Save.setEncryptedDataKey( configInfo.getEncryptedDataKey() == null ? "" : configInfo.getEncryptedDataKey()); String type = configInfo.getType(); if (StringUtils.isBlank(type)) { // simple judgment of file type based on suffix if (configInfo.getDataId().contains(SPOT)) { String extName = configInfo.getDataId().substring(configInfo.getDataId().lastIndexOf(SPOT) + 1); FileTypeEnum fileTypeEnum = FileTypeEnum.getFileTypeEnumByFileExtensionOrFileType(extName); type = fileTypeEnum.getFileType(); } else { type = FileTypeEnum.getFileTypeEnumByFileExtensionOrFileType(null).getFileType(); } } if (configAdvanceInfo == null) { configAdvanceInfo = new HashMap<>(16); } configAdvanceInfo.put("type", type); configAdvanceInfo.put("desc", configInfo.getDesc()); try { ConfigInfoStateWrapper foundCfg = findConfigInfoState(configInfo2Save.getDataId(), configInfo2Save.getGroup(), configInfo2Save.getTenant()); if (foundCfg != null) { throw new Throwable("DuplicateKeyException: config already exists, should be overridden"); } addConfigInfo(srcIp, srcUser, configInfo2Save, configAdvanceInfo, callFinally); succCount++; } catch (Throwable e) { if (!StringUtils.contains(e.toString(), "DuplicateKeyException")) { throw new NacosException(NacosException.SERVER_ERROR, e); } // uniqueness constraint conflict if (SameConfigPolicy.ABORT.equals(policy)) { failData = new ArrayList<>(); skipData = new ArrayList<>(); Map<String, String> faileditem = new HashMap<>(2); faileditem.put("dataId", configInfo2Save.getDataId()); faileditem.put("group", configInfo2Save.getGroup()); failData.add(faileditem); for (int j = (i + 1); j < configInfoList.size(); j++) { ConfigInfo skipConfigInfo = configInfoList.get(j); Map<String, String> skipitem = new HashMap<>(2); skipitem.put("dataId", skipConfigInfo.getDataId()); skipitem.put("group", skipConfigInfo.getGroup()); skipData.add(skipitem); skipCount++; } break; } else if (SameConfigPolicy.SKIP.equals(policy)) { skipCount++; if (skipData == null) { skipData = new ArrayList<>(); } Map<String, String> skipitem = new HashMap<>(2); skipitem.put("dataId", configInfo2Save.getDataId()); skipitem.put("group", configInfo2Save.getGroup()); skipData.add(skipitem); } else if (SameConfigPolicy.OVERWRITE.equals(policy)) { succCount++; updateConfigInfo(configInfo2Save, srcIp, srcUser, configAdvanceInfo); } } } Map<String, Object> result = new HashMap<>(4); result.put("succCount", succCount); result.put("skipCount", skipCount); if (failData != null && !failData.isEmpty()) { result.put("failData", failData); } if (skipData != null && !skipData.isEmpty()) { result.put("skipData", skipData); } return result; }
@Test void testBatchInsertOrUpdateOverwrite() throws NacosException { List<ConfigAllInfo> configInfoList = new ArrayList<>(); //insert direct configInfoList.add(createMockConfigAllInfo(0)); //exist config and overwrite configInfoList.add(createMockConfigAllInfo(1)); //insert direct configInfoList.add(createMockConfigAllInfo(2)); String srcUser = "srcUser1324"; String srcIp = "srcIp1243"; Map<String, Object> configAdvanceInfo = new HashMap<>(); //mock add config 1 success,config 2 fail and skip,config 3 success Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {configInfoList.get(0).getDataId(), configInfoList.get(0).getGroup(), configInfoList.get(0).getTenant()}), eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(null); Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {configInfoList.get(1).getDataId(), configInfoList.get(1).getGroup(), configInfoList.get(1).getTenant()}), eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(new ConfigInfoStateWrapper()); Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {configInfoList.get(2).getDataId(), configInfoList.get(2).getGroup(), configInfoList.get(1).getTenant()}), eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(null); //mock query config info during update ConfigInfoWrapper configInfoWrapper = new ConfigInfoWrapper(); Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {configInfoList.get(1).getDataId(), configInfoList.get(1).getGroup(), configInfoList.get(1).getTenant()}), eq(CONFIG_INFO_WRAPPER_ROW_MAPPER))).thenReturn(configInfoWrapper); Map<String, Object> stringObjectMap = embeddedConfigInfoPersistService.batchInsertOrUpdate(configInfoList, srcUser, srcIp, configAdvanceInfo, SameConfigPolicy.OVERWRITE); assertEquals(3, stringObjectMap.get("succCount")); assertEquals(0, stringObjectMap.get("skipCount")); }
@Override public OSGeneratedQueryContext generate(Query query, Set<SearchError> validationErrors, DateTimeZone timezone) { final BackendQuery backendQuery = query.query(); final Set<SearchType> searchTypes = query.searchTypes(); final QueryBuilder normalizedRootQuery = translateQueryString(backendQuery.queryString()); final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() .filter(normalizedRootQuery); usedSearchFiltersToQueryStringsMapper.map(query.filters()) .stream() .map(this::translateQueryString) .forEach(boolQuery::filter); // add the optional root query filters generateFilterClause(query.filter()).ifPresent(boolQuery::filter); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() .query(boolQuery) .from(0) .size(0) .trackTotalHits(true); final OSGeneratedQueryContext queryContext = queryContextFactory.create(this, searchSourceBuilder, validationErrors, timezone); searchTypes.stream() .filter(searchType -> !isSearchTypeWithError(queryContext, searchType.id())) .forEach(searchType -> { final String type = searchType.type(); final Provider<OSSearchTypeHandler<? extends SearchType>> searchTypeHandler = openSearchSearchTypeHandlers.get(type); if (searchTypeHandler == null) { LOG.error("Unknown search type {} for elasticsearch backend, cannot generate query part. Skipping this search type.", type); queryContext.addError(new SearchTypeError(query, searchType.id(), "Unknown search type '" + type + "' for elasticsearch backend, cannot generate query")); return; } final SearchSourceBuilder searchTypeSourceBuilder = queryContext.searchSourceBuilder(searchType); final Set<String> effectiveStreamIds = query.effectiveStreams(searchType); final BoolQueryBuilder searchTypeOverrides = QueryBuilders.boolQuery() .must(searchTypeSourceBuilder.query()) .must( Objects.requireNonNull( TimeRangeQueryFactory.create( query.effectiveTimeRange(searchType) ), "Timerange for search type " + searchType.id() + " cannot be found in query or search type." ) ); if (effectiveStreamIds.stream().noneMatch(s -> s.startsWith(Stream.DATASTREAM_PREFIX))) { searchTypeOverrides .must(QueryBuilders.termsQuery(Message.FIELD_STREAMS, effectiveStreamIds)); } searchType.query().ifPresent(searchTypeQuery -> { final QueryBuilder normalizedSearchTypeQuery = translateQueryString(searchTypeQuery.queryString()); searchTypeOverrides.must(normalizedSearchTypeQuery); }); usedSearchFiltersToQueryStringsMapper.map(searchType.filters()) .stream() .map(this::translateQueryString) .forEach(searchTypeOverrides::must); searchTypeSourceBuilder.query(searchTypeOverrides); searchTypeHandler.get().generateQueryPart(query, searchType, queryContext); }); return queryContext; }
@Test public void generatedContextHasQueryThatIncludesSearchFilters() { final ImmutableList<UsedSearchFilter> usedSearchFilters = ImmutableList.of( InlineQueryStringSearchFilter.builder().title("").description("").queryString("method:GET").build(), ReferencedQueryStringSearchFilter.create("12345") ); doReturn(ImmutableSet.of("method:GET", "method:POST")).when(usedSearchFiltersToQueryStringsMapper).map(usedSearchFilters); final Query query = Query.builder() .id("queryWithSearchFilters") .query(ElasticsearchQueryString.of("")) .filters(usedSearchFilters) .timerange(RelativeRange.create(300)) .build(); final OSGeneratedQueryContext queryContext = backend.generate(query, Collections.emptySet(), DateTimeZone.UTC); final QueryBuilder esQuery = queryContext.searchSourceBuilder(new SearchType.Fallback()).query(); assertThat(esQuery) .isNotNull() .isInstanceOf(BoolQueryBuilder.class); final List<QueryBuilder> filters = ((BoolQueryBuilder) esQuery).filter(); //filter for empty ES query assertThat(filters) .anyMatch(queryBuilder -> queryBuilder instanceof MatchAllQueryBuilder); //2 filters from search filters assertThat(filters) .filteredOn(queryBuilder -> queryBuilder instanceof QueryStringQueryBuilder) .extracting(queryBuilder -> (QueryStringQueryBuilder) queryBuilder) .extracting(QueryStringQueryBuilder::queryString) .contains("method:POST") .contains("method:GET"); }
public void mergeMetric(String name, RuntimeMetric metric) { metrics.computeIfAbsent(name, k -> new RuntimeMetric(name, metric.getUnit())).mergeWith(metric); }
@Test public void testMergeMetric() { RuntimeStats stats1 = new RuntimeStats(); stats1.addMetricValue(TEST_METRIC_NAME_1, NONE, 2); stats1.addMetricValue(TEST_METRIC_NAME_1, NONE, 3); stats1.addMetricValue(TEST_METRIC_NAME_NANO_1, NANO, 3); RuntimeStats stats2 = new RuntimeStats(); stats2.addMetricValue(TEST_METRIC_NAME_NANO_2, NANO, 5); stats2.mergeMetric(TEST_METRIC_NAME_2, stats1.getMetric(TEST_METRIC_NAME_1)); stats2.mergeMetric(TEST_METRIC_NAME_NANO_2, stats1.getMetric(TEST_METRIC_NAME_NANO_1)); assertEquals(stats2.getMetrics().size(), 2); assertRuntimeMetricEquals( stats2.getMetric(TEST_METRIC_NAME_2), new RuntimeMetric(TEST_METRIC_NAME_2, NONE, 5, 2, 3, 2)); assertRuntimeMetricEquals( stats2.getMetric(TEST_METRIC_NAME_NANO_2), new RuntimeMetric(TEST_METRIC_NAME_NANO_2, NANO, 8, 2, 5, 3)); }
@Override public void removeSelector(final SelectorData selectorData) { if (Objects.isNull(selectorData.getId())) { return; } ApplicationConfigCache.getInstance().invalidate(selectorData.getId()); }
@Test public void testRemoveSelector() { when(selectorData.getId()).thenReturn(null); grpcPluginDataHandler.removeSelector(selectorData); when(selectorData.getId()).thenReturn("selectorId"); grpcPluginDataHandler.removeSelector(selectorData); final List<ShenyuServiceInstance> shenyuServiceInstances = ApplicationConfigCache.getInstance().get(selectorData.getId()).getShenyuServiceInstances(); assertTrue(CollectionUtils.isEmpty(shenyuServiceInstances), "shenyuServiceInstances mast is empty"); }
@GetMapping("") public ShenyuAdminResult queryPlugins(final String name, final Integer enabled, @NotNull final Integer currentPage, @NotNull final Integer pageSize) { CommonPager<PluginVO> commonPager = pluginService.listByPage(new PluginQuery(name, enabled, new PageParameter(currentPage, pageSize))); return ShenyuAdminResult.success(ShenyuResultMessage.QUERY_SUCCESS, commonPager); }
@Test public void testQueryPlugins() throws Exception { final PageParameter pageParameter = new PageParameter(); List<PluginVO> pluginVOS = new ArrayList<>(); pluginVOS.add(pluginVO); final CommonPager<PluginVO> commonPager = new CommonPager<>(); commonPager.setPage(pageParameter); commonPager.setDataList(pluginVOS); final PluginQuery pluginQuery = new PluginQuery("t_n", 1, pageParameter); given(this.pluginService.listByPage(pluginQuery)).willReturn(commonPager); this.mockMvc.perform(MockMvcRequestBuilders.get("/plugin") .param("name", "t_n") .param("enabled", "1") .param("currentPage", String.valueOf(pageParameter.getCurrentPage())) .param("pageSize", String.valueOf(pageParameter.getPageSize()))) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.QUERY_SUCCESS))) .andExpect(jsonPath("$.data.dataList[0].name", is(pluginVO.getName()))) .andReturn(); }
public static Schema schemaFor(Table table, long snapshotId) { Snapshot snapshot = table.snapshot(snapshotId); Preconditions.checkArgument(snapshot != null, "Cannot find snapshot with ID %s", snapshotId); Integer schemaId = snapshot.schemaId(); // schemaId could be null, if snapshot was created before Iceberg added schema id to snapshot if (schemaId != null) { Schema schema = table.schemas().get(schemaId); Preconditions.checkState(schema != null, "Cannot find schema with schema id %s", schemaId); return schema; } // TODO: recover the schema by reading previous metadata files return table.schema(); }
@Test public void schemaForRef() { Schema initialSchema = new Schema( required(1, "id", Types.IntegerType.get()), required(2, "data", Types.StringType.get())); assertThat(table.schema().asStruct()).isEqualTo(initialSchema.asStruct()); assertThat(SnapshotUtil.schemaFor(table, null).asStruct()).isEqualTo(initialSchema.asStruct()); assertThat(SnapshotUtil.schemaFor(table, "non-existing-ref").asStruct()) .isEqualTo(initialSchema.asStruct()); assertThat(SnapshotUtil.schemaFor(table, SnapshotRef.MAIN_BRANCH).asStruct()) .isEqualTo(initialSchema.asStruct()); }
public static BufferDebloatConfiguration fromConfiguration(ReadableConfig config) { Duration targetTotalBufferSize = config.get(BUFFER_DEBLOAT_TARGET); int maxBufferSize = Math.toIntExact(config.get(TaskManagerOptions.MEMORY_SEGMENT_SIZE).getBytes()); int minBufferSize = Math.toIntExact(config.get(TaskManagerOptions.MIN_MEMORY_SEGMENT_SIZE).getBytes()); int bufferDebloatThresholdPercentages = config.get(BUFFER_DEBLOAT_THRESHOLD_PERCENTAGES); final int numberOfSamples = config.get(BUFFER_DEBLOAT_SAMPLES); // Right now the buffer size can not be grater than integer max value according to // MemorySegment and buffer implementation. checkArgument(maxBufferSize > 0); checkArgument(minBufferSize > 0); checkArgument(numberOfSamples > 0); checkArgument(maxBufferSize >= minBufferSize); checkArgument(targetTotalBufferSize.toMillis() > 0.0); return new BufferDebloatConfiguration( config.get(TaskManagerOptions.BUFFER_DEBLOAT_ENABLED), targetTotalBufferSize, maxBufferSize, minBufferSize, bufferDebloatThresholdPercentages, numberOfSamples); }
@Test public void testMinGreaterThanMaxBufferSize() { final Configuration config = new Configuration(); config.set(TaskManagerOptions.MIN_MEMORY_SEGMENT_SIZE, new MemorySize(50)); config.set(TaskManagerOptions.MEMORY_SEGMENT_SIZE, new MemorySize(49)); assertThrows( IllegalArgumentException.class, () -> BufferDebloatConfiguration.fromConfiguration(config)); }
@Override public void execute(final List<String> args, final PrintWriter terminal) { CliCmdUtil.ensureArgCountBounds(args, 0, 1, HELP); if (args.isEmpty()) { terminal.println(restClient.getServerAddress()); return; } else { final String serverAddress = args.get(0); restClient.setServerAddress(serverAddress); terminal.println("Server now: " + serverAddress); resetCliForNewServer.fire(); } validateClient(terminal, restClient); }
@Test public void shouldResetCliForNewServer() { // When: command.execute(ImmutableList.of(VALID_SERVER_ADDRESS), terminal); // Then: verify(resetCliForNewServer).fire(); }
public static <InputT, AccumT, OutputT> CombineFnWithContext<InputT, AccumT, OutputT> toFnWithContext( GlobalCombineFn<InputT, AccumT, OutputT> globalCombineFn) { if (globalCombineFn instanceof CombineFnWithContext) { @SuppressWarnings("unchecked") CombineFnWithContext<InputT, AccumT, OutputT> combineFnWithContext = (CombineFnWithContext<InputT, AccumT, OutputT>) globalCombineFn; return combineFnWithContext; } else { @SuppressWarnings("unchecked") final CombineFn<InputT, AccumT, OutputT> combineFn = (CombineFn<InputT, AccumT, OutputT>) globalCombineFn; return new CombineFnWithContext<InputT, AccumT, OutputT>() { @Override public AccumT createAccumulator(Context c) { return combineFn.createAccumulator(); } @Override public AccumT addInput(AccumT accumulator, InputT input, Context c) { return combineFn.addInput(accumulator, input); } @Override public AccumT mergeAccumulators(Iterable<AccumT> accumulators, Context c) { return combineFn.mergeAccumulators(accumulators); } @Override public OutputT extractOutput(AccumT accumulator, Context c) { return combineFn.extractOutput(accumulator); } @Override public AccumT compact(AccumT accumulator, Context c) { return combineFn.compact(accumulator); } @Override public OutputT defaultValue() { return combineFn.defaultValue(); } @Override public Coder<AccumT> getAccumulatorCoder(CoderRegistry registry, Coder<InputT> inputCoder) throws CannotProvideCoderException { return combineFn.getAccumulatorCoder(registry, inputCoder); } @Override public Coder<OutputT> getDefaultOutputCoder( CoderRegistry registry, Coder<InputT> inputCoder) throws CannotProvideCoderException { return combineFn.getDefaultOutputCoder(registry, inputCoder); } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); combineFn.populateDisplayData(builder); } }; } }
@Test public void testToFnWithContextIdempotent() throws Exception { CombineFnWithContext<Integer, int[], Integer> fnWithContext = CombineFnUtil.toFnWithContext(Sum.ofIntegers()); assertTrue(fnWithContext == CombineFnUtil.toFnWithContext(fnWithContext)); }
public Exception getException() { if (exception != null) return exception; try { final Class<? extends Exception> exceptionClass = ReflectionUtils.toClass(getExceptionType()); if (getExceptionCauseType() != null) { final Class<? extends Exception> exceptionCauseClass = ReflectionUtils.toClass(getExceptionCauseType()); final Exception exceptionCause = getExceptionCauseMessage() != null ? ReflectionUtils.newInstanceCE(exceptionCauseClass, getExceptionCauseMessage()) : ReflectionUtils.newInstanceCE(exceptionCauseClass); exceptionCause.setStackTrace(new StackTraceElement[]{}); return getExceptionMessage() != null ? ReflectionUtils.newInstanceCE(exceptionClass, getExceptionMessage(), exceptionCause) : ReflectionUtils.newInstanceCE(exceptionClass, exceptionCause); } else { return getExceptionMessage() != null ? ReflectionUtils.newInstanceCE(exceptionClass, getExceptionMessage()) : ReflectionUtils.newInstanceCE(exceptionClass); } } catch (ReflectiveOperationException e) { throw new IllegalStateException("Could not reconstruct exception for class " + getExceptionType() + " and message " + getExceptionMessage(), e); } }
@Test void getExceptionForJobMethodNotFoundException() { final FailedState failedState = new FailedState("JobRunr message", new JobMethodNotFoundException(jobDetails().build())); setInternalState(failedState, "exception", null); assertThat(failedState.getException()) .isInstanceOf(JobMethodNotFoundException.class); }
@Override public int solve(ArrayList<Character> elements) throws Exception { return 0; }
@Test public void testSolve() throws Exception { Controlador controlador = new Controlador(); ArrayList<Character> elements = new ArrayList<>(); elements.add('1'); elements.add('2'); elements.add('+'); elements.add('4'); elements.add('*'); elements.add('3'); elements.add('+'); int result = controlador.solve(elements); assertEquals(0, result); }
@SuppressWarnings("deprecation") static Object[] buildArgs(final Object[] positionalArguments, final ResourceMethodDescriptor resourceMethod, final ServerResourceContext context, final DynamicRecordTemplate template, final ResourceMethodConfig resourceMethodConfig) { List<Parameter<?>> parameters = resourceMethod.getParameters(); Object[] arguments = Arrays.copyOf(positionalArguments, parameters.size()); fixUpComplexKeySingletonArraysInArguments(arguments); boolean attachmentsDesired = false; for (int i = positionalArguments.length; i < parameters.size(); ++i) { Parameter<?> param = parameters.get(i); try { if (param.getParamType() == Parameter.ParamType.KEY || param.getParamType() == Parameter.ParamType.ASSOC_KEY_PARAM) { Object value = context.getPathKeys().get(param.getName()); if (value != null) { arguments[i] = value; continue; } } else if (param.getParamType() == Parameter.ParamType.CALLBACK) { continue; } else if (param.getParamType() == Parameter.ParamType.PARSEQ_CONTEXT_PARAM || param.getParamType() == Parameter.ParamType.PARSEQ_CONTEXT) { continue; // don't know what to fill in yet } else if (param.getParamType() == Parameter.ParamType.HEADER) { HeaderParam headerParam = param.getAnnotations().get(HeaderParam.class); String value = context.getRequestHeaders().get(headerParam.value()); arguments[i] = value; continue; } //Since we have multiple different types of MaskTrees that can be passed into resource methods, //we must evaluate based on the param type (annotation used) else if (param.getParamType() == Parameter.ParamType.PROJECTION || param.getParamType() == Parameter.ParamType.PROJECTION_PARAM) { arguments[i] = context.getProjectionMask(); continue; } else if (param.getParamType() == Parameter.ParamType.METADATA_PROJECTION_PARAM) { arguments[i] = context.getMetadataProjectionMask(); continue; } else if (param.getParamType() == Parameter.ParamType.PAGING_PROJECTION_PARAM) { arguments[i] = context.getPagingProjectionMask(); continue; } else if (param.getParamType() == Parameter.ParamType.CONTEXT || param.getParamType() == Parameter.ParamType.PAGING_CONTEXT_PARAM) { PagingContext ctx = RestUtils.getPagingContext(context, (PagingContext) param.getDefaultValue()); arguments[i] = ctx; continue; } else if (param.getParamType() == Parameter.ParamType.PATH_KEYS || param.getParamType() == Parameter.ParamType.PATH_KEYS_PARAM) { arguments[i] = context.getPathKeys(); continue; } else if (param.getParamType() == Parameter.ParamType.PATH_KEY_PARAM) { Object value = context.getPathKeys().get(param.getName()); if (value != null) { arguments[i] = value; continue; } } else if (param.getParamType() == Parameter.ParamType.RESOURCE_CONTEXT || param.getParamType() == Parameter.ParamType.RESOURCE_CONTEXT_PARAM) { arguments[i] = context; continue; } else if (param.getParamType() == Parameter.ParamType.VALIDATOR_PARAM) { RestLiDataValidator validator = new RestLiDataValidator(resourceMethod.getResourceModel().getResourceClass().getAnnotations(), resourceMethod.getResourceModel().getValueClass(), resourceMethod.getMethodType()); arguments[i] = validator; continue; } else if (param.getParamType() == Parameter.ParamType.RESTLI_ATTACHMENTS_PARAM) { arguments[i] = context.getRequestAttachmentReader(); attachmentsDesired = true; continue; } else if (param.getParamType() == Parameter.ParamType.UNSTRUCTURED_DATA_WRITER_PARAM) { // The OutputStream is passed to the resource implementation in a synchronous call. Upon return of the // resource method, all the bytes would haven't written to the OutputStream. The EntityStream would have // contained all the bytes by the time data is requested. The ownership of the OutputStream is passed to // the ByteArrayOutputStreamWriter, which is responsible of closing the OutputStream if necessary. ByteArrayOutputStream out = new ByteArrayOutputStream(); context.setResponseEntityStream(EntityStreams.newEntityStream(new ByteArrayOutputStreamWriter(out))); arguments[i] = new UnstructuredDataWriter(out, context); continue; } else if (param.getParamType() == Parameter.ParamType.UNSTRUCTURED_DATA_REACTIVE_READER_PARAM) { arguments[i] = new UnstructuredDataReactiveReader(context.getRequestEntityStream(), context.getRawRequest().getHeader(RestConstants.HEADER_CONTENT_TYPE)); continue; } else if (param.getParamType() == Parameter.ParamType.POST) { // handle action parameters if (template != null) { DataMap data = template.data(); if (data.containsKey(param.getName())) { arguments[i] = template.getValue(param); continue; } } } else if (param.getParamType() == Parameter.ParamType.QUERY) { Object value; if (DataTemplate.class.isAssignableFrom(param.getType())) { value = buildDataTemplateArgument(context.getStructuredParameter(param.getName()), param, resourceMethodConfig.shouldValidateQueryParams()); } else { value = buildRegularArgument(context, param, resourceMethodConfig.shouldValidateQueryParams()); } if (value != null) { arguments[i] = value; continue; } } else if (param.getParamType() == Parameter.ParamType.BATCH || param.getParamType() == Parameter.ParamType.RESOURCE_KEY) { // should not come to this routine since it should be handled by passing in positionalArguments throw new RoutingException("Parameter '" + param.getName() + "' should be passed in as a positional argument", HttpStatus.S_400_BAD_REQUEST.getCode()); } else { // unknown param type throw new RoutingException( "Parameter '" + param.getName() + "' has an unknown parameter type '" + param.getParamType().name() + "'", HttpStatus.S_400_BAD_REQUEST.getCode()); } } catch (TemplateRuntimeException e) { throw new RoutingException("Parameter '" + param.getName() + "' is invalid", HttpStatus.S_400_BAD_REQUEST.getCode()); } try { // Handling null-valued parameters not provided in resource context or entity body // check if it is optional parameter if (param.isOptional() && param.hasDefaultValue()) { arguments[i] = param.getDefaultValue(); } else if (param.isOptional() && !param.getType().isPrimitive()) { // optional primitive parameter must have default value or provided arguments[i] = null; } else { throw new RoutingException("Parameter '" + param.getName() + "' is required", HttpStatus.S_400_BAD_REQUEST.getCode()); } } catch (ResourceConfigException e) { // Parameter default value format exception should result in server error code 500. throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Parameter '" + param.getName() + "' default value is invalid", e); } } //Verify that if the resource method did not expect attachments, and attachments were present, that we drain all //incoming attachments and send back a bad request. We must take precaution here since simply ignoring the request //attachments is not correct behavior here. Ignoring other request level constructs such as headers or query parameters //that were not needed is safe, but not for request attachments. if (!attachmentsDesired && context.getRequestAttachmentReader() != null) { throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Resource method endpoint invoked does not accept any request attachments."); } return arguments; }
@Test(dataProvider = "validateQueryParameter") public void testQueryParameterValidation(String paramKey, Class<?> dataType, Object paramValue, boolean isValid, String errorMessage) { Parameter<?> param = new Parameter<>(paramKey, dataType, DataTemplateUtil.getSchema(dataType), false, null, Parameter.ParamType.QUERY, false, AnnotationSet.EMPTY); ServerResourceContext mockResourceContext = EasyMock.createMock(ServerResourceContext.class); EasyMock.expect(mockResourceContext.getRequestAttachmentReader()).andReturn(null).anyTimes(); EasyMock.expect(mockResourceContext.getStructuredParameter(paramKey)).andReturn(paramValue).anyTimes(); EasyMock.replay(mockResourceContext); List<Parameter<?>> parameters = Collections.singletonList(param); try { ArgumentBuilder.buildArgs(new Object[0], getMockResourceMethod(parameters), mockResourceContext, null, getMockResourceMethodConfig(true)); assert(isValid); } catch (Exception e) { assert(!isValid); assert(e.getMessage().equals(errorMessage)); } }
@Nullable public String getValue(@Nullable TraceContext context) { if (context == null) return null; return this.context.getValue(this, context); }
@Test void getValue_extracted_invalid() { assertThatThrownBy(() -> REQUEST_ID.getValue((TraceContextOrSamplingFlags) null)) .isInstanceOf(NullPointerException.class); }
public BigMatrix aat() { BigMatrix C = new BigMatrix(m, m); C.mm(NO_TRANSPOSE, this, TRANSPOSE, this); C.uplo(LOWER); return C; }
@Test public void testAAT() { System.out.println("AAT"); BigMatrix c = matrix.aat(); assertEquals(c.nrow(), 3); assertEquals(c.ncol(), 3); for (int i = 0; i < C.length; i++) { for (int j = 0; j < C[i].length; j++) { assertEquals(C[i][j], c.get(i, j), 1E-7); } } }
@ThriftField(1) public boolean isAll() { return all; }
@Test public void testFromValueSetNone() { PrestoThriftValueSet thriftValueSet = fromValueSet(ValueSet.none(HYPER_LOG_LOG)); assertNotNull(thriftValueSet.getAllOrNoneValueSet()); assertFalse(thriftValueSet.getAllOrNoneValueSet().isAll()); }
public GoConfigHolder loadConfigHolder(final String content, Callback callback) throws Exception { CruiseConfig configForEdit; CruiseConfig config; LOGGER.debug("[Config Save] Loading config holder"); configForEdit = deserializeConfig(content); if (callback != null) callback.call(configForEdit); config = preprocessAndValidate(configForEdit); return new GoConfigHolder(config, configForEdit); }
@Test void shouldLoadConfigWithConfigRepoAndPluginName() throws Exception { CruiseConfig cruiseConfig = xmlLoader.loadConfigHolder(configWithConfigRepos( """ <config-repos> <config-repo pluginId="myplugin" id="repo-id"> <git url="https://github.com/tomzo/gocd-indep-config-part.git" /> </config-repo > </config-repos> """ )).config; assertThat(cruiseConfig.getConfigRepos().size()).isEqualTo(1); ConfigRepoConfig configRepo = cruiseConfig.getConfigRepos().get(0); assertThat(configRepo.getPluginId()).isEqualTo("myplugin"); }
@Override public QueryCacheScheduler getQueryCacheScheduler() { return queryCacheScheduler; }
@Test public void testGetQueryCacheScheduler() { QueryCacheScheduler scheduler = context.getQueryCacheScheduler(); assertNotNull(scheduler); final QuerySchedulerTask task = new QuerySchedulerTask(); scheduler.execute(task); final QuerySchedulerRepetitionTask repetitionTask = new QuerySchedulerRepetitionTask(); scheduler.scheduleWithRepetition(repetitionTask, 1); assertTrueEventually(() -> { assertTrue(task.executed); assertTrue(repetitionTask.counter.get() > 1); }); scheduler.shutdown(); }
@Override public List<T> getValue() { // Static arrays cannot be modified return Collections.unmodifiableList(value); }
@Test public void canBeInstantiatedWithLessThan32Elements() { final StaticArray<Uint> array = new StaticArray32<>(arrayOfUints(32)); assertEquals(array.getValue().size(), (32)); }
public static <T> T[] replaceFirst(T[] src, T oldValue, T[] newValues) { int index = indexOf(src, oldValue); if (index == -1) { return src; } T[] dst = (T[]) Array.newInstance(src.getClass().getComponentType(), src.length - 1 + newValues.length); // copy the first part till the match System.arraycopy(src, 0, dst, 0, index); // copy the second part from the match System.arraycopy(src, index + 1, dst, index + newValues.length, src.length - index - 1); // copy the newValues into the dst System.arraycopy(newValues, 0, dst, index, newValues.length); return dst; }
@Test public void replace_whenInBeginning() { Integer[] result = replaceFirst(new Integer[]{6, 3, 4}, 6, new Integer[]{1, 2}); System.out.println(Arrays.toString(result)); assertArrayEquals(new Integer[]{1, 2, 3, 4}, result); }
@Override public SmsTemplateRespDTO getSmsTemplate(String apiTemplateId) throws Throwable { // 1. 执行请求 // 参考链接 https://api.aliyun.com/document/Dysmsapi/2017-05-25/QuerySmsTemplate TreeMap<String, Object> queryParam = new TreeMap<>(); queryParam.put("TemplateCode", apiTemplateId); JSONObject response = request("QuerySmsTemplate", queryParam); // 2.1 请求失败 String code = response.getStr("Code"); if (ObjectUtil.notEqual(code, RESPONSE_CODE_SUCCESS)) { log.error("[getSmsTemplate][模版编号({}) 响应不正确({})]", apiTemplateId, response); return null; } // 2.2 请求成功 return new SmsTemplateRespDTO() .setId(response.getStr("TemplateCode")) .setContent(response.getStr("TemplateContent")) .setAuditStatus(convertSmsTemplateAuditStatus(response.getInt("TemplateStatus"))) .setAuditReason(response.getStr("Reason")); }
@Test public void testGetSmsTemplate() throws Throwable { try (MockedStatic<HttpUtils> httpUtilsMockedStatic = mockStatic(HttpUtils.class)) { // 准备参数 String apiTemplateId = randomString(); // mock 方法 httpUtilsMockedStatic.when(() -> HttpUtils.post(anyString(), anyMap(), anyString())) .thenReturn("{\"TemplateCode\":\"SMS_207945135\",\"RequestId\":\"6F4CC077-29C8-5BA5-AB62-5FF95068A5AC\",\"Message\":\"OK\",\"TemplateContent\":\"您的验证码${code},该验证码5分钟内有效,请勿泄漏于他人!\",\"TemplateName\":\"公告通知\",\"TemplateType\":0,\"Code\":\"OK\",\"CreateDate\":\"2020-12-23 17:34:42\",\"Reason\":\"无审批备注\",\"TemplateStatus\":1}"); // 调用 SmsTemplateRespDTO result = smsClient.getSmsTemplate(apiTemplateId); // 断言 assertEquals("SMS_207945135", result.getId()); assertEquals("您的验证码${code},该验证码5分钟内有效,请勿泄漏于他人!", result.getContent()); assertEquals(SmsTemplateAuditStatusEnum.SUCCESS.getStatus(), result.getAuditStatus()); assertEquals("无审批备注", result.getAuditReason()); } }
public static <T> T retryUntilTimeout(Callable<T> callable, Supplier<String> description, Duration timeoutDuration, long retryBackoffMs) throws Exception { return retryUntilTimeout(callable, description, timeoutDuration, retryBackoffMs, Time.SYSTEM); }
@Test public void testSupplier() throws Exception { Mockito.when(mockCallable.call()).thenThrow(new TimeoutException("timeout exception")); ConnectException e = assertThrows(ConnectException.class, () -> RetryUtil.retryUntilTimeout(mockCallable, null, Duration.ofMillis(100), 10, mockTime)); assertTrue(e.getMessage().startsWith("Fail to callable")); e = assertThrows(ConnectException.class, () -> RetryUtil.retryUntilTimeout(mockCallable, () -> null, Duration.ofMillis(100), 10, mockTime)); assertTrue(e.getMessage().startsWith("Fail to callable")); e = assertThrows(ConnectException.class, () -> RetryUtil.retryUntilTimeout(mockCallable, () -> "execute lambda", Duration.ofMillis(500), 10, mockTime)); assertTrue(e.getMessage().startsWith("Fail to execute lambda")); Mockito.verify(mockCallable, Mockito.atLeast(3)).call(); }
public static BigDecimal generateMaximumNumberWithPrecision(int precision) { return (new BigDecimal("10")).pow(precision).subtract(new BigDecimal("1")); }
@Test public void testGenerateMaximumNumberWithPrecision() { int[] testCases = { 1, 3, 10, 38, 128 }; for (int precision : testCases) { BigDecimal bd = BigDecimalUtils.generateMaximumNumberWithPrecision(precision); assertEquals(bd.precision(), precision); assertEquals(bd.add(new BigDecimal("1")).precision(), precision + 1); } }
synchronized void evict() { if (tail == null) { return; } final LRUNode eldest = tail; currentSizeBytes -= eldest.size(); remove(eldest); cache.remove(eldest.key); if (eldest.entry.isDirty()) { flush(eldest); } totalCacheSizeSensor.record(currentSizeBytes); }
@Test public void shouldNotThrowNullPointerWhenCacheIsEmptyAndEvictionCalled() { cache.evict(); }
@Override public BroadcastRule build(final BroadcastRuleConfiguration ruleConfig, final String databaseName, final DatabaseType protocolType, final ResourceMetaData resourceMetaData, final Collection<ShardingSphereRule> builtRules, final ComputeNodeInstanceContext computeNodeInstanceContext) { return new BroadcastRule(ruleConfig, databaseName, resourceMetaData.getDataSourceMap(), builtRules); }
@SuppressWarnings({"rawtypes", "unchecked"}) @Test void assertBuild() { BroadcastRuleConfiguration ruleConfig = mock(BroadcastRuleConfiguration.class); DatabaseRuleBuilder builder = OrderedSPILoader.getServices(DatabaseRuleBuilder.class, Collections.singleton(ruleConfig)).get(ruleConfig); assertThat(builder.build(ruleConfig, "", new MockedDatabaseType(), mock(ResourceMetaData.class), Collections.emptyList(), mock(ComputeNodeInstanceContext.class)), instanceOf(BroadcastRule.class)); }
@Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { // actually, there was no sense in executing SELECT from db in this case, // should be reported as improvement return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); // IS (NOT) NULL operation does not require second argument // hence, lookupValue can be absent // basically, the index ignores both meta and value, so we can pass everything there Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { // if nothing matches, break the search return null; } } // iterate through all elements survived after filtering stage // and find the first matching BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { // BETWEEN is a special condition demanding two arguments // technically there are no obstacles to implement it, // as it is just a short form of: (a <= b) && (b <= c) // however, let it be so for now matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { // if not BETWEEN, than it is LIKE (or some new operator) // for now, LIKE is not supported here matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; }
@Test public void lookup_Finds_WithBetweenOperator() throws Exception { RowMeta meta = keysMeta.clone(); meta.setValueMeta( 3, new ValueMetaDate() ); meta.addValueMeta( new ValueMetaInteger() ); ReadAllCache cache = buildCache( "<>,IS NOT NULL,BETWEEN,IS NULL" ); Object[] found = cache.getRowFromCache( meta, new Object[] { -1L, null, new Date( 140 ), new Date( 160 ), null } ); assertArrayEquals( "(140 <= keys[2] <= 160) --> row 4", data[ 4 ], found ); }
public ImmutableList<PluginMatchingResult<VulnDetector>> getVulnDetectors( ReconnaissanceReport reconnaissanceReport) { return tsunamiPlugins.entrySet().stream() .filter(entry -> isVulnDetector(entry.getKey())) .map(entry -> matchAllVulnDetectors(entry.getKey(), entry.getValue(), reconnaissanceReport)) .flatMap(Streams::stream) .collect(toImmutableList()); }
@Test public void getVulnDetectors_whenNoVulnDetectorsInstalled_returnsEmptyList() { NetworkService fakeNetworkService1 = NetworkService.newBuilder() .setNetworkEndpoint(NetworkEndpointUtils.forIpAndPort("1.1.1.1", 80)) .setTransportProtocol(TransportProtocol.TCP) .setServiceName("http") .build(); NetworkService fakeNetworkService2 = NetworkService.newBuilder() .setNetworkEndpoint(NetworkEndpointUtils.forIpAndPort("1.1.1.1", 443)) .setTransportProtocol(TransportProtocol.TCP) .setServiceName("https") .build(); ReconnaissanceReport fakeReconnaissanceReport = ReconnaissanceReport.newBuilder() .setTargetInfo(TargetInfo.getDefaultInstance()) .addNetworkServices(fakeNetworkService1) .addNetworkServices(fakeNetworkService2) .build(); PluginManager pluginManager = Guice.createInjector( new FakePortScannerBootstrapModule(), new FakeServiceFingerprinterBootstrapModule()) .getInstance(PluginManager.class); assertThat(pluginManager.getVulnDetectors(fakeReconnaissanceReport)).isEmpty(); }
public static List<FieldValueSetter> getSetters( TypeDescriptor<?> typeDescriptor, Schema schema, FieldValueTypeSupplier fieldValueTypeSupplier, TypeConversionsFactory typeConversionsFactory) { // Return the setters, ordered by their position in the schema. return CACHED_SETTERS.computeIfAbsent( TypeDescriptorWithSchema.create(typeDescriptor, schema), c -> { List<FieldValueTypeInformation> types = fieldValueTypeSupplier.get(typeDescriptor, schema); return types.stream() .map(t -> createSetter(t, typeConversionsFactory)) .collect(Collectors.toList()); }); }
@Test public void testGeneratedByteBufferSetters() { POJOWithByteArray pojo = new POJOWithByteArray(); List<FieldValueSetter> setters = POJOUtils.getSetters( new TypeDescriptor<POJOWithByteArray>() {}, POJO_WITH_BYTE_ARRAY_SCHEMA, JavaFieldTypeSupplier.INSTANCE, new DefaultTypeConversionsFactory()); setters.get(0).set(pojo, BYTE_ARRAY); setters.get(1).set(pojo, BYTE_BUFFER.array()); assertArrayEquals("not equal", BYTE_ARRAY, pojo.bytes1); assertEquals(BYTE_BUFFER, pojo.bytes2); }
@Override public List<Object> handle(String targetName, List<Object> instances, RequestData requestData) { if (!shouldHandle(instances)) { return instances; } List<Object> result = getTargetInstancesByRules(targetName, instances); return super.handle(targetName, result, requestData); }
@Test public void testGetTargetInstancesByTagRulesWithPolicySceneThree() { RuleInitializationUtils.initAZTagMatchTriggerThresholdMinAllInstancesPolicyRule(); List<Object> instances = new ArrayList<>(); ServiceInstance instance1 = TestDefaultServiceInstance.getTestDefaultServiceInstance("1.0.0", "az1"); ServiceInstance instance2 = TestDefaultServiceInstance.getTestDefaultServiceInstance("1.0.0", "az2"); ServiceInstance instance3 = TestDefaultServiceInstance.getTestDefaultServiceInstance("1.0.1", "az1"); ServiceInstance instance4 = TestDefaultServiceInstance.getTestDefaultServiceInstance("1.0.1", "az2"); ServiceInstance instance5 = TestDefaultServiceInstance.getTestDefaultServiceInstance("1.0.2", "az1"); instances.add(instance1); instances.add(instance2); instances.add(instance3); instances.add(instance4); instances.add(instance5); Map<String, String> metadata = new HashMap<>(); metadata.put("zone", "az1"); AppCache.INSTANCE.setMetadata(metadata); List<Object> targetInstances = tagRouteHandler.handle("foo", instances, new RequestData(null, null, null)); Assert.assertEquals(3, targetInstances.size()); ConfigCache.getLabel(RouterConstant.SPRING_CACHE_NAME).resetRouteRule(Collections.emptyMap()); }
@Override public JType apply(String nodeName, JsonNode node, JsonNode parent, JClassContainer jClassContainer, Schema schema) { String propertyTypeName = getTypeName(node); JType type; if (propertyTypeName.equals("object") || node.has("properties") && node.path("properties").size() > 0) { type = ruleFactory.getObjectRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema); } else if (node.has("existingJavaType")) { String typeName = node.path("existingJavaType").asText(); if (isPrimitive(typeName, jClassContainer.owner())) { type = primitiveType(typeName, jClassContainer.owner()); } else { type = resolveType(jClassContainer, typeName); } } else if (propertyTypeName.equals("string")) { type = jClassContainer.owner().ref(String.class); } else if (propertyTypeName.equals("number")) { type = getNumberType(jClassContainer.owner(), ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("integer")) { type = getIntegerType(jClassContainer.owner(), node, ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("boolean")) { type = unboxIfNecessary(jClassContainer.owner().ref(Boolean.class), ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("array")) { type = ruleFactory.getArrayRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema); } else { type = jClassContainer.owner().ref(Object.class); } if (!node.has("javaType") && !node.has("existingJavaType") && node.has("format")) { type = ruleFactory.getFormatRule().apply(nodeName, node.get("format"), node, type, schema); } else if (!node.has("javaType") && !node.has("existingJavaType") && propertyTypeName.equals("string") && node.has("media")) { type = ruleFactory.getMediaRule().apply(nodeName, node.get("media"), node, type, schema); } return type; }
@Test public void applyGeneratesString() { JPackage jpackage = new JCodeModel()._package(getClass().getPackage().getName()); ObjectNode objectNode = new ObjectMapper().createObjectNode(); objectNode.put("type", "string"); JType result = rule.apply("fooBar", objectNode, null, jpackage, null); assertThat(result.fullName(), is(String.class.getName())); }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .precision(column.getColumnLength()) .length(column.getColumnLength()) .nullable(column.isNullable()) .comment(column.getComment()) .scale(column.getScale()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case NULL: builder.columnType(IRIS_NULL); builder.dataType(IRIS_NULL); break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(String.format("%s(%s)", IRIS_VARCHAR, MAX_VARCHAR_LENGTH)); builder.dataType(IRIS_VARCHAR); } else if (column.getColumnLength() < MAX_VARCHAR_LENGTH) { builder.columnType( String.format("%s(%s)", IRIS_VARCHAR, column.getColumnLength())); builder.dataType(IRIS_VARCHAR); } else { builder.columnType(IRIS_LONG_VARCHAR); builder.dataType(IRIS_LONG_VARCHAR); } break; case BOOLEAN: builder.columnType(IRIS_BIT); builder.dataType(IRIS_BIT); break; case TINYINT: builder.columnType(IRIS_TINYINT); builder.dataType(IRIS_TINYINT); break; case SMALLINT: builder.columnType(IRIS_SMALLINT); builder.dataType(IRIS_SMALLINT); break; case INT: builder.columnType(IRIS_INTEGER); builder.dataType(IRIS_INTEGER); break; case BIGINT: builder.columnType(IRIS_BIGINT); builder.dataType(IRIS_BIGINT); break; case FLOAT: builder.columnType(IRIS_FLOAT); builder.dataType(IRIS_FLOAT); break; case DOUBLE: builder.columnType(IRIS_DOUBLE); builder.dataType(IRIS_DOUBLE); break; case DECIMAL: DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } if (precision < scale) { precision = scale; } if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = MAX_SCALE; precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } builder.columnType(String.format("%s(%s,%s)", IRIS_DECIMAL, precision, scale)); builder.dataType(IRIS_DECIMAL); builder.precision(precision); builder.scale(scale); break; case BYTES: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(IRIS_LONG_BINARY); builder.dataType(IRIS_LONG_BINARY); } else if (column.getColumnLength() < MAX_BINARY_LENGTH) { builder.dataType(IRIS_BINARY); builder.columnType( String.format("%s(%s)", IRIS_BINARY, column.getColumnLength())); } else { builder.columnType(IRIS_LONG_BINARY); builder.dataType(IRIS_LONG_BINARY); } break; case DATE: builder.columnType(IRIS_DATE); builder.dataType(IRIS_DATE); break; case TIME: builder.dataType(IRIS_TIME); if (Objects.nonNull(column.getScale()) && column.getScale() > 0) { Integer timeScale = column.getScale(); if (timeScale > MAX_TIME_SCALE) { timeScale = MAX_TIME_SCALE; log.warn( "The time column {} type time({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to time({})", column.getName(), column.getScale(), MAX_TIME_SCALE, timeScale); } builder.columnType(String.format("%s(%s)", IRIS_TIME, timeScale)); builder.scale(timeScale); } else { builder.columnType(IRIS_TIME); } break; case TIMESTAMP: builder.columnType(IRIS_TIMESTAMP2); builder.dataType(IRIS_TIMESTAMP2); break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.IRIS, column.getDataType().getSqlType().name(), column.getName()); } return builder.build(); }
@Test public void testReconvertFloat() { Column column = PhysicalColumn.builder().name("test").dataType(BasicType.FLOAT_TYPE).build(); BasicTypeDefine typeDefine = IrisTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(IrisTypeConverter.IRIS_FLOAT, typeDefine.getColumnType()); Assertions.assertEquals(IrisTypeConverter.IRIS_FLOAT, typeDefine.getDataType()); }
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { try { return new SchemaAndValue(Schema.OPTIONAL_STRING_SCHEMA, deserializer.deserialize(topic, value)); } catch (SerializationException e) { throw new DataException("Failed to deserialize string: ", e); } }
@Test public void testBytesNullToString() { SchemaAndValue data = converter.toConnectData(TOPIC, null); assertEquals(Schema.OPTIONAL_STRING_SCHEMA, data.schema()); assertNull(data.value()); }
@Override protected String buildHandle(final List<URIRegisterDTO> uriList, final SelectorDO selectorDO) { String handleAdd; List<WebSocketUpstream> addList = buildWebSocketUpstreamList(uriList); List<WebSocketUpstream> canAddList = new CopyOnWriteArrayList<>(); List<WebSocketUpstream> existList = GsonUtils.getInstance().fromCurrentList(selectorDO.getHandle(), WebSocketUpstream.class); if (CollectionUtils.isEmpty(existList)) { handleAdd = GsonUtils.getInstance().toJson(addList); canAddList = addList; } else { List<WebSocketUpstream> diffList = addList.stream().filter(divideUpstream -> !existList.contains(divideUpstream)).collect(Collectors.toList()); if (CollectionUtils.isNotEmpty(diffList)) { canAddList.addAll(diffList); existList.addAll(diffList); } handleAdd = GsonUtils.getInstance().toJson(existList); } doSubmit(selectorDO.getId(), canAddList); return handleAdd; }
@Test public void testBuildHandle() { URIRegisterDTO dto = new URIRegisterDTO(); dto.setPort(8080); dto.setHost("host"); dto.setProtocol("http"); List<URIRegisterDTO> uriList = Collections.singletonList(dto); SelectorDO selectorDO = mock(SelectorDO.class); when(upstreamCheckService.checkAndSubmit(any(), any())).thenReturn(true); WebSocketUpstream.builder().host("localhost").protocol("http").upstreamUrl("host:8080").weight(50).warmup(10) .status(true).build(); List result = GsonUtils.getInstance().fromJson(shenyuClientRegisterWebSocketService.buildHandle(uriList, selectorDO), List.class); LinkedTreeMap<String, Object> webSocketUpstreamResult = (LinkedTreeMap<String, Object>) result.get(0); assertEquals(webSocketUpstreamResult.get("host"), "localhost"); assertEquals(webSocketUpstreamResult.get("url"), "host:8080"); assertEquals(webSocketUpstreamResult.get("weight"), 50.0); assertEquals(webSocketUpstreamResult.get("warmup"), 600000.0); assertEquals(webSocketUpstreamResult.get("upstreamHost"), "localhost"); assertEquals(webSocketUpstreamResult.get("status"), true); WebSocketUpstream webSocketUpstreamnew = WebSocketUpstream.builder().host("localhost").protocol("http").upstreamUrl("host:8090").weight(50).warmup(10) .status(true).build(); SelectorDO selectorDO1 = new SelectorDO(); selectorDO1.setHandle(GsonUtils.getGson().toJson(Collections.singletonList(webSocketUpstreamnew))); result = GsonUtils.getInstance().fromJson(shenyuClientRegisterWebSocketService.buildHandle(uriList, selectorDO1), List.class); webSocketUpstreamResult = (LinkedTreeMap<String, Object>) result.get(0); assertEquals(webSocketUpstreamResult.get("host"), "localhost"); assertEquals(webSocketUpstreamResult.get("url"), "host:8090"); assertEquals(webSocketUpstreamResult.get("weight"), 50.0); assertEquals(webSocketUpstreamResult.get("warmup"), 10.0); assertEquals(webSocketUpstreamResult.get("upstreamHost"), "localhost"); assertEquals(webSocketUpstreamResult.get("status"), true); webSocketUpstreamResult = (LinkedTreeMap<String, Object>) result.get(1); assertEquals(webSocketUpstreamResult.get("host"), "localhost"); assertEquals(webSocketUpstreamResult.get("url"), "host:8080"); assertEquals(webSocketUpstreamResult.get("weight"), 50.0); assertEquals(webSocketUpstreamResult.get("warmup"), 600000.0); assertEquals(webSocketUpstreamResult.get("upstreamHost"), "localhost"); assertEquals(webSocketUpstreamResult.get("status"), true); }
@Override public void upgrade() { if (hasBeenRunSuccessfully()) { LOG.debug("Migration already completed."); return; } final Map<String, String> savedSearchToViewsMap = new HashMap<>(); final Map<View, Search> newViews = this.savedSearchService.streamAll() .map(savedSearch -> { final Map.Entry<View, Search> newView = migrateSavedSearch(savedSearch); savedSearchToViewsMap.put(savedSearch.id(), newView.getKey().id()); return newView; }) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); newViews.forEach((view, search) -> { viewService.save(view); searchService.save(search); }); final MigrationCompleted migrationCompleted = MigrationCompleted.create(savedSearchToViewsMap); writeMigrationCompleted(migrationCompleted); }
@Test public void runsIfNoSavedSearchesArePresent() { this.migration.upgrade(); }
static Object parseCell(String cell, Schema.Field field) { Schema.FieldType fieldType = field.getType(); try { switch (fieldType.getTypeName()) { case STRING: return cell; case INT16: return Short.parseShort(cell); case INT32: return Integer.parseInt(cell); case INT64: return Long.parseLong(cell); case BOOLEAN: return Boolean.parseBoolean(cell); case BYTE: return Byte.parseByte(cell); case DECIMAL: return new BigDecimal(cell); case DOUBLE: return Double.parseDouble(cell); case FLOAT: return Float.parseFloat(cell); case DATETIME: return Instant.parse(cell); default: throw new UnsupportedOperationException( "Unsupported type: " + fieldType + ", consider using withCustomRecordParsing"); } } catch (IllegalArgumentException e) { throw new IllegalArgumentException( e.getMessage() + " field " + field.getName() + " was received -- type mismatch"); } }
@Test public void givenIntegerWithSurroundingSpaces_throws() { DefaultMapEntry cellToExpectedValue = new DefaultMapEntry(" 12 ", 12); Schema schema = Schema.builder().addInt32Field("an_integer").addStringField("a_string").build(); IllegalArgumentException e = assertThrows( IllegalArgumentException.class, () -> CsvIOParseHelpers.parseCell( cellToExpectedValue.getKey().toString(), schema.getField("an_integer"))); assertEquals( "For input string: \"" + cellToExpectedValue.getKey() + "\" field " + schema.getField("an_integer").getName() + " was received -- type mismatch", e.getMessage()); }