focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static <P, R> FuncRt<P, R> uncheck(Func<P, R> expression) { return uncheck(expression, RuntimeException::new); }
@SuppressWarnings("ConstantConditions") @Test public void supplierTest() { File noFile = new File("./no-file"); try { //本行代码原本需要抛出受检查异常,现在只抛出运行时异常 CheckedUtil.uncheck(() -> new FileInputStream(noFile)).call(); } catch (Exception re) { assertTrue(re instanceof RuntimeException); } }
@Override public TypeDefinition build( ProcessingEnvironment processingEnv, ArrayType type, Map<String, TypeDefinition> typeCache) { TypeDefinition typeDefinition = new TypeDefinition(type.toString()); TypeMirror componentType = type.getComponentType(); TypeDefinition subTypeDefinition = TypeDefinitionBuilder.build(processingEnv, componentType, typeCache); typeDefinition.getItems().add(subTypeDefinition.getType()); return typeDefinition; }
@Test void testBuild() { buildAndAssertTypeDefinition(processingEnv, integersField, "int[]", "int", builder); buildAndAssertTypeDefinition(processingEnv, stringsField, "java.lang.String[]", "java.lang.String", builder); buildAndAssertTypeDefinition( processingEnv, primitiveTypeModelsField, "org.apache.dubbo.metadata.annotation.processing.model.PrimitiveTypeModel[]", "org.apache.dubbo.metadata.annotation.processing.model.PrimitiveTypeModel", builder); buildAndAssertTypeDefinition( processingEnv, modelsField, "org.apache.dubbo.metadata.annotation.processing.model.Model[]", "org.apache.dubbo.metadata.annotation.processing.model.Model", builder, (def, subDef) -> { TypeElement subType = elements.getTypeElement(subDef.getType()); assertEquals(ElementKind.CLASS, subType.getKind()); }); buildAndAssertTypeDefinition( processingEnv, colorsField, "org.apache.dubbo.metadata.annotation.processing.model.Color[]", "org.apache.dubbo.metadata.annotation.processing.model.Color", builder, (def, subDef) -> { TypeElement subType = elements.getTypeElement(subDef.getType()); assertEquals(ElementKind.ENUM, subType.getKind()); }); }
public static TableSchema of(Column... columns) { return new AutoValue_TableSchema(Arrays.asList(columns)); }
@Test public void testParseEnum8() { Map<String, Integer> enumValues = ImmutableMap.of( "a", -1, "b", 0, "c", 42); assertEquals( ColumnType.enum8(enumValues), ColumnType.parse("Enum8('a' = -1, 'b' = 0, 'c' = 42)")); }
@Override public boolean next() { if (closed || !values.hasNext()) { currentValue = null; return false; } currentValue = values.next(); return true; }
@Test void assertNextForEmptyResultSet() { try (GeneratedKeysResultSet actual = new GeneratedKeysResultSet()) { assertFalse(actual.next()); } }
public TableInfo setEntityName(@NotNull String entityName) { this.entityName = entityName; setConvert(); return this; }
@Test void setEntityNameTest() { ConfigBuilder configBuilder; Assertions.assertTrue(new TableInfo(new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, GeneratorBuilder.strategyConfig(), null, null, null), "user").setEntityName("UserEntity").isConvert()); Assertions.assertFalse(new TableInfo(new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, GeneratorBuilder.strategyConfig(), null, null, null), "user").setEntityName("User").isConvert()); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, GeneratorBuilder.strategyConfigBuilder().enableCapitalMode().build(), null, null, null); Assertions.assertFalse(new TableInfo(configBuilder, "USER").setEntityName("User").isConvert()); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, GeneratorBuilder.strategyConfigBuilder().enableCapitalMode().build(), null, null, null); Assertions.assertTrue(new TableInfo(configBuilder, "USER").setEntityName("UserEntity").isConvert()); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, GeneratorBuilder.strategyConfigBuilder().entityBuilder().naming(NamingStrategy.no_change).build(), null, null, null); Assertions.assertTrue(new TableInfo(configBuilder, "test_user").setEntityName("TestUser").isConvert()); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, GeneratorBuilder.strategyConfigBuilder().entityBuilder().naming(NamingStrategy.no_change).build(), null, null, null); Assertions.assertFalse(new TableInfo(configBuilder, "user").setEntityName("User").isConvert()); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, GeneratorBuilder.strategyConfigBuilder().entityBuilder().naming(NamingStrategy.underline_to_camel).build(), null, null, null); Assertions.assertTrue(new TableInfo(configBuilder, "test_user").setEntityName("TestUser").isConvert()); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, GeneratorBuilder.strategyConfigBuilder().entityBuilder().naming(NamingStrategy.underline_to_camel).build(), null, null, null); Assertions.assertTrue(new TableInfo(configBuilder, "TEST_USER").setEntityName("TestUser").isConvert()); }
@Override public void onSwipeDown() {}
@Test public void testOnSwipeDown() { mUnderTest.onSwipeDown(); Mockito.verifyZeroInteractions(mMockParentListener, mMockKeyboardDismissAction); }
public BoundingBox extendDegrees(double verticalExpansion, double horizontalExpansion) { if (verticalExpansion == 0 && horizontalExpansion == 0) { return this; } else if (verticalExpansion < 0 || horizontalExpansion < 0) { throw new IllegalArgumentException("BoundingBox extend operation does not accept negative values"); } double minLat = Math.max(MercatorProjection.LATITUDE_MIN, this.minLatitude - verticalExpansion); double minLon = Math.max(-180, this.minLongitude - horizontalExpansion); double maxLat = Math.min(MercatorProjection.LATITUDE_MAX, this.maxLatitude + verticalExpansion); double maxLon = Math.min(180, this.maxLongitude + horizontalExpansion); return new BoundingBox(minLat, minLon, maxLat, maxLon); }
@Test public void extendDegreesTest() { BoundingBox boundingBox1 = new BoundingBox(MIN_LATITUDE, MIN_LONGITUDE, MAX_LATITUDE, MAX_LONGITUDE); BoundingBox boundingBox2 = new BoundingBox(MIN_LATITUDE - 1, MIN_LONGITUDE - 1, MAX_LATITUDE, MAX_LONGITUDE); BoundingBox boundingBox3 = new BoundingBox(MIN_LATITUDE, MIN_LONGITUDE, MAX_LATITUDE + 1, MAX_LONGITUDE + 1); Assert.assertEquals(boundingBox1, boundingBox1.extendDegrees(0, 0)); Assert.assertEquals(boundingBox2, boundingBox2.extendDegrees(0, 0)); Assert.assertEquals(boundingBox3, boundingBox3.extendDegrees(0, 0)); Assert.assertTrue(boundingBox1.extendDegrees(1, 1).contains(new LatLong(MIN_LATITUDE - 1, MAX_LONGITUDE + 1))); Assert.assertTrue(boundingBox1.extendDegrees(1, 1).contains(new LatLong(MAX_LATITUDE + 1, MAX_LONGITUDE + 1))); Assert.assertTrue(boundingBox1.extendDegrees(1, 1).contains(new LatLong(MAX_LATITUDE + 1, MIN_LONGITUDE - 1))); Assert.assertTrue(boundingBox1.extendDegrees(1, 1).contains(new LatLong(MIN_LATITUDE - 1, MIN_LONGITUDE - 1))); }
public static CatalogTable buildWithConfig(Config config) { ReadonlyConfig readonlyConfig = ReadonlyConfig.fromConfig(config); return buildWithConfig(readonlyConfig); }
@Test public void testComplexSchemaParse() throws FileNotFoundException, URISyntaxException { String path = getTestConfigFile("/conf/complex.schema.conf"); Config config = ConfigFactory.parseFile(new File(path)); SeaTunnelRowType seaTunnelRowType = CatalogTableUtil.buildWithConfig(config).getSeaTunnelRowType(); Assertions.assertNotNull(seaTunnelRowType); Assertions.assertEquals( seaTunnelRowType.getFieldType(0), new MapType<>( BasicType.STRING_TYPE, new MapType<>(BasicType.STRING_TYPE, BasicType.STRING_TYPE))); Assertions.assertEquals( seaTunnelRowType.getFieldType(1), new MapType<>( BasicType.STRING_TYPE, new MapType<>(BasicType.STRING_TYPE, ArrayType.INT_ARRAY_TYPE))); Assertions.assertEquals(seaTunnelRowType.getTotalFields(), 18); Assertions.assertEquals(seaTunnelRowType.getFieldType(17).getSqlType(), SqlType.ROW); SeaTunnelRowType nestedRowFieldType = (SeaTunnelRowType) seaTunnelRowType.getFieldType(17); Assertions.assertEquals( "map", nestedRowFieldType.getFieldName(nestedRowFieldType.indexOf("map"))); Assertions.assertEquals( "row", nestedRowFieldType.getFieldName(nestedRowFieldType.indexOf("row"))); }
@Override public NacosUser parseToken(String token) throws AccessException { if (!tokenMap.containsKey(token)) { Authentication authentication = jwtTokenManager.getAuthentication(token); String username = authentication.getName(); if (username == null || username.isEmpty()) { throw new AccessException("invalid token, username is empty"); } long expiredTime = TimeUnit.SECONDS.toMillis(jwtTokenManager.getExpiredTimeInSeconds(token)); if (expiredTime <= System.currentTimeMillis()) { throw new AccessException("expired token"); } NacosUser user = jwtTokenManager.parseToken(token); tokenMap.putIfAbsent(token, new TokenEntity(token, username, expiredTime, authentication, user)); return user; } return tokenMap.get(token).getNacosUser(); }
@Test void testParseToken() throws AccessException { assertNotNull(cachedJwtTokenManager.parseToken("token")); }
@Override public void write(int b) throws IOException { if (buffer.length <= bufferIdx) { flushInternalBuffer(); } buffer[bufferIdx] = (byte) b; ++bufferIdx; }
@Test void testFailingSecondaryWriteArrayFail() throws Exception { DuplicatingCheckpointOutputStream duplicatingStream = createDuplicatingStreamWithFailingSecondary(); testFailingSecondaryStream(duplicatingStream, () -> duplicatingStream.write(new byte[512])); }
public static Sensor recordLatenessSensor(final String threadId, final String taskId, final StreamsMetricsImpl streamsMetrics) { return avgAndMaxSensor( threadId, taskId, RECORD_LATENESS, RECORD_LATENESS_AVG_DESCRIPTION, RECORD_LATENESS_MAX_DESCRIPTION, RecordingLevel.DEBUG, streamsMetrics ); }
@Test public void shouldGetRecordLatenessSensor() { final String operation = "record-lateness"; final String avgDescription = "The observed average lateness of records in milliseconds, measured by comparing the record timestamp with " + "the current stream time"; final String maxDescription = "The observed maximum lateness of records in milliseconds, measured by comparing the record timestamp with " + "the current stream time"; when(streamsMetrics.taskLevelSensor(THREAD_ID, TASK_ID, operation, RecordingLevel.DEBUG)).thenReturn(expectedSensor); when(streamsMetrics.taskLevelTagMap(THREAD_ID, TASK_ID)).thenReturn(tagMap); try (final MockedStatic<StreamsMetricsImpl> streamsMetricsStaticMock = mockStatic(StreamsMetricsImpl.class)) { final Sensor sensor = TaskMetrics.recordLatenessSensor(THREAD_ID, TASK_ID, streamsMetrics); streamsMetricsStaticMock.verify( () -> StreamsMetricsImpl.addAvgAndMaxToSensor( expectedSensor, TASK_LEVEL_GROUP, tagMap, operation, avgDescription, maxDescription ) ); assertThat(sensor, is(expectedSensor)); } }
@Override public String getURL( String hostname, String port, String databaseName ) throws KettleDatabaseException { StringBuilder urlBuilder = new StringBuilder(); urlBuilder.append( "jdbc:databricks://" ); urlBuilder.append( hostname ); if ( StringUtils.isNotBlank( port ) ) { urlBuilder.append( ":" ); urlBuilder.append( port ); } appendProperty( urlBuilder, Params.HTTP_PATH, getMandatoryAttribute( Attributes.HTTP_PATH ) ); if ( getAuthMethod() == AuthMethod.Token ) { // user/pass will be passed as properties if set appendProperty( urlBuilder, Params.AUTH_MECH, "3" ); appendProperty( urlBuilder, Params.USER, "token" ); appendProperty( urlBuilder, Params.PASS, Encr.decryptPassword( getMandatoryAttribute( Attributes.TOKEN ) ) ); } if ( StringUtils.isNotBlank( databaseName ) ) { appendProperty( urlBuilder, Params.CATALOG, databaseName ); } return urlBuilder.toString(); }
@Test( expected = KettleDatabaseException.class ) public void testNoPath() throws Exception { DatabaseMeta dbMeta = getDBMeta(); dbMeta.getURL(); }
@Override public boolean equals(final Object o) { if(this == o) { return true; } if(o == null || getClass() != o.getClass()) { return false; } final TransferStatus that = (TransferStatus) o; return length == that.length && Objects.equals(offset.longValue(), that.offset.longValue()) && Objects.equals(part, that.part); }
@Test public void testEquals() { assertEquals(new TransferStatus(), new TransferStatus()); assertEquals(new TransferStatus().hashCode(), new TransferStatus().hashCode()); }
synchronized void addFunction(final KsqlScalarFunction ksqlFunction) { checkCompatible(ksqlFunction); udfIndex.addFunction(ksqlFunction); }
@Test public void shouldThrowExceptionIfAddingFunctionWithDifferentPath() { // When: final Exception e = assertThrows( KafkaException.class, () -> factory.addFunction(create( (params, args) -> STRING, ParamTypes.STRING, emptyList(), FunctionName.of("TestFunc"), TestFunc.class, ksqlConfig -> null, "", "not the same path", false )) ); // Then: assertThat(e.getMessage(), containsString( "as a function with the same name has been loaded from a different jar")); }
public FileStoreInfo getFileStoreByName(String fsName) throws DdlException { try { return client.getFileStoreByName(fsName, serviceId); } catch (StarClientException e) { if (e.getCode() == StatusCode.NOT_EXIST) { return null; } throw new DdlException("Failed to get file store, error: " + e.getMessage()); } }
@Test public void testGetFileStoreByName() throws StarClientException, DdlException { S3FileStoreInfo s3FsInfo = S3FileStoreInfo.newBuilder() .setRegion("region").setEndpoint("endpoint").build(); FileStoreInfo fsInfo = FileStoreInfo.newBuilder().setFsKey("test-fskey") .setFsName("test-fsname").setFsType(FileStoreType.S3).setS3FsInfo(s3FsInfo).build(); new Expectations() { { client.getFileStoreByName("test-fsname", "1"); result = fsInfo; minTimes = 0; client.getFileStoreByName("test-fsname", "2"); result = new StarClientException(StatusCode.INVALID_ARGUMENT, "mocked exception"); } }; Deencapsulation.setField(starosAgent, "serviceId", "1"); Assert.assertEquals("test-fskey", starosAgent.getFileStoreByName("test-fsname").getFsKey()); Deencapsulation.setField(starosAgent, "serviceId", "2"); ExceptionChecker.expectThrowsWithMsg(DdlException.class, "Failed to get file store, error: INVALID_ARGUMENT:mocked exception", () -> starosAgent.getFileStoreByName("test-fsname")); }
public Map<InjectionMetadata.InjectedElement, ReferenceBean<?>> getInjectedFieldReferenceBeanMap() { Map<InjectionMetadata.InjectedElement, ReferenceBean<?>> map = new HashMap<>(); for (Map.Entry<InjectionMetadata.InjectedElement, String> entry : injectedFieldReferenceBeanCache.entrySet()) { map.put(entry.getKey(), referenceBeanManager.getById(entry.getValue())); } return Collections.unmodifiableMap(map); }
@Test void testGetInjectedFieldReferenceBeanMap() { ReferenceAnnotationBeanPostProcessor beanPostProcessor = getReferenceAnnotationBeanPostProcessor(); Map<InjectionMetadata.InjectedElement, ReferenceBean<?>> referenceBeanMap = beanPostProcessor.getInjectedFieldReferenceBeanMap(); Assertions.assertEquals(5, referenceBeanMap.size()); Map<String, Integer> checkingFieldNames = new HashMap<>(); checkingFieldNames.put( "private org.apache.dubbo.config.spring.api.HelloService org.apache.dubbo.config.spring.beans.factory.annotation.ReferenceAnnotationBeanPostProcessorTest$MyConfiguration.helloService", 0); checkingFieldNames.put( "private org.apache.dubbo.config.spring.api.HelloService org.apache.dubbo.config.spring.beans.factory.annotation.ReferenceAnnotationBeanPostProcessorTest.helloService", 0); checkingFieldNames.put( "private org.apache.dubbo.config.spring.api.HelloService org.apache.dubbo.config.spring.beans.factory.annotation.ReferenceAnnotationBeanPostProcessorTest.helloService2", 0); checkingFieldNames.put( "private org.apache.dubbo.config.spring.api.HelloService org.apache.dubbo.config.spring.beans.factory.annotation.ReferenceAnnotationBeanPostProcessorTest.helloService3", 0); checkingFieldNames.put( "private org.apache.dubbo.config.spring.api.DemoService org.apache.dubbo.config.spring.beans.factory.annotation.ReferenceAnnotationBeanPostProcessorTest$ParentBean.demoServiceFromParent", 0); for (Map.Entry<InjectionMetadata.InjectedElement, ReferenceBean<?>> entry : referenceBeanMap.entrySet()) { InjectionMetadata.InjectedElement injectedElement = entry.getKey(); String member = injectedElement.getMember().toString(); Integer count = checkingFieldNames.get(member); Assertions.assertNotNull(count); checkingFieldNames.put(member, count + 1); } for (Map.Entry<String, Integer> entry : checkingFieldNames.entrySet()) { Assertions.assertEquals(1, entry.getValue().intValue(), "check field element failed: " + entry.getKey()); } }
public static PipelineDataSourceConfiguration newInstance(final String type, final String param) { switch (type) { case StandardPipelineDataSourceConfiguration.TYPE: return new StandardPipelineDataSourceConfiguration(param); case ShardingSpherePipelineDataSourceConfiguration.TYPE: return new ShardingSpherePipelineDataSourceConfiguration(param); default: throw new UnsupportedSQLOperationException(String.format("Unsupported data source type `%s`", type)); } }
@Test void assertNewInstanceForStandardPipelineDataSourceConfiguration() { assertThat(PipelineDataSourceConfigurationFactory.newInstance(StandardPipelineDataSourceConfiguration.TYPE, "url: jdbc:mock://127.0.0.1/foo_db"), instanceOf(StandardPipelineDataSourceConfiguration.class)); }
static Object parseCell(String cell, Schema.Field field) { Schema.FieldType fieldType = field.getType(); try { switch (fieldType.getTypeName()) { case STRING: return cell; case INT16: return Short.parseShort(cell); case INT32: return Integer.parseInt(cell); case INT64: return Long.parseLong(cell); case BOOLEAN: return Boolean.parseBoolean(cell); case BYTE: return Byte.parseByte(cell); case DECIMAL: return new BigDecimal(cell); case DOUBLE: return Double.parseDouble(cell); case FLOAT: return Float.parseFloat(cell); case DATETIME: return Instant.parse(cell); default: throw new UnsupportedOperationException( "Unsupported type: " + fieldType + ", consider using withCustomRecordParsing"); } } catch (IllegalArgumentException e) { throw new IllegalArgumentException( e.getMessage() + " field " + field.getName() + " was received -- type mismatch"); } }
@Test public void givenMultiLineCell_parses() { String multiLineString = "a\na\na\na\na\na\na\na\na\nand"; Schema schema = Schema.builder().addStringField("a_string").addDoubleField("a_double").build(); assertEquals( multiLineString, CsvIOParseHelpers.parseCell(multiLineString, schema.getField("a_string"))); }
@VisibleForTesting TransMeta filterPrivateDatabases( TransMeta transMeta ) { Set<String> privateDatabases = transMeta.getPrivateDatabases(); if ( privateDatabases != null ) { // keep only private transformation databases for ( Iterator<DatabaseMeta> it = transMeta.getDatabases().iterator(); it.hasNext(); ) { DatabaseMeta databaseMeta = it.next(); String databaseName = databaseMeta.getName(); if ( !privateDatabases.contains( databaseName ) && !transMeta.isDatabaseConnectionUsed( databaseMeta ) ) { it.remove(); } } } return transMeta; }
@Test public void filterPrivateDatabasesWithOnePrivateDatabaseAndOneInUseTest() { IUnifiedRepository purMock = mock( IUnifiedRepository.class ); TransMeta transMeta = spy( TransMeta.class ); transMeta.setDatabases( getDummyDatabases() ); Set<String> privateDatabases = new HashSet<>( ); privateDatabases.add( "database2" ); transMeta.setPrivateDatabases( privateDatabases ); when( transMeta.isDatabaseConnectionUsed( getDummyDatabases().get( 0 ) ) ).thenReturn( true ); StreamToTransNodeConverter transConverter = new StreamToTransNodeConverter( purMock ); assertEquals( 2, transConverter.filterPrivateDatabases( transMeta ).getDatabases().size() ); }
public ConfigTransformerResult transform(Map<String, String> configs) { Map<String, Map<String, Set<String>>> keysByProvider = new HashMap<>(); Map<String, Map<String, Map<String, String>>> lookupsByProvider = new HashMap<>(); // Collect the variables from the given configs that need transformation for (Map.Entry<String, String> config : configs.entrySet()) { if (config.getValue() != null) { List<ConfigVariable> configVars = getVars(config.getValue(), DEFAULT_PATTERN); for (ConfigVariable configVar : configVars) { Map<String, Set<String>> keysByPath = keysByProvider.computeIfAbsent(configVar.providerName, k -> new HashMap<>()); Set<String> keys = keysByPath.computeIfAbsent(configVar.path, k -> new HashSet<>()); keys.add(configVar.variable); } } } // Retrieve requested variables from the ConfigProviders Map<String, Long> ttls = new HashMap<>(); for (Map.Entry<String, Map<String, Set<String>>> entry : keysByProvider.entrySet()) { String providerName = entry.getKey(); ConfigProvider provider = configProviders.get(providerName); Map<String, Set<String>> keysByPath = entry.getValue(); if (provider != null && keysByPath != null) { for (Map.Entry<String, Set<String>> pathWithKeys : keysByPath.entrySet()) { String path = pathWithKeys.getKey(); Set<String> keys = new HashSet<>(pathWithKeys.getValue()); ConfigData configData = provider.get(path, keys); Map<String, String> data = configData.data(); Long ttl = configData.ttl(); if (ttl != null && ttl >= 0) { ttls.put(path, ttl); } Map<String, Map<String, String>> keyValuesByPath = lookupsByProvider.computeIfAbsent(providerName, k -> new HashMap<>()); keyValuesByPath.put(path, data); } } } // Perform the transformations by performing variable replacements Map<String, String> data = new HashMap<>(configs); for (Map.Entry<String, String> config : configs.entrySet()) { data.put(config.getKey(), replace(lookupsByProvider, config.getValue(), DEFAULT_PATTERN)); } return new ConfigTransformerResult(data, ttls); }
@Test public void testReplaceVariableNoPath() { ConfigTransformerResult result = configTransformer.transform(Collections.singletonMap(MY_KEY, "${test:testKey}")); Map<String, String> data = result.data(); Map<String, Long> ttls = result.ttls(); assertEquals(TEST_RESULT_NO_PATH, data.get(MY_KEY)); assertTrue(ttls.isEmpty()); }
protected boolean configDevice(DeviceId deviceId) { // Returns true if config was successful, false if not and a clean up is // needed. final Device device = deviceService.getDevice(deviceId); if (device == null || !device.is(IntProgrammable.class)) { return true; } if (isNotIntConfigured()) { log.warn("Missing INT config, aborting programming of INT device {}", deviceId); return true; } final boolean isEdge = !hostService.getConnectedHosts(deviceId).isEmpty(); final IntDeviceRole intDeviceRole = isEdge ? IntDeviceRole.SOURCE_SINK : IntDeviceRole.TRANSIT; log.info("Started programming of INT device {} with role {}...", deviceId, intDeviceRole); final IntProgrammable intProg = device.as(IntProgrammable.class); if (!isIntStarted()) { // Leave device with no INT configuration. return true; } if (!intProg.init()) { log.warn("Unable to init INT pipeline on {}", deviceId); return false; } boolean supportSource = intProg.supportsFunctionality(IntProgrammable.IntFunctionality.SOURCE); boolean supportSink = intProg.supportsFunctionality(IntProgrammable.IntFunctionality.SINK); boolean supportPostcard = intProg.supportsFunctionality(IntProgrammable.IntFunctionality.POSTCARD); if (intDeviceRole != IntDeviceRole.SOURCE_SINK && !supportPostcard) { // Stop here, no more configuration needed for transit devices unless it support postcard. return true; } if (supportSink || supportPostcard) { if (!intProg.setupIntConfig(intConfig.get())) { log.warn("Unable to apply INT report config on {}", deviceId); return false; } } // Port configuration. final Set<PortNumber> hostPorts = deviceService.getPorts(deviceId) .stream() .map(port -> new ConnectPoint(deviceId, port.number())) .filter(cp -> !hostService.getConnectedHosts(cp).isEmpty()) .map(ConnectPoint::port) .collect(Collectors.toSet()); for (PortNumber port : hostPorts) { if (supportSource) { log.info("Setting port {}/{} as INT source port...", deviceId, port); if (!intProg.setSourcePort(port)) { log.warn("Unable to set INT source port {} on {}", port, deviceId); return false; } } if (supportSink) { log.info("Setting port {}/{} as INT sink port...", deviceId, port); if (!intProg.setSinkPort(port)) { log.warn("Unable to set INT sink port {} on {}", port, deviceId); return false; } } } if (!supportSource && !supportPostcard) { // Stop here, no more configuration needed for sink devices unless // it supports postcard mode. return true; } // Apply intents. // This is a trivial implementation where we simply get the // corresponding INT objective from an intent and we apply to all // device which support reporting. int appliedCount = 0; for (Versioned<IntIntent> versionedIntent : intentMap.values()) { IntIntent intent = versionedIntent.value(); IntObjective intObjective = getIntObjective(intent); if (intent.telemetryMode() == IntIntent.TelemetryMode.INBAND_TELEMETRY && supportSource) { intProg.addIntObjective(intObjective); appliedCount++; } else if (intent.telemetryMode() == IntIntent.TelemetryMode.POSTCARD && supportPostcard) { intProg.addIntObjective(intObjective); appliedCount++; } else { log.warn("Device {} does not support intent {}.", deviceId, intent); } } log.info("Completed programming of {}, applied {} INT objectives of {} total", deviceId, appliedCount, intentMap.size()); return true; }
@Test public void testConfigSourceDevice() { reset(deviceService, hostService); Device device = getMockDevice(true, DEVICE_ID); IntProgrammable intProg = getMockIntProgrammable(true, false, false, false); setUpDeviceTest(device, intProg, true, false); IntObjective intObj = IntObjective.builder() .withSelector(FLOW_SELECTOR2) .build(); expect(intProg.addIntObjective(eq(intObj))) .andReturn(true) .once(); expect(intProg.setSourcePort(PortNumber.portNumber(1))).andReturn(true).once(); expect(intProg.setSourcePort(PortNumber.portNumber(2))).andReturn(true).once(); replay(deviceService, hostService, device, intProg); installTestIntents(); assertTrue(manager.configDevice(DEVICE_ID)); verify(intProg); }
@Operation(summary = "deleteProcessInstanceById", description = "DELETE_PROCESS_INSTANCE_BY_ID_NOTES") @Parameters({ @Parameter(name = "id", description = "PROCESS_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")) }) @DeleteMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR) public Result<Void> deleteProcessInstanceById(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable("id") Integer id) { processInstanceService.deleteProcessInstanceById(loginUser, id); return Result.success(); }
@Test public void testDeleteProcessInstanceById() throws Exception { Map<String, Object> mockResult = new HashMap<>(); mockResult.put(Constants.STATUS, Status.SUCCESS); Mockito.doNothing().when(processInstanceService).deleteProcessInstanceById(Mockito.any(), Mockito.anyInt()); MvcResult mvcResult = mockMvc.perform(delete("/projects/{projectCode}/process-instances/{id}", "1113", "123") .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertNotNull(result); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); }
public static Color fromHex(String hex) { if (!hex.startsWith("#") && !hex.startsWith("0x")) { hex = "#" + hex; } if ((hex.length() <= 7 && hex.startsWith("#")) || (hex.length() <= 8 && hex.startsWith("0x"))) { try { return Color.decode(hex); } catch (NumberFormatException e) { return null; } } try { return new Color(Long.decode(hex).intValue(), true); } catch (NumberFormatException e) { return null; } }
@Test public void fromHex() { assertEquals(Color.BLACK, ColorUtil.fromHex("0x000000")); assertEquals(Color.BLACK, ColorUtil.fromHex("#000000")); assertEquals(Color.BLACK, ColorUtil.fromHex("000000")); assertEquals(Color.BLACK, ColorUtil.fromHex("0x0")); assertEquals(Color.BLACK, ColorUtil.fromHex("#0")); assertEquals(ColorUtil.colorWithAlpha(Color.BLACK, 0), ColorUtil.fromHex("0x00000000")); assertEquals(Color.WHITE, ColorUtil.fromHex("0xFFFFFFFF")); INVALID_COLOR_HEXSTRING_LIST.forEach((string) -> { assertNull(ColorUtil.fromHex(string)); }); }
public static String md5Hex(Consumer<MessageDigest> digestHandler) { return digestHex(md5::get, digestHandler); }
@Test @SneakyThrows public void test() { Set<String> check = ConcurrentHashMap.newKeySet(); for (int i = 0; i < 1000; i++) { new Thread(() -> check.add(DigestUtils.md5Hex("test"))) .start(); } Thread.sleep(1000); System.out.println(check); assertEquals(1, check.size()); }
@Override public String toString() { return Numeric.toHexStringWithPrefixZeroPadded(value.getValue(), value.getBitSize() >> 2); }
@Test public void testToString() { assertEquals( new Address("52b08330e05d731e38c856c1043288f7d9744").toString(), ("0x00052b08330e05d731e38c856c1043288f7d9744")); assertEquals( new Address("0x00052b08330e05d731e38c856c1043288f7d9744").toString(), ("0x00052b08330e05d731e38c856c1043288f7d9744")); }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ConsumerGroupMember that = (ConsumerGroupMember) o; return memberEpoch == that.memberEpoch && previousMemberEpoch == that.previousMemberEpoch && state == that.state && rebalanceTimeoutMs == that.rebalanceTimeoutMs && Objects.equals(memberId, that.memberId) && Objects.equals(instanceId, that.instanceId) && Objects.equals(rackId, that.rackId) && Objects.equals(clientId, that.clientId) && Objects.equals(clientHost, that.clientHost) && Objects.equals(subscribedTopicNames, that.subscribedTopicNames) && Objects.equals(subscribedTopicRegex, that.subscribedTopicRegex) && Objects.equals(serverAssignorName, that.serverAssignorName) && Objects.equals(assignedPartitions, that.assignedPartitions) && Objects.equals(partitionsPendingRevocation, that.partitionsPendingRevocation) && Objects.equals(classicMemberMetadata, that.classicMemberMetadata); }
@Test public void testEquals() { Uuid topicId1 = Uuid.randomUuid(); Uuid topicId2 = Uuid.randomUuid(); ConsumerGroupMember member1 = new ConsumerGroupMember.Builder("member-id") .setMemberEpoch(10) .setPreviousMemberEpoch(9) .setInstanceId("instance-id") .setRackId("rack-id") .setRebalanceTimeoutMs(5000) .setClientId("client-id") .setClientHost("hostname") .setSubscribedTopicNames(Arrays.asList("foo", "bar")) .setSubscribedTopicRegex("regex") .setServerAssignorName("range") .setAssignedPartitions(mkAssignment( mkTopicAssignment(topicId1, 1, 2, 3))) .setPartitionsPendingRevocation(mkAssignment( mkTopicAssignment(topicId2, 4, 5, 6))) .setClassicMemberMetadata(new ConsumerGroupMemberMetadataValue.ClassicMemberMetadata() .setSupportedProtocols(toClassicProtocolCollection("range"))) .build(); ConsumerGroupMember member2 = new ConsumerGroupMember.Builder("member-id") .setMemberEpoch(10) .setPreviousMemberEpoch(9) .setInstanceId("instance-id") .setRackId("rack-id") .setRebalanceTimeoutMs(5000) .setClientId("client-id") .setClientHost("hostname") .setSubscribedTopicNames(Arrays.asList("foo", "bar")) .setSubscribedTopicRegex("regex") .setServerAssignorName("range") .setAssignedPartitions(mkAssignment( mkTopicAssignment(topicId1, 1, 2, 3))) .setPartitionsPendingRevocation(mkAssignment( mkTopicAssignment(topicId2, 4, 5, 6))) .setClassicMemberMetadata(new ConsumerGroupMemberMetadataValue.ClassicMemberMetadata() .setSupportedProtocols(toClassicProtocolCollection("range"))) .build(); assertEquals(member1, member2); }
@Override public void onPass(Context context, ResourceWrapper rw, DefaultNode param, int count, Object... args) throws Exception { for (MetricExtension m : MetricExtensionProvider.getMetricExtensions()) { if (m instanceof AdvancedMetricExtension) { ((AdvancedMetricExtension) m).onPass(rw, count, args); } else { m.increaseThreadNum(rw.getName(), args); m.addPass(rw.getName(), count, args); } } }
@Test public void onPass() throws Exception { FakeMetricExtension extension = new FakeMetricExtension(); FakeAdvancedMetricExtension advancedExtension = new FakeAdvancedMetricExtension(); MetricExtensionProvider.addMetricExtension(extension); MetricExtensionProvider.addMetricExtension(advancedExtension); MetricEntryCallback entryCallback = new MetricEntryCallback(); StringResourceWrapper resourceWrapper = new StringResourceWrapper("resource", EntryType.OUT); int count = 2; Object[] args = {"args1", "args2"}; entryCallback.onPass(null, resourceWrapper, null, count, args); // assert extension Assert.assertEquals(extension.pass, count); Assert.assertEquals(extension.thread, 1); // assert advancedExtension Assert.assertEquals(advancedExtension.pass, count); Assert.assertEquals(advancedExtension.concurrency, 1); }
public abstract void renameTable(String dbName, String oldName, String newName) throws HCatException;
@Test public void testRenameTable() throws Exception { HCatClient client = HCatClient.create(new Configuration(hcatConf)); String tableName = "temptable"; String newName = "mytable"; client.dropTable(null, tableName, true); client.dropTable(null, newName, true); ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>(); cols.add(new HCatFieldSchema("id", Type.INT, "id columns")); cols.add(new HCatFieldSchema("value", Type.STRING, "id columns")); HCatCreateTableDesc tableDesc = HCatCreateTableDesc .create(null, tableName, cols).fileFormat("rcfile").build(); client.createTable(tableDesc); client.renameTable(null, tableName, newName); try { client.getTable(null, tableName); } catch (HCatException exp) { assertTrue("Unexpected exception message: " + exp.getMessage(), exp.getMessage().contains("NoSuchObjectException while fetching table")); } HCatTable newTable = client.getTable(null, newName); assertTrue(newTable != null); assertTrue(newTable.getTableName().equals(newName)); client.close(); }
public void logNewElection( final int memberId, final long leadershipTermId, final long logPosition, final long appendPosition, final String reason) { final int length = ClusterEventEncoder.newElectionLength(reason); final int captureLength = captureLength(length); final int encodedLength = encodedLength(captureLength); final ManyToOneRingBuffer ringBuffer = this.ringBuffer; final int index = ringBuffer.tryClaim(NEW_ELECTION.toEventCodeId(), encodedLength); if (index > 0) { try { ClusterEventEncoder.encodeNewElection( (UnsafeBuffer)ringBuffer.buffer(), index, captureLength, length, memberId, leadershipTermId, logPosition, appendPosition, reason); } finally { ringBuffer.commit(index); } } }
@Test void logNewElection() { final int memberId = 42; final long leadershipTermId = 8L; final long logPosition = 9827342L; final long appendPosition = 342384382L; final String reason = "why an election was started"; final int offset = 16; logBuffer.putLong(CAPACITY + TAIL_POSITION_OFFSET, offset); final int encodedLength = newElectionLength(reason); logger.logNewElection(memberId, leadershipTermId, logPosition, appendPosition, reason); verifyLogHeader( logBuffer, offset, NEW_ELECTION.toEventCodeId(), encodedLength, encodedLength); final int index = encodedMsgOffset(offset) + LOG_HEADER_LENGTH; assertEquals(leadershipTermId, logBuffer.getLong(index, LITTLE_ENDIAN)); assertEquals(logPosition, logBuffer.getLong(index + SIZE_OF_LONG, LITTLE_ENDIAN)); assertEquals(appendPosition, logBuffer.getLong(index + 2 * SIZE_OF_LONG, LITTLE_ENDIAN)); assertEquals(memberId, logBuffer.getInt(index + (3 * SIZE_OF_LONG), LITTLE_ENDIAN)); assertEquals(reason, logBuffer.getStringAscii(index + (3 * SIZE_OF_LONG) + SIZE_OF_INT, LITTLE_ENDIAN)); final StringBuilder sb = new StringBuilder(); ClusterEventDissector.dissectNewElection(NEW_ELECTION, logBuffer, encodedMsgOffset(offset), sb); final String expectedMessagePattern = "\\[[0-9]+\\.[0-9]+] CLUSTER: NEW_ELECTION \\[59/59]: memberId=42 " + "leadershipTermId=8 logPosition=9827342 appendPosition=342384382 reason=why an election was started"; assertThat(sb.toString(), Matchers.matchesPattern(expectedMessagePattern)); }
@Override public Optional<E> find(String name) { return entryMap.find(name); }
@Test public void shouldOnlyFindRegisteredObjects() { TestRegistry testRegistry = new TestRegistry(); assertThat(testRegistry.find("test")).isEmpty(); testRegistry.entryMap.putIfAbsent("test", "value"); assertThat(testRegistry.find("test")).contains("value"); }
public static Path getStagingDir(Cluster cluster, Configuration conf) throws IOException, InterruptedException { UserGroupInformation user = UserGroupInformation.getLoginUser(); return getStagingDir(cluster, conf, user); }
@Test public void testDirPermission() throws Exception { Cluster cluster = mock(Cluster.class); HdfsConfiguration conf = new HdfsConfiguration(); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "700"); MiniDFSCluster dfsCluster = null; try { dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); FileSystem fs = dfsCluster.getFileSystem(); UserGroupInformation user = UserGroupInformation .createUserForTesting(USER_1_SHORT_NAME, GROUP_NAMES); Path stagingPath = new Path(fs.getUri().toString() + "/testDirPermission"); when(cluster.getStagingAreaDir()).thenReturn(stagingPath); Path res = JobSubmissionFiles.getStagingDir(cluster, conf, user); assertEquals(new FsPermission(0700), fs.getFileStatus(res).getPermission()); } finally { if (dfsCluster != null) { dfsCluster.shutdown(); } } }
public static int ringNextIntByObj(Object object, AtomicInteger atomicInteger) { Assert.notNull(object); int modulo = CollUtil.size(object); return ringNextInt(modulo, atomicInteger); }
@Test public void ringNextIntByObjTest() { final AtomicInteger atomicInteger = new AtomicInteger(); // 开启并发测试,每个线程获取到的元素都是唯一的 ThreadUtil.concurrencyTest(strList.size(), () -> { final int index = RingIndexUtil.ringNextIntByObj(strList, atomicInteger); final String s = strList.get(index); assertNotNull(s); }); }
public WatsonxAiRequest request(Prompt prompt) { WatsonxAiChatOptions options = WatsonxAiChatOptions.builder().build(); if (this.defaultOptions != null) { options = ModelOptionsUtils.merge(options, this.defaultOptions, WatsonxAiChatOptions.class); } if (prompt.getOptions() != null) { if (prompt.getOptions() instanceof WatsonxAiChatOptions runtimeOptions) { options = ModelOptionsUtils.merge(runtimeOptions, options, WatsonxAiChatOptions.class); } else { var updatedRuntimeOptions = ModelOptionsUtils.copyToTarget(prompt.getOptions(), ChatOptions.class, WatsonxAiChatOptions.class); options = ModelOptionsUtils.merge(updatedRuntimeOptions, options, WatsonxAiChatOptions.class); } } Map<String, Object> parameters = options.toMap(); final String convertedPrompt = MessageToPromptConverter.create() .withAssistantPrompt("") .withHumanPrompt("") .toPrompt(prompt.getInstructions()); return WatsonxAiRequest.builder(convertedPrompt).withParameters(parameters).build(); }
@Test public void testCreateRequestSuccessfullyWithChatDisabled() { String msg = "Test message"; WatsonxAiChatOptions modelOptions = WatsonxAiChatOptions.builder() .withModel("meta-llama/llama-2-70b-chat") .withDecodingMethod("sample") .withTemperature(0.1f) .withTopP(0.2f) .withTopK(10) .withMaxNewTokens(30) .withMinNewTokens(10) .withRepetitionPenalty(1.4f) .withStopSequences(List.of("\n\n\n")) .withRandomSeed(4) .build(); Prompt prompt = new Prompt(msg, modelOptions); WatsonxAiRequest request = chatModel.request(prompt); Assert.assertEquals(request.getModelId(), "meta-llama/llama-2-70b-chat"); assertThat(request.getInput()).isEqualTo(msg); assertThat(request.getParameters().get("decoding_method")).isEqualTo("sample"); assertThat(request.getParameters().get("temperature")).isEqualTo(0.1); assertThat(request.getParameters().get("top_p")).isEqualTo(0.2); assertThat(request.getParameters().get("top_k")).isEqualTo(10); assertThat(request.getParameters().get("max_new_tokens")).isEqualTo(30); assertThat(request.getParameters().get("min_new_tokens")).isEqualTo(10); assertThat(request.getParameters().get("stop_sequences")).isInstanceOf(List.class); Assert.assertEquals(request.getParameters().get("stop_sequences"), List.of("\n\n\n")); assertThat(request.getParameters().get("random_seed")).isEqualTo(4); }
@Override public boolean mkdirs(Path f) throws IOException { return fs.mkdirs(f); }
@Test public void testRenameFileIntoDir() throws Exception { Path srcPath = new Path(TEST_ROOT_DIR, "testRenameSrc"); Path dstPath = new Path(TEST_ROOT_DIR, "testRenameDir"); localFs.mkdirs(dstPath); verifyRename(srcPath, dstPath, true); }
public Point getCenter() { return new Point(getCenterX(), getCenterY()); }
@Test public void getCenterTest() { Rectangle rectangle = create(1, 2, 3, 4); Assert.assertEquals(new Point(2, 3), rectangle.getCenter()); }
public String convert(ILoggingEvent event) { StringBuilder sb = new StringBuilder(); int pri = facility + LevelToSyslogSeverity.convert(event); sb.append("<"); sb.append(pri); sb.append(">"); sb.append(computeTimeStampString(event.getTimeStamp())); sb.append(' '); sb.append(localHostName); sb.append(' '); return sb.toString(); }
@Test public void datesLessThanTen() { // RFC 3164, section 4.1.2: // If the day of the month is less than 10, then it MUST be represented as // a space and then the number. For example, the 7th day of August would be // represented as "Aug 7", with two spaces between the "g" and the "7". LoggingEvent le = createLoggingEvent(); calendar.set(2012, Calendar.AUGUST, 7, 13, 15, 0); le.setTimeStamp(calendar.getTimeInMillis()); assertEquals("<191>Aug 7 13:15:00 " + HOSTNAME + " ", converter.convert(le)); }
@Override public boolean archive(String gcsUrl, byte[] data) { BlobInfo blobInfo = parseBlobInfo(gcsUrl); if (data.length <= options.chunkUploadThresholdInBytes) { // Create the blob in one request. logger.atInfo().log("Archiving data to GCS at '%s' in one request.", gcsUrl); storage.create(blobInfo, data); return true; } // When content is large (1MB or more) it is recommended to write it in chunks via the blob's // channel writer. logger.atInfo().log( "Content is larger than threshold, archiving data to GCS at '%s' in chunks.", gcsUrl); try (WriteChannel writer = storage.writer(blobInfo)) { for (int chunkOffset = 0; chunkOffset < data.length; chunkOffset += options.chunkSizeInBytes) { int chunkSize = Math.min(data.length - chunkOffset, options.chunkSizeInBytes); writer.write(ByteBuffer.wrap(data, chunkOffset, chunkSize)); } return true; } catch (IOException e) { logger.atSevere().withCause(e).log("Unable to archving data to GCS at '%s'.", gcsUrl); return false; } }
@Test public void archive_withSmallSizeString_createsBlobInOneRequest() { GoogleCloudStorageArchiver archiver = archiverFactory.create(mockStorage); String dataToArchive = "TEST DATA"; boolean succeeded = archiver.archive(buildGcsUrl(BUCKET_ID, OBJECT_ID), dataToArchive); assertThat(succeeded).isTrue(); verify(mockStorage, times(1)).create(blobInfoCaptor.capture(), byteDataCaptor.capture()); assertThat(blobInfoCaptor.getValue()) .isEqualTo(BlobInfo.newBuilder(BUCKET_ID, OBJECT_ID).build()); assertThat(byteDataCaptor.getValue()).isEqualTo(dataToArchive.getBytes(UTF_8)); }
@Override public void checkDone() throws IllegalStateException { boolean done = shouldStop || streamProgress.getCloseStream() != null || streamProgress.isFailToLock(); Preconditions.checkState(done, "There's more work to be done"); }
@Test public void testDoneOnFailToLockTrue() { StreamProgress streamProgress = new StreamProgress(); streamProgress.setFailToLock(true); ReadChangeStreamPartitionProgressTracker tracker = new ReadChangeStreamPartitionProgressTracker(streamProgress); tracker.checkDone(); }
static Set<String> getConfigValueAsSet(ServiceConfiguration conf, String configProp) { String value = getConfigValueAsStringImpl(conf, configProp); if (StringUtils.isBlank(value)) { log.info("Configuration for [{}] is the empty set.", configProp); return Collections.emptySet(); } Set<String> set = Arrays.stream(value.trim().split("\\s*,\\s*")).collect(Collectors.toSet()); log.info("Configuration for [{}] is [{}].", configProp, String.join(", ", set)); return set; }
@Test public void testGetConfigValueAsSetReturnsEmptySetIfMissing() { Properties props = new Properties(); ServiceConfiguration config = new ServiceConfiguration(); config.setProperties(props); Set<String> actual = ConfigUtils.getConfigValueAsSet(config, "prop1"); assertEquals(Collections.emptySet(), actual); }
public static Future<Integer> authTlsHash(SecretOperator secretOperations, String namespace, KafkaClientAuthentication auth, List<CertSecretSource> certSecretSources) { Future<Integer> tlsFuture; if (certSecretSources == null || certSecretSources.isEmpty()) { tlsFuture = Future.succeededFuture(0); } else { // get all TLS trusted certs, compute hash from each of them, sum hashes tlsFuture = Future.join(certSecretSources.stream().map(certSecretSource -> getCertificateAsync(secretOperations, namespace, certSecretSource) .compose(cert -> Future.succeededFuture(cert.hashCode()))).collect(Collectors.toList())) .compose(hashes -> Future.succeededFuture(hashes.list().stream().mapToInt(e -> (int) e).sum())); } if (auth == null) { return tlsFuture; } else { // compute hash from Auth if (auth instanceof KafkaClientAuthenticationScram) { // only passwordSecret can be changed return tlsFuture.compose(tlsHash -> getPasswordAsync(secretOperations, namespace, auth) .compose(password -> Future.succeededFuture(password.hashCode() + tlsHash))); } else if (auth instanceof KafkaClientAuthenticationPlain) { // only passwordSecret can be changed return tlsFuture.compose(tlsHash -> getPasswordAsync(secretOperations, namespace, auth) .compose(password -> Future.succeededFuture(password.hashCode() + tlsHash))); } else if (auth instanceof KafkaClientAuthenticationTls) { // custom cert can be used (and changed) return ((KafkaClientAuthenticationTls) auth).getCertificateAndKey() == null ? tlsFuture : tlsFuture.compose(tlsHash -> getCertificateAndKeyAsync(secretOperations, namespace, (KafkaClientAuthenticationTls) auth) .compose(crtAndKey -> Future.succeededFuture(crtAndKey.certAsBase64String().hashCode() + crtAndKey.keyAsBase64String().hashCode() + tlsHash))); } else if (auth instanceof KafkaClientAuthenticationOAuth) { List<Future<Integer>> futureList = ((KafkaClientAuthenticationOAuth) auth).getTlsTrustedCertificates() == null ? new ArrayList<>() : ((KafkaClientAuthenticationOAuth) auth).getTlsTrustedCertificates().stream().map(certSecretSource -> getCertificateAsync(secretOperations, namespace, certSecretSource) .compose(cert -> Future.succeededFuture(cert.hashCode()))).collect(Collectors.toList()); futureList.add(tlsFuture); futureList.add(addSecretHash(secretOperations, namespace, ((KafkaClientAuthenticationOAuth) auth).getAccessToken())); futureList.add(addSecretHash(secretOperations, namespace, ((KafkaClientAuthenticationOAuth) auth).getClientSecret())); futureList.add(addSecretHash(secretOperations, namespace, ((KafkaClientAuthenticationOAuth) auth).getRefreshToken())); return Future.join(futureList) .compose(hashes -> Future.succeededFuture(hashes.list().stream().mapToInt(e -> (int) e).sum())); } else { // unknown Auth type return tlsFuture; } } }
@Test void testAuthTlsHashScramSha512SecretFoundAndPasswordNotFound() { SecretOperator secretOperator = mock(SecretOperator.class); Map<String, String> data = new HashMap<>(); data.put("passwordKey", "my-password"); Secret secret = new Secret(); secret.setData(data); CompletionStage<Secret> cf = CompletableFuture.supplyAsync(() -> secret); when(secretOperator.getAsync(anyString(), anyString())).thenReturn(Future.fromCompletionStage(cf)); KafkaClientAuthenticationScramSha512 auth = new KafkaClientAuthenticationScramSha512(); PasswordSecretSource passwordSecretSource = new PasswordSecretSource(); passwordSecretSource.setSecretName("my-secret"); passwordSecretSource.setPassword("password1"); auth.setPasswordSecret(passwordSecretSource); Future<Integer> result = VertxUtil.authTlsHash(secretOperator, "anyNamespace", auth, List.of()); result.onComplete(handler -> { assertTrue(handler.failed()); assertEquals("Items with key(s) [password1] are missing in Secret my-secret", handler.cause().getMessage()); }); }
public abstract byte[] encode(MutableSpan input);
@Test void span_64bitTraceId_JSON_V2() { clientSpan.traceId(clientSpan.traceId().substring(16)); assertThat(new String(encoder.encode(clientSpan), UTF_8)) .isEqualTo( "{\"traceId\":\"216a2aea45d08fc9\",\"parentId\":\"6b221d5bc9e6496c\",\"id\":\"5b4185666d50f68b\",\"kind\":\"CLIENT\",\"name\":\"get\",\"timestamp\":1472470996199000,\"duration\":207000,\"localEndpoint\":{\"serviceName\":\"frontend\",\"ipv4\":\"127.0.0.1\"},\"remoteEndpoint\":{\"serviceName\":\"backend\",\"ipv4\":\"192.168.99.101\",\"port\":9000},\"annotations\":[{\"timestamp\":1472470996238000,\"value\":\"foo\"},{\"timestamp\":1472470996403000,\"value\":\"bar\"}],\"tags\":{\"clnt/finagle.version\":\"6.45.0\",\"http.path\":\"/api\"}}"); }
@Override public String authenticate(AuthenticationDataSource authData) throws AuthenticationException { SocketAddress clientAddress; String roleToken; ErrorCode errorCode = ErrorCode.UNKNOWN; try { if (authData.hasDataFromPeer()) { clientAddress = authData.getPeerAddress(); } else { errorCode = ErrorCode.NO_CLIENT; throw new AuthenticationException("Authentication data source does not have a client address"); } if (authData.hasDataFromCommand()) { roleToken = authData.getCommandData(); } else if (authData.hasDataFromHttp()) { roleToken = authData.getHttpHeader(AuthZpeClient.ZPE_TOKEN_HDR); } else { errorCode = ErrorCode.NO_TOKEN; throw new AuthenticationException("Authentication data source does not have a role token"); } if (roleToken == null) { errorCode = ErrorCode.NO_TOKEN; throw new AuthenticationException("Athenz token is null, can't authenticate"); } if (roleToken.isEmpty()) { errorCode = ErrorCode.NO_TOKEN; throw new AuthenticationException("Athenz RoleToken is empty, Server is Using Athenz Authentication"); } if (log.isDebugEnabled()) { log.debug("Athenz RoleToken : [{}] received from Client: {}", roleToken, clientAddress); } RoleToken token = new RoleToken(roleToken); if (!domainNameList.contains(token.getDomain())) { errorCode = ErrorCode.DOMAIN_MISMATCH; throw new AuthenticationException( String.format("Athenz RoleToken Domain mismatch, Expected: %s, Found: %s", domainNameList.toString(), token.getDomain())); } // Synchronize for non-thread safe static calls inside athenz library synchronized (this) { PublicKey ztsPublicKey = AuthZpeClient.getZtsPublicKey(token.getKeyId()); if (ztsPublicKey == null) { errorCode = ErrorCode.NO_PUBLIC_KEY; throw new AuthenticationException("Unable to retrieve ZTS Public Key"); } if (token.validate(ztsPublicKey, allowedOffset, false, null)) { log.debug("Athenz Role Token : {}, Authenticated for Client: {}", roleToken, clientAddress); AuthenticationMetrics.authenticateSuccess(getClass().getSimpleName(), getAuthMethodName()); return token.getPrincipal(); } else { errorCode = ErrorCode.INVALID_TOKEN; throw new AuthenticationException( String.format("Athenz Role Token Not Authenticated from Client: %s", clientAddress)); } } } catch (AuthenticationException exception) { incrementFailureMetric(errorCode); throw exception; } }
@Test public void testAuthenticateSignedTokenWithDifferentDomain() throws Exception { List<String> roles = new ArrayList<String>() { { add("test_role"); } }; RoleToken token = new RoleToken.Builder("Z1", "invalid", roles).principal("test_app").build(); String privateKey = new String(Files.readAllBytes(Paths.get("./src/test/resources/zts_private.pem"))); token.sign(privateKey); AuthenticationDataSource authData = new AuthenticationDataCommand(token.getSignedToken(), new InetSocketAddress("localhost", 0), null); try { provider.authenticate(authData); fail("Token which has different domain should not be authenticated"); } catch (AuthenticationException e) { // OK, expected } }
public ReencryptionPendingInodeIdCollector getTraverser() { return traverser; }
@Test public void testThrottleAccumulatingTasks() throws Exception { final Configuration conf = new Configuration(); final ReencryptionHandler rh = mockReencryptionhandler(conf); // mock tasks piling up final Map<Long, ReencryptionUpdater.ZoneSubmissionTracker> submissions = new HashMap<>(); final ReencryptionUpdater.ZoneSubmissionTracker zst = new ReencryptionUpdater.ZoneSubmissionTracker(); submissions.put(new Long(1), zst); Future mock = Mockito.mock(Future.class); for (int i = 0; i < Runtime.getRuntime().availableProcessors() * 3; ++i) { zst.addTask(mock); } Thread removeTaskThread = new Thread() { public void run() { try { Thread.sleep(3000); } catch (InterruptedException ie) { LOG.info("removeTaskThread interrupted."); Thread.currentThread().interrupt(); } zst.getTasks().clear(); } }; Whitebox.setInternalState(rh, "submissions", submissions); final StopWatch sw = new StopWatch().start(); removeTaskThread.start(); rh.getTraverser().throttle(); sw.stop(); LOG.info("Throttle completed, consumed {}", sw.now(TimeUnit.MILLISECONDS)); assertTrue("should have throttled for at least 3 second", sw.now(TimeUnit.MILLISECONDS) >= 3000); }
public List<GrantDTO> getForTarget(GRN target) { return db.find(DBQuery.is(GrantDTO.FIELD_TARGET, target.toString())).toArray(); }
@Test @MongoDBFixtures("grants.json") public void getForTarget() { final GRN stream1 = grnRegistry.parse("grn::::stream:54e3deadbeefdeadbeef0000"); final GRN stream2 = grnRegistry.parse("grn::::stream:54e3deadbeefdeadbeef0001"); assertThat(dbService.getForTarget(stream1)).hasSize(1); assertThat(dbService.getForTarget(stream2)).hasSize(3); }
public static <T> void update(Map<String, String> properties, T obj) throws IllegalArgumentException { Field[] fields = obj.getClass().getDeclaredFields(); Arrays.stream(fields).forEach(f -> { if (properties.containsKey(f.getName())) { try { f.setAccessible(true); String v = properties.get(f.getName()); if (!StringUtils.isBlank(v)) { f.set(obj, value(trim(v), f)); } else { setEmptyValue(v, f, obj); } } catch (Exception e) { throw new IllegalArgumentException(format("failed to initialize %s field while setting value %s", f.getName(), properties.get(f.getName())), e); } } }); }
@Test public void testWithBlankVallueConfig() { Map<String, String> properties = new HashMap<>(); properties.put("name", " config "); properties.put("stringStringMap", "key1=value1 , key2= value2 "); properties.put("stringIntMap", "key1 = 1, key2 = 2 "); properties.put("longStringMap", " 1 =value1 ,2 =value2 "); properties.put("longList", " 1, 3, 8 , 0 ,9 "); properties.put("stringList", " aa, bb , cc, ee "); properties.put("longSet", " 1, 3, 8 , 0 , 3, 1 ,9 "); properties.put("stringSet", " aa, bb , cc, ee , bb, aa "); MyConfig config = new MyConfig(); FieldParser.update(properties, config); assertEquals(config.name, "config"); assertEquals(config.stringStringMap.get("key1"), "value1"); assertEquals(config.stringStringMap.get("key2"), "value2"); assertEquals((int) config.stringIntMap.get("key1"), 1); assertEquals((int) config.stringIntMap.get("key2"), 2); assertEquals(config.longStringMap.get(1L), "value1"); assertEquals(config.longStringMap.get(2L), "value2"); assertEquals((long)config.longList.get(2), 8); assertEquals(config.stringList.get(1), "bb"); assertTrue(config.longSet.contains(3L)); assertTrue(config.stringSet.contains("bb")); }
@Override public void handlerPlugin(final PluginData pluginData) { if (null != pluginData && pluginData.getEnabled()) { SofaRegisterConfig sofaRegisterConfig = GsonUtils.getInstance().fromJson(pluginData.getConfig(), SofaRegisterConfig.class); if (Objects.isNull(sofaRegisterConfig)) { return; } SofaRegisterConfig exist = Singleton.INST.get(SofaRegisterConfig.class); if (Objects.isNull(exist) || !sofaRegisterConfig.equals(exist)) { // If it is null, initialize it ApplicationConfigCache.getInstance().init(sofaRegisterConfig); ApplicationConfigCache.getInstance().invalidateAll(); } Singleton.INST.single(SofaRegisterConfig.class, sofaRegisterConfig); } }
@Test public void testPluginDisable() { PluginData pluginData = new PluginData("", "", registryConfig, "1", false, null); sofaPluginDataHandler.handlerPlugin(pluginData); assertNull(Singleton.INST.get(SofaRegisterConfig.class)); }
@Override public Optional<DevOpsProjectCreator> getDevOpsProjectCreator(DbSession dbSession, Map<String, String> characteristics) { return Optional.empty(); }
@Test void getDevOpsProjectCreator_withCharacteristics_returnsEmpty() { assertThat(underTest.getDevOpsProjectCreator(mock(DbSession.class), Map.of())).isEmpty(); }
public static Extension findExtensionAnnotation(Class<?> clazz) { if (clazz.isAnnotationPresent(Extension.class)) { return clazz.getAnnotation(Extension.class); } // search recursively through all annotations for (Annotation annotation : clazz.getAnnotations()) { Class<? extends Annotation> annotationClass = annotation.annotationType(); if (!annotationClass.getName().startsWith("java.lang.annotation")) { Extension extensionAnnotation = findExtensionAnnotation(annotationClass); if (extensionAnnotation != null) { return extensionAnnotation; } } } return null; }
@Test public void findExtensionAnnotation() { List<JavaFileObject> generatedFiles = JavaSources.compileAll(JavaSources.Greeting, JavaSources.WhazzupGreeting); assertEquals(2, generatedFiles.size()); Map<String, Class<?>> loadedClasses = new JavaFileObjectClassLoader().load(generatedFiles); Class<?> clazz = loadedClasses.get("test.WhazzupGreeting"); Extension extension = AbstractExtensionFinder.findExtensionAnnotation(clazz); Assertions.assertNotNull(extension); }
public static ByteBuffer getByteBufferOrNull(String property, JsonNode node) { if (!node.has(property) || node.get(property).isNull()) { return null; } JsonNode pNode = node.get(property); Preconditions.checkArgument( pNode.isTextual(), "Cannot parse byte buffer from non-text value: %s: %s", property, pNode); return ByteBuffer.wrap( BaseEncoding.base16().decode(pNode.textValue().toUpperCase(Locale.ROOT))); }
@Test public void getByteBufferOrNull() throws JsonProcessingException { assertThat(JsonUtil.getByteBufferOrNull("x", JsonUtil.mapper().readTree("{}"))).isNull(); assertThat(JsonUtil.getByteBufferOrNull("x", JsonUtil.mapper().readTree("{\"x\": null}"))) .isNull(); byte[] bytes = new byte[] {1, 2, 3, 4}; String base16Str = BaseEncoding.base16().encode(bytes); String json = String.format("{\"x\": \"%s\"}", base16Str); ByteBuffer byteBuffer = JsonUtil.getByteBufferOrNull("x", JsonUtil.mapper().readTree(json)); assertThat(byteBuffer.array()).isEqualTo(bytes); assertThatThrownBy( () -> JsonUtil.getByteBufferOrNull("x", JsonUtil.mapper().readTree("{\"x\": 23}"))) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse byte buffer from non-text value: x: 23"); }
@Override @Nullable public Object convert(String value) { if (value == null || value.isEmpty()) { return null; } final Parser parser = new Parser(timeZone.toTimeZone()); final List<DateGroup> r = parser.parse(value); if (r.isEmpty() || r.get(0).getDates().isEmpty()) { return null; } return new DateTime(r.get(0).getDates().get(0), timeZone); }
@Test public void convertObeysTimeZone() throws Exception { Converter c = new FlexibleDateConverter(ImmutableMap.<String, Object>of("time_zone", "+12:00")); final DateTime dateOnly = (DateTime) c.convert("2014-3-12"); assertThat(dateOnly.getZone()).isEqualTo(DateTimeZone.forOffsetHours(12)); Assertions.assertThat(dateOnly) .isAfterOrEqualTo(new DateTime(2014, 3, 12, 0, 0, DateTimeZone.forOffsetHours(12))) .isBefore(new DateTime(2014, 3, 13, 0, 0, DateTimeZone.forOffsetHours(12))); final DateTime dateTime = (DateTime) c.convert("2014-3-12 12:34"); assertThat(dateTime.getZone()).isEqualTo(DateTimeZone.forOffsetHours(12)); Assertions.assertThat(dateTime) .isEqualTo(new DateTime(2014, 3, 12, 12, 34, DateTimeZone.forOffsetHours(12))); final DateTime textualDateTime = (DateTime) c.convert("Mar 12, 2014 2pm"); assertThat(textualDateTime.getZone()).isEqualTo(DateTimeZone.forOffsetHours(12)); Assertions.assertThat(textualDateTime) .isEqualTo(new DateTime(2014, 3, 12, 14, 0, DateTimeZone.forOffsetHours(12))); }
private Resource assignContainer( FSSchedulerNode node, PendingAsk pendingAsk, NodeType type, boolean reserved, SchedulerRequestKey schedulerKey) { // How much does this request need? Resource capability = pendingAsk.getPerAllocationResource(); // How much does the node have? Resource available = node.getUnallocatedResource(); Container reservedContainer = null; if (reserved) { reservedContainer = node.getReservedContainer().getContainer(); } // Can we allocate a container on this node? if (Resources.fitsIn(capability, available)) { // Inform the application of the new container for this request RMContainer allocatedContainer = allocate(type, node, schedulerKey, pendingAsk, reservedContainer); if (allocatedContainer == null) { // Did the application need this resource? if (reserved) { unreserve(schedulerKey, node); } LOG.debug("Resource ask {} fits in available node resources {}," + " but no container was allocated", capability, available); return Resources.none(); } // If we had previously made a reservation, delete it if (reserved) { unreserve(schedulerKey, node); } // Inform the node node.allocateContainer(allocatedContainer); // If not running unmanaged, the first container we allocate is always // the AM. Set the amResource for this app and update the leaf queue's AM // usage if (!isAmRunning() && !getUnmanagedAM()) { setAMResource(capability); getQueue().addAMResourceUsage(capability); setAmRunning(true); } return capability; } LOG.debug("Resource request: {} exceeds the available" + " resources of the node.", capability); // The desired container won't fit here, so reserve // Reserve only, if app does not wait for preempted resources on the node, // otherwise we may end up with duplicate reservations if (isReservable(capability) && !node.isPreemptedForApp(this) && reserve(pendingAsk.getPerAllocationResource(), node, reservedContainer, type, schedulerKey)) { updateAMDiagnosticMsg(capability, " exceeds the available resources of " + "the node and the request is reserved)"); LOG.debug("{}'s resource request is reserved.", getName()); return FairScheduler.CONTAINER_RESERVED; } else { updateAMDiagnosticMsg(capability, " exceeds the available resources of " + "the node and the request cannot be reserved)"); if (LOG.isDebugEnabled()) { LOG.debug("Couldn't create reservation for app: " + getName() + ", at priority " + schedulerKey.getPriority()); } return Resources.none(); } }
@Test public void testNoNextPendingAsk() { FSLeafQueue queue = Mockito.mock(FSLeafQueue.class); ApplicationAttemptId applicationAttemptId = createAppAttemptId(1, 1); RMContext rmContext = Mockito.mock(RMContext.class); ConcurrentMap<ApplicationId, RMApp> rmApps = new ConcurrentHashMap<>(); RMApp rmApp = Mockito.mock(RMApp.class); rmApps.put(applicationAttemptId.getApplicationId(), rmApp); ApplicationSubmissionContext appContext = Mockito.mock(ApplicationSubmissionContext.class); Mockito.when(appContext.getUnmanagedAM()).thenReturn(false); Mockito.when(appContext.getLogAggregationContext()) .thenReturn(Mockito.mock(LogAggregationContext.class)); Mockito.when(rmApp.getApplicationSchedulingEnvs()) .thenReturn(new HashMap<>()); Mockito.when(rmApp.getApplicationSubmissionContext()) .thenReturn(appContext); Mockito.when(rmContext.getRMApps()).thenReturn(rmApps); Mockito.when(rmContext.getYarnConfiguration()).thenReturn(conf); FSAppAttempt schedulerApp = new FSAppAttempt(scheduler, applicationAttemptId, "user1", queue, null, rmContext); schedulerApp.setAmRunning(false); FSSchedulerNode schedulerNode = Mockito.mock(FSSchedulerNode.class); Resource resource = schedulerApp.assignContainer(schedulerNode); assertEquals(Resources.none(), resource); }
public void destroy() { boolean invokeDestroyed = false; this.lock.lock(); try { if (this.destroyed) { return; } this.destroyed = true; if (!this.running) { invokeDestroyed = true; } if (this.stopped) { return; } this.stopped = true; if (this.timeout != null) { if (this.timeout.cancel()) { invokeDestroyed = true; this.running = false; } this.timeout = null; } } finally { this.lock.unlock(); this.timer.stop(); if (invokeDestroyed) { onDestroy(); } } }
@Test public void testDestroy() throws Exception { this.timer.start(); assertEquals(0, this.timer.destroyed.get()); Thread.sleep(100); this.timer.destroy(); assertEquals(1, this.timer.destroyed.get()); }
public static MembersView createNew(int version, Collection<MemberImpl> members) { List<MemberInfo> list = new ArrayList<>(members.size()); for (MemberImpl member : members) { list.add(new MemberInfo(member)); } return new MembersView(version, unmodifiableList(list)); }
@Test public void createNew() { int version = 7; MemberImpl[] members = MemberMapTest.newMembers(5); MembersView view = MembersView.createNew(version, Arrays.asList(members)); assertEquals(version, view.getVersion()); assertMembersViewEquals(members, view); }
public synchronized ConnectionProfile createGCSDestinationConnectionProfile( String connectionProfileId, String gcsBucketName, String gcsRootPath) { checkArgument( !Strings.isNullOrEmpty(connectionProfileId), "connectionProfileId can not be null or empty"); checkArgument(!Strings.isNullOrEmpty(gcsBucketName), "gcsBucketName can not be null or empty"); checkArgument(gcsRootPath != null, "gcsRootPath can not be null"); checkArgument( gcsRootPath.isEmpty() || gcsRootPath.charAt(0) == '/', "gcsRootPath must either be an empty string or start with a '/'"); LOG.info( "Creating GCS Destination Connection Profile {} in project {}.", connectionProfileId, projectId); try { ConnectionProfile.Builder connectionProfileBuilder = ConnectionProfile.newBuilder() .setDisplayName(connectionProfileId) .setStaticServiceIpConnectivity(StaticServiceIpConnectivity.getDefaultInstance()) .setGcsProfile( GcsProfile.newBuilder().setBucket(gcsBucketName).setRootPath(gcsRootPath)); CreateConnectionProfileRequest request = CreateConnectionProfileRequest.newBuilder() .setParent(LocationName.of(projectId, location).toString()) .setConnectionProfile(connectionProfileBuilder) .setConnectionProfileId(connectionProfileId) .build(); ConnectionProfile reference = datastreamClient.createConnectionProfileAsync(request).get(); createdConnectionProfileIds.add(connectionProfileId); LOG.info( "Successfully created GCS Destination Connection Profile {} in project {}.", connectionProfileId, projectId); return reference; } catch (ExecutionException | InterruptedException e) { throw new DatastreamResourceManagerException( "Failed to create GCS source connection profile. ", e); } }
@Test public void testCreateGCSDestinationConnectionShouldCreateSuccessfully() throws ExecutionException, InterruptedException { ConnectionProfile connectionProfile = ConnectionProfile.getDefaultInstance(); when(datastreamClient .createConnectionProfileAsync(any(CreateConnectionProfileRequest.class)) .get()) .thenReturn(connectionProfile); assertThat( testManager.createGCSDestinationConnectionProfile( CONNECTION_PROFILE_ID, BUCKET, ROOT_PATH)) .isEqualTo(connectionProfile); }
@Override public boolean execute(String sql) throws SQLException { this.targetSQL = sql; return ExecuteTemplate.execute(this, (statement, args) -> statement.execute((String) args[0]), sql); }
@Test public void testExecute() throws SQLException { String sql = "select * from table_statment_proxy"; Assertions.assertNotNull(statementProxy.executeQuery(sql)); Assertions.assertDoesNotThrow(() -> statementProxy.executeUpdate(sql)); Assertions.assertDoesNotThrow(() -> statementProxy.executeUpdate(sql, Statement.RETURN_GENERATED_KEYS)); Assertions.assertDoesNotThrow(() -> statementProxy.executeUpdate(sql, new int[]{1})); Assertions.assertDoesNotThrow(() -> statementProxy.executeUpdate(sql, new String[]{"id"})); Assertions.assertDoesNotThrow(() -> statementProxy.execute(sql)); Assertions.assertDoesNotThrow(() -> statementProxy.execute(sql, Statement.RETURN_GENERATED_KEYS)); Assertions.assertDoesNotThrow(() -> statementProxy.execute(sql, new int[]{1})); Assertions.assertDoesNotThrow(() -> statementProxy.execute(sql, new String[]{"id"})); Assertions.assertDoesNotThrow(() -> statementProxy.executeBatch()); Assertions.assertDoesNotThrow(() -> statementProxy.clearBatch()); }
public static Type convertType(TypeInfo typeInfo) { switch (typeInfo.getOdpsType()) { case BIGINT: return Type.BIGINT; case INT: return Type.INT; case SMALLINT: return Type.SMALLINT; case TINYINT: return Type.TINYINT; case FLOAT: return Type.FLOAT; case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; return ScalarType.createUnifiedDecimalType(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); case DOUBLE: return Type.DOUBLE; case CHAR: CharTypeInfo charTypeInfo = (CharTypeInfo) typeInfo; return ScalarType.createCharType(charTypeInfo.getLength()); case VARCHAR: VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) typeInfo; return ScalarType.createVarcharType(varcharTypeInfo.getLength()); case STRING: case JSON: return ScalarType.createDefaultCatalogString(); case BINARY: return Type.VARBINARY; case BOOLEAN: return Type.BOOLEAN; case DATE: return Type.DATE; case TIMESTAMP: case DATETIME: return Type.DATETIME; case MAP: MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo; return new MapType(convertType(mapTypeInfo.getKeyTypeInfo()), convertType(mapTypeInfo.getValueTypeInfo())); case ARRAY: ArrayTypeInfo arrayTypeInfo = (ArrayTypeInfo) typeInfo; return new ArrayType(convertType(arrayTypeInfo.getElementTypeInfo())); case STRUCT: StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; List<Type> fieldTypeList = structTypeInfo.getFieldTypeInfos().stream().map(EntityConvertUtils::convertType) .collect(Collectors.toList()); return new StructType(fieldTypeList); default: return Type.VARCHAR; } }
@Test public void testConvertTypeCaseVarchar() { VarcharTypeInfo varcharTypeInfo = TypeInfoFactory.getVarcharTypeInfo(20); Type result = EntityConvertUtils.convertType(varcharTypeInfo); Type expectedType = ScalarType.createVarcharType(20); assertEquals(expectedType, result); }
public static PositionBound unbounded() { return new PositionBound(Position.emptyPosition()); }
@Test public void shouldEqualUnbounded() { final PositionBound bound1 = PositionBound.unbounded(); final PositionBound bound2 = PositionBound.unbounded(); assertEquals(bound1, bound2); }
@Override public ProcessConfigurable<?> toSink(Sink<T> sink) { DataStreamV2SinkTransformation<T, T> sinkTransformation = StreamUtils.addSinkOperator(this, sink, getType()); return StreamUtils.wrapWithConfigureHandle( new NonKeyedPartitionStreamImpl<>(environment, sinkTransformation)); }
@Test void testToSink() throws Exception { ExecutionEnvironmentImpl env = StreamTestUtils.getEnv(); GlobalStreamImpl<Integer> stream = new GlobalStreamImpl<>(env, new TestingTransformation<>("t1", Types.INT, 1)); stream.toSink(DataStreamV2SinkUtils.wrapSink(new DiscardingSink<>())); List<Transformation<?>> transformations = env.getTransformations(); assertThat(transformations) .hasSize(1) .element(0) .isInstanceOf(DataStreamV2SinkTransformation.class); }
public static MediaType valueOf(String contentType) { if (StringUtils.isEmpty(contentType)) { throw new IllegalArgumentException("MediaType must not be empty"); } String[] values = contentType.split(";"); String charset = Constants.ENCODE; for (String value : values) { if (value.startsWith("charset=")) { charset = value.substring("charset=".length()); } } return new MediaType(values[0], charset); }
@Test void testValueOf() { MediaType mediaType = MediaType.valueOf(MediaType.APPLICATION_FORM_URLENCODED); String type = "application/x-www-form-urlencoded"; String charset = "UTF-8"; assertEquals(type, mediaType.getType()); assertEquals(charset, mediaType.getCharset()); assertEquals(MediaType.APPLICATION_FORM_URLENCODED, mediaType.toString()); }
public void moveModel(int fromPosition, int toPosition) { assertNotBuildingModels(); adapter.moveModel(fromPosition, toPosition); requestDelayedModelBuild(500); }
@Test public void moveModel() { AdapterDataObserver observer = mock(AdapterDataObserver.class); final List<TestModel> testModels = new ArrayList<>(); testModels.add(new TestModel(1)); testModels.add(new TestModel(2)); testModels.add(new TestModel(3)); EpoxyController controller = new EpoxyController() { @Override protected void buildModels() { add(testModels); } }; EpoxyControllerAdapter adapter = controller.getAdapter(); adapter.registerAdapterDataObserver(observer); controller.requestModelBuild(); verify(observer).onItemRangeInserted(0, 3); testModels.add(0, testModels.remove(1)); controller.moveModel(1, 0); verify(observer).onItemRangeMoved(1, 0, 1); assertEquals(testModels, adapter.getCurrentModels()); controller.requestModelBuild(); assertEquals(testModels, adapter.getCurrentModels()); verifyNoMoreInteractions(observer); }
@InvokeOnHeader(Web3jConstants.ETH_GET_BALANCE) void ethGetBalance(Message message) throws IOException { String address = message.getHeader(Web3jConstants.ADDRESS, configuration::getAddress, String.class); DefaultBlockParameter atBlock = toDefaultBlockParameter(message.getHeader(Web3jConstants.AT_BLOCK, configuration::getAtBlock, String.class)); Request<?, EthGetBalance> request = web3j.ethGetBalance(address, atBlock); setRequestId(message, request); EthGetBalance response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getBalance()); } }
@Test public void ethGetBalanceTest() throws Exception { EthGetBalance response = Mockito.mock(EthGetBalance.class); Mockito.when(mockWeb3j.ethGetBalance(any(), any())).thenReturn(request); Mockito.when(request.send()).thenReturn(response); Mockito.when(response.getBalance()).thenReturn(BigInteger.ONE); Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.ETH_GET_BALANCE); exchange.getIn().setHeader(Web3jConstants.AT_BLOCK, DefaultBlockParameterName.EARLIEST); template.send(exchange); BigInteger body = exchange.getIn().getBody(BigInteger.class); assertEquals(BigInteger.ONE, body); }
@Override public V put(K key, V value, Duration ttl) { return get(putAsync(key, value, ttl)); }
@Test public void testReplaceOldValueFail() { RMapCacheNative<SimpleKey, SimpleValue> map = redisson.getMapCacheNative("simple"); map.put(new SimpleKey("1"), new SimpleValue("2")); boolean res = map.replace(new SimpleKey("1"), new SimpleValue("43"), new SimpleValue("31")); Assertions.assertFalse(res); SimpleValue val1 = map.get(new SimpleKey("1")); Assertions.assertEquals("2", val1.getValue()); map.destroy(); }
public <T> HttpResponse<T> httpRequest(String url, String method, HttpHeaders headers, Object requestBodyData, TypeReference<T> responseFormat) { return httpRequest(url, method, headers, requestBodyData, responseFormat, null, null); }
@Test public void testNullMethod() { RestClient client = spy(new RestClient(null)); assertThrows(NullPointerException.class, () -> client.httpRequest( MOCK_URL, null, null, TEST_DTO, TEST_TYPE, MOCK_SECRET_KEY, TEST_SIGNATURE_ALGORITHM )); }
ControllerResult<AssignReplicasToDirsResponseData> handleAssignReplicasToDirs(AssignReplicasToDirsRequestData request) { if (!featureControl.metadataVersion().isDirectoryAssignmentSupported()) { throw new UnsupportedVersionException("Directory assignment is not supported yet."); } int brokerId = request.brokerId(); clusterControl.checkBrokerEpoch(brokerId, request.brokerEpoch()); BrokerRegistration brokerRegistration = clusterControl.brokerRegistrations().get(brokerId); if (brokerRegistration == null) { throw new BrokerIdNotRegisteredException("Broker ID " + brokerId + " is not currently registered"); } List<ApiMessageAndVersion> records = new ArrayList<>(); AssignReplicasToDirsResponseData response = new AssignReplicasToDirsResponseData(); Set<TopicIdPartition> leaderAndIsrUpdates = new HashSet<>(); for (AssignReplicasToDirsRequestData.DirectoryData reqDir : request.directories()) { Uuid dirId = reqDir.id(); boolean directoryIsOffline = !brokerRegistration.hasOnlineDir(dirId); AssignReplicasToDirsResponseData.DirectoryData resDir = new AssignReplicasToDirsResponseData.DirectoryData().setId(dirId); for (AssignReplicasToDirsRequestData.TopicData reqTopic : reqDir.topics()) { Uuid topicId = reqTopic.topicId(); Errors topicError = Errors.NONE; TopicControlInfo topicInfo = this.topics.get(topicId); if (topicInfo == null) { log.warn("AssignReplicasToDirsRequest from broker {} references unknown topic ID {}", brokerId, topicId); topicError = Errors.UNKNOWN_TOPIC_ID; } AssignReplicasToDirsResponseData.TopicData resTopic = new AssignReplicasToDirsResponseData.TopicData().setTopicId(topicId); for (AssignReplicasToDirsRequestData.PartitionData reqPartition : reqTopic.partitions()) { int partitionIndex = reqPartition.partitionIndex(); Errors partitionError = topicError; if (topicError == Errors.NONE) { String topicName = topicInfo.name; PartitionRegistration partitionRegistration = topicInfo.parts.get(partitionIndex); if (partitionRegistration == null) { log.warn("AssignReplicasToDirsRequest from broker {} references unknown partition {}-{}", brokerId, topicName, partitionIndex); partitionError = Errors.UNKNOWN_TOPIC_OR_PARTITION; } else if (!Replicas.contains(partitionRegistration.replicas, brokerId)) { log.warn("AssignReplicasToDirsRequest from broker {} references non assigned partition {}-{}", brokerId, topicName, partitionIndex); partitionError = Errors.NOT_LEADER_OR_FOLLOWER; } else { Optional<ApiMessageAndVersion> partitionChangeRecord = new PartitionChangeBuilder( partitionRegistration, topicId, partitionIndex, new LeaderAcceptor(clusterControl, partitionRegistration), featureControl.metadataVersion(), getTopicEffectiveMinIsr(topicName) ) .setDirectory(brokerId, dirId) .setDefaultDirProvider(clusterDescriber) .build(); partitionChangeRecord.ifPresent(records::add); if (directoryIsOffline) { leaderAndIsrUpdates.add(new TopicIdPartition(topicId, partitionIndex)); } if (log.isDebugEnabled()) { log.debug("Broker {} assigned partition {}:{} to {} dir {}", brokerId, topics.get(topicId).name(), partitionIndex, directoryIsOffline ? "OFFLINE" : "ONLINE", dirId); } } } resTopic.partitions().add(new AssignReplicasToDirsResponseData.PartitionData(). setPartitionIndex(partitionIndex). setErrorCode(partitionError.code())); } resDir.topics().add(resTopic); } response.directories().add(resDir); } if (!leaderAndIsrUpdates.isEmpty()) { generateLeaderAndIsrUpdates("offline-dir-assignment", brokerId, NO_LEADER, NO_LEADER, records, leaderAndIsrUpdates.iterator()); } return ControllerResult.of(records, response); }
@Test void testHandleAssignReplicasToDirs() { ReplicationControlTestContext ctx = new ReplicationControlTestContext.Builder().build(); Uuid dir1b1 = Uuid.fromString("hO2YI5bgRUmByNPHiHxjNQ"); Uuid dir2b1 = Uuid.fromString("R3Gb1HLoTzuKMgAkH5Vtpw"); Uuid dir1b2 = Uuid.fromString("TBGa8UayQi6KguqF5nC0sw"); Uuid offlineDir = Uuid.fromString("zvAf9BKZRyyrEWz4FX2nLA"); ctx.registerBrokersWithDirs(1, asList(dir1b1, dir2b1), 2, singletonList(dir1b2)); ctx.unfenceBrokers(1, 2); Uuid topicA = ctx.createTestTopic("a", new int[][]{new int[]{1, 2}, new int[]{1, 2}, new int[]{1, 2}}).topicId(); Uuid topicB = ctx.createTestTopic("b", new int[][]{new int[]{1, 2}, new int[]{1, 2}}).topicId(); Uuid topicC = ctx.createTestTopic("c", new int[][]{new int[]{2}}).topicId(); ControllerResult<AssignReplicasToDirsResponseData> controllerResult = ctx.assignReplicasToDirs(1, new HashMap<TopicIdPartition, Uuid>() {{ put(new TopicIdPartition(topicA, 0), dir1b1); put(new TopicIdPartition(topicA, 1), dir2b1); put(new TopicIdPartition(topicA, 2), offlineDir); // unknown/offline dir put(new TopicIdPartition(topicB, 0), dir1b1); put(new TopicIdPartition(topicB, 1), DirectoryId.LOST); put(new TopicIdPartition(Uuid.fromString("nLU9hKNXSZuMe5PO2A4dVQ"), 1), dir2b1); // expect UNKNOWN_TOPIC_ID put(new TopicIdPartition(topicA, 137), dir1b1); // expect UNKNOWN_TOPIC_OR_PARTITION put(new TopicIdPartition(topicC, 0), dir1b1); // expect NOT_LEADER_OR_FOLLOWER }}); assertEquals(AssignmentsHelper.normalize(AssignmentsHelper.buildResponseData((short) 0, 0, new HashMap<Uuid, Map<TopicIdPartition, Errors>>() {{ put(dir1b1, new HashMap<TopicIdPartition, Errors>() {{ put(new TopicIdPartition(topicA, 0), NONE); put(new TopicIdPartition(topicA, 137), UNKNOWN_TOPIC_OR_PARTITION); put(new TopicIdPartition(topicB, 0), NONE); put(new TopicIdPartition(topicC, 0), NOT_LEADER_OR_FOLLOWER); }}); put(dir2b1, new HashMap<TopicIdPartition, Errors>() {{ put(new TopicIdPartition(topicA, 1), NONE); put(new TopicIdPartition(Uuid.fromString("nLU9hKNXSZuMe5PO2A4dVQ"), 1), UNKNOWN_TOPIC_ID); }}); put(offlineDir, new HashMap<TopicIdPartition, Errors>() {{ put(new TopicIdPartition(topicA, 2), NONE); }}); put(DirectoryId.LOST, new HashMap<TopicIdPartition, Errors>() {{ put(new TopicIdPartition(topicB, 1), NONE); }}); }})), AssignmentsHelper.normalize(controllerResult.response())); short recordVersion = ctx.featureControl.metadataVersion().partitionChangeRecordVersion(); assertEquals(sortPartitionChangeRecords(asList( new ApiMessageAndVersion( new PartitionChangeRecord().setTopicId(topicA).setPartitionId(0) .setDirectories(asList(dir1b1, dir1b2)), recordVersion), new ApiMessageAndVersion( new PartitionChangeRecord().setTopicId(topicA).setPartitionId(1). setDirectories(asList(dir2b1, dir1b2)), recordVersion), new ApiMessageAndVersion( new PartitionChangeRecord().setTopicId(topicA).setPartitionId(2). setDirectories(asList(offlineDir, dir1b2)), recordVersion), new ApiMessageAndVersion( new PartitionChangeRecord().setTopicId(topicB).setPartitionId(0). setDirectories(asList(dir1b1, dir1b2)), recordVersion), new ApiMessageAndVersion( new PartitionChangeRecord().setTopicId(topicB).setPartitionId(1). setDirectories(asList(DirectoryId.LOST, dir1b2)), recordVersion), // In addition to the directory assignment changes we expect two additional records, // which elect new leaders for: // - a-2 which has been assigned to a directory which is not an online directory (unknown/offline) // - b-1 which has been assigned to an offline directory. new ApiMessageAndVersion( new PartitionChangeRecord().setTopicId(topicA).setPartitionId(2). setIsr(singletonList(2)).setLeader(2), recordVersion), new ApiMessageAndVersion( new PartitionChangeRecord().setTopicId(topicB).setPartitionId(1). setIsr(singletonList(2)).setLeader(2), recordVersion) )), sortPartitionChangeRecords(controllerResult.records())); ctx.replay(controllerResult.records()); assertEquals(new HashSet<TopicIdPartition>() {{ add(new TopicIdPartition(topicA, 0)); add(new TopicIdPartition(topicA, 1)); add(new TopicIdPartition(topicB, 0)); }}, RecordTestUtils.iteratorToSet(ctx.replicationControl.brokersToIsrs().iterator(1, true))); assertEquals(new HashSet<TopicIdPartition>() {{ add(new TopicIdPartition(topicA, 2)); add(new TopicIdPartition(topicB, 1)); add(new TopicIdPartition(topicC, 0)); }}, RecordTestUtils.iteratorToSet(ctx.replicationControl.brokersToIsrs().iterator(2, true))); }
public Optional<String> getIPAddr() { if (this == DiscoveryResult.EMPTY) { return Optional.empty(); } if (server.getInstanceInfo() != null) { String ip = server.getInstanceInfo().getIPAddr(); if (ip != null && !ip.isEmpty()) { return Optional.of(ip); } return Optional.empty(); } return Optional.empty(); }
@Test void ipAddrEmptyForIncompleteInstanceInfo() { final InstanceInfo instanceInfo = Builder.newBuilder() .setAppName("ipAddrMissing") .setHostName("ipAddrMissing") .setPort(7777) .build(); final DiscoveryEnabledServer server = new DiscoveryEnabledServer(instanceInfo, false); final DynamicServerListLoadBalancer<Server> lb = new DynamicServerListLoadBalancer<>(new DefaultClientConfigImpl()); final DiscoveryResult result = new DiscoveryResult(server, lb.getLoadBalancerStats()); Truth.assertThat(result.getIPAddr()).isEqualTo(Optional.empty()); }
@Override public HttpServletRequest readRequest(AwsProxyRequest request, SecurityContext securityContext, Context lambdaContext, ContainerConfig config) throws InvalidRequestEventException { // Expect the HTTP method and context to be populated. If they are not, we are handling an // unsupported event type. if (request.getHttpMethod() == null || request.getHttpMethod().equals("") || request.getRequestContext() == null) { throw new InvalidRequestEventException(INVALID_REQUEST_ERROR); } request.setPath(stripBasePath(request.getPath(), config)); if (request.getMultiValueHeaders() != null && request.getMultiValueHeaders().getFirst(HttpHeaders.CONTENT_TYPE) != null) { String contentType = request.getMultiValueHeaders().getFirst(HttpHeaders.CONTENT_TYPE); // put single as we always expect to have one and only one content type in a request. request.getMultiValueHeaders().putSingle(HttpHeaders.CONTENT_TYPE, getContentTypeWithCharset(contentType, config)); } AwsProxyHttpServletRequest servletRequest = new AwsProxyHttpServletRequest(request, lambdaContext, securityContext, config); servletRequest.setServletContext(servletContext); servletRequest.setAttribute(API_GATEWAY_CONTEXT_PROPERTY, request.getRequestContext()); servletRequest.setAttribute(API_GATEWAY_STAGE_VARS_PROPERTY, request.getStageVariables()); servletRequest.setAttribute(API_GATEWAY_EVENT_PROPERTY, request); servletRequest.setAttribute(ALB_CONTEXT_PROPERTY, request.getRequestContext().getElb()); servletRequest.setAttribute(LAMBDA_CONTEXT_PROPERTY, lambdaContext); servletRequest.setAttribute(JAX_SECURITY_CONTEXT_PROPERTY, securityContext); return servletRequest; }
@Test void readRequest_invalidEventEmptyContext_expectException() { try { AwsProxyRequest req = new AwsProxyRequestBuilder("/path", "GET").build(); req.setRequestContext(null); reader.readRequest(req, null, null, ContainerConfig.defaultConfig()); fail("Expected InvalidRequestEventException"); } catch (InvalidRequestEventException e) { assertEquals(AwsProxyHttpServletRequestReader.INVALID_REQUEST_ERROR, e.getMessage()); } }
@Override public T deserialize(final String topic, final byte[] bytes) { try { if (bytes == null) { return null; } // don't use the JsonSchemaConverter to read this data because // we require that the MAPPER enables USE_BIG_DECIMAL_FOR_FLOATS, // which is not currently available in the standard converters final JsonNode value = isJsonSchema ? JsonSerdeUtils.readJsonSR(bytes, MAPPER, JsonNode.class) : MAPPER.readTree(bytes); final Object coerced = enforceFieldType( "$", new JsonValueContext(value, schema) ); if (LOG.isTraceEnabled()) { LOG.trace("Deserialized {}. topic:{}, row:{}", target, topic, coerced); } return SerdeUtils.castToTargetType(coerced, targetType); } catch (final Exception e) { // Clear location in order to avoid logging data, for security reasons if (e instanceof JsonParseException) { ((JsonParseException) e).clearLocation(); } throw new SerializationException( "Failed to deserialize " + target + " from topic: " + topic + ". " + e.getMessage(), e); } }
@Test public void shouldIncludePathForErrorsInRootNode() { // Given: final KsqlJsonDeserializer<Double> deserializer = givenDeserializerForSchema(Schema.OPTIONAL_FLOAT64_SCHEMA, Double.class); final byte[] bytes = serializeJson(BooleanNode.valueOf(true)); // When: final Exception e = assertThrows( Exception.class, () -> deserializer.deserialize(SOME_TOPIC, bytes) ); // Then: assertThat(e.getCause(), (hasMessage(endsWith(", path: $")))); }
@Override public ListShareGroupsResult listShareGroups(ListShareGroupsOptions options) { final KafkaFutureImpl<Collection<Object>> all = new KafkaFutureImpl<>(); final long nowMetadata = time.milliseconds(); final long deadline = calcDeadlineMs(nowMetadata, options.timeoutMs()); runnable.call(new Call("findAllBrokers", deadline, new LeastLoadedNodeProvider()) { @Override MetadataRequest.Builder createRequest(int timeoutMs) { return new MetadataRequest.Builder(new MetadataRequestData() .setTopics(Collections.emptyList()) .setAllowAutoTopicCreation(true)); } @Override void handleResponse(AbstractResponse abstractResponse) { MetadataResponse metadataResponse = (MetadataResponse) abstractResponse; Collection<Node> nodes = metadataResponse.brokers(); if (nodes.isEmpty()) throw new StaleMetadataException("Metadata fetch failed due to missing broker list"); HashSet<Node> allNodes = new HashSet<>(nodes); final ListShareGroupsResults results = new ListShareGroupsResults(allNodes, all); for (final Node node : allNodes) { final long nowList = time.milliseconds(); runnable.call(new Call("listShareGroups", deadline, new ConstantNodeIdProvider(node.id())) { @Override ListGroupsRequest.Builder createRequest(int timeoutMs) { List<String> states = options.states() .stream() .map(ShareGroupState::toString) .collect(Collectors.toList()); List<String> types = Collections.singletonList(GroupType.SHARE.toString()); return new ListGroupsRequest.Builder(new ListGroupsRequestData() .setStatesFilter(states) .setTypesFilter(types) ); } private void maybeAddShareGroup(ListGroupsResponseData.ListedGroup group) { final String groupId = group.groupId(); final Optional<ShareGroupState> state = group.groupState().isEmpty() ? Optional.empty() : Optional.of(ShareGroupState.parse(group.groupState())); final ShareGroupListing groupListing = new ShareGroupListing(groupId, state); results.addListing(groupListing); } @Override void handleResponse(AbstractResponse abstractResponse) { final ListGroupsResponse response = (ListGroupsResponse) abstractResponse; synchronized (results) { Errors error = Errors.forCode(response.data().errorCode()); if (error == Errors.COORDINATOR_LOAD_IN_PROGRESS || error == Errors.COORDINATOR_NOT_AVAILABLE) { throw error.exception(); } else if (error != Errors.NONE) { results.addError(error.exception(), node); } else { for (ListGroupsResponseData.ListedGroup group : response.data().groups()) { maybeAddShareGroup(group); } } results.tryComplete(node); } } @Override void handleFailure(Throwable throwable) { synchronized (results) { results.addError(throwable, node); results.tryComplete(node); } } }, nowList); } } @Override void handleFailure(Throwable throwable) { KafkaException exception = new KafkaException("Failed to find brokers to send ListGroups", throwable); all.complete(Collections.singletonList(exception)); } }, nowMetadata); return new ListShareGroupsResult(all); }
@Test public void testListShareGroups() throws Exception { try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(mockCluster(4, 0), AdminClientConfig.RETRIES_CONFIG, "2")) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); // Empty metadata response should be retried env.kafkaClient().prepareResponse( RequestTestUtils.metadataResponse( Collections.emptyList(), env.cluster().clusterResource().clusterId(), -1, Collections.emptyList())); env.kafkaClient().prepareResponse( RequestTestUtils.metadataResponse( env.cluster().nodes(), env.cluster().clusterResource().clusterId(), env.cluster().controller().id(), Collections.emptyList())); env.kafkaClient().prepareResponseFrom( new ListGroupsResponse( new ListGroupsResponseData() .setErrorCode(Errors.NONE.code()) .setGroups(Arrays.asList( new ListGroupsResponseData.ListedGroup() .setGroupId("share-group-1") .setGroupType(GroupType.SHARE.toString()) .setGroupState("Stable") ))), env.cluster().nodeById(0)); // handle retriable errors env.kafkaClient().prepareResponseFrom( new ListGroupsResponse( new ListGroupsResponseData() .setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code()) .setGroups(Collections.emptyList()) ), env.cluster().nodeById(1)); env.kafkaClient().prepareResponseFrom( new ListGroupsResponse( new ListGroupsResponseData() .setErrorCode(Errors.COORDINATOR_LOAD_IN_PROGRESS.code()) .setGroups(Collections.emptyList()) ), env.cluster().nodeById(1)); env.kafkaClient().prepareResponseFrom( new ListGroupsResponse( new ListGroupsResponseData() .setErrorCode(Errors.NONE.code()) .setGroups(Arrays.asList( new ListGroupsResponseData.ListedGroup() .setGroupId("share-group-2") .setGroupType(GroupType.SHARE.toString()) .setGroupState("Stable"), new ListGroupsResponseData.ListedGroup() .setGroupId("share-group-3") .setGroupType(GroupType.SHARE.toString()) .setGroupState("Stable") ))), env.cluster().nodeById(1)); env.kafkaClient().prepareResponseFrom( new ListGroupsResponse( new ListGroupsResponseData() .setErrorCode(Errors.NONE.code()) .setGroups(Arrays.asList( new ListGroupsResponseData.ListedGroup() .setGroupId("share-group-4") .setGroupType(GroupType.SHARE.toString()) .setGroupState("Stable") ))), env.cluster().nodeById(2)); // fatal error env.kafkaClient().prepareResponseFrom( new ListGroupsResponse( new ListGroupsResponseData() .setErrorCode(Errors.UNKNOWN_SERVER_ERROR.code()) .setGroups(Collections.emptyList())), env.cluster().nodeById(3)); final ListShareGroupsResult result = env.adminClient().listShareGroups(); TestUtils.assertFutureError(result.all(), UnknownServerException.class); Collection<ShareGroupListing> listings = result.valid().get(); assertEquals(4, listings.size()); Set<String> groupIds = new HashSet<>(); for (ShareGroupListing listing : listings) { groupIds.add(listing.groupId()); assertTrue(listing.state().isPresent()); } assertEquals(Utils.mkSet("share-group-1", "share-group-2", "share-group-3", "share-group-4"), groupIds); assertEquals(1, result.errors().get().size()); } }
@Override public Stream<HoodieInstant> getCandidateInstants(HoodieTableMetaClient metaClient, HoodieInstant currentInstant, Option<HoodieInstant> lastSuccessfulInstant) { HoodieActiveTimeline activeTimeline = metaClient.reloadActiveTimeline(); if (ClusteringUtils.isClusteringInstant(activeTimeline, currentInstant) || COMPACTION_ACTION.equals(currentInstant.getAction())) { return getCandidateInstantsForTableServicesCommits(activeTimeline, currentInstant); } else { return getCandidateInstantsForNonTableServicesCommits(activeTimeline, currentInstant); } }
@Test public void testConcurrentWritesWithInterleavingCompaction() throws Exception { createCommit(metaClient.createNewInstantTime(), metaClient); HoodieActiveTimeline timeline = metaClient.getActiveTimeline(); // consider commits before this are all successful Option<HoodieInstant> lastSuccessfulInstant = timeline.getCommitsTimeline().filterCompletedInstants().lastInstant(); // writer 1 starts String currentWriterInstant = metaClient.createNewInstantTime(); createInflightCommit(currentWriterInstant, metaClient); // compaction 1 gets scheduled and finishes String newInstantTime = metaClient.createNewInstantTime(); createCompactionRequested(newInstantTime, metaClient); Option<HoodieInstant> currentInstant = Option.of(new HoodieInstant(HoodieInstant.State.INFLIGHT, HoodieTimeline.COMPACTION_ACTION, newInstantTime)); PreferWriterConflictResolutionStrategy strategy = new PreferWriterConflictResolutionStrategy(); // TODO Create method to create compactCommitMetadata // HoodieCommitMetadata currentMetadata = createCommitMetadata(newInstantTime); List<HoodieInstant> candidateInstants = strategy.getCandidateInstants(metaClient, currentInstant.get(), lastSuccessfulInstant).collect( Collectors.toList()); // writer 1 conflicts with compaction 1 Assertions.assertEquals(1, candidateInstants.size()); Assertions.assertEquals(currentWriterInstant, candidateInstants.get(0).getTimestamp()); // TODO: Once compactCommitMetadata is created use that to verify resolveConflict method. }
@Override public String getXML() { StringBuilder retval = new StringBuilder( 1500 ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_filenames", inputFiles.acceptingFilenames ) ); retval.append( " " ).append( XMLHandler.addTagValue( "passing_through_fields", inputFiles.passingThruFields ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_field", inputFiles.acceptingField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_stepname", ( acceptingStep != null ? acceptingStep .getName() : "" ) ) ); retval.append( " " ).append( XMLHandler.addTagValue( "separator", content.separator ) ); retval.append( " " ).append( XMLHandler.addTagValue( "enclosure", content.enclosure ) ); retval.append( " " ).append( XMLHandler.addTagValue( "enclosure_breaks", content.breakInEnclosureAllowed ) ); retval.append( " " ).append( XMLHandler.addTagValue( "escapechar", content.escapeCharacter ) ); retval.append( " " ).append( XMLHandler.addTagValue( "header", content.header ) ); retval.append( " " ).append( XMLHandler.addTagValue( "nr_headerlines", content.nrHeaderLines ) ); retval.append( " " ).append( XMLHandler.addTagValue( "footer", content.footer ) ); retval.append( " " ).append( XMLHandler.addTagValue( "nr_footerlines", content.nrFooterLines ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_wrapped", content.lineWrapped ) ); retval.append( " " ).append( XMLHandler.addTagValue( "nr_wraps", content.nrWraps ) ); retval.append( " " ).append( XMLHandler.addTagValue( "layout_paged", content.layoutPaged ) ); retval.append( " " ).append( XMLHandler.addTagValue( "nr_lines_per_page", content.nrLinesPerPage ) ); retval.append( " " ).append( XMLHandler.addTagValue( "nr_lines_doc_header", content.nrLinesDocHeader ) ); retval.append( " " ).append( XMLHandler.addTagValue( "noempty", content.noEmptyLines ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include", content.includeFilename ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_field", content.filenameField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rownum", content.includeRowNumber ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rownumByFile", content.rowNumberByFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rownum_field", content.rowNumberField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "format", content.fileFormat ) ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", content.encoding ) ); retval.append( " " ).append( XMLHandler.addTagValue( "length", content.length ) ); retval.append( " " + XMLHandler.addTagValue( "add_to_result_filenames", inputFiles.isaddresult ) ); retval.append( " <file>" ).append( Const.CR ); //we need the equals by size arrays for inputFiles.fileName[i], inputFiles.fileMask[i], inputFiles.fileRequired[i], inputFiles.includeSubFolders[i] //to prevent the ArrayIndexOutOfBoundsException inputFiles.normalizeAllocation( inputFiles.fileName.length ); for ( int i = 0; i < inputFiles.fileName.length; i++ ) { saveSource( retval, inputFiles.fileName[i] ); parentStepMeta.getParentTransMeta().getNamedClusterEmbedManager().registerUrl( inputFiles.fileName[i] ); retval.append( " " ).append( XMLHandler.addTagValue( "filemask", inputFiles.fileMask[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "exclude_filemask", inputFiles.excludeFileMask[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "file_required", inputFiles.fileRequired[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", inputFiles.includeSubFolders[i] ) ); } retval.append( " " ).append( XMLHandler.addTagValue( "type", content.fileType ) ); retval.append( " " ).append( XMLHandler.addTagValue( "compression", ( content.fileCompression == null ) ? "None" : content.fileCompression ) ); retval.append( " </file>" ).append( Const.CR ); retval.append( " <filters>" ).append( Const.CR ); for ( int i = 0; i < filter.length; i++ ) { String filterString = filter[i].getFilterString(); byte[] filterBytes = new byte[] {}; String filterPrefix = ""; if ( filterString != null ) { filterBytes = filterString.getBytes(); filterPrefix = STRING_BASE64_PREFIX; } String filterEncoded = filterPrefix + new String( Base64.encodeBase64( filterBytes ) ); retval.append( " <filter>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "filter_string", filterEncoded, false ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filter_position", filter[i].getFilterPosition(), false ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filter_is_last_line", filter[i].isFilterLastLine(), false ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filter_is_positive", filter[i].isFilterPositive(), false ) ); retval.append( " </filter>" ).append( Const.CR ); } retval.append( " </filters>" ).append( Const.CR ); retval.append( " <fields>" ).append( Const.CR ); for ( int i = 0; i < inputFields.length; i++ ) { BaseFileField field = inputFields[i]; retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", field.getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", field.getTypeDesc() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "format", field.getFormat() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "currency", field.getCurrencySymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "decimal", field.getDecimalSymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "group", field.getGroupSymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "nullif", field.getNullString() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "ifnull", field.getIfNullValue() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "position", field.getPosition() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "length", field.getLength() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "precision", field.getPrecision() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "trim_type", field.getTrimTypeCode() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "repeat", field.isRepeated() ) ); retval.append( " </field>" ).append( Const.CR ); } retval.append( " </fields>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "limit", content.rowLimit ) ); // ERROR HANDLING retval.append( " " ).append( XMLHandler.addTagValue( "error_ignored", errorHandling.errorIgnored ) ); retval.append( " " ).append( XMLHandler.addTagValue( "skip_bad_files", errorHandling.skipBadFiles ) ); retval.append( " " ).append( XMLHandler.addTagValue( "file_error_field", errorHandling.fileErrorField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "file_error_message_field", errorHandling.fileErrorMessageField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_skipped", errorLineSkipped ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_count_field", errorCountField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_fields_field", errorFieldsField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_text_field", errorTextField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_line_files_destination_directory", errorHandling.warningFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_line_files_extension", errorHandling.warningFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_files_destination_directory", errorHandling.errorFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_files_extension", errorHandling.errorFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_number_files_destination_directory", errorHandling.lineNumberFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_number_files_extension", errorHandling.lineNumberFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "date_format_lenient", content.dateFormatLenient ) ); retval.append( " " ).append( XMLHandler.addTagValue( "date_format_locale", content.dateFormatLocale != null ? content.dateFormatLocale.toString() : null ) ); retval.append( " " ).append( XMLHandler.addTagValue( "shortFileFieldName", additionalOutputFields.shortFilenameField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "pathFieldName", additionalOutputFields.pathField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "hiddenFieldName", additionalOutputFields.hiddenField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "lastModificationTimeFieldName", additionalOutputFields.lastModificationField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "uriNameFieldName", additionalOutputFields.uriField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rootUriNameFieldName", additionalOutputFields.rootUriField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "extensionFieldName", additionalOutputFields.extensionField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sizeFieldName", additionalOutputFields.sizeField ) ); return retval.toString(); }
@Test public void testGetXmlWorksIfWeUpdateOnlyPartOfInputFilesInformation() { inputMeta.inputFiles = new BaseFileInputFiles(); inputMeta.inputFiles.fileName = new String[] { FILE_NAME_VALID_PATH }; inputMeta.getXML(); assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.fileMask.length ); assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.excludeFileMask.length ); assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.fileRequired.length ); assertEquals( inputMeta.inputFiles.fileName.length, inputMeta.inputFiles.includeSubFolders.length ); }
public void execute() { new PathAwareCrawler<>( FormulaExecutorComponentVisitor.newBuilder(metricRepository, measureRepository).buildFor(formulas)) .visit(treeRootHolder.getReportTreeRoot()); }
@Test public void compute_duplicated_lines_counts_lines_from_original_and_ignores_InProjectDuplicate() { TextBlock original = new TextBlock(1, 1); duplicationRepository.addDuplication(FILE_1_REF, original, FILE_2_REF, new TextBlock(2, 2)); underTest.execute(); assertRawMeasureValue(FILE_1_REF, DUPLICATED_LINES_KEY, 1); }
public static String translateTableName(String tableName, @Nullable String databaseName, boolean ignoreCase) { Preconditions.checkArgument(StringUtils.isNotEmpty(tableName), "'tableName' cannot be null or empty"); String[] tableSplit = StringUtils.split(tableName, '.'); switch (tableSplit.length) { case 1: // do not concat the database name prefix if it's a 'default' database if (StringUtils.isNotEmpty(databaseName) && !databaseName.equalsIgnoreCase(CommonConstants.DEFAULT_DATABASE)) { return databaseName + "." + tableName; } return tableName; case 2: Preconditions.checkArgument(!tableSplit[1].isEmpty(), "Invalid table name '%s'", tableName); String databasePrefix = tableSplit[0]; if (!StringUtils.isEmpty(databaseName) && (ignoreCase || !databaseName.equals(databasePrefix)) && (!ignoreCase || !databaseName.equalsIgnoreCase(databasePrefix))) { throw new DatabaseConflictException("Database name '" + databasePrefix + "' from table prefix does not match database name '" + databaseName + "' from header"); } // skip database name prefix if it's a 'default' database return databasePrefix.equalsIgnoreCase(CommonConstants.DEFAULT_DATABASE) ? tableSplit[1] : tableName; default: throw new IllegalArgumentException( "Table name: '" + tableName + "' containing more than one '.' is not allowed"); } }
@Test public void translateTableNameTest() { // valid cases with non-default database check(LOGICAL_TABLE_NAME, DATABASE_NAME, FULLY_QUALIFIED_TABLE_NAME); check(FULLY_QUALIFIED_TABLE_NAME, DATABASE_NAME, FULLY_QUALIFIED_TABLE_NAME); check(FULLY_QUALIFIED_TABLE_NAME, null, FULLY_QUALIFIED_TABLE_NAME); // error cases with non-default database error(null, DATABASE_NAME); error(FULLY_QUALIFIED_TABLE_NAME + "." + "foo", null); error(FULLY_QUALIFIED_TABLE_NAME + "." + "foo", DATABASE_NAME); error(FULLY_QUALIFIED_TABLE_NAME, DATABASE_NAME + "foo"); // valid cases with default database check(LOGICAL_TABLE_NAME, null, LOGICAL_TABLE_NAME); check(LOGICAL_TABLE_NAME, DEFAULT_DATABASE_NAME, LOGICAL_TABLE_NAME); check(DEFAULT_DATABASE_NAME + "." + LOGICAL_TABLE_NAME, null, LOGICAL_TABLE_NAME); check(DEFAULT_DATABASE_NAME + "." + LOGICAL_TABLE_NAME, DEFAULT_DATABASE_NAME, LOGICAL_TABLE_NAME); // error cases with default database error(null, DEFAULT_DATABASE_NAME); error(FULLY_QUALIFIED_TABLE_NAME, DEFAULT_DATABASE_NAME); error(DEFAULT_DATABASE_NAME + "." + LOGICAL_TABLE_NAME, DATABASE_NAME); error(DEFAULT_DATABASE_NAME + "." + FULLY_QUALIFIED_TABLE_NAME, null); error(DEFAULT_DATABASE_NAME + "." + FULLY_QUALIFIED_TABLE_NAME, DATABASE_NAME); error(DEFAULT_DATABASE_NAME + "." + FULLY_QUALIFIED_TABLE_NAME, DEFAULT_DATABASE_NAME); }
static ProjectMeasuresQuery newProjectMeasuresQuery(List<Criterion> criteria, @Nullable Set<String> projectUuids) { ProjectMeasuresQuery query = new ProjectMeasuresQuery(); Optional.ofNullable(projectUuids).ifPresent(query::setProjectUuids); criteria.forEach(criterion -> processCriterion(criterion, query)); return query; }
@Test public void filter_on_projectUuids_if_projectUuid_is_non_empty_and_criteria_empty() { ProjectMeasuresQuery query = newProjectMeasuresQuery(emptyList(), Collections.singleton("foo")); assertThat(query.getProjectUuids()).isPresent(); }
public String[] getUrlStrings() { String[] fileStrings = new String[ files.size() ]; for ( int i = 0; i < fileStrings.length; i++ ) { fileStrings[ i ] = Const.optionallyDecodeUriString( files.get( i ).getPublicURIString() ); } return fileStrings; }
@Test public void testGetUrlStrings() throws Exception { String sFileA = "hdfs://myfolderA/myfileA.txt"; String sFileB = "file:///myfolderB/myfileB.txt"; FileObject fileA = mock( FileObject.class ); FileObject fileB = mock( FileObject.class ); when( fileA.getPublicURIString() ).thenReturn( sFileA ); when( fileB.getPublicURIString() ).thenReturn( sFileB ); FileInputList fileInputList = new FileInputList(); fileInputList.addFile( fileA ); fileInputList.addFile( fileB ); String[] result = fileInputList.getUrlStrings(); assertNotNull( result ); assertEquals( 2, result.length ); assertEquals( sFileA, result[ 0 ] ); assertEquals( sFileB, result[ 1 ] ); }
@Override public Path mkdir(final Path folder, final TransferStatus status) throws BackgroundException { try { if(containerService.isContainer(folder)) { final Storage.Buckets.Insert request = session.getClient().buckets().insert(session.getHost().getCredentials().getUsername(), new Bucket() .setLocation(status.getRegion()) .setStorageClass(status.getStorageClass()) .setName(containerService.getContainer(folder).getName())); final Bucket bucket = request.execute(); final EnumSet<Path.Type> type = EnumSet.copyOf(folder.getType()); type.add(Path.Type.volume); return folder.withType(type).withAttributes(new GoogleStorageAttributesFinderFeature(session).toAttributes(bucket)); } else { final EnumSet<Path.Type> type = EnumSet.copyOf(folder.getType()); type.add(Path.Type.placeholder); // Add placeholder object return new GoogleStorageTouchFeature(session).withWriter(writer).touch(folder.withType(type), status.withMime(MIMETYPE)); } } catch(IOException e) { throw new GoogleStorageExceptionMappingService().map("Cannot create folder {0}", e, folder); } }
@Test public void testDirectoryDeleteWithVersioning() throws Exception { final Path bucket = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path parent = new GoogleStorageDirectoryFeature(session).mkdir(new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path test = new GoogleStorageDirectoryFeature(session).mkdir(new Path(parent, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); assertNotNull(test.attributes().getVersionId()); // Only placeholder is found in list output with no version id set assertTrue(test.isPlaceholder()); assertTrue(new GoogleStorageFindFeature(session).find(test)); assertTrue(new DefaultFindFeature(session).find(test)); // This will only cause a delete marker being added new GoogleStorageDeleteFeature(session).delete(Arrays.asList(new Path(test).withAttributes(PathAttributes.EMPTY), parent), new DisabledLoginCallback(), new Delete.DisabledCallback()); // Specific version is still found assertTrue(new GoogleStorageFindFeature(session).find(test)); assertTrue(new DefaultFindFeature(session).find(test)); assertFalse(new GoogleStorageFindFeature(session).find(new Path(test).withAttributes(PathAttributes.EMPTY))); assertFalse(new DefaultFindFeature(session).find(new Path(test).withAttributes(PathAttributes.EMPTY))); assertTrue(new GoogleStorageAttributesFinderFeature(session).find(new Path(test)).isDuplicate()); assertTrue(new DefaultAttributesFinderFeature(session).find(new Path(test)).isDuplicate()); }
@VisibleForTesting static StringStatistics toStringStatistics(HiveWriterVersion hiveWriterVersion, DwrfProto.StringStatistics stringStatistics, boolean isRowGroup) { if (hiveWriterVersion == ORIGINAL && !isRowGroup) { return null; } Slice maximum = stringStatistics.hasMaximum() ? maxStringTruncateToValidRange(byteStringToSlice(stringStatistics.getMaximumBytes()), hiveWriterVersion) : null; Slice minimum = stringStatistics.hasMinimum() ? minStringTruncateToValidRange(byteStringToSlice(stringStatistics.getMinimumBytes()), hiveWriterVersion) : null; long sum = stringStatistics.hasSum() ? stringStatistics.getSum() : 0; return new StringStatistics(minimum, maximum, sum); }
@Test public void testToStringStatistics() { // ORIGINAL version only produces stats at the row group level assertNull(DwrfMetadataReader.toStringStatistics( HiveWriterVersion.ORIGINAL, DwrfProto.StringStatistics.newBuilder() .setMinimum("ant") .setMaximum("cat") .setSum(44) .build(), false)); // having only sum should work for current version for (boolean isRowGroup : ImmutableList.of(true, false)) { assertEquals( DwrfMetadataReader.toStringStatistics( HiveWriterVersion.ORC_HIVE_8732, DwrfProto.StringStatistics.newBuilder() .setSum(45) .build(), isRowGroup), new StringStatistics(null, null, 45)); } // and the ORIGINAL version row group stats (but not rolled up stats) assertEquals( DwrfMetadataReader.toStringStatistics( HiveWriterVersion.ORIGINAL, DwrfProto.StringStatistics.newBuilder() .setSum(45) .build(), true), new StringStatistics(null, null, 45)); // having only a min or max should work assertEquals( DwrfMetadataReader.toStringStatistics( HiveWriterVersion.ORC_HIVE_8732, DwrfProto.StringStatistics.newBuilder() .setMinimum("ant") .build(), true), new StringStatistics(Slices.utf8Slice("ant"), null, 0)); assertEquals( DwrfMetadataReader.toStringStatistics( HiveWriterVersion.ORC_HIVE_8732, DwrfProto.StringStatistics.newBuilder() .setMaximum("cat") .build(), true), new StringStatistics(null, Slices.utf8Slice("cat"), 0)); // normal full stat assertEquals( DwrfMetadataReader.toStringStatistics( HiveWriterVersion.ORC_HIVE_8732, DwrfProto.StringStatistics.newBuilder() .setMinimum("ant") .setMaximum("cat") .setSum(79) .build(), true), new StringStatistics(Slices.utf8Slice("ant"), Slices.utf8Slice("cat"), 79)); for (Slice prefix : ALL_UTF8_SEQUENCES) { for (int testCodePoint : TEST_CODE_POINTS) { Slice codePoint = codePointToUtf8(testCodePoint); for (Slice suffix : ALL_UTF8_SEQUENCES) { Slice testValue = concatSlice(prefix, codePoint, suffix); testStringStatisticsTruncation(testValue, HiveWriterVersion.ORIGINAL); testStringStatisticsTruncation(testValue, HiveWriterVersion.ORC_HIVE_8732); } } } }
@VisibleForTesting Path getJarArtifact() throws IOException { Optional<String> classifier = Optional.empty(); Path buildDirectory = Paths.get(project.getBuild().getDirectory()); Path outputDirectory = buildDirectory; // Read <classifier> and <outputDirectory> from maven-jar-plugin. Plugin jarPlugin = project.getPlugin("org.apache.maven.plugins:maven-jar-plugin"); if (jarPlugin != null) { for (PluginExecution execution : jarPlugin.getExecutions()) { if ("default-jar".equals(execution.getId())) { Xpp3Dom configuration = (Xpp3Dom) execution.getConfiguration(); classifier = getChildValue(configuration, "classifier"); Optional<String> directoryString = getChildValue(configuration, "outputDirectory"); if (directoryString.isPresent()) { outputDirectory = project.getBasedir().toPath().resolve(directoryString.get()); } break; } } } String finalName = project.getBuild().getFinalName(); String suffix = ".jar"; Optional<Xpp3Dom> bootConfiguration = getSpringBootRepackageConfiguration(); if (bootConfiguration.isPresent()) { log(LogEvent.lifecycle("Spring Boot repackaging (fat JAR) detected; using the original JAR")); // Spring renames original JAR only when replacing it, so check if the paths are clashing. Optional<String> bootFinalName = getChildValue(bootConfiguration.get(), "finalName"); Optional<String> bootClassifier = getChildValue(bootConfiguration.get(), "classifier"); boolean sameDirectory = outputDirectory.equals(buildDirectory); // If Boot <finalName> is undefined, it uses the default project <finalName>. boolean sameFinalName = !bootFinalName.isPresent() || finalName.equals(bootFinalName.get()); boolean sameClassifier = classifier.equals(bootClassifier); if (sameDirectory && sameFinalName && sameClassifier) { suffix = ".jar.original"; } } String noSuffixJarName = finalName + (classifier.isPresent() ? '-' + classifier.get() : ""); Path jarPath = outputDirectory.resolve(noSuffixJarName + suffix); log(LogEvent.debug("Using JAR: " + jarPath)); if (".jar".equals(suffix)) { return jarPath; } // "*" in "java -cp *" doesn't work if JAR doesn't end with ".jar". Copy the JAR with a new name // ending with ".jar". Path tempDirectory = tempDirectoryProvider.newDirectory(); Path newJarPath = tempDirectory.resolve(noSuffixJarName + ".original.jar"); Files.copy(jarPath, newJarPath); return newJarPath; }
@Test public void testGetJarArtifact_relativeOutputDirectoryFromJarPlugin() throws IOException { when(mockMavenProject.getBasedir()).thenReturn(new File("/base/dir")); when(mockBuild.getDirectory()).thenReturn(temporaryFolder.getRoot().toString()); when(mockBuild.getFinalName()).thenReturn("helloworld-1"); when(mockMavenProject.getPlugin("org.apache.maven.plugins:maven-jar-plugin")) .thenReturn(mockPlugin); when(mockPlugin.getExecutions()).thenReturn(Arrays.asList(mockPluginExecution)); when(mockPluginExecution.getId()).thenReturn("default-jar"); when(mockPluginExecution.getConfiguration()).thenReturn(pluginConfiguration); addXpp3DomChild(pluginConfiguration, "outputDirectory", Paths.get("relative").toString()); assertThat(mavenProjectProperties.getJarArtifact()) .isEqualTo(Paths.get("/base/dir/relative/helloworld-1.jar")); }
@Override public void build(T instance) { super.build(instance); if (!StringUtils.isEmpty(version)) { instance.setVersion(version); } if (!StringUtils.isEmpty(group)) { instance.setGroup(group); } if (deprecated != null) { instance.setDeprecated(deprecated); } if (delay != null) { instance.setDelay(delay); } if (export != null) { instance.setExport(export); } if (weight != null) { instance.setWeight(weight); } if (!StringUtils.isEmpty(document)) { instance.setDocument(document); } if (dynamic != null) { instance.setDynamic(dynamic); } if (!StringUtils.isEmpty(token)) { instance.setToken(token); } if (!StringUtils.isEmpty(accesslog)) { instance.setAccesslog(accesslog); } if (protocols != null) { instance.setProtocols(protocols); } if (!StringUtils.isEmpty(protocolIds)) { instance.setProtocolIds(protocolIds); } if (executes != null) { instance.setExecutes(executes); } if (register != null) { instance.setRegister(register); } if (warmup != null) { instance.setWarmup(warmup); } if (!StringUtils.isEmpty(serialization)) { instance.setSerialization(serialization); } if (executor != null) { instance.setExecutor(executor); } if (StringUtils.isNotBlank(preferSerialization)) { instance.setPreferSerialization(preferSerialization); } }
@Test void tag() { ServiceBuilder builder = new ServiceBuilder(); builder.tag("tag"); Assertions.assertEquals("tag", builder.build().getTag()); }
@Override public void alterJob(AlterLoadStmt stmt) throws DdlException { writeLock(); try { if (stmt.getAnalyzedJobProperties().containsKey(LoadStmt.PRIORITY)) { priority = LoadPriority.priorityByName(stmt.getAnalyzedJobProperties().get(LoadStmt.PRIORITY)); AlterLoadJobOperationLog log = new AlterLoadJobOperationLog(id, stmt.getAnalyzedJobProperties()); GlobalStateMgr.getCurrentState().getEditLog().logAlterLoadJob(log); for (LoadTask loadTask : newLoadingTasks) { GlobalStateMgr.getCurrentState().getLoadingLoadTaskScheduler().updatePriority( loadTask.getSignature(), priority); } } } finally { writeUnlock(); } }
@Test public void testAlterLoad(@Injectable LoadStmt loadStmt, @Injectable AlterLoadStmt alterLoadStmt, @Injectable DataDescription dataDescription, @Injectable LabelName labelName, @Injectable Database database, @Injectable OlapTable olapTable, @Mocked GlobalStateMgr globalStateMgr) { String label = "label"; long dbId = 1; String tableName = "table"; String databaseName = "database"; List<DataDescription> dataDescriptionList = Lists.newArrayList(); dataDescriptionList.add(dataDescription); BrokerDesc brokerDesc = new BrokerDesc("broker0", Maps.newHashMap()); Map<String, String> properties = new HashMap<>(); properties.put(LoadStmt.PRIORITY, "HIGH"); new Expectations() { { loadStmt.getLabel(); minTimes = 0; result = labelName; labelName.getDbName(); minTimes = 0; result = databaseName; labelName.getLabelName(); minTimes = 0; result = label; globalStateMgr.getDb(databaseName); minTimes = 0; result = database; loadStmt.getDataDescriptions(); minTimes = 0; result = dataDescriptionList; dataDescription.getTableName(); minTimes = 0; result = tableName; database.getTable(tableName); minTimes = 0; result = olapTable; dataDescription.getPartitionNames(); minTimes = 0; result = null; database.getId(); minTimes = 0; result = dbId; loadStmt.getBrokerDesc(); minTimes = 0; result = brokerDesc; loadStmt.getEtlJobType(); minTimes = 0; result = EtlJobType.BROKER; alterLoadStmt.getAnalyzedJobProperties(); minTimes = 0; result = properties; } }; try { BrokerLoadJob brokerLoadJob = (BrokerLoadJob) BulkLoadJob.fromLoadStmt(loadStmt, null); Assert.assertEquals(Long.valueOf(dbId), Deencapsulation.getField(brokerLoadJob, "dbId")); Assert.assertEquals(label, Deencapsulation.getField(brokerLoadJob, "label")); Assert.assertEquals(JobState.PENDING, Deencapsulation.getField(brokerLoadJob, "state")); Assert.assertEquals(EtlJobType.BROKER, Deencapsulation.getField(brokerLoadJob, "jobType")); brokerLoadJob.alterJob(alterLoadStmt); } catch (DdlException e) { Assert.fail(e.getMessage()); } }
Optional<List<ComparisonDifference>> registeredComparisonDifferencesOf(DualValue dualValue) { return this.dualValues.stream() // use sameValues to get already visited dual values with different location .filter(visitedDualValue -> visitedDualValue.dualValue.sameValues(dualValue)) .findFirst() .map(visitedDualValue -> visitedDualValue.comparisonDifferences); }
@Test void should_return_empty_optional_for_unknown_dual_values() { // GIVEN VisitedDualValues visitedDualValues = new VisitedDualValues(); DualValue dualValue = new DualValue(list(""), "abc", "abc"); // WHEN Optional<List<ComparisonDifference>> optionalComparisonDifferences = visitedDualValues.registeredComparisonDifferencesOf(dualValue); // THEN then(optionalComparisonDifferences).isEmpty(); }
@Override public Collection<SchemaMetaData> load(final MetaDataLoaderMaterial material) throws SQLException { try (Connection connection = material.getDataSource().getConnection()) { Collection<String> schemaNames = SchemaMetaDataLoader.loadSchemaNames(connection, TypedSPILoader.getService(DatabaseType.class, "openGauss")); Map<String, Multimap<String, IndexMetaData>> schemaIndexMetaDataMap = loadIndexMetaDataMap(connection, schemaNames); Map<String, Multimap<String, ColumnMetaData>> schemaColumnMetaDataMap = loadColumnMetaDataMap(connection, material.getActualTableNames(), schemaNames); Collection<SchemaMetaData> result = new LinkedList<>(); for (String each : schemaNames) { Multimap<String, IndexMetaData> tableIndexMetaDataMap = schemaIndexMetaDataMap.getOrDefault(each, LinkedHashMultimap.create()); Multimap<String, ColumnMetaData> tableColumnMetaDataMap = schemaColumnMetaDataMap.getOrDefault(each, LinkedHashMultimap.create()); result.add(new SchemaMetaData(each, createTableMetaDataList(tableIndexMetaDataMap, tableColumnMetaDataMap))); } return result; } }
@Test void assertLoadWithTables() throws SQLException { DataSource dataSource = mockDataSource(); ResultSet schemaResultSet = mockSchemaMetaDataResultSet(); when(dataSource.getConnection().getMetaData().getSchemas()).thenReturn(schemaResultSet); ResultSet tableResultSet = mockTableMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(TABLE_META_DATA_SQL_WITH_TABLES).executeQuery()).thenReturn(tableResultSet); ResultSet primaryKeyResultSet = mockPrimaryKeyMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(PRIMARY_KEY_META_DATA_SQL).executeQuery()).thenReturn(primaryKeyResultSet); ResultSet indexResultSet = mockIndexMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(BASIC_INDEX_META_DATA_SQL).executeQuery()).thenReturn(indexResultSet); ResultSet advanceIndexResultSet = mockAdvanceIndexMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(ADVANCE_INDEX_META_DATA_SQL).executeQuery()).thenReturn(advanceIndexResultSet); assertTableMetaDataMap(getDialectTableMetaDataLoader().load(new MetaDataLoaderMaterial(Collections.singletonList("tbl"), dataSource, new OpenGaussDatabaseType(), "sharding_db"))); }
@Operation(summary = "deleteResource", description = "DELETE_RESOURCE_BY_ID_NOTES") @Parameters({ @Parameter(name = "fullName", description = "RESOURCE_FULLNAME", required = true, schema = @Schema(implementation = String.class, example = "test/")) }) @DeleteMapping() @ResponseStatus(HttpStatus.OK) @ApiException(DELETE_RESOURCE_ERROR) public Result<Object> deleteResource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "fullName") String fullName, @RequestParam(value = "tenantCode", required = false) String tenantCode) throws Exception { return resourceService.delete(loginUser, fullName, tenantCode); }
@Test public void testDeleteResource() throws Exception { Result mockResult = new Result<>(); mockResult.setCode(Status.SUCCESS.getCode()); Mockito.when(resourcesService.delete(Mockito.any(), Mockito.anyString(), Mockito.anyString())) .thenReturn(mockResult); MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("fullName", "dolphinscheduler/resourcePath"); paramsMap.add("tenantCode", "123"); MvcResult mvcResult = mockMvc.perform(delete("/resources") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); }
public abstract Duration parse(String text);
@Test public void testLongDays() { Assert.assertEquals(Duration.ofDays(1), DurationStyle.LONG.parse("1 days")); Assert.assertEquals(Duration.ofDays(2), DurationStyle.LONG.parse("2 days")); }
@Override public AuthUser getAuthUser(Integer socialType, Integer userType, String code, String state) { // 构建请求 AuthRequest authRequest = buildAuthRequest(socialType, userType); AuthCallback authCallback = AuthCallback.builder().code(code).state(state).build(); // 执行请求 AuthResponse<?> authResponse = authRequest.login(authCallback); log.info("[getAuthUser][请求社交平台 type({}) request({}) response({})]", socialType, toJsonString(authCallback), toJsonString(authResponse)); if (!authResponse.ok()) { throw exception(SOCIAL_USER_AUTH_FAILURE, authResponse.getMsg()); } return (AuthUser) authResponse.getData(); }
@Test public void testAuthSocialUser_fail() { // 准备参数 Integer socialType = SocialTypeEnum.WECHAT_MP.getType(); Integer userType = randomPojo(UserTypeEnum.class).getValue(); String code = randomString(); String state = randomString(); // mock 方法(AuthRequest) AuthRequest authRequest = mock(AuthRequest.class); when(authRequestFactory.get(eq("WECHAT_MP"))).thenReturn(authRequest); // mock 方法(AuthResponse) AuthResponse<?> authResponse = new AuthResponse<>(0, "模拟失败", null); when(authRequest.login(argThat(authCallback -> { assertEquals(code, authCallback.getCode()); assertEquals(state, authCallback.getState()); return true; }))).thenReturn(authResponse); // 调用并断言 assertServiceException( () -> socialClientService.getAuthUser(socialType, userType, code, state), SOCIAL_USER_AUTH_FAILURE, "模拟失败"); }
public static int getUTCTimestamp() { return (int) (System.currentTimeMillis() / 1000); }
@Test public void testGetUTCTimestamp() { assertTrue(Tools.getUTCTimestamp() > 0); }
public static MetricsInfo info(String name, String description) { return Info.INSTANCE.cache.add(name, description); }
@Test public void testInfo() { MetricsInfo info = info("m", "m desc"); assertSame("same info", info, info("m", "m desc")); }
public static <T> T[] replaceFirst(T[] src, T oldValue, T[] newValues) { int index = indexOf(src, oldValue); if (index == -1) { return src; } T[] dst = (T[]) Array.newInstance(src.getClass().getComponentType(), src.length - 1 + newValues.length); // copy the first part till the match System.arraycopy(src, 0, dst, 0, index); // copy the second part from the match System.arraycopy(src, index + 1, dst, index + newValues.length, src.length - index - 1); // copy the newValues into the dst System.arraycopy(newValues, 0, dst, index, newValues.length); return dst; }
@Test public void replace_whenSrcEmpty() { Integer[] result = replaceFirst(new Integer[]{}, 6, new Integer[]{3, 4}); System.out.println(Arrays.toString(result)); assertArrayEquals(new Integer[]{}, result); }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String pgDataType = typeDefine.getDataType().toLowerCase(); switch (pgDataType) { case PG_BOOLEAN: builder.dataType(BasicType.BOOLEAN_TYPE); break; case PG_BOOLEAN_ARRAY: builder.dataType(ArrayType.BOOLEAN_ARRAY_TYPE); break; case PG_SMALLSERIAL: case PG_SMALLINT: builder.dataType(BasicType.SHORT_TYPE); break; case PG_SMALLINT_ARRAY: builder.dataType(ArrayType.SHORT_ARRAY_TYPE); break; case PG_INTEGER: case PG_SERIAL: builder.dataType(BasicType.INT_TYPE); break; case PG_INTEGER_ARRAY: builder.dataType(ArrayType.INT_ARRAY_TYPE); break; case PG_BIGINT: case PG_BIGSERIAL: builder.dataType(BasicType.LONG_TYPE); break; case PG_BIGINT_ARRAY: builder.dataType(ArrayType.LONG_ARRAY_TYPE); break; case PG_REAL: builder.dataType(BasicType.FLOAT_TYPE); break; case PG_REAL_ARRAY: builder.dataType(ArrayType.FLOAT_ARRAY_TYPE); break; case PG_DOUBLE_PRECISION: builder.dataType(BasicType.DOUBLE_TYPE); break; case PG_DOUBLE_PRECISION_ARRAY: builder.dataType(ArrayType.DOUBLE_ARRAY_TYPE); break; case PG_NUMERIC: DecimalType decimalType; if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) { decimalType = new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale()); } else { decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE); } builder.dataType(decimalType); break; case PG_MONEY: // -92233720368547758.08 to +92233720368547758.07, With the sign bit it's 20, we use // 30 precision to save it DecimalType moneyDecimalType; moneyDecimalType = new DecimalType(30, 2); builder.dataType(moneyDecimalType); builder.columnLength(30L); builder.scale(2); break; case PG_CHAR: case PG_CHARACTER: builder.dataType(BasicType.STRING_TYPE); if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(TypeDefineUtils.charTo4ByteLength(1L)); builder.sourceType(pgDataType); } else { builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength())); builder.sourceType(String.format("%s(%s)", pgDataType, typeDefine.getLength())); } break; case PG_VARCHAR: case PG_CHARACTER_VARYING: builder.dataType(BasicType.STRING_TYPE); if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.sourceType(pgDataType); } else { builder.sourceType(String.format("%s(%s)", pgDataType, typeDefine.getLength())); builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength())); } break; case PG_TEXT: builder.dataType(BasicType.STRING_TYPE); break; case PG_UUID: builder.dataType(BasicType.STRING_TYPE); builder.sourceType(pgDataType); builder.columnLength(128L); break; case PG_JSON: case PG_JSONB: case PG_XML: case PG_GEOMETRY: case PG_GEOGRAPHY: builder.dataType(BasicType.STRING_TYPE); break; case PG_CHAR_ARRAY: case PG_VARCHAR_ARRAY: case PG_TEXT_ARRAY: builder.dataType(ArrayType.STRING_ARRAY_TYPE); break; case PG_BYTEA: builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case PG_DATE: builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case PG_TIME: case PG_TIME_TZ: builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); if (typeDefine.getScale() != null && typeDefine.getScale() > MAX_TIME_SCALE) { builder.scale(MAX_TIME_SCALE); log.warn( "The scale of time type is larger than {}, it will be truncated to {}", MAX_TIME_SCALE, MAX_TIME_SCALE); } else { builder.scale(typeDefine.getScale()); } break; case PG_TIMESTAMP: case PG_TIMESTAMP_TZ: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); if (typeDefine.getScale() != null && typeDefine.getScale() > MAX_TIMESTAMP_SCALE) { builder.scale(MAX_TIMESTAMP_SCALE); log.warn( "The scale of timestamp type is larger than {}, it will be truncated to {}", MAX_TIMESTAMP_SCALE, MAX_TIMESTAMP_SCALE); } else { builder.scale(typeDefine.getScale()); } break; default: throw CommonError.convertToSeaTunnelTypeError( identifier(), typeDefine.getDataType(), typeDefine.getName()); } return builder.build(); }
@Test public void testConvertInt() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder().name("test").columnType("int4").dataType("int4").build(); Column column = PostgresTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.INT_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toLowerCase()); }
@Description("Return the parent for a Bing tile") @ScalarFunction("bing_tile_parent") @SqlType(BingTileType.NAME) public static long bingTileParent(@SqlType(BingTileType.NAME) long input) { BingTile tile = BingTile.decode(input); try { return tile.findParent().encode(); } catch (IllegalArgumentException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, e.getMessage(), e); } }
@Test public void testBingTileParent() { assertBingTileParent("03", OptionalInt.empty(), "0"); assertBingTileParent("0123", OptionalInt.of(2), "01"); assertInvalidFunction("bing_tile_parent(bing_tile('0'), 2)", "newZoom must be less than or equal to current zoom 1: 2"); assertInvalidFunction(format("bing_tile_parent(bing_tile('0'), %s)", -1), "newZoom must be greater than or equal to 0: -1"); }
@Override public boolean add(T e) { if (size == Integer.MAX_VALUE) { throw new RuntimeException("Can't add an additional element to the " + "list; list already has INT_MAX elements."); } if (lastChunk == null) { addChunk(initialChunkCapacity); } else if (lastChunk.size() >= lastChunkCapacity) { int newCapacity = lastChunkCapacity + (lastChunkCapacity >> 1); addChunk(Math.min(newCapacity, maxChunkSize)); } size++; return lastChunk.add(e); }
@Test public void testPerformance() { String obj = "hello world"; final int numElems = 1000000; final int numTrials = 5; for (int trial = 0; trial < numTrials; trial++) { System.gc(); { ArrayList<String> arrayList = new ArrayList<String>(); StopWatch sw = new StopWatch(); sw.start(); for (int i = 0; i < numElems; i++) { arrayList.add(obj); } System.out.println(" ArrayList " + sw.now(TimeUnit.MILLISECONDS)); } // test ChunkedArrayList System.gc(); { ChunkedArrayList<String> chunkedList = new ChunkedArrayList<String>(); StopWatch sw = new StopWatch(); sw.start(); for (int i = 0; i < numElems; i++) { chunkedList.add(obj); } System.out.println("ChunkedArrayList " + sw.now(TimeUnit.MILLISECONDS)); } } }
@Override public Connection getConnection() throws SQLException { return dataSourceProxyXA.getConnection(); }
@Test public void testGetMariaXaConnection() throws SQLException, ClassNotFoundException { // Mock Driver driver = Mockito.mock(Driver.class); Class clazz = Class.forName("org.mariadb.jdbc.MariaDbConnection"); Connection connection = (Connection)(Mockito.mock(clazz)); Mockito.when(connection.getAutoCommit()).thenReturn(true); DatabaseMetaData metaData = Mockito.mock(DatabaseMetaData.class); Mockito.when(metaData.getURL()).thenReturn("jdbc:mariadb:xxx"); Mockito.when(connection.getMetaData()).thenReturn(metaData); Mockito.when(driver.connect(any(), any())).thenReturn(connection); DruidDataSource druidDataSource = new DruidDataSource(); druidDataSource.setDriver(driver); DataSourceProxyXA dataSourceProxyXA = new DataSourceProxyXA(druidDataSource); RootContext.unbind(); Connection connFromDataSourceProxyXA = dataSourceProxyXA.getConnection(); Assertions.assertFalse(connFromDataSourceProxyXA instanceof ConnectionProxyXA); RootContext.bind("test"); connFromDataSourceProxyXA = dataSourceProxyXA.getConnection(); Assertions.assertTrue(connFromDataSourceProxyXA instanceof ConnectionProxyXA); ConnectionProxyXA connectionProxyXA = (ConnectionProxyXA)dataSourceProxyXA.getConnection(); Connection wrappedConnection = connectionProxyXA.getWrappedConnection(); Assertions.assertTrue(wrappedConnection instanceof PooledConnection); Connection wrappedPhysicalConn = ((PooledConnection)wrappedConnection).getConnection(); Assertions.assertSame(wrappedPhysicalConn, connection); XAConnection xaConnection = connectionProxyXA.getWrappedXAConnection(); Connection connectionInXA = xaConnection.getConnection(); Assertions.assertEquals("org.mariadb.jdbc.MariaDbConnection", connectionInXA.getClass().getName()); }
@Nonnull public static String withEmptyFallback(@Nullable String defaultText, @Nonnull String fallback) { if (defaultText == null || defaultText.trim().isBlank()) return fallback; return defaultText; }
@Test void testWithEmptyFallback() { assertEquals("fall", StringUtil.withEmptyFallback(null, "fall")); assertEquals("fall", StringUtil.withEmptyFallback("", "fall")); assertEquals("fall", StringUtil.withEmptyFallback(" ", "fall")); assertEquals("fall", StringUtil.withEmptyFallback("\n", "fall")); assertEquals("demo", StringUtil.withEmptyFallback("demo", "fall")); }
public static boolean parse(final String str, ResTable_config out) { return parse(str, out, true); }
@Test public void parse_mcc() { ResTable_config config = new ResTable_config(); ConfigDescription.parse("mcc310", config); assertThat(config.mcc).isEqualTo(310); }
protected void warn(String warning) { LOG.warn(warning); }
@Test public void testWarnWhenMultiplePatternsMatch() { StrictFieldProjectionFilter filter = createMockBuilder(StrictFieldProjectionFilter.class) .withConstructor(Arrays.asList("a.b.c.{x_average,z_average}", "a.*_average")) .addMockedMethod("warn") .createMock(); // set expectations filter.warn("Field path: 'a.b.c.x_average' matched more than one glob path pattern. " + "First match: 'a.b.c.{x_average,z_average}' (when expanded to 'a.b.c.x_average') " + "second match:'a.*_average' (when expanded to 'a.*_average')"); filter.warn("Field path: 'a.b.c.z_average' matched more than one glob path pattern. " + "First match: 'a.b.c.{x_average,z_average}' (when expanded to 'a.b.c.z_average') " + "second match:'a.*_average' (when expanded to 'a.*_average')"); replay(filter); assertMatches(filter, "a.b.c.x_average", "a.b.c.z_average", "a.other.w_average"); assertDoesNotMatch(filter, "hello"); verify(filter); }
private void cleanReleaseHistory(ReleaseHistory cleanRelease) { String appId = cleanRelease.getAppId(); String clusterName = cleanRelease.getClusterName(); String namespaceName = cleanRelease.getNamespaceName(); String branchName = cleanRelease.getBranchName(); int retentionLimit = this.getReleaseHistoryRetentionLimit(cleanRelease); //Second check, if retentionLimit is default value, do not clean if (retentionLimit == DEFAULT_RELEASE_HISTORY_RETENTION_SIZE) { return; } Optional<Long> maxId = this.releaseHistoryRetentionMaxId(cleanRelease, retentionLimit); if (!maxId.isPresent()) { return; } boolean hasMore = true; while (hasMore && !Thread.currentThread().isInterrupted()) { List<ReleaseHistory> cleanReleaseHistoryList = releaseHistoryRepository.findFirst100ByAppIdAndClusterNameAndNamespaceNameAndBranchNameAndIdLessThanEqualOrderByIdAsc( appId, clusterName, namespaceName, branchName, maxId.get()); Set<Long> releaseIds = cleanReleaseHistoryList.stream() .map(ReleaseHistory::getReleaseId) .collect(Collectors.toSet()); transactionManager.execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus status) { releaseHistoryRepository.deleteAll(cleanReleaseHistoryList); releaseRepository.deleteAllById(releaseIds); } }); hasMore = cleanReleaseHistoryList.size() == 100; } }
@Test @Sql(scripts = "/sql/release-history-test.sql", executionPhase = ExecutionPhase.BEFORE_TEST_METHOD) @Sql(scripts = "/sql/clean.sql", executionPhase = ExecutionPhase.AFTER_TEST_METHOD) public void testCleanReleaseHistory() { ReleaseHistoryService service = (ReleaseHistoryService) AopProxyUtils.getSingletonTarget(releaseHistoryService); assert service != null; Method method = ReflectionUtils.findMethod(service.getClass(), "cleanReleaseHistory", ReleaseHistory.class); assert method != null; ReflectionUtils.makeAccessible(method); when(bizConfig.releaseHistoryRetentionSize()).thenReturn(-1); when(bizConfig.releaseHistoryRetentionSizeOverride()).thenReturn(Maps.newHashMap()); ReflectionUtils.invokeMethod(method, service, mockReleaseHistory); Assert.assertEquals(6, releaseHistoryRepository.count()); Assert.assertEquals(6, releaseRepository.count()); when(bizConfig.releaseHistoryRetentionSize()).thenReturn(2); when(bizConfig.releaseHistoryRetentionSizeOverride()).thenReturn(Maps.newHashMap()); ReflectionUtils.invokeMethod(method, service, mockReleaseHistory); Assert.assertEquals(2, releaseHistoryRepository.count()); Assert.assertEquals(2, releaseRepository.count()); when(bizConfig.releaseHistoryRetentionSize()).thenReturn(2); when(bizConfig.releaseHistoryRetentionSizeOverride()).thenReturn( ImmutableMap.of("kl-app+default+application+default", 1)); ReflectionUtils.invokeMethod(method, service, mockReleaseHistory); Assert.assertEquals(1, releaseHistoryRepository.count()); Assert.assertEquals(1, releaseRepository.count()); Iterable<ReleaseHistory> historyList = releaseHistoryRepository.findAll(); historyList.forEach(history -> Assert.assertEquals(6, history.getId())); Iterable<Release> releaseList = releaseRepository.findAll(); releaseList.forEach(release -> Assert.assertEquals(6, release.getId())); }
@Override public List<ColumnStatistics> getTableColumnStatistics( String catName, String dbName, String tableName, List<String> colNames) throws MetaException, NoSuchObjectException { // Note: this will get stats without verifying ACID. boolean committed = false; Query query = null; List<ColumnStatistics> result = new ArrayList<>(); try { openTransaction(); query = pm.newQuery(MTableColumnStatistics.class); query.setFilter("table.tableName == t1 && table.database.name == t2 && table.database.catalogName == t3"); query.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3"); query.setResult("DISTINCT engine"); Collection names = (Collection) query.execute(tableName, dbName, catName); List<String> engines = new ArrayList<>(); for (Iterator i = names.iterator(); i.hasNext();) { engines.add((String) i.next()); } for (String e : engines) { ColumnStatistics cs = getTableColumnStatisticsInternal( catName, dbName, tableName, colNames, e, true, true); if (cs != null) { result.add(cs); } } committed = commitTransaction(); return result; } finally { LOG.debug("Done executing getTableColumnStatistics with status : {}", committed); rollbackAndCleanup(committed, query); } }
@Test (expected = NoSuchObjectException.class) public void testTableOpsWhenTableDoesNotExist() throws NoSuchObjectException, MetaException { List<String> colNames = Arrays.asList("c0", "c1"); objectStore.getTableColumnStatistics(DEFAULT_CATALOG_NAME, DB1, "not_existed_table", colNames, ENGINE, ""); }
@Override public String toString(final RouteUnit routeUnit) { StringBuilder result = new StringBuilder(); appendInsertValue(routeUnit, result); result.delete(result.length() - 2, result.length()); return result.toString(); }
@Test void assertToStringWithRouteUnit() { assertThat(shardingInsertValuesToken.toString(routeUnit), is("('shardingsphere', 'test')")); }
@Override public Long createRewardActivity(RewardActivityCreateReqVO createReqVO) { // 校验商品是否冲突 validateRewardActivitySpuConflicts(null, createReqVO.getProductSpuIds()); // 插入 RewardActivityDO rewardActivity = RewardActivityConvert.INSTANCE.convert(createReqVO) .setStatus(PromotionUtils.calculateActivityStatus(createReqVO.getEndTime())); rewardActivityMapper.insert(rewardActivity); // 返回 return rewardActivity.getId(); }
@Test public void testCreateRewardActivity_success() { // 准备参数 RewardActivityCreateReqVO reqVO = randomPojo(RewardActivityCreateReqVO.class, o -> { o.setConditionType(randomEle(PromotionConditionTypeEnum.values()).getType()); o.setProductScope(randomEle(PromotionProductScopeEnum.values()).getScope()); // 用于触发进行中的状态 o.setStartTime(addTime(Duration.ofDays(1))).setEndTime(addTime(Duration.ofDays(2))); }); // 调用 Long rewardActivityId = rewardActivityService.createRewardActivity(reqVO); // 断言 assertNotNull(rewardActivityId); // 校验记录的属性是否正确 RewardActivityDO rewardActivity = rewardActivityMapper.selectById(rewardActivityId); assertPojoEquals(reqVO, rewardActivity, "rules"); assertEquals(rewardActivity.getStatus(), PromotionActivityStatusEnum.WAIT.getStatus()); for (int i = 0; i < reqVO.getRules().size(); i++) { assertPojoEquals(reqVO.getRules().get(i), rewardActivity.getRules().get(i)); } }
public static BigDecimal cast(final Integer value, final int precision, final int scale) { if (value == null) { return null; } return cast(value.longValue(), precision, scale); }
@Test public void shouldCastDecimalRoundingDownNegative() { // When: final BigDecimal decimal = DecimalUtil.cast(new BigDecimal("-1.19"), 2, 1); // Then: assertThat(decimal, is(new BigDecimal("-1.2"))); }
@Override public int read() throws IOException { if (mPosition == mLength) { // at end of file return -1; } updateStreamIfNeeded(); int res = mUfsInStream.get().read(); if (res == -1) { return -1; } mPosition++; Metrics.BYTES_READ_FROM_UFS.inc(1); return res; }
@Test public void readAllByteBuffer() throws IOException, AlluxioException { int len = CHUNK_SIZE * 5; int start = 0; AlluxioURI ufsPath = getUfsPath(); createFile(ufsPath, CHUNK_SIZE * 5); ByteBuffer buffer = ByteBuffer.allocate(CHUNK_SIZE); try (FileInStream inStream = getStream(ufsPath)) { while (start < len) { assertEquals(CHUNK_SIZE, inStream.read(buffer)); assertTrue(BufferUtils.equalIncreasingByteBuffer(start, CHUNK_SIZE, buffer)); start += CHUNK_SIZE; buffer.clear(); } } }