focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static byte[] checkPassword(String passwdString) { if (Strings.isNullOrEmpty(passwdString)) { return EMPTY_PASSWORD; } byte[] passwd; passwdString = passwdString.toUpperCase(); passwd = passwdString.getBytes(StandardCharsets.UTF_8); if (passwd.length != SCRAMBLE_LENGTH_HEX_LENGTH || passwd[0] != PVERSION41_CHAR) { throw ErrorReportException.report(ErrorCode.ERR_PASSWD_LENGTH, 41); } for (int i = 1; i < passwd.length; ++i) { if (!((passwd[i] <= '9' && passwd[i] >= '0') || passwd[i] >= 'A' && passwd[i] <= 'F')) { throw ErrorReportException.report(ErrorCode.ERR_PASSWD_LENGTH, 41); } } return passwd; }
@Test(expected = ErrorReportException.class) public void testCheckPasswdFail() { MysqlPassword.checkPassword("*9A6EC1164108A8D3DA3BE3F35A56F6499B6FC32"); Assert.fail("No exception throws"); }
@Override public Optional<String> getLeaderHostName() { UUID leaderUuid = (UUID) hzMember.getAtomicReference(LEADER).get(); if (leaderUuid != null) { Optional<Member> leader = hzMember.getCluster().getMembers().stream().filter(m -> m.getUuid().equals(leaderUuid)).findFirst(); if (leader.isPresent()) { return Optional.of(leader.get().getAddress().getHost()); } } return Optional.empty(); }
@Test public void return_null_if_node_is_not_leader() { try (ClusterAppStateImpl underTest = createClusterAppState()) { Optional<String> hostname = underTest.getLeaderHostName(); assertThat(hostname).isEmpty(); } }
@Override public V getNow(V valueIfAbsent) { // if there is an explicit value set, we use that if (result != null) { return (V) result; } // if there already is a deserialized value set, use it. if (deserializedValue != VOID) { return (V) deserializedValue; } // otherwise, do not cache the value returned from future.getNow // because it might be the default valueIfAbsent Object value = future.getNow(valueIfAbsent); try { if (value instanceof ClientMessage) { return resolve(value); } else { return (value instanceof Data && deserializeResponse) ? serializationService.toObject(value) : (V) value; } } catch (HazelcastSerializationException exc) { throw new CompletionException(exc); } }
@Test public void getNow_whenNotDoneShouldReturnDefaultValue() { assertFalse(delegatingFuture.isDone()); assertEquals(DESERIALIZED_DEFAULT_VALUE, delegatingFuture.getNow(DESERIALIZED_DEFAULT_VALUE)); }
@Override public void callExtensionPoint( LogChannelInterface logChannelInterface, Object o ) throws KettleException { ExecutionConfiguration executionConfiguration = (ExecutionConfiguration) ( (Object[]) o )[ 0 ]; AbstractMeta meta = (AbstractMeta) ( (Object[]) o )[ 1 ]; VariableSpace variableSpace = (VariableSpace) ( (Object[]) o )[ 2 ]; Repository repository = (Repository) ( (Object[]) o )[ 3 ]; EmbeddedMetaStore embeddedMetaStore = meta.getEmbeddedMetaStore(); RunConfigurationManager runConfigurationManager = rcmProvider.apply( meta.getBowl() ); RunConfiguration runConfiguration = runConfigurationManager.load( executionConfiguration.getRunConfiguration() ); if ( runConfiguration == null ) { RunConfigurationManager embeddedRunConfigurationManager = EmbeddedRunConfigurationManager.build( embeddedMetaStore ); runConfiguration = embeddedRunConfigurationManager.load( executionConfiguration.getRunConfiguration() ); } if ( runConfiguration != null ) { RunConfigurationExecutor runConfigurationExecutor = runConfigurationManager.getExecutor( runConfiguration.getType() ); if ( runConfigurationExecutor != null ) { runConfigurationExecutor.execute( runConfiguration, executionConfiguration, meta, variableSpace, repository ); } } else { String name = ""; if ( variableSpace instanceof TransMeta ) { name = ( (TransMeta) variableSpace ).getFilename(); } throw new KettleException( BaseMessages .getString( PKG, "RunConfigurationRunExtensionPoint.ConfigNotFound.Error", name, executionConfiguration.getRunConfiguration(), "{0}" ) ); } }
@Test public void testCallExtensionPoint() throws Exception { runConfigurationRunExtensionPoint.callExtensionPoint( log, new Object[] { transExecutionConfiguration, abstractMeta, variableSpace, null } ); verify( runConfigurationExecutor ) .execute( runConfiguration, transExecutionConfiguration, abstractMeta, variableSpace, null ); }
public static boolean containsSystemSchema(final DatabaseType databaseType, final Collection<String> schemaNames, final ShardingSphereDatabase database) { DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData(); if (database.isComplete() && !dialectDatabaseMetaData.getDefaultSchema().isPresent()) { return false; } SystemDatabase systemDatabase = new SystemDatabase(databaseType); for (String each : schemaNames) { if (systemDatabase.getSystemSchemas().contains(each)) { return true; } } return !dialectDatabaseMetaData.getDefaultSchema().isPresent() && systemDatabase.getSystemSchemas().contains(database.getName()); }
@Test void assertContainsSystemSchemaForPostgreSQL() { DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, "PostgreSQL"); ShardingSphereDatabase informationSchemaDatabase = mockShardingSphereDatabase("information_schema", false); assertTrue(SystemSchemaUtils.containsSystemSchema(databaseType, Arrays.asList("information_schema", "pg_catalog"), informationSchemaDatabase)); ShardingSphereDatabase shardingSchemaDatabase = mockShardingSphereDatabase("sharding_db", false); assertFalse(SystemSchemaUtils.containsSystemSchema(databaseType, Collections.singletonList("sharding_db"), shardingSchemaDatabase)); ShardingSphereDatabase customizedInformationSchemaDatabase = mockShardingSphereDatabase("information_schema", true); assertTrue(SystemSchemaUtils.containsSystemSchema(databaseType, Arrays.asList("information_schema", "pg_catalog"), customizedInformationSchemaDatabase)); }
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; // This handles a tombstone message if (value == null) { return SchemaAndValue.NULL; } try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (config.schemasEnabled() && (!jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME) || !jsonValue.has(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); // The deserialized data should either be an envelope object containing the schema and the payload or the schema // was stripped during serialization and we need to fill in an all-encompassing schema. if (!config.schemasEnabled()) { ObjectNode envelope = JSON_NODE_FACTORY.objectNode(); envelope.set(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME, null); envelope.set(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME, jsonValue); jsonValue = envelope; } Schema schema = asConnectSchema(jsonValue.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); return new SchemaAndValue( schema, convertToConnect(schema, jsonValue.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME), config) ); }
@Test public void dateToConnectOptional() { Schema schema = Date.builder().optional().schema(); String msg = "{ \"schema\": { \"type\": \"int32\", \"name\": \"org.apache.kafka.connect.data.Date\", \"version\": 1, \"optional\": true }, \"payload\": null }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); assertEquals(schema, schemaAndValue.schema()); assertNull(schemaAndValue.value()); }
@Override public double logp(double x) { return Math.log(p(x)); }
@Test public void testLogP() { System.out.println("logP"); LogisticDistribution instance = new LogisticDistribution(2.0, 1.0); instance.rand(); assertEquals(Math.log(0.1050736), instance.logp(0.001), 1E-5); assertEquals(Math.log(0.1057951), instance.logp(0.01), 1E-5); assertEquals(Math.log(0.1131803), instance.logp(0.1), 1E-5); assertEquals(Math.log(0.1217293), instance.logp(0.2), 1E-5); assertEquals(Math.log(0.1491465), instance.logp(0.5), 1E-5); assertEquals(Math.log(0.1966119), instance.logp(1.0), 1E-5); assertEquals(Math.log(0.25), instance.logp(2.0), 1E-5); assertEquals(Math.log(0.04517666), instance.logp(5.0), 1E-5); assertEquals(Math.log(0.0003352377), instance.logp(10.0), 1E-5); }
@Override public void remove(String noteId, String notePath, AuthenticationInfo subject) throws IOException { Preconditions.checkArgument(StringUtils.isNotEmpty(noteId)); BlobId blobId = makeBlobId(noteId, notePath); try { boolean deleted = storage.delete(blobId); if (!deleted) { throw new IOException("Tried to remove nonexistent blob " + blobId.toString()); } } catch (StorageException se) { throw new IOException("Could not remove " + blobId.toString() + ": " + se.getMessage(), se); } }
@Test void testRemoveFolder_nonexistent() throws Exception { zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), DEFAULT_URL); this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); assertThrows(IOException.class, () -> { notebookRepo.remove("id", "/name", AUTH_INFO); fail(); }); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() != 6) { onInvalidDataReceived(device, data); return; } final int featuresValue = data.getIntValue(Data.FORMAT_UINT24_LE, 0); final int typeAndSampleLocation = data.getIntValue(Data.FORMAT_UINT8, 3); final int expectedCrc = data.getIntValue(Data.FORMAT_UINT16_LE, 4); final CGMFeatures features = new CGMFeatures(featuresValue); if (features.e2eCrcSupported) { final int actualCrc = CRC16.MCRF4XX(data.getValue(), 0, 4); if (actualCrc != expectedCrc) { onContinuousGlucoseMonitorFeaturesReceivedWithCrcError(device, data); return; } } else { // If the device doesn't support E2E-safety the value of the field shall be set to 0xFFFF. if (expectedCrc != 0xFFFF) { onInvalidDataReceived(device, data); return; } } @SuppressLint("WrongConstant") final int type = typeAndSampleLocation & 0x0F; // least significant nibble final int sampleLocation = typeAndSampleLocation >> 4; // most significant nibble onContinuousGlucoseMonitorFeaturesReceived(device, features, type, sampleLocation, features.e2eCrcSupported); }
@Test public void onContinuousGlucoseMeasurementFeaturesReceived_crcNotSupported() { final DataReceivedCallback callback = new CGMFeatureDataCallback() { @Override public void onContinuousGlucoseMonitorFeaturesReceived(@NonNull final BluetoothDevice device, @NonNull final CGMFeatures features, final int type, final int sampleLocation, final boolean secured) { called = true; assertNotNull(features); assertFalse(features.calibrationSupported); assertTrue(features.patientHighLowAlertsSupported); assertTrue(features.hypoAlertsSupported); assertTrue(features.hyperAlertsSupported); assertFalse(features.rateOfIncreaseDecreaseAlertsSupported); assertTrue(features.deviceSpecificAlertSupported); assertTrue(features.sensorMalfunctionDetectionSupported); assertFalse(features.sensorTempHighLowDetectionSupported); assertFalse(features.sensorResultHighLowSupported); assertTrue(features.lowBatteryDetectionSupported); assertTrue(features.sensorTypeErrorDetectionSupported); assertTrue(features.generalDeviceFaultSupported); assertFalse(features.e2eCrcSupported); assertFalse(features.multipleBondSupported); assertFalse(features.multipleSessionsSupported); assertTrue(features.cgmTrendInfoSupported); assertTrue(features.cgmQualityInfoSupported); assertEquals("Type", TYPE_ARTERIAL_PLASMA, type); assertEquals("Sample Location", SAMPLE_LOCATION_FINGER, sampleLocation); assertFalse(secured); } @Override public void onContinuousGlucoseMonitorFeaturesReceivedWithCrcError(@NonNull final BluetoothDevice device, @NonNull final Data data) { assertEquals("Correct packet but invalid CRC reported", 1, 2); } @Override public void onInvalidDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { assertEquals("Correct packet but invalid data reported", 1, 2); } }; final MutableData data = new MutableData(new byte[6]); data.setValue(0b11000111001101110, Data.FORMAT_UINT24_LE, 0); data.setValue(0x16, Data.FORMAT_UINT8, 3); data.setValue(0xFFFF, Data.FORMAT_UINT16_LE, 4); called = false; //noinspection DataFlowIssue callback.onDataReceived(null, data); assertTrue(called); }
@Override public String arguments() { ArrayList<String> args = new ArrayList<>(); if (buildFile != null) { args.add("-buildfile:\"" + FilenameUtils.separatorsToUnix(buildFile) + "\""); } if (target != null) { args.add(target); } return StringUtils.join(args, " "); }
@Test public void shouldGiveArgumentsIncludingBuildfileAndTarget() { NantTask task = new NantTask(); task.setBuildFile("build/build.xml"); task.setTarget("compile"); assertThat(task.arguments(), is("-buildfile:\"build/build.xml\" compile")); }
@PostMapping("/confirm") public AccountResult confirmEmail(@RequestBody @Valid EmailConfirmRequest request, @RequestHeader(MijnDigidSession.MIJN_DIGID_SESSION_HEADER) String mijnDigiDsessionId){ MijnDigidSession mijnDigiDSession = retrieveMijnDigiDSession(mijnDigiDsessionId); return accountService.confirmEmail(mijnDigiDSession.getAccountId(), request); }
@Test public void validEmailConfirm() { EmailConfirmRequest request = new EmailConfirmRequest(); request.setEmailAddressConfirmed(true); AccountResult result = new EmailVerifyResult(); result.setStatus(Status.OK); result.setError("error"); when(accountService.confirmEmail(anyLong(), any())).thenReturn(result); AccountResult acocuntResult = emailController.confirmEmail(request, mijnDigiDSession.getId()); assertEquals(Status.OK, acocuntResult.getStatus()); assertEquals("error", acocuntResult.getError()); }
public List<ServiceInstance> getInstances(String serviceId) throws PolarisException { serviceId = DiscoveryUtil.rewriteServiceId(serviceId); List<ServiceInstance> instances = new ArrayList<>(); InstancesResponse filteredInstances = polarisDiscoveryHandler.getHealthyInstances(serviceId); ServiceInstances serviceInstances = filteredInstances.toServiceInstances(); for (Instance instance : serviceInstances.getInstances()) { instances.add(new PolarisServiceInstance(instance, filteredInstances.getMetadata())); } return instances; }
@Test public void testGetInstances() { DefaultServiceInstances mockDefaultServiceInstances = mock(DefaultServiceInstances.class); when(mockDefaultServiceInstances.getInstances()).thenReturn(singletonList(mock(DefaultInstance.class))); InstancesResponse mockInstancesResponse = mock(InstancesResponse.class); when(mockInstancesResponse.toServiceInstances()).thenReturn(mockDefaultServiceInstances); when(polarisDiscoveryHandler.getHealthyInstances(anyString())).thenReturn(mockInstancesResponse); List<ServiceInstance> serviceInstances = polarisServiceDiscovery.getInstances(SERVICE_PROVIDER); assertThat(serviceInstances).isNotEmpty(); }
public static String getTmpDir(List<String> tmpDirs) { Preconditions.checkState(!tmpDirs.isEmpty(), "No temporary directories available"); if (tmpDirs.size() == 1) { return tmpDirs.get(0); } // Use existing random instead of ThreadLocal because contention is not expected to be high. return tmpDirs.get(RANDOM.nextInt(tmpDirs.size())); }
@Test public void getTmpDir() { // Test single tmp dir String singleDir = "/tmp"; List<String> singleDirList = Arrays.asList("/tmp"); assertEquals(singleDir, CommonUtils.getTmpDir(singleDirList)); // Test multiple tmp dir List<String> multiDirs = Arrays.asList("/tmp1", "/tmp2", "/tmp3"); Set<String> results = new HashSet<>(); for (int i = 0; i < 100 || results.size() != multiDirs.size(); i++) { results.add(CommonUtils.getTmpDir(multiDirs)); } assertEquals(new HashSet<>(multiDirs), results); }
@Override public Optional<DatabaseAdminExecutor> create(final SQLStatementContext sqlStatementContext) { return delegated.create(sqlStatementContext); }
@Test void assertCreateExecutorForSelectVersion() { SelectStatementContext selectStatementContext = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); when(selectStatementContext.getSqlStatement().getProjections().getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(-1, -1, "VERSION()"))); Optional<DatabaseAdminExecutor> actual = new OpenGaussAdminExecutorCreator().create(selectStatementContext, "select VERSION()", "postgres", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(OpenGaussSystemCatalogAdminQueryExecutor.class)); }
@Override public ValidationResult validate(Object value) { ValidationResult result = super.validate(value); if (result instanceof ValidationResult.ValidationPassed) { final String sValue = (String) value; if (sValue != null && sValue.length() > maxLength) { result = new ValidationResult.ValidationFailed("Value is longer than " + maxLength + " characters!"); } } return result; }
@Test public void testValidateEmptyValue() { assertThat(new LimitedOptionalStringValidator(1).validate("")) .isInstanceOf(ValidationResult.ValidationPassed.class); }
public static <T> void update(Map<String, String> properties, T obj) throws IllegalArgumentException { Field[] fields = obj.getClass().getDeclaredFields(); Arrays.stream(fields).forEach(f -> { if (properties.containsKey(f.getName())) { try { f.setAccessible(true); String v = properties.get(f.getName()); if (!StringUtils.isBlank(v)) { f.set(obj, value(trim(v), f)); } else { setEmptyValue(v, f, obj); } } catch (Exception e) { throw new IllegalArgumentException(format("failed to initialize %s field while setting value %s", f.getName(), properties.get(f.getName())), e); } } }); }
@Test public void testMap() { Map<String, String> properties = new HashMap<>(); properties.put("name", "config"); properties.put("stringStringMap", "key1=value1,key2=value2"); properties.put("stringIntMap", "key1=1,key2=2"); properties.put("longStringMap", "1=value1,2=value2"); MyConfig config = new MyConfig(); FieldParser.update(properties, config); assertEquals(config.name, "config"); assertEquals(config.stringStringMap.get("key1"), "value1"); assertEquals(config.stringStringMap.get("key2"), "value2"); assertEquals((int) config.stringIntMap.get("key1"), 1); assertEquals((int) config.stringIntMap.get("key2"), 2); assertEquals(config.longStringMap.get(1L), "value1"); assertEquals(config.longStringMap.get(2L), "value2"); }
public static <T> Read<T> read() { return new AutoValue_CassandraIO_Read.Builder<T>().build(); }
@Test public void testReadWithQueryProvider() throws Exception { String query = String.format( "select person_id, writetime(person_name) from %s.%s", CASSANDRA_KEYSPACE, CASSANDRA_TABLE); PCollection<Scientist> output = pipeline.apply( CassandraIO.<Scientist>read() .withHosts(Collections.singletonList(CASSANDRA_HOST)) .withPort(cassandraPort) .withKeyspace(CASSANDRA_KEYSPACE) .withTable(CASSANDRA_TABLE) .withMinNumberOfSplits(20) .withQuery(new MockQueryProvider(query)) .withCoder(SerializableCoder.of(Scientist.class)) .withEntity(Scientist.class)); PAssert.thatSingleton(output.apply("Count", Count.globally())).isEqualTo(NUM_ROWS); PAssert.that(output) .satisfies( input -> { for (Scientist sci : input) { assertNull(sci.name); assertTrue(sci.nameTs != null && sci.nameTs > 0); } return null; }); pipeline.run(); }
@Override public boolean isEmpty() { return true; }
@Test public void testIsEmpty() { assertTrue(HEADERS.isEmpty()); }
public static int degreesToMicrodegrees(double coordinate) { return (int) (coordinate * CONVERSION_FACTOR); }
@Test public void doubleToIntTest() { int microdegrees = LatLongUtils.degreesToMicrodegrees(DEGREES); Assert.assertEquals(MICRO_DEGREES, microdegrees); }
public PagesHashStrategyFactory compilePagesHashStrategyFactory(List<Type> types, List<Integer> joinChannels) { return compilePagesHashStrategyFactory(types, joinChannels, Optional.empty()); }
@Test(dataProvider = "hashEnabledValues") public void testSingleChannel(boolean hashEnabled) { List<Type> joinTypes = ImmutableList.of(VARCHAR); List<Integer> joinChannels = Ints.asList(0); // compile a single channel hash strategy PagesHashStrategyFactory pagesHashStrategyFactory = joinCompiler.compilePagesHashStrategyFactory(joinTypes, joinChannels); // create hash strategy with a single channel blocks -- make sure there is some overlap in values List<Block> channel = ImmutableList.of( BlockAssertions.createStringSequenceBlock(10, 20), BlockAssertions.createStringSequenceBlock(20, 30), BlockAssertions.createStringSequenceBlock(15, 25)); OptionalInt hashChannel = OptionalInt.empty(); List<List<Block>> channels = ImmutableList.of(channel); if (hashEnabled) { ImmutableList.Builder<Block> hashChannelBuilder = ImmutableList.builder(); for (Block block : channel) { hashChannelBuilder.add(TypeUtils.getHashBlock(joinTypes, block)); } hashChannel = OptionalInt.of(1); channels = ImmutableList.of(channel, hashChannelBuilder.build()); } PagesHashStrategy hashStrategy = pagesHashStrategyFactory.createPagesHashStrategy(channels, hashChannel); // verify channel count assertEquals(hashStrategy.getChannelCount(), 1); // verify hashStrategy is consistent with equals and hash code from block for (int leftBlockIndex = 0; leftBlockIndex < channel.size(); leftBlockIndex++) { Block leftBlock = channel.get(leftBlockIndex); PageBuilder pageBuilder = new PageBuilder(ImmutableList.of(VARCHAR)); for (int leftBlockPosition = 0; leftBlockPosition < leftBlock.getPositionCount(); leftBlockPosition++) { // hash code of position must match block hash assertEquals(hashStrategy.hashPosition(leftBlockIndex, leftBlockPosition), hashPosition(VARCHAR, leftBlock, leftBlockPosition)); // position must be equal to itself assertTrue(hashStrategy.positionEqualsPositionIgnoreNulls(leftBlockIndex, leftBlockPosition, leftBlockIndex, leftBlockPosition)); // check equality of every position against every other position in the block for (int rightBlockIndex = 0; rightBlockIndex < channel.size(); rightBlockIndex++) { Block rightBlock = channel.get(rightBlockIndex); for (int rightBlockPosition = 0; rightBlockPosition < rightBlock.getPositionCount(); rightBlockPosition++) { boolean expected = positionEqualsPosition(VARCHAR, leftBlock, leftBlockPosition, rightBlock, rightBlockPosition); assertEquals(hashStrategy.positionEqualsRow(leftBlockIndex, leftBlockPosition, rightBlockPosition, new Page(rightBlock)), expected); assertEquals(hashStrategy.rowEqualsRow(leftBlockPosition, new Page(leftBlock), rightBlockPosition, new Page(rightBlock)), expected); assertEquals(hashStrategy.positionEqualsRowIgnoreNulls(leftBlockIndex, leftBlockPosition, rightBlockPosition, new Page(rightBlock)), expected); assertEquals(hashStrategy.positionEqualsPositionIgnoreNulls(leftBlockIndex, leftBlockPosition, rightBlockIndex, rightBlockPosition), expected); assertEquals(hashStrategy.positionEqualsPosition(leftBlockIndex, leftBlockPosition, rightBlockIndex, rightBlockPosition), expected); } } // check equality of every position against every other position in the block cursor for (int rightBlockIndex = 0; rightBlockIndex < channel.size(); rightBlockIndex++) { Block rightBlock = channel.get(rightBlockIndex); for (int rightBlockPosition = 0; rightBlockPosition < rightBlock.getPositionCount(); rightBlockPosition++) { boolean expected = positionEqualsPosition(VARCHAR, leftBlock, leftBlockPosition, rightBlock, rightBlockPosition); assertEquals(hashStrategy.positionEqualsRow(leftBlockIndex, leftBlockPosition, rightBlockPosition, new Page(rightBlock)), expected); assertEquals(hashStrategy.rowEqualsRow(leftBlockPosition, new Page(leftBlock), rightBlockPosition, new Page(rightBlock)), expected); assertEquals(hashStrategy.positionEqualsRowIgnoreNulls(leftBlockIndex, leftBlockPosition, rightBlockPosition, new Page(rightBlock)), expected); assertEquals(hashStrategy.positionEqualsPositionIgnoreNulls(leftBlockIndex, leftBlockPosition, rightBlockIndex, rightBlockPosition), expected); assertEquals(hashStrategy.positionEqualsPosition(leftBlockIndex, leftBlockPosition, rightBlockIndex, rightBlockPosition), expected); } } // write position to output block pageBuilder.declarePosition(); hashStrategy.appendTo(leftBlockIndex, leftBlockPosition, pageBuilder, 0); } // verify output block matches assertBlockEquals(VARCHAR, pageBuilder.build().getBlock(0), leftBlock); } }
@Override public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest) req; HttpServletResponse response = (HttpServletResponse) resp; String appId = accessKeyUtil.extractAppIdFromRequest(request); if (StringUtils.isBlank(appId)) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "InvalidAppId"); return; } List<String> availableSecrets = accessKeyUtil.findAvailableSecret(appId); if (!CollectionUtils.isEmpty(availableSecrets)) { String timestamp = request.getHeader(Signature.HTTP_HEADER_TIMESTAMP); String authorization = request.getHeader(HttpHeaders.AUTHORIZATION); // check timestamp, valid within 1 minute if (!checkTimestamp(timestamp)) { logger.warn("Invalid timestamp. appId={},timestamp={}", appId, timestamp); response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "RequestTimeTooSkewed"); return; } // check signature String uri = request.getRequestURI(); String query = request.getQueryString(); if (!checkAuthorization(authorization, availableSecrets, timestamp, uri, query)) { logger.warn("Invalid authorization. appId={},authorization={}", appId, authorization); response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized"); return; } } chain.doFilter(request, response); }
@Test public void testInvalidAppId() throws Exception { when(accessKeyUtil.extractAppIdFromRequest(any())).thenReturn(null); clientAuthenticationFilter.doFilter(request, response, filterChain); verify(response).sendError(HttpServletResponse.SC_BAD_REQUEST, "InvalidAppId"); verify(filterChain, never()).doFilter(request, response); }
@Override public Map<K, V> getCachedMap() { return localCacheView.getCachedMap(); }
@Test public void testRemove() { RLocalCachedMap<String, Integer> map = redisson.getLocalCachedMap(LocalCachedMapOptions.name("test")); Map<String, Integer> cache = map.getCachedMap(); map.put("12", 1); assertThat(cache.size()).isEqualTo(1); assertThat(map.remove("12")).isEqualTo(1); assertThat(cache.size()).isEqualTo(0); assertThat(map.remove("14")).isNull(); }
@Override public Iterable<MeasureComputerWrapper> getMeasureComputers() { checkState(this.measureComputers != null, "Measure computers have not been initialized yet"); return measureComputers; }
@Test public void get_measure_computers_throws_ISE_if_not_initialized() { assertThatThrownBy(() -> underTest.getMeasureComputers()) .isInstanceOf(IllegalStateException.class) .hasMessage("Measure computers have not been initialized yet"); }
public static String arrayAwareToString(Object o) { final String arrayString = Arrays.deepToString(new Object[] {o}); return arrayString.substring(1, arrayString.length() - 1); }
@Test void testArrayAwareToString() { assertThat(StringUtils.arrayAwareToString(null)).isEqualTo("null"); assertThat(StringUtils.arrayAwareToString(DayOfWeek.MONDAY)).isEqualTo("MONDAY"); assertThat(StringUtils.arrayAwareToString(new int[] {1, 2, 3})).isEqualTo("[1, 2, 3]"); assertThat(StringUtils.arrayAwareToString(new byte[][] {{4, 5, 6}, null, {}})) .isEqualTo("[[4, 5, 6], null, []]"); assertThat( StringUtils.arrayAwareToString( new Object[] {new Integer[] {4, 5, 6}, null, DayOfWeek.MONDAY})) .isEqualTo("[[4, 5, 6], null, MONDAY]"); }
@Override public String getMethod() { return PATH; }
@Test public void testGetMyCommandsWithAllSet() { GetMyCommands getMyCommands = GetMyCommands .builder() .languageCode("en") .scope(BotCommandScopeDefault.builder().build()) .build(); assertEquals("getMyCommands", getMyCommands.getMethod()); assertDoesNotThrow(getMyCommands::validate); }
public static Map<String, String> getKiePMMLClusteringModelSourcesMap(final ClusteringCompilationDTO compilationDTO) { logger.trace("getKiePMMLClusteringModelSourcesMap {}", compilationDTO); String simpleClassName = compilationDTO.getSimpleClassName(); CompilationUnit compilationUnit = JavaParserUtils.getKiePMMLModelCompilationUnit(simpleClassName, compilationDTO.getPackageName(), KIE_PMML_CLUSTERING_MODEL_TEMPLATE_JAVA, KIE_PMML_CLUSTERING_MODEL_TEMPLATE); ClassOrInterfaceDeclaration modelTemplate = compilationUnit.getClassByName(simpleClassName) .orElseThrow(() -> new KiePMMLException(MAIN_CLASS_NOT_FOUND + ": " + simpleClassName)); setStaticGetter(compilationDTO, modelTemplate); populateGetClustersMethod(modelTemplate, compilationDTO.getModel()); populateGetClusteringFieldsMethod(modelTemplate, compilationDTO.getModel()); Map<String, String> sourcesMap = new HashMap<>(); sourcesMap.put(getFullClassName(compilationUnit), compilationUnit.toString()); return sourcesMap; }
@Test void getKiePMMLClusteringModelSourcesMap() { final CommonCompilationDTO<ClusteringModel> compilationDTO = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, pmml, clusteringModel, new PMMLCompilationContextMock(), "fileName"); Map<String, String> retrieved = KiePMMLClusteringModelFactory.getKiePMMLClusteringModelSourcesMap(ClusteringCompilationDTO.fromCompilationDTO(compilationDTO)); assertThat(retrieved).isNotNull(); assertThat(retrieved).hasSize(1); }
public CompletionStage<Void> migrate(MigrationSet set) { InterProcessLock lock = new InterProcessSemaphoreMutex(client.unwrap(), ZKPaths.makePath(lockPath, set.id())); CompletionStage<Void> lockStage = lockAsync(lock, lockMax.toMillis(), TimeUnit.MILLISECONDS, executor); return lockStage.thenCompose(__ -> runMigrationInLock(lock, set)); }
@Test public void testConcurrency1() throws Exception { CuratorOp op1 = client.transactionOp().create().forPath("/test"); CuratorOp op2 = client.transactionOp().create().forPath("/test/bar", "first".getBytes()); Migration migration = () -> Arrays.asList(op1, op2); MigrationSet migrationSet = MigrationSet.build("1", Collections.singletonList(migration)); CountDownLatch latch = new CountDownLatch(1); filterLatch.set(latch); CompletionStage<Void> first = manager.migrate(migrationSet); assertTrue(timing.awaitLatch(filterIsSetLatch)); MigrationManager manager2 = new MigrationManager( client, LOCK_PATH, META_DATA_PATH, executor, Duration.ofMillis(timing.forSleepingABit().milliseconds())); try { complete(manager2.migrate(migrationSet)); fail("Should throw"); } catch (Throwable e) { assertTrue( Throwables.getRootCause(e) instanceof AsyncWrappers.TimeoutException, "Should throw AsyncWrappers.TimeoutException, was: " + Throwables.getStackTraceAsString(Throwables.getRootCause(e))); } latch.countDown(); complete(first); assertArrayEquals(client.unwrap().getData().forPath("/test/bar"), "first".getBytes()); }
public static FingerprintTrustManagerFactoryBuilder builder(String algorithm) { return new FingerprintTrustManagerFactoryBuilder(algorithm); }
@Test public void testFingerprintWithInvalidLength() { assertThrows(IllegalArgumentException.class, new Executable() { @Override public void execute() { FingerprintTrustManagerFactory.builder("SHA-256").fingerprints("00:00:00").build(); } }); }
@Override public Job getJobById(UUID id) { try (final Connection conn = dataSource.getConnection()) { return jobTable(conn) .selectJobById(id) .orElseThrow(() -> new JobNotFoundException(id)); } catch (SQLException e) { throw new StorageException(e); } }
@Test void testGetJobById_WhenSqlExceptionOccursAJobStorageExceptionIsThrown() throws SQLException { doThrow(new SQLException("Boem")).when(resultSet).next(); assertThatThrownBy(() -> jobStorageProvider.getJobById(randomUUID())).isInstanceOf(StorageException.class); }
@Override public SplitResult<OffsetRange> trySplit(double fractionOfRemainder) { // Convert to BigDecimal in computation to prevent overflow, which may result in loss of // precision. BigDecimal cur = (lastAttemptedOffset == null) ? BigDecimal.valueOf(range.getFrom()).subtract(BigDecimal.ONE, MathContext.DECIMAL128) : BigDecimal.valueOf(lastAttemptedOffset); // split = cur + max(1, (range.getTo() - cur) * fractionOfRemainder) BigDecimal splitPos = cur.add( BigDecimal.valueOf(range.getTo()) .subtract(cur, MathContext.DECIMAL128) .multiply(BigDecimal.valueOf(fractionOfRemainder), MathContext.DECIMAL128) .max(BigDecimal.ONE), MathContext.DECIMAL128); long split = splitPos.longValue(); if (split >= range.getTo()) { return null; } OffsetRange res = new OffsetRange(split, range.getTo()); this.range = new OffsetRange(range.getFrom(), split); return SplitResult.of(range, res); }
@Test public void testTrySplit() throws Exception { OffsetRangeTracker tracker = new OffsetRangeTracker(new OffsetRange(100, 200)); tracker.tryClaim(100L); SplitResult splitRes = tracker.trySplit(0.509); assertEquals(new OffsetRange(100, 150), splitRes.getPrimary()); assertEquals(new OffsetRange(150, 200), splitRes.getResidual()); splitRes = tracker.trySplit(1); assertNull(splitRes); }
public static String getProperty(ScopeModel scopeModel, String property) { return getProperty(scopeModel, property, null); }
@Test void testGetProperty() { System.setProperty(SHUTDOWN_WAIT_KEY, " 10000"); Assertions.assertEquals( "10000", ConfigurationUtils.getProperty(ApplicationModel.defaultModel(), SHUTDOWN_WAIT_KEY)); System.clearProperty(SHUTDOWN_WAIT_KEY); }
public static ImmutableList<HttpRequest> fuzzGetParametersExpectingPathValues( HttpRequest request, String payload) { return fuzzGetParameters( request, payload, Optional.empty(), ImmutableSet.of(FuzzingModifier.FUZZING_PATHS)); }
@Test public void fuzzGetParametersExpectingPathValues_whenGetParameterValueHasPathPrefixOrFileExtension_prefixesPayloadOrAppendsFileExtension() { HttpRequest requestWithPathPrefixOrFileExtension = HttpRequest.get("https://google.com?key=resources./value").withEmptyHeaders().build(); HttpRequest requestWithFuzzedGetParameterWithPathPrefixAndFileExtension = HttpRequest.get("https://google.com?key=resources./<payload>%00./value") .withEmptyHeaders() .build(); assertThat( FuzzingUtils.fuzzGetParametersExpectingPathValues( requestWithPathPrefixOrFileExtension, "<payload>")) .doesNotContain(requestWithFuzzedGetParameterWithPathPrefixAndFileExtension); }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .nullable(column.isNullable()) .comment(column.getComment()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case BOOLEAN: builder.columnType(DB2_BOOLEAN); builder.dataType(DB2_BOOLEAN); break; case TINYINT: case SMALLINT: builder.columnType(DB2_SMALLINT); builder.dataType(DB2_SMALLINT); break; case INT: builder.columnType(DB2_INT); builder.dataType(DB2_INT); break; case BIGINT: builder.columnType(DB2_BIGINT); builder.dataType(DB2_BIGINT); break; case FLOAT: builder.columnType(DB2_REAL); builder.dataType(DB2_REAL); break; case DOUBLE: builder.columnType(DB2_DOUBLE); builder.dataType(DB2_DOUBLE); break; case DECIMAL: DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = (int) Math.max(0, scale - (precision - MAX_PRECISION)); precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } builder.columnType(String.format("%s(%s,%s)", DB2_DECIMAL, precision, scale)); builder.dataType(DB2_DECIMAL); builder.precision(precision); builder.scale(scale); break; case BYTES: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType( String.format("%s(%s)", DB2_VARBINARY, MAX_VARBINARY_LENGTH)); builder.dataType(DB2_VARBINARY); builder.length(column.getColumnLength()); } else if (column.getColumnLength() <= MAX_BINARY_LENGTH) { builder.columnType( String.format("%s(%s)", DB2_BINARY, column.getColumnLength())); builder.dataType(DB2_BINARY); builder.length(column.getColumnLength()); } else if (column.getColumnLength() <= MAX_VARBINARY_LENGTH) { builder.columnType( String.format("%s(%s)", DB2_VARBINARY, column.getColumnLength())); builder.dataType(DB2_VARBINARY); builder.length(column.getColumnLength()); } else { long length = column.getColumnLength(); if (length > MAX_BLOB_LENGTH) { length = MAX_BLOB_LENGTH; log.warn( "The length of blob type {} is out of range, " + "it will be converted to {}({})", column.getName(), DB2_BLOB, length); } builder.columnType(String.format("%s(%s)", DB2_BLOB, length)); builder.dataType(DB2_BLOB); builder.length(length); } break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(String.format("%s(%s)", DB2_VARCHAR, MAX_VARCHAR_LENGTH)); builder.dataType(DB2_VARCHAR); builder.length(column.getColumnLength()); } else if (column.getColumnLength() <= MAX_CHAR_LENGTH) { builder.columnType(String.format("%s(%s)", DB2_CHAR, column.getColumnLength())); builder.dataType(DB2_CHAR); builder.length(column.getColumnLength()); } else if (column.getColumnLength() <= MAX_VARCHAR_LENGTH) { builder.columnType( String.format("%s(%s)", DB2_VARCHAR, column.getColumnLength())); builder.dataType(DB2_VARCHAR); builder.length(column.getColumnLength()); } else { long length = column.getColumnLength(); if (length > MAX_CLOB_LENGTH) { length = MAX_CLOB_LENGTH; log.warn( "The length of clob type {} is out of range, " + "it will be converted to {}({})", column.getName(), DB2_CLOB, length); } builder.columnType(String.format("%s(%s)", DB2_CLOB, length)); builder.dataType(DB2_CLOB); builder.length(length); } break; case DATE: builder.columnType(DB2_DATE); builder.dataType(DB2_DATE); break; case TIME: builder.columnType(DB2_TIME); builder.dataType(DB2_TIME); break; case TIMESTAMP: if (column.getScale() != null && column.getScale() > 0) { int timestampScale = column.getScale(); if (column.getScale() > MAX_TIMESTAMP_SCALE) { timestampScale = MAX_TIMESTAMP_SCALE; log.warn( "The timestamp column {} type timestamp({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to timestamp({})", column.getName(), column.getScale(), MAX_TIMESTAMP_SCALE, timestampScale); } builder.columnType(String.format("%s(%s)", DB2_TIMESTAMP, timestampScale)); builder.scale(timestampScale); } else { builder.columnType(DB2_TIMESTAMP); } builder.dataType(DB2_TIMESTAMP); break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.DB_2, column.getDataType().getSqlType().name(), column.getName()); } return builder.build(); }
@Test public void testReconvertDecimal() { Column column = PhysicalColumn.builder().name("test").dataType(new DecimalType(0, 0)).build(); BasicTypeDefine typeDefine = DB2TypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals( String.format( "%s(%s,%s)", DB2TypeConverter.DB2_DECIMAL, DB2TypeConverter.DEFAULT_PRECISION, 0), typeDefine.getColumnType()); Assertions.assertEquals(DB2TypeConverter.DB2_DECIMAL, typeDefine.getDataType()); column = PhysicalColumn.builder().name("test").dataType(new DecimalType(10, 2)).build(); typeDefine = DB2TypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals( String.format("%s(%s,%s)", DB2TypeConverter.DB2_DECIMAL, 10, 2), typeDefine.getColumnType()); Assertions.assertEquals(DB2TypeConverter.DB2_DECIMAL, typeDefine.getDataType()); column = PhysicalColumn.builder().name("test").dataType(new DecimalType(32, 31)).build(); typeDefine = DB2TypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals( String.format("%s(%s,%s)", DB2TypeConverter.DB2_DECIMAL, 31, 30), typeDefine.getColumnType()); Assertions.assertEquals(DB2TypeConverter.DB2_DECIMAL, typeDefine.getDataType()); }
@Override public String getIdentity() { return MaxcomputeConfig.PLUGIN_NAME; }
@Test public void getIdentity() { Assertions.assertEquals( MaxcomputeConfig.PLUGIN_NAME, maxComputeDataTypeConvertor.getIdentity()); }
@Override public Collection<SQLToken> generateSQLTokens(final InsertStatementContext insertStatementContext) { Collection<SQLToken> result = new LinkedList<>(); EncryptTable encryptTable = encryptRule.getEncryptTable(insertStatementContext.getSqlStatement().getTable().getTableName().getIdentifier().getValue()); for (ColumnSegment each : insertStatementContext.getSqlStatement().getColumns()) { List<String> derivedColumnNames = getDerivedColumnNames(encryptTable, each); if (!derivedColumnNames.isEmpty()) { result.add(new InsertColumnsToken(each.getStopIndex() + 1, derivedColumnNames)); } } return result; }
@Test void assertGenerateSQLTokensNotExistColumns() { EncryptInsertDerivedColumnsTokenGenerator tokenGenerator = new EncryptInsertDerivedColumnsTokenGenerator(mockEncryptRule()); ColumnSegment columnSegment = mock(ColumnSegment.class, RETURNS_DEEP_STUBS); when(columnSegment.getIdentifier().getValue()).thenReturn("bar_col"); InsertStatementContext insertStatementContext = mockInsertStatementContext(); when(insertStatementContext.getSqlStatement().getColumns()).thenReturn(Collections.singleton(columnSegment)); assertTrue(tokenGenerator.generateSQLTokens(insertStatementContext).isEmpty()); }
public void addNutrients(Nutrients nutrients) { if(nutrients == null) return; addCalories(nutrients.getCalories()); addCarbohydrates(nutrients.getCarbohydrates()); addFats(nutrients.getFats()); addProteins(nutrients.getProteins()); }
@Test void addNegativeValues_shouldDecreaseValues() { modifyNutrients = new Nutrients( new Calories(new BigDecimal("-130")), new Carbohydrates(new BigDecimal("-5"), new BigDecimal("-2"), new BigDecimal("-3")), new Proteins(new BigDecimal("-5")), new Fats(new BigDecimal("-10"), new BigDecimal("-5")) ); baseNutrients.addNutrients(modifyNutrients); assertAll("Should decrease", () -> assertEquals(new BigDecimal("520"), baseNutrients.getCalories().getTotalCalories()), () -> assertEquals(new BigDecimal("15"), baseNutrients.getCarbohydrates().getTotalCarbohydrates()), () -> assertEquals(new BigDecimal("25"), baseNutrients.getProteins().getTotalProteins()), () -> assertEquals(new BigDecimal("40"), baseNutrients.getFats().getTotalFats()) ); }
public static <T> T toObj(byte[] json, Class<T> cls) { try { return mapper.readValue(json, cls); } catch (Exception e) { throw new NacosDeserializationException(cls, e); } }
@Test void tesToObjForClassWithException() { assertThrows(NacosDeserializationException.class, () -> { JacksonUtils.toObj("aaa", JsonNode.class); }); }
@Override public List<SqlScriptStatement> getSqlScriptStatements( String sqlScript ) { List<SqlScriptStatement> statements = new ArrayList<>(); String all = sqlScript; int from = 0; int to = 0; int length = all.length(); while ( to < length ) { char c = all.charAt( to ); // Skip comment lines... // while ( all.startsWith( "--", from ) ) { int nextLineIndex = all.indexOf( Const.CR, from ); from = nextLineIndex + Const.CR.length(); if ( to >= length ) { break; } c = all.charAt( c ); } if ( to >= length ) { break; } // Skip over double quotes... // if ( c == '"' ) { int nextDQuoteIndex = all.indexOf( '"', to + 1 ); if ( nextDQuoteIndex >= 0 ) { to = nextDQuoteIndex + 1; } } // Skip over back-ticks if ( c == '`' ) { int nextBacktickIndex = all.indexOf( '`', to + 1 ); if ( nextBacktickIndex >= 0 ) { to = nextBacktickIndex + 1; } } c = all.charAt( to ); if ( c == '\'' ) { boolean skip = true; // Don't skip over \' or '' // if ( to > 0 ) { char prevChar = all.charAt( to - 1 ); if ( prevChar == '\\' || prevChar == '\'' ) { skip = false; } } // Jump to the next quote and continue from there. // while ( skip ) { int nextQuoteIndex = all.indexOf( '\'', to + 1 ); if ( nextQuoteIndex >= 0 ) { to = nextQuoteIndex + 1; skip = false; if ( to < all.length() ) { char nextChar = all.charAt( to ); if ( nextChar == '\'' ) { skip = true; to++; } } if ( to > 0 ) { char prevChar = all.charAt( to - 2 ); if ( prevChar == '\\' ) { skip = true; to++; } } } } } c = all.charAt( to ); // end of statement if ( c == ';' || to >= length - 1 ) { if ( to >= length - 1 ) { to++; // grab last char also! } String stat = all.substring( from, to ); if ( !onlySpaces( stat ) ) { String s = Const.trim( stat ); statements.add( new SqlScriptStatement( s, from, to, s.toUpperCase().startsWith( "SELECT" ) || s.toLowerCase().startsWith( "show" ) ) ); } to++; from = to; } else { to++; } } return statements; }
@Test public void testShowIsTreatedAsAResultsQuery() throws Exception { List<SqlScriptStatement> sqlScriptStatements = new H2DatabaseMeta().getSqlScriptStatements( "show annotations from service" ); assertTrue( sqlScriptStatements.get( 0 ).isQuery() ); }
static Map<String, Comparable> prepareProperties(Map<String, Comparable> properties, Collection<PropertyDefinition> propertyDefinitions) { Map<String, Comparable> mappedProperties = createHashMap(propertyDefinitions.size()); for (PropertyDefinition propertyDefinition : propertyDefinitions) { String propertyKey = propertyDefinition.key(); if (properties.containsKey(propertyKey.replace("-", ""))) { properties.put(propertyKey, properties.remove(propertyKey.replace("-", ""))); } if (!properties.containsKey(propertyKey)) { if (!propertyDefinition.optional()) { throw new InvalidConfigurationException( String.format("Missing property '%s' on discovery strategy", propertyKey)); } continue; } Comparable value = properties.get(propertyKey); TypeConverter typeConverter = propertyDefinition.typeConverter(); Comparable mappedValue = typeConverter.convert(value); ValueValidator validator = propertyDefinition.validator(); if (validator != null) { validator.validate(mappedValue); } mappedProperties.put(propertyKey, mappedValue); } verifyNoUnknownProperties(mappedProperties, properties); return mappedProperties; }
@Test(expected = ValidationException.class) public void invalidProperty() { // given Map<String, Comparable> properties = new HashMap<>(singletonMap(PROPERTY_KEY_1, (Comparable) PROPERTY_VALUE_1)); ValueValidator<String> valueValidator = mock(ValueValidator.class); willThrow(new ValidationException("Invalid property")).given(valueValidator).validate(PROPERTY_VALUE_1); Collection<PropertyDefinition> propertyDefinitions = singletonList( new SimplePropertyDefinition(PROPERTY_KEY_1, false, STRING, new DummyValidator())); // when prepareProperties(properties, propertyDefinitions); // then // throw exception }
@Override public CompletableFuture<Object> updateGlobalAggregate( String aggregateName, Object aggregand, byte[] serializedAggregateFunction) { AggregateFunction aggregateFunction = null; try { aggregateFunction = InstantiationUtil.deserializeObject( serializedAggregateFunction, userCodeLoader); } catch (Exception e) { log.error("Error while attempting to deserialize user AggregateFunction."); return FutureUtils.completedExceptionally(e); } Object accumulator = accumulators.get(aggregateName); if (null == accumulator) { accumulator = aggregateFunction.createAccumulator(); } accumulator = aggregateFunction.add(aggregand, accumulator); accumulators.put(aggregateName, accumulator); return CompletableFuture.completedFuture(aggregateFunction.getResult(accumulator)); }
@Test void testJobMasterAggregatesValuesCorrectly() throws Exception { try (final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(heartbeatServices) .createJobMaster()) { jobMaster.start(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); CompletableFuture<Object> updateAggregateFuture; AggregateFunction<Integer, Integer, Integer> aggregateFunction = createAggregateFunction(); ClosureCleaner.clean( aggregateFunction, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); byte[] serializedAggregateFunction = InstantiationUtil.serializeObject(aggregateFunction); updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 1, serializedAggregateFunction); assertThat(updateAggregateFuture.get()).isEqualTo(1); updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 2, serializedAggregateFunction); assertThat(updateAggregateFuture.get()).isEqualTo(3); updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 3, serializedAggregateFunction); assertThat(updateAggregateFuture.get()).isEqualTo(6); updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 4, serializedAggregateFunction); assertThat(updateAggregateFuture.get()).isEqualTo(10); updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg2", 10, serializedAggregateFunction); assertThat(updateAggregateFuture.get()).isEqualTo(10); updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg2", 23, serializedAggregateFunction); assertThat(updateAggregateFuture.get()).isEqualTo(33); } }
@Override public void setMonochrome(boolean monochrome) { formats = monochrome ? monochrome() : ansi(); }
@Test void should_print_multiple_tables() { Feature feature = TestFeatureParser.parse("path/test.feature", "" + "Feature: Test feature\n" + " Scenario: Test Scenario\n" + " Given first step\n" + " | key1 | key2 |\n" + " | value1 | value2 |\n" + " | another1 | another2 |\n" + " Given second step\n" + " | key3 | key4 |\n" + " | value3 | value4 |\n" + " | another3 | another4 |\n"); ByteArrayOutputStream out = new ByteArrayOutputStream(); Runtime.builder() .withFeatureSupplier(new StubFeatureSupplier(feature)) .withAdditionalPlugins(new PrettyFormatter(out)) .withRuntimeOptions(new RuntimeOptionsBuilder().setMonochrome().build()) .withBackendSupplier(new StubBackendSupplier( new StubStepDefinition("first step", "path/step_definitions.java:7", DataTable.class), new StubStepDefinition("second step", "path/step_definitions.java:15", DataTable.class))) .build() .run(); assertThat(out, bytes(equalToCompressingWhiteSpace("" + "\n" + "Scenario: Test Scenario # path/test.feature:2\n" + " Given first step # path/step_definitions.java:7\n" + " | key1 | key2 |\n" + " | value1 | value2 |\n" + " | another1 | another2 |\n" + " Given second step # path/step_definitions.java:15\n" + " | key3 | key4 |\n" + " | value3 | value4 |\n" + " | another3 | another4 |\n"))); }
@Override public boolean assign(final Map<ProcessId, ClientState> clients, final Set<TaskId> allTaskIds, final Set<TaskId> statefulTaskIds, final AssignmentConfigs configs) { final int numStandbyReplicas = configs.numStandbyReplicas(); final Set<String> rackAwareAssignmentTags = new HashSet<>(tagsFunction.apply(configs)); final Map<TaskId, Integer> tasksToRemainingStandbys = computeTasksToRemainingStandbys( numStandbyReplicas, statefulTaskIds ); final Map<String, Set<String>> tagKeyToValues = new HashMap<>(); final Map<TagEntry, Set<ProcessId>> tagEntryToClients = new HashMap<>(); fillClientsTagStatistics(clients, tagEntryToClients, tagKeyToValues); final ConstrainedPrioritySet standbyTaskClientsByTaskLoad = createLeastLoadedPrioritySetConstrainedByAssignedTask(clients); final Map<TaskId, ProcessId> pendingStandbyTasksToClientId = new HashMap<>(); for (final TaskId statefulTaskId : statefulTaskIds) { for (final Map.Entry<ProcessId, ClientState> entry : clients.entrySet()) { final ProcessId clientId = entry.getKey(); final ClientState clientState = entry.getValue(); if (clientState.activeTasks().contains(statefulTaskId)) { assignStandbyTasksToClientsWithDifferentTags( numStandbyReplicas, standbyTaskClientsByTaskLoad, statefulTaskId, clientId, rackAwareAssignmentTags, clients, tasksToRemainingStandbys, tagKeyToValues, tagEntryToClients, pendingStandbyTasksToClientId ); } } } if (!tasksToRemainingStandbys.isEmpty()) { assignPendingStandbyTasksToLeastLoadedClients(clients, numStandbyReplicas, standbyTaskClientsByTaskLoad, tasksToRemainingStandbys); } // returning false, because standby task assignment will never require a follow-up probing rebalance. return false; }
@Test public void shouldDoThePartialRackAwareness() { final Map<ProcessId, ClientState> clientStates = mkMap( mkEntry(PID_1, createClientStateWithCapacity(PID_1, 1, mkMap(mkEntry(CLUSTER_TAG, CLUSTER_1), mkEntry(ZONE_TAG, ZONE_1)), TASK_0_0)), mkEntry(PID_2, createClientStateWithCapacity(PID_2, 1, mkMap(mkEntry(CLUSTER_TAG, CLUSTER_1), mkEntry(ZONE_TAG, ZONE_2)))), mkEntry(PID_3, createClientStateWithCapacity(PID_3, 1, mkMap(mkEntry(CLUSTER_TAG, CLUSTER_1), mkEntry(ZONE_TAG, ZONE_3)))), mkEntry(PID_4, createClientStateWithCapacity(PID_4, 1, mkMap(mkEntry(CLUSTER_TAG, CLUSTER_2), mkEntry(ZONE_TAG, ZONE_1)))), mkEntry(PID_5, createClientStateWithCapacity(PID_5, 1, mkMap(mkEntry(CLUSTER_TAG, CLUSTER_2), mkEntry(ZONE_TAG, ZONE_2)))), mkEntry(PID_6, createClientStateWithCapacity(PID_6, 1, mkMap(mkEntry(CLUSTER_TAG, CLUSTER_2), mkEntry(ZONE_TAG, ZONE_3)), TASK_1_0)) ); final Set<TaskId> allActiveTasks = findAllActiveTasks(clientStates); final AssignmentConfigs assignmentConfigs = newAssignmentConfigs(2, CLUSTER_TAG, ZONE_TAG); standbyTaskAssignor.assign(clientStates, allActiveTasks, allActiveTasks, assignmentConfigs); // We need to distribute 2 standby tasks (+1 active task). // Since we have only two unique `cluster` tag values, // we can only achieve "ideal" distribution on the 1st standby task assignment. // We can't consider the `cluster` tag for the 2nd standby task assignment because the 1st standby // task would already be assigned on different clusters compared to the active one, which means // we have already used all the available cluster tag values. Taking the `cluster` tag into consideration // for the 2nd standby task assignment would affectively mean excluding all the clients. // Instead, for the 2nd standby task, we can only achieve partial rack awareness based on the `zone` tag. // As we don't consider the `cluster` tag for the 2nd standby task assignment, partial rack awareness // can be satisfied by placing the 2nd standby client on a different `zone` tag compared to active and corresponding standby tasks. // The `zone` on either `cluster` tags are valid candidates for the partial rack awareness, as our goal is to distribute clients on the different `zone` tags. Stream.of(PID_2, PID_5).forEach(client -> assertStandbyTaskCountForClientEqualsTo(clientStates, client, 1)); // There's no strong guarantee where 2nd standby task will end up. Stream.of(PID_1, PID_3, PID_4, PID_6).forEach(client -> assertStandbyTaskCountForClientEqualsTo(clientStates, client, 0, 1)); assertTotalNumberOfStandbyTasksEqualsTo(clientStates, 4); assertTrue( standbyClientsHonorRackAwareness( TASK_0_0, clientStates, asList( // Since it's located on a different `cluster` and `zone` tag dimensions, // `PID_5` is the "ideal" distribution for the 1st standby task assignment. // For the 2nd standby, either `PID_3` or `PID_6` are valid destinations as // we need to distribute the clients on different `zone` // tags without considering the `cluster` tag value. mkSet(PID_5, PID_3), mkSet(PID_5, PID_6) ) ) ); assertTrue( standbyClientsHonorRackAwareness( TASK_1_0, clientStates, asList( // The same comment as above applies here too. // `PID_2` is the ideal distribution on different `cluster` // and `zone` tag dimensions. In contrast, `PID_4` and `PID_1` // satisfy only the partial rack awareness as they are located on a different `zone` tag dimension. mkSet(PID_2, PID_4), mkSet(PID_2, PID_1) ) ) ); }
public KeySet keySet() { if (null == keySet) { keySet = new KeySet(); } return keySet; }
@Test void removeIfOnKeySet() { final Predicate<Integer> filter = (key) -> true; final UnsupportedOperationException exception = assertThrowsExactly(UnsupportedOperationException.class, () -> cache.keySet().removeIf(filter)); assertEquals("Cannot remove from KeySet", exception.getMessage()); }
public void setContract(@Nullable Produce contract) { this.contract = contract; setStoredContract(contract); handleContractState(); }
@Test public void cabbageContractCabbageHarvestableAndCabbageDead() { final long unixNow = Instant.now().getEpochSecond(); // Get the two allotment patches final FarmingPatch patch1 = farmingGuildPatches.get(Varbits.FARMING_4773); final FarmingPatch patch2 = farmingGuildPatches.get(Varbits.FARMING_4774); assertNotNull(patch1); assertNotNull(patch2); // Specify the two allotment patches when(farmingTracker.predictPatch(patch1)) .thenReturn(new PatchPrediction(Produce.CABBAGE, CropState.DEAD, 0, 2, 3)); when(farmingTracker.predictPatch(patch2)) .thenReturn(new PatchPrediction(Produce.CABBAGE, CropState.HARVESTABLE, unixNow, 3, 3)); farmingContractManager.setContract(Produce.CABBAGE); assertEquals(SummaryState.COMPLETED, farmingContractManager.getSummary()); }
@SneakyThrows(GeneralSecurityException.class) @Override public String encrypt(final Object plainValue) { if (null == plainValue) { return null; } byte[] result = getCipher(Cipher.ENCRYPT_MODE).doFinal(String.valueOf(plainValue).getBytes(StandardCharsets.UTF_8)); return Base64.getEncoder().encodeToString(result); }
@Test void assertEncrypt() { Object actual = cryptographicAlgorithm.encrypt("test"); assertThat(actual, is("dSpPiyENQGDUXMKFMJPGWA==")); }
List<RowExpression> rewriteRowExpressionsWithCSE( List<RowExpression> rows, Map<RowExpression, VariableReferenceExpression> commonSubExpressions) { if (!commonSubExpressions.isEmpty()) { rows = rows.stream() .map(p -> rewriteExpressionWithCSE(p, commonSubExpressions)) .collect(toImmutableList()); if (log.isDebugEnabled()) { log.debug("Extracted %d common sub-expressions", commonSubExpressions.size()); commonSubExpressions.entrySet().forEach(entry -> log.debug("\t%s = %s", entry.getValue(), entry.getKey())); log.debug("Rewrote Rows: %s", rows); } } return rows; }
@Test public void testRewriteRowExpressionWithCSE() { CursorProcessorCompiler cseCursorCompiler = new CursorProcessorCompiler(METADATA, true, emptyMap()); ClassDefinition cursorProcessorClassDefinition = new ClassDefinition( a(PUBLIC, FINAL), makeClassName(CursorProcessor.class.getSimpleName()), type(Object.class), type(CursorProcessor.class)); RowExpression filter = new SpecialFormExpression(AND, BIGINT, ADD_X_Y_GREATER_THAN_2); List<RowExpression> projections = ImmutableList.of(ADD_X_Y_Z); List<RowExpression> rowExpressions = ImmutableList.<RowExpression>builder() .addAll(projections) .add(filter) .build(); Map<Integer, Map<RowExpression, VariableReferenceExpression>> commonSubExpressionsByLevel = collectCSEByLevel(rowExpressions); Map<VariableReferenceExpression, CommonSubExpressionRewriter.CommonSubExpressionFields> cseFields = declareCommonSubExpressionFields(cursorProcessorClassDefinition, commonSubExpressionsByLevel); Map<RowExpression, VariableReferenceExpression> commonSubExpressions = commonSubExpressionsByLevel.values().stream() .flatMap(m -> m.entrySet().stream()) .collect(toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)); // X+Y as CSE assertEquals(1, cseFields.size()); VariableReferenceExpression cseVariable = cseFields.keySet().iterator().next(); RowExpression rewrittenFilter = cseCursorCompiler.rewriteRowExpressionsWithCSE(ImmutableList.of(filter), commonSubExpressions).get(0); List<RowExpression> rewrittenProjections = cseCursorCompiler.rewriteRowExpressionsWithCSE(projections, commonSubExpressions); // X+Y+Z contains CSE X+Y assertTrue(((CallExpression) rewrittenProjections.get(0)).getArguments().contains(cseVariable)); // X+Y > 2 consists CSE X+Y assertTrue(((CallExpression) ((SpecialFormExpression) rewrittenFilter).getArguments().get(0)).getArguments().contains(cseVariable)); }
public static int roundToPowerOfTwo(final int value) { if (value > MAX_POW2) { throw new IllegalArgumentException("There is no larger power of 2 int for value:"+value+" since it exceeds 2^31."); } if (value < 0) { throw new IllegalArgumentException("Given value:"+value+". Expecting value >= 0."); } final int nextPow2 = 1 << (32 - Integer.numberOfLeadingZeros(value - 1)); return nextPow2; }
@Test public void testRound() { assertEquals(4, Pow2.roundToPowerOfTwo(4)); assertEquals(4, Pow2.roundToPowerOfTwo(3)); assertEquals(1, Pow2.roundToPowerOfTwo(0)); assertEquals(MAX_POSITIVE_POW2, Pow2.roundToPowerOfTwo(MAX_POSITIVE_POW2)); }
@SuppressWarnings("unchecked") @Override public @Nullable Object get(File file, String attribute) { switch (attribute) { case "uid": UserPrincipal user = (UserPrincipal) file.getAttribute("owner", "owner"); return getUniqueId(user); case "gid": GroupPrincipal group = (GroupPrincipal) file.getAttribute("posix", "group"); return getUniqueId(group); case "mode": Set<PosixFilePermission> permissions = (Set<PosixFilePermission>) file.getAttribute("posix", "permissions"); return toMode(permissions); case "ctime": return file.getCreationTime(); case "rdev": return 0L; case "dev": return 1L; case "ino": return file.id(); case "nlink": return file.links(); default: return null; } }
@Test public void testInitialAttributes() { // unix provider relies on other providers to set their initial attributes file.setAttribute("owner", "owner", createUserPrincipal("foo")); file.setAttribute("posix", "group", createGroupPrincipal("bar")); file.setAttribute( "posix", "permissions", ImmutableSet.copyOf(PosixFilePermissions.fromString("rw-r--r--"))); // these are pretty much meaningless here since they aren't properties this // file system actually has, so don't really care about the exact value of these assertThat(provider.get(file, "uid")).isInstanceOf(Integer.class); assertThat(provider.get(file, "gid")).isInstanceOf(Integer.class); assertThat(provider.get(file, "rdev")).isEqualTo(0L); assertThat(provider.get(file, "dev")).isEqualTo(1L); assertThat(provider.get(file, "ino")).isInstanceOf(Integer.class); // these have logical origins in attributes from other views assertThat(provider.get(file, "mode")).isEqualTo(0644); // rw-r--r-- assertThat(provider.get(file, "ctime")).isEqualTo(file.getCreationTime()); // this is based on a property this file system does actually have assertThat(provider.get(file, "nlink")).isEqualTo(1); file.incrementLinkCount(); assertThat(provider.get(file, "nlink")).isEqualTo(2); file.decrementLinkCount(); assertThat(provider.get(file, "nlink")).isEqualTo(1); }
@Override public void decorateRouteContext(final RouteContext routeContext, final QueryContext queryContext, final ShardingSphereDatabase database, final ReadwriteSplittingRule rule, final ConfigurationProperties props, final ConnectionContext connectionContext) { Collection<RouteUnit> toBeRemoved = new LinkedList<>(); Collection<RouteUnit> toBeAdded = new LinkedList<>(); for (RouteUnit each : routeContext.getRouteUnits()) { String logicDataSourceName = each.getDataSourceMapper().getActualName(); rule.findDataSourceGroupRule(logicDataSourceName).ifPresent(optional -> { toBeRemoved.add(each); String actualDataSourceName = new ReadwriteSplittingDataSourceRouter(optional, connectionContext).route(queryContext.getSqlStatementContext(), queryContext.getHintValueContext()); toBeAdded.add(new RouteUnit(new RouteMapper(logicDataSourceName, actualDataSourceName), each.getTableMappers())); }); } routeContext.getRouteUnits().removeAll(toBeRemoved); routeContext.getRouteUnits().addAll(toBeAdded); }
@Test void assertDecorateRouteContextToReplicaDataSource() { RouteContext actual = mockRouteContext(); MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); when(selectStatement.getLock()).thenReturn(Optional.empty()); QueryContext queryContext = new QueryContext(sqlStatementContext, "", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class)); RuleMetaData ruleMetaData = new RuleMetaData(Collections.singleton(staticRule)); ShardingSphereDatabase database = new ShardingSphereDatabase(DefaultDatabase.LOGIC_NAME, mock(DatabaseType.class), mock(ResourceMetaData.class, RETURNS_DEEP_STUBS), ruleMetaData, Collections.emptyMap()); sqlRouter.decorateRouteContext(actual, queryContext, database, staticRule, new ConfigurationProperties(new Properties()), new ConnectionContext(Collections::emptySet)); Iterator<String> routedDataSourceNames = actual.getActualDataSourceNames().iterator(); assertThat(routedDataSourceNames.next(), is(NONE_READWRITE_SPLITTING_DATASOURCE_NAME)); assertThat(routedDataSourceNames.next(), is(READ_DATASOURCE)); }
public FEELFnResult<Boolean> invoke(@ParameterName("list") List list, @ParameterName("element") Object element) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } if (element == null) { return FEELFnResult.ofResult(list.contains(element)); } Object e = NumberEvalHelper.coerceNumber(element); boolean found = false; ListIterator<?> it = list.listIterator(); while (it.hasNext() && !found) { Object next = NumberEvalHelper.coerceNumber(it.next()); found = itemEqualsSC(e, next); } return FEELFnResult.ofResult(found); }
@Test void invokeListNull() { FunctionTestUtil.assertResultError(listContainsFunction.invoke((List) null, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(listContainsFunction.invoke(null, new Object()), InvalidParametersEvent.class); }
@Override public long tick() throws InterruptedException { long now = mClock.millis(); mSleeper.sleep( () -> Duration.ofMillis(mIntervalSupplier.getNextInterval(mPreviousTickedMs, now))); mPreviousTickedMs = mClock.millis(); return mIntervalSupplier.getRunLimit(mPreviousTickedMs); }
@Test public void sleepForSpecifiedInterval() throws Exception { final SleepingTimer timer = new SleepingTimer(THREAD_NAME, mMockLogger, mFakeClock, new SteppingThreadSleeper(mMockSleeper, mFakeClock), () -> new FixedIntervalSupplier(INTERVAL_MS)); timer.tick(); // first tick won't sleep verify(mMockSleeper, times(0)).sleep(any(Duration.class)); timer.tick(); verify(mMockSleeper).sleep(Duration.ofMillis(INTERVAL_MS)); }
static ConfigServer[] toConfigServers(String configserversString) { return multiValueParameterStream(configserversString) .map(CloudConfigInstallVariables::toConfigServer) .toArray(ConfigServer[]::new); }
@Test public void multiple_spaces_are_supported() { CloudConfigOptions.ConfigServer[] parsed = toConfigServers("test1 test2"); assertEquals(2, parsed.length); List<String> hostNames = Arrays.stream(parsed).map(cs -> cs.hostName).toList(); assertTrue(hostNames.containsAll(List.of("test1", "test2"))); }
public DirectoryEntry lookUp( File workingDirectory, JimfsPath path, Set<? super LinkOption> options) throws IOException { checkNotNull(path); checkNotNull(options); DirectoryEntry result = lookUp(workingDirectory, path, options, 0); if (result == null) { // an intermediate file in the path did not exist or was not a directory throw new NoSuchFileException(path.toString()); } return result; }
@Test public void testLookup_relative_symlinkLoop() { try { lookup("four/loop"); fail(); } catch (IOException expected) { } try { lookup("four/loop/whatever"); fail(); } catch (IOException expected) { } }
public static String jsToString( Object value, String classType ) { if ( classType.equalsIgnoreCase( JS_NATIVE_JAVA_OBJ ) || classType.equalsIgnoreCase( JS_UNDEFINED ) ) { // Is it a java Value class ? try { Value v = (Value) Context.jsToJava( value, Value.class ); return v.toString(); } catch ( Exception ev ) { // convert to a string should work in most cases... // return Context.toString( value ); } } else { // A String perhaps? return Context.toString( value ); } }
@Test public void jsToString_NativeJavaObject_Double() throws Exception { assertEquals( "1.0", JavaScriptUtils.jsToString( getDoubleValue(), JAVA_OBJECT ).trim() ); }
@Override public void doAlarm(List<AlarmMessage> alarmMessages) throws Exception { Map<String, DingtalkSettings> settingsMap = alarmRulesWatcher.getDingtalkSettings(); if (settingsMap == null || settingsMap.isEmpty()) { return; } Map<String, List<AlarmMessage>> groupedMessages = groupMessagesByHook(alarmMessages); for (Map.Entry<String, List<AlarmMessage>> entry : groupedMessages.entrySet()) { var hookName = entry.getKey(); var messages = entry.getValue(); var setting = settingsMap.get(hookName); if (setting == null || CollectionUtils.isEmpty(setting.getWebhooks()) || CollectionUtils.isEmpty( messages)) { continue; } for (final var webHookUrl : setting.getWebhooks()) { final var url = getUrl(webHookUrl); for (final var alarmMessage : messages) { final var requestBody = String.format( setting.getTextTemplate(), alarmMessage.getAlarmMessage() ); post(URI.create(url), requestBody, Map.of()); } } } }
@Test public void testDingtalkWebhookWithSign() throws Exception { CHECK_SIGN.set(true); List<DingtalkSettings.WebHookUrl> webHooks = new ArrayList<>(); webHooks.add(new DingtalkSettings.WebHookUrl(secret, "http://127.0.0.1:" + SERVER.httpPort() + "/dingtalkhook/receiveAlarm?token=dummy_token")); Rules rules = new Rules(); String template = "{\"msgtype\":\"text\",\"text\":{\"content\":\"Skywaling alarm: %s\"}}"; DingtalkSettings setting1 = new DingtalkSettings("setting1", AlarmHooksType.dingtalk, true); setting1.setWebhooks(webHooks); setting1.setTextTemplate(template); DingtalkSettings setting2 = new DingtalkSettings("setting2", AlarmHooksType.dingtalk, false); setting2.setWebhooks(webHooks); setting2.setTextTemplate(template); rules.getDingtalkSettingsMap().put(setting1.getFormattedName(), setting1); rules.getDingtalkSettingsMap().put(setting2.getFormattedName(), setting2); AlarmRulesWatcher alarmRulesWatcher = new AlarmRulesWatcher(rules, null); DingtalkHookCallback dingtalkCallBack = new DingtalkHookCallback(alarmRulesWatcher); List<AlarmMessage> alarmMessages = new ArrayList<>(2); AlarmMessage alarmMessage = new AlarmMessage(); alarmMessage.setScopeId(DefaultScopeDefine.SERVICE); alarmMessage.setRuleName("service_resp_time_rule"); alarmMessage.setAlarmMessage("alarmMessage with [DefaultScopeDefine.All]"); alarmMessage.getHooks().add(setting1.getFormattedName()); alarmMessages.add(alarmMessage); AlarmMessage anotherAlarmMessage = new AlarmMessage(); anotherAlarmMessage.setRuleName("service_resp_time_rule_2"); anotherAlarmMessage.setScopeId(DefaultScopeDefine.ENDPOINT); anotherAlarmMessage.setAlarmMessage("anotherAlarmMessage with [DefaultScopeDefine.Endpoint]"); anotherAlarmMessage.getHooks().add(setting2.getFormattedName()); alarmMessages.add(anotherAlarmMessage); dingtalkCallBack.doAlarm(alarmMessages); Assertions.assertTrue(IS_SUCCESS.get()); }
public void addAsEvidence(String source, MavenArtifact mavenArtifact, Confidence confidence) { if (mavenArtifact.getGroupId() != null && !mavenArtifact.getGroupId().isEmpty()) { this.addEvidence(EvidenceType.VENDOR, source, "groupid", mavenArtifact.getGroupId(), confidence); } if (mavenArtifact.getArtifactId() != null && !mavenArtifact.getArtifactId().isEmpty()) { this.addEvidence(EvidenceType.PRODUCT, source, "artifactid", mavenArtifact.getArtifactId(), confidence); this.addEvidence(EvidenceType.VENDOR, source, "artifactid", mavenArtifact.getArtifactId(), confidence); } if (mavenArtifact.getVersion() != null && !mavenArtifact.getVersion().isEmpty()) { this.addEvidence(EvidenceType.VERSION, source, "version", mavenArtifact.getVersion(), confidence); } boolean found = false; if (mavenArtifact.getArtifactUrl() != null && !mavenArtifact.getArtifactUrl().isEmpty()) { synchronized (this) { for (Identifier i : this.softwareIdentifiers) { if (i instanceof PurlIdentifier) { final PurlIdentifier id = (PurlIdentifier) i; if (mavenArtifact.getArtifactId().equals(id.getName()) && mavenArtifact.getGroupId().equals(id.getNamespace())) { found = true; i.setConfidence(Confidence.HIGHEST); final String url = "https://search.maven.org/search?q=1:" + this.getSha1sum(); i.setUrl(url); //i.setUrl(mavenArtifact.getArtifactUrl()); LOGGER.debug("Already found identifier {}. Confidence set to highest", i.getValue()); break; } } } } } if (!found && !StringUtils.isAnyEmpty(mavenArtifact.getGroupId(), mavenArtifact.getArtifactId(), mavenArtifact.getVersion())) { try { LOGGER.debug("Adding new maven identifier {}", mavenArtifact); final PackageURL p = new PackageURL("maven", mavenArtifact.getGroupId(), mavenArtifact.getArtifactId(), mavenArtifact.getVersion(), null, null); final PurlIdentifier id = new PurlIdentifier(p, Confidence.HIGHEST); this.addSoftwareIdentifier(id); } catch (MalformedPackageURLException ex) { throw new UnexpectedAnalysisException(ex); } } }
@Test public void testAddAsEvidence() { Dependency instance = new Dependency(); MavenArtifact mavenArtifact = new MavenArtifact("group", "artifact", "version", "url"); instance.addAsEvidence("pom", mavenArtifact, Confidence.HIGH); assertTrue(instance.contains(EvidenceType.VENDOR, Confidence.HIGH)); assertEquals(4, instance.size()); assertFalse(instance.getSoftwareIdentifiers().isEmpty()); }
@Nullable public T poll() { return size() > 0 ? removeInternal(getHeadElementIndex()) : null; }
@Test void testPoll() { HeapPriorityQueue<TestElement> priorityQueue = newPriorityQueue(3); final Comparator<Long> comparator = getTestElementPriorityComparator(); assertThat(priorityQueue.poll()).isNull(); final int testSize = 345; HashSet<TestElement> checkSet = new HashSet<>(testSize); insertRandomElements(priorityQueue, checkSet, testSize); long lastPriorityValue = getHighestPriorityValueForComparator(); while (!priorityQueue.isEmpty()) { TestElement removed = priorityQueue.poll(); assertThat(removed).isNotNull(); assertThat(checkSet.remove(removed)).isTrue(); assertThat(comparator.compare(removed.getPriority(), lastPriorityValue) >= 0).isTrue(); lastPriorityValue = removed.getPriority(); } assertThat(checkSet).isEmpty(); assertThat(priorityQueue.poll()).isNull(); }
@Override public int read() throws EOFException { return (pos < size) ? (data[pos++] & 0xff) : -1; }
@Test public void testRead() throws Exception { for (int i = 0; i < in.size; i++) { int readValidPos = in.read(); // Compare the unsigned byte range equivalents of // the initial values with the values we read, as // the contract of the read() says that the values // need to be in that range. assertEquals(INIT_DATA[i] & 0xFF, readValidPos); } //try to read an invalid position should return -1 assertEquals(-1, in.read()); }
public static void preserve(FileSystem targetFS, Path path, CopyListingFileStatus srcFileStatus, EnumSet<FileAttribute> attributes, boolean preserveRawXattrs) throws IOException { // strip out those attributes we don't need any more attributes.remove(FileAttribute.BLOCKSIZE); attributes.remove(FileAttribute.CHECKSUMTYPE); // If not preserving anything from FileStatus, don't bother fetching it. FileStatus targetFileStatus = attributes.isEmpty() ? null : targetFS.getFileStatus(path); String group = targetFileStatus == null ? null : targetFileStatus.getGroup(); String user = targetFileStatus == null ? null : targetFileStatus.getOwner(); boolean chown = false; if (attributes.contains(FileAttribute.ACL)) { List<AclEntry> srcAcl = srcFileStatus.getAclEntries(); List<AclEntry> targetAcl = getAcl(targetFS, targetFileStatus); if (!srcAcl.equals(targetAcl)) { targetFS.removeAcl(path); targetFS.setAcl(path, srcAcl); } // setAcl doesn't preserve sticky bit, so also call setPermission if needed. if (srcFileStatus.getPermission().getStickyBit() != targetFileStatus.getPermission().getStickyBit()) { targetFS.setPermission(path, srcFileStatus.getPermission()); } } else if (attributes.contains(FileAttribute.PERMISSION) && !srcFileStatus.getPermission().equals(targetFileStatus.getPermission())) { targetFS.setPermission(path, srcFileStatus.getPermission()); } final boolean preserveXAttrs = attributes.contains(FileAttribute.XATTR); if (preserveXAttrs || preserveRawXattrs) { final String rawNS = StringUtils.toLowerCase(XAttr.NameSpace.RAW.name()); Map<String, byte[]> srcXAttrs = srcFileStatus.getXAttrs(); Map<String, byte[]> targetXAttrs = getXAttrs(targetFS, path); if (srcXAttrs != null && !srcXAttrs.equals(targetXAttrs)) { for (Entry<String, byte[]> entry : srcXAttrs.entrySet()) { String xattrName = entry.getKey(); if (xattrName.startsWith(rawNS) || preserveXAttrs) { targetFS.setXAttr(path, xattrName, entry.getValue()); } } } } // The replication factor can only be preserved for replicated files. // It is ignored when either the source or target file are erasure coded. if (attributes.contains(FileAttribute.REPLICATION) && !targetFileStatus.isDirectory() && !targetFileStatus.isErasureCoded() && !srcFileStatus.isErasureCoded() && srcFileStatus.getReplication() != targetFileStatus.getReplication()) { targetFS.setReplication(path, srcFileStatus.getReplication()); } if (attributes.contains(FileAttribute.GROUP) && !group.equals(srcFileStatus.getGroup())) { group = srcFileStatus.getGroup(); chown = true; } if (attributes.contains(FileAttribute.USER) && !user.equals(srcFileStatus.getOwner())) { user = srcFileStatus.getOwner(); chown = true; } if (chown) { targetFS.setOwner(path, user, group); } if (attributes.contains(FileAttribute.TIMES)) { targetFS.setTimes(path, srcFileStatus.getModificationTime(), srcFileStatus.getAccessTime()); } }
@Test public void testPreserveNothingOnFile() throws IOException { FileSystem fs = FileSystem.get(config); EnumSet<FileAttribute> attributes = EnumSet.noneOf(FileAttribute.class); Path dst = new Path("/tmp/dest2"); Path src = new Path("/tmp/src2"); createFile(fs, src); createFile(fs, dst); fs.setPermission(src, fullPerm); fs.setOwner(src, "somebody", "somebody-group"); fs.setTimes(src, 0, 0); fs.setReplication(src, (short) 1); fs.setPermission(dst, noPerm); fs.setOwner(dst, "nobody", "nobody-group"); fs.setTimes(dst, 100, 100); fs.setReplication(dst, (short) 2); CopyListingFileStatus srcStatus = new CopyListingFileStatus(fs.getFileStatus(src)); DistCpUtils.preserve(fs, dst, srcStatus, attributes, false); CopyListingFileStatus dstStatus = new CopyListingFileStatus(fs.getFileStatus(dst)); // FileStatus.equals only compares path field, must explicitly compare all fields assertStatusNotEqual(fs, dst, srcStatus); }
@Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser optParser = new OptionParser(); OptionSpec<Long> offsetOpt = optParser.accepts("offset", "offset for reading input").withRequiredArg() .ofType(Long.class).defaultsTo(Long.valueOf(0)); OptionSpec<Long> limitOpt = optParser.accepts("limit", "maximum number of records in the outputfile") .withRequiredArg().ofType(Long.class).defaultsTo(Long.MAX_VALUE); OptionSpec<Double> fracOpt = optParser.accepts("samplerate", "rate at which records will be collected") .withRequiredArg().ofType(Double.class).defaultsTo(Double.valueOf(1)); OptionSet opts = optParser.parse(args.toArray(new String[0])); List<String> nargs = (List<String>) opts.nonOptionArguments(); if (nargs.size() < 2) { printHelp(out); return 0; } inFiles = Util.getFiles(nargs.subList(0, nargs.size() - 1)); System.out.println("List of input files:"); for (Path p : inFiles) { System.out.println(p); } currentInput = -1; nextInput(); OutputStream output = out; String lastArg = nargs.get(nargs.size() - 1); if (nargs.size() > 1 && !lastArg.equals("-")) { output = Util.createFromFS(lastArg); } writer = new DataFileWriter<>(new GenericDatumWriter<>()); String codecName = reader.getMetaString(DataFileConstants.CODEC); CodecFactory codec = (codecName == null) ? CodecFactory.fromString(DataFileConstants.NULL_CODEC) : CodecFactory.fromString(codecName); writer.setCodec(codec); for (String key : reader.getMetaKeys()) { if (!DataFileWriter.isReservedMeta(key)) { writer.setMeta(key, reader.getMeta(key)); } } writer.create(schema, output); long offset = opts.valueOf(offsetOpt); long limit = opts.valueOf(limitOpt); double samplerate = opts.valueOf(fracOpt); sampleCounter = 1; totalCopied = 0; reuse = null; if (limit < 0) { System.out.println("limit has to be non-negative"); this.printHelp(out); return 1; } if (offset < 0) { System.out.println("offset has to be non-negative"); this.printHelp(out); return 1; } if (samplerate < 0 || samplerate > 1) { System.out.println("samplerate has to be a number between 0 and 1"); this.printHelp(out); return 1; } skip(offset); writeRecords(limit, samplerate); System.out.println(totalCopied + " records written."); writer.flush(); writer.close(); Util.close(out); return 0; }
@Test void samplerateAccuracy() throws Exception { Map<String, String> metadata = new HashMap<>(); metadata.put("myMetaKey", "myMetaValue"); File input1 = generateData("input1.avro", Type.INT, metadata, DEFLATE); File output = new File(DIR, name.getMethodName() + ".avro"); output.deleteOnExit(); List<String> args = asList(input1.getAbsolutePath(), output.getAbsolutePath(), "--offset", String.valueOf(OFFSET), "--samplerate", String.valueOf(SAMPLERATE)); int returnCode = new CatTool().run(System.in, System.out, System.err, args); assertEquals(0, returnCode); assertTrue((ROWS_IN_INPUT_FILES - OFFSET) * SAMPLERATE - numRowsInFile(output) < 2, "Outputsize is not roughly (Inputsize - Offset) * samplerate"); assertTrue((ROWS_IN_INPUT_FILES - OFFSET) * SAMPLERATE - numRowsInFile(output) > -2, ""); }
protected void handleOriginNonSuccessResponse(final HttpResponse originResponse, DiscoveryResult chosenServer) { final int respStatus = originResponse.status().code(); OutboundException obe; StatusCategory statusCategory; ClientException.ErrorType niwsErrorType; if (respStatus == 503) { statusCategory = ZuulStatusCategory.FAILURE_ORIGIN_THROTTLED; niwsErrorType = ClientException.ErrorType.SERVER_THROTTLED; obe = new OutboundException(OutboundErrorType.SERVICE_UNAVAILABLE, requestAttempts); if (currentRequestStat != null) { currentRequestStat.updateWithHttpStatusCode(respStatus); currentRequestStat.serviceUnavailable(); } } else { statusCategory = ZuulStatusCategory.FAILURE_ORIGIN; niwsErrorType = ClientException.ErrorType.GENERAL; obe = new OutboundException(OutboundErrorType.ERROR_STATUS_RESPONSE, requestAttempts); if (currentRequestStat != null) { currentRequestStat.updateWithHttpStatusCode(respStatus); currentRequestStat.generalError(); } } obe.setStatusCode(respStatus); long duration = 0; if (currentRequestStat != null) { duration = currentRequestStat.duration(); } if (currentRequestAttempt != null) { currentRequestAttempt.complete(respStatus, duration, obe); } // Flag this error with the ExecutionListener. origin.onRequestExceptionWithServer(zuulRequest, chosenServer, attemptNum, new ClientException(niwsErrorType)); boolean retryable5xxResponse = isRetryable5xxResponse(zuulRequest, originResponse); if (retryable5xxResponse) { origin.adjustRetryPolicyIfNeeded(zuulRequest); } if (retryable5xxResponse && isBelowRetryLimit()) { logger.debug( "Retrying: status={}, attemptNum={}, maxRetries={}, startedSendingResponseToClient={}, hasCompleteBody={}, method={}", respStatus, attemptNum, origin.getMaxRetriesForRequest(context), startedSendingResponseToClient, zuulRequest.hasCompleteBody(), zuulRequest.getMethod()); // detach from current origin. ByteBufUtil.touch(originResponse, "ProxyEndpoint handling non-success retry, request: ", zuulRequest); unlinkFromOrigin(); releasePartialResponse(originResponse); // ensure body reader indexes are reset so retry is able to access the body buffer // otherwise when the body is read by netty (in writeBufferedBodyContent) the body will appear empty zuulRequest.resetBodyReader(); // retry request with different origin passport.add(PassportState.ORIGIN_RETRY_START); proxyRequestToOrigin(); } else { SessionContext zuulCtx = context; logger.info( "Sending error to client: status={}, attemptNum={}, maxRetries={}, startedSendingResponseToClient={}, hasCompleteBody={}, method={}", respStatus, attemptNum, origin.getMaxRetriesForRequest(zuulCtx), startedSendingResponseToClient, zuulRequest.hasCompleteBody(), zuulRequest.getMethod()); // This is a final response after all retries that will go to the client ByteBufUtil.touch(originResponse, "ProxyEndpoint handling non-success response, request: ", zuulRequest); zuulResponse = buildZuulHttpResponse(originResponse, statusCategory, obe); invokeNext(zuulResponse); } }
@Test void testRetryWillResetBodyReader() { assertEquals("Hello There", new String(request.getBody())); // move the body readerIndex to the end to mimic nettys behavior after writing to the origin channel request.getBodyContents() .forEach((b) -> b.content().readerIndex(b.content().capacity())); createResponse(HttpResponseStatus.SERVICE_UNAVAILABLE); DiscoveryResult discoveryResult = createDiscoveryResult(); // when retrying a response, the request body reader should have it's indexes reset proxyEndpoint.handleOriginNonSuccessResponse(response, discoveryResult); assertEquals("Hello There", new String(request.getBody())); }
public Secret generateSecret() { if (authentication instanceof KafkaUserTlsClientAuthentication) { Map<String, String> data = new HashMap<>(5); data.put("ca.crt", caCert); data.put("user.key", userCertAndKey.keyAsBase64String()); data.put("user.crt", userCertAndKey.certAsBase64String()); data.put("user.p12", userCertAndKey.keyStoreAsBase64String()); data.put("user.password", userCertAndKey.storePasswordAsBase64String()); return createSecret(data); } else if (authentication instanceof KafkaUserScramSha512ClientAuthentication) { Map<String, String> data = new HashMap<>(2); data.put(KafkaUserModel.KEY_PASSWORD, Base64.getEncoder().encodeToString(this.scramSha512Password.getBytes(StandardCharsets.US_ASCII))); data.put(KafkaUserModel.KEY_SASL_JAAS_CONFIG, Base64.getEncoder().encodeToString(getSaslJsonConfig().getBytes(StandardCharsets.US_ASCII))); return createSecret(data); } else { return null; } }
@Test public void testGenerateSecret() { KafkaUserModel model = KafkaUserModel.fromCrd(tlsUser, UserOperatorConfig.SECRET_PREFIX.defaultValue(), Boolean.parseBoolean(UserOperatorConfig.ACLS_ADMIN_API_SUPPORTED.defaultValue())); model.maybeGenerateCertificates(Reconciliation.DUMMY_RECONCILIATION, mockCertManager, passwordGenerator, clientsCaCert, clientsCaKey, null, 365, 30, null, Clock.systemUTC()); Secret generatedSecret = model.generateSecret(); assertThat(generatedSecret.getData().keySet(), is(set("ca.crt", "user.crt", "user.key", "user.p12", "user.password"))); assertThat(generatedSecret.getMetadata().getName(), is(ResourceUtils.NAME)); assertThat(generatedSecret.getMetadata().getNamespace(), is(ResourceUtils.NAMESPACE)); assertThat(generatedSecret.getMetadata().getAnnotations(), is(emptyMap())); assertThat(generatedSecret.getMetadata().getLabels(), is(Labels.fromMap(ResourceUtils.LABELS) .withStrimziKind(KafkaUser.RESOURCE_KIND) .withKubernetesName(KafkaUserModel.KAFKA_USER_OPERATOR_NAME) .withKubernetesInstance(ResourceUtils.NAME) .withKubernetesPartOf(ResourceUtils.NAME) .withKubernetesManagedBy(KafkaUserModel.KAFKA_USER_OPERATOR_NAME) .toMap())); // Check owner reference checkOwnerReference(model.createOwnerReference(), generatedSecret); }
public ActionResult apply(Agent agent, Map<String, String> input) { log.debug("Outputmessage action message={}", input.get("message")); String message = input.get("message"); if (!StringUtils.hasText(message)) { message = ""; } return ActionResult.builder() .status(ActionResult.Status.SUCCESS) .result("The message has been displayed to the user.") .output(message) .summary("") .build(); }
@Test void testApply() { Map<String, String> input = new HashMap<>(); String testMessage = "Test message"; input.put("message", testMessage); ActionResult result = outputMessageAction.apply(mockAgent, input); assertEquals(ActionResult.Status.SUCCESS, result.getStatus()); assertEquals("The message has been displayed to the user.", result.getResult()); assertEquals(testMessage, result.getOutput()); assertEquals("", result.getSummary()); }
public static String getIpAddressAndPort() { return new StringBuilder().append(ipAddress).append(IP_PORT_SPLIT_CHAR).append(port).toString(); }
@Test public void testGetIpAddressAndPort() { XID.setPort(8080); XID.setIpAddress("127.0.0.1"); Assertions.assertEquals("127.0.0.1:8080",XID.getIpAddressAndPort()); }
public boolean hasOobLog(String secretString) { // making a blocking call to get result Optional<PollingResult> result = sendPollingRequest(secretString); if (result.isPresent()) { // In the future we may refactor hasOobLog() to return finer grained info about what kind // of oob is logged return result.get().getHasDnsInteraction() || result.get().getHasHttpInteraction(); } else { // we may choose to retry sendPollingRequest() if oob interactions do arrive late. return false; } }
@Test public void isVulnerable_sendsValidPollingRequest() throws IOException, InterruptedException { MockWebServer mockWebServer = new MockWebServer(); mockWebServer.start(); client = new TcsClient(VALID_DOMAIN, VALID_PORT, mockWebServer.url("/").toString(), httpClient); client.hasOobLog(SECRET); assertThat(mockWebServer.takeRequest().getPath()) .isEqualTo(String.format("/?secret=%s", SECRET)); mockWebServer.shutdown(); }
@Override public Connection getConnection(Properties properties, String connectionString, SSLContextSettings sslContextSettings) { try { RestClientConfigurationBuilder builder = new RestClientConfigurationBuilder().withProperties(properties); if (connectionString == null || connectionString.isEmpty() || "-".equals(connectionString)) { builder.addServer().host("localhost").port(11222); } else { Matcher matcher = HOST_PORT.matcher(connectionString); if (matcher.matches()) { String host = matcher.group(1); String port = matcher.group(2); builder.addServer().host(host).port(port != null ? Integer.parseInt(port) : 11222); } else { URL url = new URL(connectionString); if (!url.getProtocol().equals("http") && !url.getProtocol().equals("https")) { throw new IllegalArgumentException(); } int port = url.getPort(); builder.addServer().host(url.getHost()).port(port > 0 ? port : url.getDefaultPort()); String userInfo = url.getUserInfo(); if (userInfo != null) { String[] split = userInfo.split(":"); builder.security().authentication().username(URLDecoder.decode(split[0], StandardCharsets.UTF_8)); if (split.length == 2) { builder.security().authentication().password(URLDecoder.decode(split[1], StandardCharsets.UTF_8)); } } if (url.getProtocol().equals("https")) { SslConfigurationBuilder ssl = builder.security().ssl().enable(); if (sslContextSettings != null) { ssl.sslContext(sslContextSettings.getSslContext()) .trustManagers(sslContextSettings.getTrustManagers()) .hostnameVerifier(sslContextSettings.getHostnameVerifier()); } } } } builder.header("User-Agent", Version.getBrandName() + " CLI " + Version.getBrandVersion()); return new RestConnection(builder); } catch (Throwable e) { return null; } }
@Test public void testUrlWithCredentials() { RestConnector connector = new RestConnector(); RestConnection connection = (RestConnection) connector.getConnection(new Properties(), "http://user:password@localhost:11222", null); RestClientConfigurationBuilder builder = connection.getBuilder(); RestClientConfiguration configuration = builder.build(); assertEquals(11222, configuration.servers().get(0).port()); assertEquals("localhost", configuration.servers().get(0).host()); assertTrue(configuration.security().authentication().enabled()); assertEquals("user", configuration.security().authentication().username()); assertArrayEquals("password".toCharArray(), configuration.security().authentication().password()); }
public static boolean isJson(String s) { if (s == null || s.isEmpty()) { return false; } if (s.charAt(0) == ' ') { s = s.trim(); if (s.isEmpty()) { return false; } } return s.charAt(0) == '{' || s.charAt(0) == '['; }
@Test void testDetect() { assertTrue(JsonUtils.isJson("{}")); assertTrue(JsonUtils.isJson("[]")); assertTrue(JsonUtils.isJson(" {}")); assertTrue(JsonUtils.isJson(" []")); assertFalse(JsonUtils.isJson(null)); assertFalse(JsonUtils.isJson("")); }
public static SerdeFeatures buildValueFeatures( final LogicalSchema schema, final Format valueFormat, final SerdeFeatures explicitFeatures, final KsqlConfig ksqlConfig ) { final boolean singleColumn = schema.value().size() == 1; final ImmutableSet.Builder<SerdeFeature> builder = ImmutableSet.builder(); getValueWrapping(singleColumn, valueFormat, explicitFeatures, ksqlConfig) .ifPresent(builder::add); return SerdeFeatures.from(builder.build()); }
@Test public void shouldGetSingleValueWrappingFromConfig() { // Given: ksqlConfig = new KsqlConfig(ImmutableMap.of( KsqlConfig.KSQL_WRAP_SINGLE_VALUES, false )); // When: final SerdeFeatures result = SerdeFeaturesFactory.buildValueFeatures( SINGLE_FIELD_SCHEMA, JSON, SerdeFeatures.of(), ksqlConfig ); // Then: assertThat(result.findAny(SerdeFeatures.WRAPPING_FEATURES), is(Optional.of(SerdeFeature.UNWRAP_SINGLES))); }
@GetMapping("/server/config/find-all-config") public List<ServerConfig> findAllServerConfig() { return serverConfigService.findAll(); }
@Test @Sql(scripts = "/controller/test-server-config.sql", executionPhase = ExecutionPhase.BEFORE_TEST_METHOD) @Sql(scripts = "/controller/cleanup.sql", executionPhase = ExecutionPhase.AFTER_TEST_METHOD) void findAllServerConfig() { ServerConfig[] serverConfigs = restTemplate.getForObject(url("/server/config/find-all-config"), ServerConfig[].class); assertNotNull(serverConfigs); assertEquals(1, serverConfigs.length); assertEquals("name", serverConfigs[0].getKey()); assertEquals("kl", serverConfigs[0].getValue()); }
public List<Chapter> getChapters() { return chapters; }
@Test public void testRealFileUltraschall() throws IOException, ID3ReaderException { CountingInputStream inputStream = new CountingInputStream(getClass().getClassLoader() .getResource("ultraschall5.mp3").openStream()); ChapterReader reader = new ChapterReader(inputStream); reader.readInputStream(); List<Chapter> chapters = reader.getChapters(); assertEquals(3, chapters.size()); assertEquals(0, chapters.get(0).getStart()); assertEquals(4004, chapters.get(1).getStart()); assertEquals(7999, chapters.get(2).getStart()); assertEquals("Marke 1", chapters.get(0).getTitle()); assertEquals("Marke 2", chapters.get(1).getTitle()); assertEquals("Marke 3", chapters.get(2).getTitle()); assertEquals("https://example.com", chapters.get(0).getLink()); assertEquals("https://example.com", chapters.get(1).getLink()); assertEquals("https://example.com", chapters.get(2).getLink()); assertEquals(EmbeddedChapterImage.makeUrl(16073, 2750569), chapters.get(0).getImageUrl()); assertEquals(EmbeddedChapterImage.makeUrl(2766765, 15740), chapters.get(1).getImageUrl()); assertEquals(EmbeddedChapterImage.makeUrl(2782628, 2750569), chapters.get(2).getImageUrl()); }
public Set<String> filesInDirectory(File dir) throws IOException { Set<String> fileList = new HashSet<>(); Path dirPath = Paths.get(dir.getPath()); if (!Files.exists(dirPath)) { return Collections.emptySet(); } try (DirectoryStream<Path> stream = Files.newDirectoryStream(dirPath)) { for (Path path : stream) { if (!Files.isDirectory(path)) { fileList.add(path.toString()); } } } return fileList; }
@Test public void testFilesInDirectoryBadDirectory() throws IOException { Set<String> result = fileUtil.filesInDirectory(new File(FOOBAR)); assertTrue(result.isEmpty()); }
@Override public PageResult<OAuth2AccessTokenDO> getAccessTokenPage(OAuth2AccessTokenPageReqVO reqVO) { return oauth2AccessTokenMapper.selectPage(reqVO); }
@Test public void testGetAccessTokenPage() { // mock 数据 OAuth2AccessTokenDO dbAccessToken = randomPojo(OAuth2AccessTokenDO.class, o -> { // 等会查询到 o.setUserId(10L); o.setUserType(1); o.setClientId("test_client"); o.setExpiresTime(LocalDateTime.now().plusDays(1)); }); oauth2AccessTokenMapper.insert(dbAccessToken); // 测试 userId 不匹配 oauth2AccessTokenMapper.insert(cloneIgnoreId(dbAccessToken, o -> o.setUserId(20L))); // 测试 userType 不匹配 oauth2AccessTokenMapper.insert(cloneIgnoreId(dbAccessToken, o -> o.setUserType(2))); // 测试 userType 不匹配 oauth2AccessTokenMapper.insert(cloneIgnoreId(dbAccessToken, o -> o.setClientId("it_client"))); // 测试 expireTime 不匹配 oauth2AccessTokenMapper.insert(cloneIgnoreId(dbAccessToken, o -> o.setExpiresTime(LocalDateTimeUtil.now()))); // 准备参数 OAuth2AccessTokenPageReqVO reqVO = new OAuth2AccessTokenPageReqVO(); reqVO.setUserId(10L); reqVO.setUserType(1); reqVO.setClientId("test"); // 调用 PageResult<OAuth2AccessTokenDO> pageResult = oauth2TokenService.getAccessTokenPage(reqVO); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbAccessToken, pageResult.getList().get(0)); }
public static String builderData(final String paramType, final String paramName, final ServerWebExchange exchange) { return newInstance(paramType).builder(paramName, exchange); }
@Test public void testBuildIPData() { ConfigurableApplicationContext context = mock(ConfigurableApplicationContext.class); SpringBeanUtils.getInstance().setApplicationContext(context); RemoteAddressResolver remoteAddressResolver = new RemoteAddressResolver() { }; ServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("/http") .remoteAddress(new InetSocketAddress("127.0.0.1", 8080)) .build()); when(context.getBean(RemoteAddressResolver.class)).thenReturn(remoteAddressResolver); assertEquals("127.0.0.1", ParameterDataFactory.builderData("ip", null, exchange)); }
@Deprecated public static List<Point> convexHull(List<Point> points) { int[] xs = new int[points.size()]; int[] ys = new int[xs.length]; for (int i = 0; i < xs.length; i++) { Point p = points.get(i); xs[i] = p.getX(); ys[i] = p.getY(); } SimplePolygon poly = convexHull(xs, ys); if (poly == null) { return null; } return poly.toRuneLitePointList(); }
@Test public void test() { Point[] points = { new Point(0, 3), new Point(1, 1), new Point(2, 2), new Point(4, 4), new Point(0, 0), new Point(1, 2), new Point(3, 1), new Point(3, 3) }; List<Point> result = Jarvis.convexHull(Arrays.asList(points)); Assert.assertEquals(4, result.size()); Assert.assertEquals(new Point(0, 0), result.get(0)); Assert.assertEquals(new Point(0, 3), result.get(1)); Assert.assertEquals(new Point(4, 4), result.get(2)); Assert.assertEquals(new Point(3, 1), result.get(3)); }
public static boolean firstPathIsShortest(String left, String right) { if (left.contains("dctemp") && !right.contains("dctemp")) { return false; } final String leftPath = left.replace('\\', '/'); final String rightPath = right.replace('\\', '/'); final int leftCount = countChar(leftPath, '/'); final int rightCount = countChar(rightPath, '/'); if (leftCount == rightCount) { return leftPath.compareTo(rightPath) <= 0; } else { return leftCount < rightCount; } }
@Test public void testFirstPathIsShortest() { String left = "./a/c.jar"; String right = "./d/e/f.jar"; boolean expResult = true; boolean result = DependencyBundlingAnalyzer.firstPathIsShortest(left, right); assertEquals(expResult, result); left = "./a/b/c.jar"; right = "./d/e/f.jar"; expResult = true; result = DependencyBundlingAnalyzer.firstPathIsShortest(left, right); assertEquals(expResult, result); left = "./d/b/c.jar"; right = "./a/e/f.jar"; expResult = false; result = DependencyBundlingAnalyzer.firstPathIsShortest(left, right); assertEquals(expResult, result); left = "./a/b/c.jar"; right = "./d/f.jar"; expResult = false; result = DependencyBundlingAnalyzer.firstPathIsShortest(left, right); assertEquals(expResult, result); left = "./a/b/c.jar"; right = "./a/b/c.jar"; expResult = true; result = DependencyBundlingAnalyzer.firstPathIsShortest(left, right); assertEquals(expResult, result); }
public static Predicate parse(String expression) { final Stack<Predicate> predicateStack = new Stack<>(); final Stack<Character> operatorStack = new Stack<>(); final String trimmedExpression = TRIMMER_PATTERN.matcher(expression).replaceAll(""); final StringTokenizer tokenizer = new StringTokenizer(trimmedExpression, OPERATORS, true); boolean isTokenMode = true; while (true) { final Character operator; final String token; if (isTokenMode) { if (tokenizer.hasMoreTokens()) { token = tokenizer.nextToken(); } else { break; } if (OPERATORS.contains(token)) { operator = token.charAt(0); } else { operator = null; } } else { operator = operatorStack.pop(); token = null; } isTokenMode = true; if (operator == null) { try { predicateStack.push(Class.forName(token).asSubclass(Predicate.class).getDeclaredConstructor().newInstance()); } catch (ClassCastException e) { throw new RuntimeException(token + " must implement " + Predicate.class.getName(), e); } catch (Exception e) { throw new RuntimeException(e); } } else { if (operatorStack.empty() || operator == '(') { operatorStack.push(operator); } else if (operator == ')') { while (operatorStack.peek() != '(') { evaluate(predicateStack, operatorStack); } operatorStack.pop(); } else { if (OPERATOR_PRECEDENCE.get(operator) < OPERATOR_PRECEDENCE.get(operatorStack.peek())) { evaluate(predicateStack, operatorStack); isTokenMode = false; } operatorStack.push(operator); } } } while (!operatorStack.empty()) { evaluate(predicateStack, operatorStack); } if (predicateStack.size() > 1) { throw new RuntimeException("Invalid logical expression"); } return predicateStack.pop(); }
@Test public void testOrAndOr() { final Predicate parsed = PredicateExpressionParser.parse("com.linkedin.data.it.AlwaysTruePredicate | com.linkedin.data.it.AlwaysFalsePredicate & com.linkedin.data.it.AlwaysTruePredicate | com.linkedin.data.it.AlwaysFalsePredicate"); Assert.assertEquals(parsed.getClass(), OrPredicate.class); final List<Predicate> orChildren = ((OrPredicate) parsed).getChildPredicates(); Assert.assertEquals(orChildren.get(0).getClass(), AlwaysTruePredicate.class); Assert.assertEquals(orChildren.get(1).getClass(), AndPredicate.class); Assert.assertEquals(orChildren.get(2).getClass(), AlwaysFalsePredicate.class); final List<Predicate> andChildren = ((AndPredicate) orChildren.get(1)).getChildPredicates(); Assert.assertEquals(andChildren.get(0).getClass(), AlwaysFalsePredicate.class); Assert.assertEquals(andChildren.get(1).getClass(), AlwaysTruePredicate.class); }
public void validateUsing(Validator<String> validator) throws ValidationException { for (String matcher : matchers) { ValidationBean validationBean = validator.validate(matcher); if (!validationBean.isValid()) { throw new ValidationException(validationBean.getError()); } } }
@Test void shouldValidateAllMatchersUsingAValidator() throws Exception { new Matcher(new String[]{"aaa,a"}).validateUsing(Validator.lengthValidator(200)); }
public static String bestMatch(Collection<String> supported, String header) { return bestMatch(supported.stream(), header); }
@Test(dataProvider = "successfulMatch") public void testBestMatchForSuccessfullMatch(List<String> supportedTypes, String header, String result) { Assert.assertEquals(MIMEParse.bestMatch(supportedTypes, header), result); }
@SuppressWarnings("unchecked") public Map<MessageQueue, Long> getConsumerStatus(String topic, String group) { MQConsumerInner impl = this.consumerTable.get(group); if (impl instanceof DefaultMQPushConsumerImpl) { DefaultMQPushConsumerImpl consumer = (DefaultMQPushConsumerImpl) impl; return consumer.getOffsetStore().cloneOffsetTable(topic); } else if (impl instanceof DefaultMQPullConsumerImpl) { DefaultMQPullConsumerImpl consumer = (DefaultMQPullConsumerImpl) impl; return consumer.getOffsetStore().cloneOffsetTable(topic); } else { return Collections.EMPTY_MAP; } }
@Test public void testGetConsumerStatus() { topicRouteTable.put(topic, createTopicRouteData()); brokerAddrTable.put(defaultBroker, createBrokerAddrMap()); consumerTable.put(group, createMQConsumerInner()); Map<MessageQueue, Long> actual = mqClientInstance.getConsumerStatus(topic, group); assertNotNull(actual); assertEquals(0, actual.size()); }
@Override public TaskView view() { return pluginRequestHelper.submitRequest(pluginId, TaskExtension.TASK_VIEW_REQUEST, new DefaultPluginInteractionCallback<>() { @Override public TaskView onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) { return handlerMap.get(resolvedExtensionVersion).toTaskView(responseBody); } }); }
@Test public void shouldGetTaskView() { String jsonResponse = "{\"displayValue\":\"MyTaskPlugin\", \"template\":\"<html>junk</html>\"}"; when(goPluginApiResponse.responseBody()).thenReturn(jsonResponse); TaskView view = task.view(); assertThat(view.displayValue(), is("MyTaskPlugin")); assertThat(view.template(), is("<html>junk</html>")); ArgumentCaptor<GoPluginApiRequest> argument = ArgumentCaptor.forClass(GoPluginApiRequest.class); verify(pluginManager).submitTo(eq(pluginId), eq(PLUGGABLE_TASK_EXTENSION), argument.capture()); MatcherAssert.assertThat(argument.getValue().extension(), Matchers.is(PLUGGABLE_TASK_EXTENSION)); MatcherAssert.assertThat(argument.getValue().extensionVersion(), Matchers.is(JsonBasedTaskExtensionHandler_V1.VERSION)); MatcherAssert.assertThat(argument.getValue().requestName(), Matchers.is(TaskExtension.TASK_VIEW_REQUEST)); }
public boolean greaterThanOrEqualTo(final int major, final int minor, final int series) { if (this.major < major) { return false; } if (this.major > major) { return true; } if (this.minor < minor) { return false; } if (this.minor > minor) { return true; } return this.series >= series; }
@Test void assertEqualTo() { MySQLServerVersion actual = new MySQLServerVersion("5.6.6"); assertTrue(actual.greaterThanOrEqualTo(5, 6, 6)); }
@Override protected int command() { if (!validateConfigFilePresent()) { return 1; } final MigrationConfig config; try { config = MigrationConfig.load(getConfigFile()); } catch (KsqlException | MigrationException e) { LOGGER.error(e.getMessage()); return 1; } return command( config, MigrationsUtil::getKsqlClient, getMigrationsDir(getConfigFile(), config), Clock.systemDefaultZone() ); }
@SuppressWarnings("unchecked") @Test public void shouldApplyDefineUndefineCommands() throws Exception { // Given: final Map<String, Object> variables = ImmutableMap.of("pre", "a", "str", "abc"); command = PARSER.parse("-n"); createMigrationFile(1, NAME, migrationsDir, DEFINE_COMMANDS); when(versionQueryResult.get()).thenReturn(ImmutableList.of()); when(ksqlClient.getVariables()).thenReturn( ImmutableMap.of(), ImmutableMap.of(), variables, variables, variables, variables, variables, variables, variables, variables, variables, variables, variables, variables, variables, variables, variables, ImmutableMap.of() ); // When: final int result = command.command(config, (cfg, headers) -> ksqlClient, migrationsDir, Clock.fixed( Instant.ofEpochMilli(1000), ZoneId.systemDefault())); // Then: assertThat(result, is(0)); final InOrder inOrder = inOrder(ksqlClient); verifyMigratedVersion(inOrder, 1, "<none>", MigrationState.MIGRATED, () -> { inOrder.verify(ksqlClient).executeStatement(COMMAND, new HashMap<>()); inOrder.verify(ksqlClient).define("pre", "a"); inOrder.verify(ksqlClient).define("str", "abc"); inOrder.verify(ksqlClient).executeStatement(eq("CREATE STREAM abc AS SELECT * FROM FOO;"), propCaptor.capture()); assertThat(propCaptor.getValue().size(), is(1)); assertThat(propCaptor.getValue().get("abc"), is("yay")); inOrder.verify(ksqlClient).insertInto("`FOO`", new KsqlObject(ImmutableMap.of("`A`", "abc"))); inOrder.verify(ksqlClient).undefine("str"); inOrder.verify(ksqlClient).insertInto("`FOO`", new KsqlObject(ImmutableMap.of("`A`", "${str}"))); }); inOrder.verify(ksqlClient).close(); inOrder.verifyNoMoreInteractions(); }
@Override public boolean isOldValueAvailable() { return hasOldValue; }
@Test public void isOldValueAvailable_false() { var entry = new JCacheEntryEvent<>(cache, EventType.CREATED, 1, false, null, 3); assertThat(entry.isOldValueAvailable()).isFalse(); }
@GET @Produces(MediaType.APPLICATION_JSON) @Operation(summary = "Get prekey count", description = "Gets the number of one-time prekeys uploaded for this device and still available") @ApiResponse(responseCode = "200", description = "Body contains the number of available one-time prekeys for the device.", useReturnTypeSchema = true) @ApiResponse(responseCode = "401", description = "Account authentication check failed.") public CompletableFuture<PreKeyCount> getStatus(@ReadOnly @Auth final AuthenticatedDevice auth, @QueryParam("identity") @DefaultValue("aci") final IdentityType identityType) { final CompletableFuture<Integer> ecCountFuture = keysManager.getEcCount(auth.getAccount().getIdentifier(identityType), auth.getAuthenticatedDevice().getId()); final CompletableFuture<Integer> pqCountFuture = keysManager.getPqCount(auth.getAccount().getIdentifier(identityType), auth.getAuthenticatedDevice().getId()); return ecCountFuture.thenCombine(pqCountFuture, PreKeyCount::new); }
@Test void putKeysStructurallyInvalidUnsignedECKey() { final ECKeyPair identityKeyPair = Curve.generateKeyPair(); final IdentityKey identityKey = new IdentityKey(identityKeyPair.getPublicKey()); final WeaklyTypedPreKey wrongPreKey = new WeaklyTypedPreKey(1, "cluck cluck i'm a parrot".getBytes()); final WeaklyTypedPreKeyState preKeyState = new WeaklyTypedPreKeyState(List.of(wrongPreKey), null, null, null, identityKey.serialize()); Response response = resources.getJerseyTest() .target("/v2/keys") .request() .header("Authorization", AuthHelper.getAuthHeader(AuthHelper.VALID_UUID, AuthHelper.VALID_PASSWORD)) .put(Entity.entity(preKeyState, MediaType.APPLICATION_JSON_TYPE)); assertThat(response.getStatus()).isEqualTo(400); }
public FEELFnResult<BigDecimal> invoke(@ParameterName( "n" ) BigDecimal n) { return invoke(n, BigDecimal.ZERO); }
@Test void invokeNull() { FunctionTestUtil.assertResultError(roundHalfUpFunction.invoke(null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(roundHalfUpFunction.invoke((BigDecimal) null, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(roundHalfUpFunction.invoke(BigDecimal.ONE, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(roundHalfUpFunction.invoke(null, BigDecimal.ONE), InvalidParametersEvent.class); }
@VisibleForTesting protected void copyFromHost(MapHost host) throws IOException { // reset retryStartTime for a new host retryStartTime = 0; // Get completed maps on 'host' List<TaskAttemptID> maps = scheduler.getMapsForHost(host); // Sanity check to catch hosts with only 'OBSOLETE' maps, // especially at the tail of large jobs if (maps.size() == 0) { return; } if (LOG.isDebugEnabled()) { LOG.debug("Fetcher " + id + " going to fetch from " + host + " for: " + maps); } // List of maps to be fetched yet Set<TaskAttemptID> remaining = new HashSet<TaskAttemptID>(maps); // Construct the url and connect URL url = getMapOutputURL(host, maps); DataInputStream input = null; try { input = openShuffleUrl(host, remaining, url); if (input == null) { return; } // Loop through available map-outputs and fetch them // On any error, faildTasks is not null and we exit // after putting back the remaining maps to the // yet_to_be_fetched list and marking the failed tasks. TaskAttemptID[] failedTasks = null; while (!remaining.isEmpty() && failedTasks == null) { try { failedTasks = copyMapOutput(host, input, remaining, fetchRetryEnabled); } catch (IOException e) { IOUtils.cleanupWithLogger(LOG, input); // // Setup connection again if disconnected by NM connection.disconnect(); // Get map output from remaining tasks only. url = getMapOutputURL(host, remaining); input = openShuffleUrl(host, remaining, url); if (input == null) { return; } } } if(failedTasks != null && failedTasks.length > 0) { LOG.warn("copyMapOutput failed for tasks "+Arrays.toString(failedTasks)); scheduler.hostFailed(host.getHostName()); for(TaskAttemptID left: failedTasks) { scheduler.copyFailed(left, host, true, false); } } // Sanity check if (failedTasks == null && !remaining.isEmpty()) { throw new IOException("server didn't return all expected map outputs: " + remaining.size() + " left."); } input.close(); input = null; } finally { if (input != null) { IOUtils.cleanupWithLogger(LOG, input); input = null; } for (TaskAttemptID left : remaining) { scheduler.putBackKnownMapOutput(host, left); } } }
@SuppressWarnings("unchecked") @Test(timeout=10000) public void testCopyFromHostWithRetryThenTimeout() throws Exception { InMemoryMapOutput<Text, Text> immo = mock(InMemoryMapOutput.class); Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(jobWithRetry, id, ss, mm, r, metrics, except, key, connection); String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key); when(connection.getResponseCode()).thenReturn(200) .thenThrow(new SocketTimeoutException("forced timeout")); when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH)) .thenReturn(replyHash); ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1); ByteArrayOutputStream bout = new ByteArrayOutputStream(); header.write(new DataOutputStream(bout)); ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray()); when(connection.getInputStream()).thenReturn(in); when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME)) .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME); when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION)) .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION); when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt())) .thenReturn(immo); doThrow(new IOException("forced error")).when(immo).shuffle( any(MapHost.class), any(InputStream.class), anyLong(), anyLong(), any(ShuffleClientMetrics.class), any(Reporter.class)); underTest.copyFromHost(host); verify(allErrs).increment(1); verify(ss, times(1)).copyFailed(map1ID, host, false, false); verify(ss, times(1)).copyFailed(map2ID, host, false, false); verify(ss, times(1)).putBackKnownMapOutput(any(MapHost.class), eq(map1ID)); verify(ss, times(1)).putBackKnownMapOutput(any(MapHost.class), eq(map2ID)); }
public List<User> userDTOsToUsers(List<AdminUserDTO> userDTOs) { return userDTOs.stream().filter(Objects::nonNull).map(this::userDTOToUser).toList(); }
@Test void userDTOsToUsersShouldMapOnlyNonNullUsers() { List<AdminUserDTO> usersDto = new ArrayList<>(); usersDto.add(userDto); usersDto.add(null); List<User> users = userMapper.userDTOsToUsers(usersDto); assertThat(users).isNotEmpty().size().isEqualTo(1); }
@VisibleForTesting public void loadDataFromRemote(String filePath, long offset, long lengthToLoad, PositionReader reader, int chunkSize) throws IOException { ByteBuffer buf = ByteBuffer.allocateDirect(chunkSize); String fileId = new AlluxioURI(filePath).hash(); while (lengthToLoad > 0) { long currentPageIndex = offset / mPageSize; PageId pageId = new PageId(fileId.toString(), currentPageIndex); int lengthToRead = (int) Math.min(chunkSize, lengthToLoad); int lengthRead = reader.read(offset, buf, lengthToRead); if (lengthRead != lengthToRead) { throw new FailedPreconditionRuntimeException( "Read " + lengthRead + " bytes, expected to read " + lengthToRead + " bytes"); } buf.flip(); mCacheManager.put(pageId, buf); offset += lengthRead; lengthToLoad -= lengthRead; buf.clear(); } }
@Test public void testLoadBlockFromReader() throws IOException { String ufsPath = "testLoadBlockRemote"; long offset = mPageSize; int numPages = 3; long lengthToLoad = numPages * mPageSize + 5; mWorker.loadDataFromRemote(ufsPath, offset, lengthToLoad, new TestDataReader((int) (5 * mPageSize)), (int) mPageSize); String fileId = new AlluxioURI(ufsPath).hash(); List<PageId> cachedPages = mCacheManager.getCachedPageIdsByFileId(fileId, 5 * mPageSize); assertEquals(4, cachedPages.size()); for (int i = 1; i < 4; i++) { byte[] buffer = new byte[(int) mPageSize]; mCacheManager.get(new PageId(fileId, i), (int) mPageSize, buffer, 0); assertTrue(BufferUtils.equalIncreasingByteArray((int) (offset + (i - 1) * mPageSize), (int) mPageSize, buffer)); } // test last page with 5 bytes byte[] buffer = new byte[(int) 5]; mCacheManager.get(new PageId(fileId, 4), 5, buffer, 0); assertTrue(BufferUtils.equalIncreasingByteArray((int) (offset + 3 * mPageSize), 5, buffer)); }
public <T> T parse(String input, Class<T> cls) { return readFlow(input, cls, type(cls)); }
@Test void serialization() throws IOException { Flow flow = this.parse("flows/valids/minimal.yaml"); String s = mapper.writeValueAsString(flow); assertThat(s, is("{\"id\":\"minimal\",\"namespace\":\"io.kestra.tests\",\"revision\":2,\"disabled\":false,\"deleted\":false,\"tasks\":[{\"id\":\"date\",\"type\":\"io.kestra.plugin.core.debug.Return\",\"format\":\"{{taskrun.startDate}}\"}]}")); }
public String getDefault(ConfigResource.Type type, String key) { ConfigDef configDef = configDefs.get(type); if (configDef == null) return null; ConfigDef.ConfigKey configKey = configDef.configKeys().get(key); if (configKey == null || !configKey.hasDefault()) { return null; } return ConfigDef.convertToString(configKey.defaultValue, configKey.type); }
@Test public void testGetConfigValueDefault() { assertEquals("1", SCHEMA.getDefault(BROKER, "foo.bar")); assertNull(SCHEMA.getDefault(BROKER, "foo.baz.quux")); assertNull(SCHEMA.getDefault(TOPIC, "abc")); assertEquals("true", SCHEMA.getDefault(TOPIC, "ghi")); }
@Override public void trackDeepLinkLaunch(String deepLinkUrl) { }
@Test public void testTrackDeepLinkLaunch() { mSensorsAPI.setTrackEventCallBack(new SensorsDataTrackEventCallBack() { @Override public boolean onTrackEvent(String eventName, JSONObject eventProperties) { Assert.fail(); return false; } }); mSensorsAPI.trackDeepLinkLaunch("https://deeplink.com", "kjsdakfjkadsljf"); }
public static void disableConsumption(KafkaConsumerWrapper kafkaConsumerWrapper, Set<String> prohibitionTopics) { Set<String> originalTopics = kafkaConsumerWrapper.getOriginalTopics(); // Not subscribed to any Topic, so no action is required if (originalTopics.size() == 0) { return; } Collection<TopicPartition> originalPartitions = kafkaConsumerWrapper.getOriginalPartitions(); KafkaConsumer<?, ?> kafkaConsumer = kafkaConsumerWrapper.getKafkaConsumer(); Collection<String> subtractTopics = CollectionUtils.subtract(originalTopics, prohibitionTopics); if (kafkaConsumerWrapper.isAssign()) { kafkaConsumer.assign(originalPartitions.stream().filter(obj -> subtractTopics.contains(obj.topic())) .collect(Collectors.toSet())); return; } kafkaConsumer.subscribe(subtractTopics); }
@Test public void testDisableConsumptionWithNoTopic() { KafkaConsumer<?, ?> mockConsumer = Mockito.mock(KafkaConsumer.class); KafkaConsumerWrapper kafkaConsumerWrapper = new KafkaConsumerWrapper(mockConsumer); HashSet<String> originalTopics = new HashSet<>(); kafkaConsumerWrapper.setOriginalTopics(originalTopics); Set<String> prohibitionTopics = new HashSet<>(); KafkaConsumerController.disableConsumption(kafkaConsumerWrapper, prohibitionTopics); Mockito.verify(mockConsumer, Mockito.times(0)).subscribe( (Collection<String>) Mockito.any()); Mockito.verify(mockConsumer, Mockito.times(0)).assign(Mockito.any()); }
void scheduleDrainBuffers() { if (drainStatusOpaque() >= PROCESSING_TO_IDLE) { return; } if (evictionLock.tryLock()) { try { int drainStatus = drainStatusOpaque(); if (drainStatus >= PROCESSING_TO_IDLE) { return; } setDrainStatusRelease(PROCESSING_TO_IDLE); executor.execute(drainBuffersTask); } catch (Throwable t) { logger.log(Level.WARNING, "Exception thrown when submitting maintenance task", t); maintenance(/* ignored */ null); } finally { evictionLock.unlock(); } } }
@Test public void scheduleDrainBuffers() { Executor executor = Mockito.mock(); var cache = new BoundedLocalCache<Object, Object>( Caffeine.newBuilder().executor(executor), /* loader */ null, /* async */ false) {}; var transitions = Map.of( IDLE, PROCESSING_TO_IDLE, REQUIRED, PROCESSING_TO_IDLE, PROCESSING_TO_IDLE, PROCESSING_TO_IDLE, PROCESSING_TO_REQUIRED, PROCESSING_TO_REQUIRED); transitions.forEach((start, end) -> { cache.drainStatus = start; cache.scheduleDrainBuffers(); assertThat(cache.drainStatus).isEqualTo(end); if (!start.equals(end)) { verify(executor).execute(any()); reset(executor); } }); }
@Override public void close() throws IOException { writer.markEndOfStream(); writer.close(); dlm.close(); }
@Test public void testClose() throws Exception { DistributedLogManager dlm = mock(DistributedLogManager.class); AppendOnlyStreamWriter writer = mock(AppendOnlyStreamWriter.class); DLOutputStream out = new DLOutputStream(dlm, writer); out.close(); verify(writer, times(1)).markEndOfStream(); verify(writer, times(1)).close(); verify(dlm, times(1)).close(); }
protected void generateHtml(final File outputPath) throws IOException { for (RouteStatistic routeStatistic : routeStatisticMap.values()) { writeDetailsAsHtml(routeStatistic, outputPath); } }
@Test public void testGenerateHtml() { }
public void printHelp() { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp( "vespa-get <options> [documentid...]", "Fetch a document from a Vespa Content cluster.", options, "If one or more document identifier are specified, these documents will be " + "retrieved. Otherwise, document identifiers (separated with line break) will be read from standard in.\n", false); }
@Test void testPrintHelp() { ByteArrayOutputStream outContent = new ByteArrayOutputStream(); PrintStream oldOut = System.out; System.setOut(new PrintStream(outContent)); try { CommandLineOptions options = new CommandLineOptions(emptyStream); options.printHelp(); String output = outContent.toString(); assertTrue(output.contains("vespa-get <options> [documentid...]")); assertTrue(output.contains("Fetch a document from a Vespa Content cluster.")); } finally { System.setOut(oldOut); outContent.reset(); } }
@Override public SQLRecognizer getDeleteRecognizer(String sql, SQLStatement ast) { return new PolarDBXDeleteRecognizer(sql, ast); }
@Test public void getDeleteRecognizerTest() { String sql = "DELETE FROM t WHERE id = 1"; SQLStatement sqlStatement = getSQLStatement(sql); Assertions.assertNotNull(new PolarDBXOperateRecognizerHolder().getDeleteRecognizer(sql, sqlStatement)); }
@Override public Messages process(Messages messages) { try (Timer.Context ignored = executionTime.time()) { final State latestState = stateUpdater.getLatestState(); if (latestState.enableRuleMetrics()) { return process(messages, new RuleMetricsListener(metricRegistry), latestState); } return process(messages, new NoopInterpreterListener(), latestState); } }
@Test public void testMatchPassContinuesIfNoRuleMatched() { final RuleService ruleService = mock(MongoDbRuleService.class); when(ruleService.loadAll()).thenReturn(ImmutableList.of(RULE_TRUE, RULE_FALSE, RULE_ADD_FOOBAR)); final PipelineService pipelineService = mock(MongoDbPipelineService.class); when(pipelineService.loadAll()).thenReturn(Collections.singleton( PipelineDao.create("p1", "title", "description", "pipeline \"pipeline\"\n" + "stage 0 match pass\n" + " rule \"false\";\n" + "stage 1 match pass\n" + " rule \"add_foobar\";\n" + "end\n", Tools.nowUTC(), null) )); final Map<String, Function<?>> functions = ImmutableMap.of(SetField.NAME, new SetField()); final PipelineInterpreter interpreter = createPipelineInterpreter(ruleService, pipelineService, functions); final Messages processed = interpreter.process(messageInDefaultStream("message", "test")); final List<Message> messages = ImmutableList.copyOf(processed); assertThat(messages).hasSize(1); final Message actualMessage = messages.get(0); assertThat(actualMessage.getFieldAs(String.class, "foobar")).isEqualTo("covfefe"); }
public String convert(ILoggingEvent le) { List<Marker> markers = le.getMarkers(); if (markers == null || markers.isEmpty()) { return EMPTY; } else { return markers.toString(); } }
@Test public void testWithMarker() { String name = "test"; Marker marker = markerFactory.getMarker(name); String result = converter.convert(createLoggingEvent(marker)); assertEquals("[" + name + "]", result); }
ProducerListeners listeners() { return new ProducerListeners(eventListeners.toArray(new HollowProducerEventListener[0])); }
@Test public void firePopulateStartDontStopWhenOneFails() { long version = 31337; HollowProducer.ReadState readState = Mockito.mock(HollowProducer.ReadState.class); Mockito.when(readState.getVersion()).thenReturn(version); Mockito.doThrow(RuntimeException.class).when(listener).onPopulateStart(version); listenerSupport.listeners().firePopulateStart(version); Mockito.verify(listener).onPopulateStart(version); }