focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Nullable public static DateTime parse(String s, DateTimeZone timeZone, Locale locale) { // Trim input string and consolidate repeating blanks final String text = s.trim().replaceAll("\\s{2,}", " "); // First try UNIX epoch millisecond timestamp try { final long l = Long.parseLong(text); return new DateTime(l, DateTimeZone.UTC); } catch (NumberFormatException e) { // ignore } for (DateTimeFormatter dateTimeFormatter : DATE_TIME_FORMATTERS) { try { return dateTimeFormatter .withZone(timeZone) .withLocale(locale) .parseDateTime(text); } catch (Exception e) { // ignore } } return null; }
@Test public void parseWithTimeZoneAndLocale() throws Exception { assertEquals(testString, expectedDateTime, CEFTimestampParser.parse(testString, timeZone, locale)); }
public static Set<String> findKeywordsFromCrashReport(String crashReport) { Matcher matcher = CRASH_REPORT_STACK_TRACE_PATTERN.matcher(crashReport); Set<String> result = new HashSet<>(); if (matcher.find()) { for (String line : matcher.group("stacktrace").split("\\n")) { Matcher lineMatcher = STACK_TRACE_LINE_PATTERN.matcher(line); if (lineMatcher.find()) { String[] method = lineMatcher.group("method").split("\\."); for (int i = 0; i < method.length - 2; i++) { if (PACKAGE_KEYWORD_BLACK_LIST.contains(method[i])) { continue; } result.add(method[i]); } Matcher moduleMatcher = STACK_TRACE_LINE_MODULE_PATTERN.matcher(line); if (moduleMatcher.find()) { for (String module : moduleMatcher.group("tokens").split(",")) { String[] split = module.split(":"); if (split.length >= 2 && "xf".equals(split[0])) { if (PACKAGE_KEYWORD_BLACK_LIST.contains(split[1])) { continue; } result.add(split[1]); } } } } } } return result; }
@Test public void nei() throws IOException { assertEquals( new HashSet<>(Arrays.asList("nei", "codechicken", "guihook")), CrashReportAnalyzer.findKeywordsFromCrashReport(loadLog("/crash-report/mod/nei.txt"))); }
@GET @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public HistoryInfo get() { return getHistoryInfo(); }
@Test public void testInfoSlash() throws JSONException, Exception { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("history") .path("info/").accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); verifyHSInfo(json.getJSONObject("historyInfo"), appContext); }
@Override public NativeEntity<ViewDTO> createNativeEntity(Entity entity, Map<String, ValueReference> parameters, Map<EntityDescriptor, Object> nativeEntities, String username) { ensureV1(entity); final User user = Optional.ofNullable(userService.load(username)).orElseThrow(() -> new IllegalStateException("Cannot load user <" + username + "> from db")); return decode((EntityV1) entity, parameters, nativeEntities, user); }
@Test @MongoDBFixtures("ViewFacadeTest.json") public void itShouldCreateADTOFromAnEntity() throws Exception { final StreamImpl stream = new StreamImpl(Collections.emptyMap()); final Entity viewEntity = createViewEntity(); final Map<EntityDescriptor, Object> nativeEntities = Map.of(EntityDescriptor.create(newStreamId, ModelTypes.STREAM_V1), stream); final UserImpl fakeUser = new UserImpl(mock(PasswordAlgorithmFactory.class), new Permissions(ImmutableSet.of()), mock(ClusterConfigService.class), ImmutableMap.of("username", "testuser")); when(userService.load("testuser")).thenReturn(fakeUser); final NativeEntity<ViewDTO> nativeEntity = facade.createNativeEntity(viewEntity, Collections.emptyMap(), nativeEntities, "testuser"); assertThat(nativeEntity.descriptor().title()).isEqualTo("title"); assertThat(nativeEntity.descriptor().type()).isEqualTo(ModelTypes.SEARCH_V1); Optional<ViewDTO> resultedView = viewService.get(nativeEntity.descriptor().id().id()); assertThat(resultedView).isPresent(); Optional<Search> search = searchDbService.get(resultedView.get().searchId()); assertThat(search).isPresent(); final Query query = search.get().queries().iterator().next(); assertThat(query.filter()).isNotNull(); assertThat(query.filter().filters()).isNotEmpty(); final StreamFilter streamFilter = (StreamFilter) query.filter().filters().iterator().next(); assertThat(streamFilter.streamId()).doesNotMatch(newStreamId); }
public RemotingDesc getServiceDesc(Object bean, String beanName) { List<RemotingDesc> ret = new ArrayList<>(); for (RemotingParser remotingParser : allRemotingParsers) { RemotingDesc s = remotingParser.getServiceDesc(bean, beanName); if (s != null) { ret.add(s); } } if (ret.size() == 1) { return ret.get(0); } else if (ret.size() > 1) { throw new FrameworkException(String.format("More than one RemotingParser for bean: %s", beanName)); } else { return null; } }
@Test public void testGetServiceDesc() { SimpleRemoteBean remoteBean = new SimpleRemoteBean(); RemotingDesc desc = remotingParser.getServiceDesc(remoteBean, remoteBean.getClass().getName()); assertEquals(Protocols.IN_JVM, desc.getProtocol()); assertEquals(SimpleRemoteBean.class, desc.getServiceClass()); }
public static int scan(final UnsafeBuffer termBuffer, final int termOffset, final int limitOffset) { int offset = termOffset; while (offset < limitOffset) { final int frameLength = frameLengthVolatile(termBuffer, offset); if (frameLength <= 0) { break; } final int alignedFrameLength = align(frameLength, FRAME_ALIGNMENT); if (isPaddingFrame(termBuffer, offset)) { if (termOffset == offset) { offset += alignedFrameLength; } break; } if (offset + alignedFrameLength > limitOffset) { break; } offset += alignedFrameLength; } return offset; }
@Test void shouldScanEmptyBuffer() { final int offset = 0; final int limit = termBuffer.capacity(); final int newOffset = TermBlockScanner.scan(termBuffer, offset, limit); assertEquals(offset, newOffset); }
@Override public byte[] createIV() throws NoSuchAlgorithmException { KeyGenerator keygen = KeyGenerator.getInstance("AES"); keygen.init(128); return keygen.generateKey().getEncoded(); }
@Test public void shouldGenerateA16ByteIV() throws NoSuchAlgorithmException { ProductionIVProvider ivProvider = new ProductionIVProvider(); byte[] iv = ivProvider.createIV(); assertThat(iv).hasSize(16); }
public static boolean containsGender( @NonNull CharSequence text, @NonNull JavaEmojiUtils.Gender gender) { return JavaEmojiUtils.containsGender(text, gender); }
@Test public void testContainsGenderGeneral() { Assert.assertFalse(JavaEmojiUtils.containsGender("\uD83E\uDDD4")); Assert.assertTrue(JavaEmojiUtils.containsGender("\uD83E\uDDD4\u200D♀")); Assert.assertTrue(JavaEmojiUtils.containsGender("\uD83E\uDDD4\uD83C\uDFFB\u200D♀")); Assert.assertFalse(JavaEmojiUtils.containsGender("\uD83E\uDDD4\uD83C\uDFFB")); Assert.assertFalse(JavaEmojiUtils.containsGender("h")); }
public static int compare(Object o1, Object o2) { return o1.getClass() != o2.getClass() && o1 instanceof Number && o2 instanceof Number ? asBigDecimal((Number)o1).compareTo(asBigDecimal((Number)o2)) : ((Comparable) o1).compareTo(o2); }
@Test public void compareWithString() { assertThat(OperatorUtils.compare("ABC", "AAA")).isPositive(); assertThat(OperatorUtils.compare("ABC", "ABC")).isZero(); assertThat(OperatorUtils.compare("ABC", "XYZ")).isNegative(); }
public static <P> Matcher<P> or(Iterable<? extends Matcher<P>> matchers) { return or(toArray(matchers)); }
@Test void or_single() { Matcher<Boolean> one = Boolean::booleanValue; assertThat(or(one)).isSameAs(one); }
public static int getHeartbeat(URL url) { String configuredHeartbeat = System.getProperty(Constants.HEARTBEAT_CONFIG_KEY); int defaultHeartbeat = Constants.DEFAULT_HEARTBEAT; if (StringUtils.isNotEmpty(configuredHeartbeat)) { try { defaultHeartbeat = Integer.parseInt(configuredHeartbeat); } catch (NumberFormatException e) { // use default heartbeat } } return url.getParameter(Constants.HEARTBEAT_KEY, defaultHeartbeat); }
@Test void testConfiguredHeartbeat() { System.setProperty(Constants.HEARTBEAT_CONFIG_KEY, "200"); URL url = URL.valueOf("dubbo://127.0.0.1:12345"); Assertions.assertEquals(200, UrlUtils.getHeartbeat(url)); System.clearProperty(Constants.HEARTBEAT_CONFIG_KEY); }
@SuppressWarnings("unused") // Part of required API. public void execute( final ConfiguredStatement<InsertValues> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final InsertValues insertValues = statement.getStatement(); final MetaStore metaStore = executionContext.getMetaStore(); final KsqlConfig config = statement.getSessionConfig().getConfig(true); final DataSource dataSource = getDataSource(config, metaStore, insertValues); validateInsert(insertValues.getColumns(), dataSource); final ProducerRecord<byte[], byte[]> record = buildRecord(statement, metaStore, dataSource, serviceContext); try { producer.sendRecord(record, serviceContext, config.getProducerClientConfigProps()); } catch (final TopicAuthorizationException e) { // TopicAuthorizationException does not give much detailed information about why it failed, // except which topics are denied. Here we just add the ACL to make the error message // consistent with other authorization error messages. final Exception rootCause = new KsqlTopicAuthorizationException( AclOperation.WRITE, e.unauthorizedTopics() ); throw new KsqlException(createInsertFailedExceptionMessage(insertValues), rootCause); } catch (final ClusterAuthorizationException e) { // ClusterAuthorizationException is thrown when using idempotent producers // and either a topic write permission or a cluster-level idempotent write // permission (only applicable for broker versions no later than 2.8) is // missing. In this case, we include additional context to help the user // distinguish this type of failure from other permissions exceptions // such as the ones thrown above when TopicAuthorizationException is caught. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } catch (final KafkaException e) { if (e.getCause() != null && e.getCause() instanceof ClusterAuthorizationException) { // The error message thrown when an idempotent producer is missing permissions // is (nondeterministically) inconsistent: it is either a raw ClusterAuthorizationException, // as checked for above, or a ClusterAuthorizationException wrapped inside a KafkaException. // ksqlDB handles these two the same way, accordingly. // See https://issues.apache.org/jira/browse/KAFKA-14138 for more. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } else { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } } catch (final Exception e) { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } }
@Test public void shouldHandleFullRow() { // Given: final ConfiguredStatement<InsertValues> statement = givenInsertValues( allColumnNames(SCHEMA), ImmutableList.of( new StringLiteral("key"), new StringLiteral("str"), new LongLiteral(2L) ) ); // When: executor.execute(statement, mock(SessionProperties.class), engine, serviceContext); // Then: verify(keySerializer).serialize(TOPIC_NAME, genericKey("key")); verify(valueSerializer).serialize(TOPIC_NAME, genericRow("str", 2L)); verify(producer).send(new ProducerRecord<>(TOPIC_NAME, null, 1L, KEY, VALUE)); }
public static String byteToBits(byte b) { return "" + (byte) ((b >> 7) & 0x01) + (byte) ((b >> 6) & 0x1) + (byte) ((b >> 5) & 0x01) + (byte) ((b >> 4) & 0x1) + (byte) ((b >> 3) & 0x01) + (byte) ((b >> 2) & 0x1) + (byte) ((b >> 1) & 0x01) + (byte) ((b >> 0) & 0x1); }
@Test public void byteToBits() { byte b = 0x35; // 0011 0101 Assert.assertEquals(CodecUtils.byteToBits(b), "00110101"); }
public static Optional<JobResourceRequirements> readFromJobGraph(JobGraph jobGraph) throws IOException { try { return Optional.ofNullable( InstantiationUtil.readObjectFromConfig( jobGraph.getJobConfiguration(), JOB_RESOURCE_REQUIREMENTS_KEY, JobResourceRequirements.class.getClassLoader())); } catch (ClassNotFoundException e) { throw new IOException( "Unable to deserialize JobResourceRequirements due to missing classes. This might happen when the JobGraph was written from a different Flink version.", e); } }
@Test void testReadNonExistentResourceRequirementsFromJobGraph() throws IOException { assertThat(JobResourceRequirements.readFromJobGraph(JobGraphTestUtils.emptyJobGraph())) .isEmpty(); }
@Override public String name() { return NAME; }
@Test void testName() { assertEquals("Hessian", hessianSerializer.name()); }
protected int getMapCompletionEvents() throws IOException, InterruptedException { int numNewMaps = 0; TaskCompletionEvent events[] = null; do { MapTaskCompletionEventsUpdate update = umbilical.getMapCompletionEvents( (org.apache.hadoop.mapred.JobID)reduce.getJobID(), fromEventIdx, maxEventsToFetch, (org.apache.hadoop.mapred.TaskAttemptID)reduce); events = update.getMapTaskCompletionEvents(); LOG.debug("Got " + events.length + " map completion events from " + fromEventIdx); assert !update.shouldReset() : "Unexpected legacy state"; // Update the last seen event ID fromEventIdx += events.length; // Process the TaskCompletionEvents: // 1. Save the SUCCEEDED maps in knownOutputs to fetch the outputs. // 2. Save the OBSOLETE/FAILED/KILLED maps in obsoleteOutputs to stop // fetching from those maps. // 3. Remove TIPFAILED maps from neededOutputs since we don't need their // outputs at all. for (TaskCompletionEvent event : events) { scheduler.resolve(event); if (TaskCompletionEvent.Status.SUCCEEDED == event.getTaskStatus()) { ++numNewMaps; } } } while (events.length == maxEventsToFetch); return numNewMaps; }
@Test public void testConsecutiveFetch() throws IOException, InterruptedException { final int MAX_EVENTS_TO_FETCH = 100; TaskAttemptID tid = new TaskAttemptID("12345", 1, TaskType.REDUCE, 1, 1); TaskUmbilicalProtocol umbilical = mock(TaskUmbilicalProtocol.class); when(umbilical.getMapCompletionEvents(any(JobID.class), anyInt(), anyInt(), any(TaskAttemptID.class))) .thenReturn(getMockedCompletionEventsUpdate(0, 0)); when(umbilical.getMapCompletionEvents(any(JobID.class), eq(0), eq(MAX_EVENTS_TO_FETCH), eq(tid))) .thenReturn(getMockedCompletionEventsUpdate(0, MAX_EVENTS_TO_FETCH)); when(umbilical.getMapCompletionEvents(any(JobID.class), eq(MAX_EVENTS_TO_FETCH), eq(MAX_EVENTS_TO_FETCH), eq(tid))) .thenReturn(getMockedCompletionEventsUpdate(MAX_EVENTS_TO_FETCH, MAX_EVENTS_TO_FETCH)); when(umbilical.getMapCompletionEvents(any(JobID.class), eq(MAX_EVENTS_TO_FETCH*2), eq(MAX_EVENTS_TO_FETCH), eq(tid))) .thenReturn(getMockedCompletionEventsUpdate(MAX_EVENTS_TO_FETCH*2, 3)); @SuppressWarnings("unchecked") ShuffleScheduler<String,String> scheduler = mock(ShuffleScheduler.class); ExceptionReporter reporter = mock(ExceptionReporter.class); EventFetcherForTest<String,String> ef = new EventFetcherForTest<String,String>(tid, umbilical, scheduler, reporter, MAX_EVENTS_TO_FETCH); ef.getMapCompletionEvents(); verify(reporter, never()).reportException(any(Throwable.class)); InOrder inOrder = inOrder(umbilical); inOrder.verify(umbilical).getMapCompletionEvents(any(JobID.class), eq(0), eq(MAX_EVENTS_TO_FETCH), eq(tid)); inOrder.verify(umbilical).getMapCompletionEvents(any(JobID.class), eq(MAX_EVENTS_TO_FETCH), eq(MAX_EVENTS_TO_FETCH), eq(tid)); inOrder.verify(umbilical).getMapCompletionEvents(any(JobID.class), eq(MAX_EVENTS_TO_FETCH*2), eq(MAX_EVENTS_TO_FETCH), eq(tid)); verify(scheduler, times(MAX_EVENTS_TO_FETCH*2 + 3)).resolve( any(TaskCompletionEvent.class)); }
String getServiceLabelValue() { return serviceLabelValue; }
@Test public void emptyProperties() { // given Map<String, Comparable> properties = createProperties(); properties.put(SERVICE_LABEL_NAME.key(), " "); String serviceLabelValue = "service-label-value"; properties.put(SERVICE_LABEL_VALUE.key(), serviceLabelValue); properties.put(SERVICE_DNS.key(), ""); //when KubernetesConfig config = new KubernetesConfig(properties); //then assertEquals(serviceLabelValue, config.getServiceLabelValue()); }
public static String getNormalisedPartitionValue(String partitionValue, String type) { LOG.debug("Converting '" + partitionValue + "' to type: '" + type + "'."); if (type.equalsIgnoreCase("tinyint") || type.equalsIgnoreCase("smallint") || type.equalsIgnoreCase("int")){ return Integer.toString(Integer.parseInt(partitionValue)); } else if (type.equalsIgnoreCase("bigint")){ return Long.toString(Long.parseLong(partitionValue)); } else if (type.equalsIgnoreCase("float")){ return Float.toString(Float.parseFloat(partitionValue)); } else if (type.equalsIgnoreCase("double")){ return Double.toString(Double.parseDouble(partitionValue)); } else if (type.startsWith("decimal")){ // Decimal datatypes are stored like decimal(10,10) return new BigDecimal(partitionValue).stripTrailingZeros().toPlainString(); } return partitionValue; }
@Test public void testConversionToSignificantNumericTypes() { assertEquals("1", MetaStoreServerUtils.getNormalisedPartitionValue("0001", "tinyint")); assertEquals("1", MetaStoreServerUtils.getNormalisedPartitionValue("0001", "smallint")); assertEquals("10", MetaStoreServerUtils.getNormalisedPartitionValue("00010", "int")); assertEquals("-10", MetaStoreServerUtils.getNormalisedPartitionValue("-00010", "int")); assertEquals("10", MetaStoreServerUtils.getNormalisedPartitionValue("00010", "bigint")); assertEquals("-10", MetaStoreServerUtils.getNormalisedPartitionValue("-00010", "bigint")); assertEquals("1.01", MetaStoreServerUtils.getNormalisedPartitionValue("0001.0100", "float")); assertEquals("-1.01", MetaStoreServerUtils.getNormalisedPartitionValue("-0001.0100", "float")); assertEquals("1.01", MetaStoreServerUtils.getNormalisedPartitionValue("0001.010000", "double")); assertEquals("-1.01", MetaStoreServerUtils.getNormalisedPartitionValue("-0001.010000", "double")); assertEquals("1.01", MetaStoreServerUtils.getNormalisedPartitionValue("0001.0100", "decimal")); assertEquals("-1.01", MetaStoreServerUtils.getNormalisedPartitionValue("-0001.0100", "decimal")); }
@Override public <T extends Notification<?>> Flowable<T> subscribe( Request request, String unsubscribeMethod, Class<T> responseType) { // We can't use usual Observer since we can call "onError" // before first client is subscribed and we need to // preserve it BehaviorSubject<T> subject = BehaviorSubject.create(); // We need to subscribe synchronously, since if we return // an Flowable to a client before we got a reply // a client can unsubscribe before we know a subscription // id and this can cause a race condition subscribeToEventsStream(request, subject, responseType); return subject.doOnDispose(() -> closeSubscription(subject, unsubscribeMethod)) .toFlowable(BackpressureStrategy.BUFFER); }
@Test public void testSendUnsubscribeRequest() throws Exception { CountDownLatch unsubscribed = new CountDownLatch(1); runAsync( () -> { Flowable<NewHeadsNotification> flowable = subscribeToEvents(); flowable.subscribe().dispose(); unsubscribed.countDown(); }); sendSubscriptionConfirmation(); sendWebSocketEvent(); assertTrue(unsubscribed.await(2, TimeUnit.SECONDS)); verifyUnsubscribed(); }
public String forDisplay(List<ConfigurationProperty> propertiesToDisplay) { ArrayList<String> list = new ArrayList<>(); for (ConfigurationProperty property : propertiesToDisplay) { if (!property.isSecure()) { list.add(format("%s=%s", property.getConfigurationKey().getName().toLowerCase(), property.getConfigurationValue().getValue())); } } return format("[%s]", StringUtils.join(list, ", ")); }
@Test void shouldNotGetValuesOfSecureKeysInConfigForDisplay() { ConfigurationProperty property1 = new ConfigurationProperty(new ConfigurationKey("key1"), new ConfigurationValue("value1"), null, null); ConfigurationProperty property2 = new ConfigurationProperty(new ConfigurationKey("key2"), new ConfigurationValue("value2"), null, null); ConfigurationProperty property3 = new ConfigurationProperty(new ConfigurationKey("secure"), null, new EncryptedConfigurationValue("secured-value"), null); Configuration config = new Configuration(property1, property2, property3); assertThat(config.forDisplay(List.of(property1, property2, property3))).isEqualTo("[key1=value1, key2=value2]"); }
@PostMapping("/refresh-token") public CustomResponse<TokenResponse> refreshToken(@RequestBody @Valid final TokenRefreshRequest tokenRefreshRequest) { log.info("UserController | refreshToken"); final Token token = refreshTokenService.refreshToken(tokenRefreshRequest); final TokenResponse tokenResponse = tokenToTokenResponseMapper.map(token); return CustomResponse.successOf(tokenResponse); }
@Test void givenLoginRequest_WhenLoginForUser_ThenReturnToken() throws Exception { // Given LoginRequest loginRequest = LoginRequest.builder() .email("admin@example.com") .password("password") .build(); Token mockToken = Token.builder() .accessToken("mockAccessToken") .accessTokenExpiresAt(3600L) .refreshToken("mockRefreshToken") .build(); TokenResponse expectedTokenResponse = tokenToTokenResponseMapper.map(mockToken); // When when(userLoginService.login(any(LoginRequest.class))).thenReturn(mockToken); // Then mockMvc.perform(MockMvcRequestBuilders.post("/api/v1/users/login") .contentType(MediaType.APPLICATION_JSON) .content(objectMapper.writeValueAsString(loginRequest))) .andDo(MockMvcResultHandlers.print()) .andExpect(MockMvcResultMatchers.status().isOk()) .andExpect(MockMvcResultMatchers.jsonPath("$.httpStatus").value("OK")) .andExpect(MockMvcResultMatchers.jsonPath("$.isSuccess").value(true)) .andExpect(MockMvcResultMatchers.jsonPath("$.response.accessToken").value(expectedTokenResponse.getAccessToken())) .andExpect(MockMvcResultMatchers.jsonPath("$.response.accessTokenExpiresAt").value(expectedTokenResponse.getAccessTokenExpiresAt())) .andExpect(MockMvcResultMatchers.jsonPath("$.response.refreshToken").value(expectedTokenResponse.getRefreshToken())); // Verify verify(userLoginService, times(1)).login(any(LoginRequest.class)); }
public Coin parse(String str) throws NumberFormatException { return Coin.valueOf(parseValue(str, Coin.SMALLEST_UNIT_EXPONENT)); }
@Test(expected = NumberFormatException.class) public void parseInvalidHugeNumber() { NO_CODE.parse("99999999999999999999"); }
@Override public Health checkNode() { return nodeHealthChecks.stream() .map(NodeHealthCheck::check) .reduce(Health.GREEN, HealthReducer::merge); }
@Test public void checkNode_returns_GREEN_status_if_only_GREEN_statuses_returned_by_NodeHealthCheck() { List<Health.Status> statuses = IntStream.range(1, 1 + random.nextInt(20)).mapToObj(i -> GREEN).toList(); HealthCheckerImpl underTest = newNodeHealthCheckerImpl(statuses.stream()); assertThat(underTest.checkNode().getStatus()) .describedAs("%s should have been computed from %s statuses", GREEN, statuses) .isEqualTo(GREEN); }
public static int standardErrorToBuckets(double maxStandardError) { checkCondition(maxStandardError >= LOWEST_MAX_STANDARD_ERROR && maxStandardError <= HIGHEST_MAX_STANDARD_ERROR, INVALID_FUNCTION_ARGUMENT, "Max standard error must be in [%s, %s]: %s", LOWEST_MAX_STANDARD_ERROR, HIGHEST_MAX_STANDARD_ERROR, maxStandardError); return log2Ceiling((int) Math.ceil(1.0816 / (maxStandardError * maxStandardError))); }
@Test public void testStandardErrorToBucketsBounds() { try { // Lower bound standardErrorToBuckets(0.0040624); fail(); } catch (PrestoException e) { assertEquals(e.getErrorCode(), INVALID_FUNCTION_ARGUMENT.toErrorCode()); } try { // Upper bound standardErrorToBuckets(0.26001); fail(); } catch (PrestoException e) { assertEquals(e.getErrorCode(), INVALID_FUNCTION_ARGUMENT.toErrorCode()); } }
private boolean parseValuesAndFilterPartition( String partitionName, List<HudiColumnHandle> partitionColumns, List<Type> partitionColumnTypes, TupleDomain<ColumnHandle> constraintSummary) { if (constraintSummary.isNone()) { return false; } Map<ColumnHandle, Domain> domains = constraintSummary.getDomains().orElseGet(ImmutableMap::of); Map<HudiColumnHandle, NullableValue> partitionValues = parsePartition(partitionName, partitionColumns, partitionColumnTypes); for (HudiColumnHandle column : partitionColumns) { NullableValue value = partitionValues.get(column); Domain allowedDomain = domains.get(column); if (allowedDomain != null && !allowedDomain.includesNullableValue(value.getValue())) { return false; } } return true; }
@Test public void testParseValuesAndFilterPartition() { ConnectorSession session = new TestingConnectorSession( new HiveSessionProperties( new HiveClientConfig().setMaxBucketsForGroupedExecution(100), new OrcFileWriterConfig(), new ParquetFileWriterConfig(), new CacheConfig()).getSessionProperties()); TupleDomain<ColumnHandle> constraintSummary = TupleDomain.withColumnDomains( ImmutableMap.of( new HudiColumnHandle( MAX_PARTITION_KEY_COLUMN_INDEX, PARTITION_COLUMN.getName(), PARTITION_COLUMN.getType(), Optional.empty(), HudiColumnHandle.ColumnType.PARTITION_KEY), Domain.singleValue(VARCHAR, utf8Slice("2019-07-23")))); List<String> actualPartitions = hudiPartitionManager.getEffectivePartitions( session, metastore, new SchemaTableName(SCHEMA_NAME, TABLE_NAME), constraintSummary); assertEquals(actualPartitions, ImmutableList.of("ds=2019-07-23")); }
@ProcessElement public void processElement(OutputReceiver<InitialPipelineState> receiver) throws IOException { LOG.info(daoFactory.getStreamTableDebugString()); LOG.info(daoFactory.getMetadataTableDebugString()); LOG.info("ChangeStreamName: " + daoFactory.getChangeStreamName()); boolean resume = false; DetectNewPartitionsState detectNewPartitionsState = daoFactory.getMetadataTableDao().readDetectNewPartitionsState(); switch (existingPipelineOptions) { case RESUME_OR_NEW: // perform resumption. if (detectNewPartitionsState != null) { resume = true; startTime = detectNewPartitionsState.getWatermark(); LOG.info("Resuming from previous pipeline with low watermark of {}", startTime); } else { LOG.info( "Attempted to resume, but previous watermark does not exist, starting at {}", startTime); } break; case RESUME_OR_FAIL: // perform resumption. if (detectNewPartitionsState != null) { resume = true; startTime = detectNewPartitionsState.getWatermark(); LOG.info("Resuming from previous pipeline with low watermark of {}", startTime); } else { LOG.error("Previous pipeline with the same change stream name doesn't exist, stopping"); return; } break; case FAIL_IF_EXISTS: if (detectNewPartitionsState != null) { LOG.error( "A previous pipeline exists with the same change stream name and existingPipelineOption is set to FAIL_IF_EXISTS."); return; } break; case SKIP_CLEANUP: if (detectNewPartitionsState != null) { LOG.error( "A previous pipeline exists with the same change stream name and existingPipelineOption is set to SKIP_CLEANUP. This option should only be used in tests."); return; } break; default: LOG.error("Unexpected existingPipelineOptions option."); // terminate pipeline return; } daoFactory.getMetadataTableDao().writeDetectNewPartitionVersion(); receiver.output(new InitialPipelineState(startTime, resume)); }
@Test public void testInitializeResumeWithDNP() throws IOException { Instant resumeTime = Instant.now().minus(Duration.standardSeconds(10000)); metadataTableDao.updateDetectNewPartitionWatermark(resumeTime); dataClient.mutateRow( RowMutation.create( tableId, metadataTableAdminDao .getChangeStreamNamePrefix() .concat(ByteString.copyFromUtf8("existing_row"))) .setCell( MetadataTableAdminDao.CF_WATERMARK, MetadataTableAdminDao.QUALIFIER_DEFAULT, 123)); Instant startTime = Instant.now(); InitializeDoFn initializeDoFn = new InitializeDoFn(daoFactory, startTime, BigtableIO.ExistingPipelineOptions.RESUME_OR_NEW); initializeDoFn.processElement(outputReceiver); verify(outputReceiver, times(1)).output(new InitialPipelineState(resumeTime, true)); assertNull(dataClient.readRow(tableId, metadataTableAdminDao.getChangeStreamNamePrefix())); }
public static String[] tokenizeToStringArray(String str, String delimiters, boolean trimTokens, boolean ignoreEmptyTokens) { if (str == null) { return EMPTY_STRING_ARRAY; } StringTokenizer st = new StringTokenizer(str, delimiters); List<String> tokens = new ArrayList<>(); while (st.hasMoreTokens()) { String token = st.nextToken(); if (trimTokens) { token = token.trim(); } if (!ignoreEmptyTokens || token.length() > 0) { tokens.add(token); } } return toStringArray(tokens); }
@Test void testTokenizeToStringArray() { // Test case 1: Empty string String str1 = ""; String delimiters1 = ","; boolean trimTokens1 = true; boolean ignoreEmptyTokens1 = false; String[] expected1 = new String[0]; String[] result1 = StringUtils.tokenizeToStringArray(str1, delimiters1, trimTokens1, ignoreEmptyTokens1); assertArrayEquals(expected1, result1); // Test case 2: Null string String str2 = null; String delimiters2 = " "; boolean trimTokens2 = false; boolean ignoreEmptyTokens2 = true; String[] expected2 = new String[0]; String[] result2 = StringUtils.tokenizeToStringArray(str2, delimiters2, trimTokens2, ignoreEmptyTokens2); assertArrayEquals(expected2, result2); // Test case 3: Single token String str3 = "Hello"; String delimiters3 = ","; boolean trimTokens3 = true; boolean ignoreEmptyTokens3 = false; String[] expected3 = {"Hello"}; String[] result3 = StringUtils.tokenizeToStringArray(str3, delimiters3, trimTokens3, ignoreEmptyTokens3); assertArrayEquals(expected3, result3); // Test case 4: Multiple tokens with trimming String str4 = " Hello, World, "; String delimiters4 = ","; boolean trimTokens4 = true; boolean ignoreEmptyTokens4 = false; String[] expected4 = {"Hello", "World", ""}; String[] result4 = StringUtils.tokenizeToStringArray(str4, delimiters4, trimTokens4, ignoreEmptyTokens4); assertArrayEquals(expected4, result4); // Test case 5: Multiple tokens with empty tokens ignored String str5 = " ,Hello, ,World, "; String delimiters5 = ","; boolean trimTokens5 = true; boolean ignoreEmptyTokens5 = true; String[] expected5 = {"Hello", "World"}; String[] result5 = StringUtils.tokenizeToStringArray(str5, delimiters5, trimTokens5, ignoreEmptyTokens5); assertArrayEquals(expected5, result5); }
public MessageExt viewMessage(String topic, String msgId) throws RemotingException, MQBrokerException, InterruptedException, MQClientException { return this.mQClientFactory.getMQAdminImpl().viewMessage(topic, msgId); }
@Test public void testViewMessage() throws InterruptedException, MQClientException, MQBrokerException, RemotingException { assertNull(defaultMQPushConsumerImpl.viewMessage(defaultTopic, createMessageExt().getMsgId())); }
@Override public Object read(final PostgreSQLPacketPayload payload, final int parameterValueLength) { byte[] bytes = new byte[parameterValueLength]; payload.getByteBuf().readBytes(bytes); return ARRAY_PARAMETER_DECODER.decodeInt8Array(bytes, '{' != bytes[0]); }
@Test void assertRead() { String parameterValue = "{\"11\",\"12\"}"; int expectedLength = 4 + parameterValue.length(); ByteBuf byteBuf = ByteBufTestUtils.createByteBuf(expectedLength); byteBuf.writeInt(parameterValue.length()); byteBuf.writeCharSequence(parameterValue, StandardCharsets.ISO_8859_1); byteBuf.readInt(); PostgreSQLPacketPayload payload = new PostgreSQLPacketPayload(byteBuf, StandardCharsets.UTF_8); Object actual = newInstance().read(payload, parameterValue.length()); assertThat(actual, is(new long[]{11L, 12L})); assertThat(byteBuf.readerIndex(), is(expectedLength)); }
static DynamicState stateMachineStep(DynamicState dynamicState, StaticState staticState) throws Exception { LOG.debug("STATE {}", dynamicState.state); switch (dynamicState.state) { case EMPTY: return handleEmpty(dynamicState, staticState); case RUNNING: return handleRunning(dynamicState, staticState); case WAITING_FOR_WORKER_START: return handleWaitingForWorkerStart(dynamicState, staticState); case KILL_BLOB_UPDATE: return handleKillBlobUpdate(dynamicState, staticState); case KILL_AND_RELAUNCH: return handleKillAndRelaunch(dynamicState, staticState); case KILL: return handleKill(dynamicState, staticState); case WAITING_FOR_BLOB_LOCALIZATION: return handleWaitingForBlobLocalization(dynamicState, staticState); case WAITING_FOR_BLOB_UPDATE: return handleWaitingForBlobUpdate(dynamicState, staticState); default: throw new IllegalStateException("Code not ready to handle a state of " + dynamicState.state); } }
@Test public void testErrorHandlingWhenLocalizationFails() throws Exception { try (SimulatedTime ignored = new SimulatedTime(1010)) { int port = 8080; String topoId = "NEW"; List<ExecutorInfo> execList = mkExecutorInfoList(1, 2, 3, 4, 5); LocalAssignment newAssignment = mkLocalAssignment(topoId, execList, mkWorkerResources(100.0, 100.0, 100.0)); AsyncLocalizer localizer = mock(AsyncLocalizer.class); BlobChangingCallback cb = mock(BlobChangingCallback.class); Container container = mock(Container.class); LocalState state = mock(LocalState.class); ContainerLauncher containerLauncher = mock(ContainerLauncher.class); when(containerLauncher.launchContainer(port, newAssignment, state)).thenReturn(container); LSWorkerHeartbeat hb = mkWorkerHB(topoId, port, execList, Time.currentTimeSecs()); when(container.readHeartbeat()).thenReturn(hb, hb); @SuppressWarnings("unchecked") CompletableFuture<Void> blobFuture = mock(CompletableFuture.class); CompletableFuture<Void> secondBlobFuture = mock(CompletableFuture.class); when(secondBlobFuture.get(anyLong(), any())).thenThrow(new ExecutionException(new RuntimeException("Localization failure"))); CompletableFuture<Void> thirdBlobFuture = mock(CompletableFuture.class); when(localizer.requestDownloadTopologyBlobs(newAssignment, port, cb)) .thenReturn(blobFuture) .thenReturn(secondBlobFuture) .thenReturn(thirdBlobFuture); ISupervisor iSuper = mock(ISupervisor.class); SlotMetrics slotMetrics = new SlotMetrics(new StormMetricsRegistry()); StaticState staticState = new StaticState(localizer, 5000, 120000, 1000, 1000, containerLauncher, "localhost", port, iSuper, state, cb, null, null, slotMetrics); DynamicState dynamicState = new DynamicState(null, null, null, slotMetrics) .withNewAssignment(newAssignment); DynamicState nextState = Slot.stateMachineStep(dynamicState, staticState); verify(localizer).requestDownloadTopologyBlobs(newAssignment, port, cb); assertEquals(MachineState.WAITING_FOR_BLOB_LOCALIZATION, nextState.state); assertSame(blobFuture, nextState.pendingDownload, "pendingDownload not set properly"); assertEquals(newAssignment, nextState.pendingLocalization); assertEquals(0, Time.currentTimeMillis()); //Assignment has changed nextState = Slot.stateMachineStep(nextState.withNewAssignment(null), staticState); assertThat(nextState.state, is(MachineState.EMPTY)); assertThat(nextState.pendingChangingBlobs, is(Collections.emptySet())); assertThat(nextState.pendingChangingBlobsAssignment, nullValue()); assertThat(nextState.pendingLocalization, nullValue()); assertThat(nextState.pendingDownload, nullValue()); clearInvocations(localizer); nextState = Slot.stateMachineStep(dynamicState.withNewAssignment(newAssignment), staticState); verify(localizer).requestDownloadTopologyBlobs(newAssignment, port, cb); assertEquals(MachineState.WAITING_FOR_BLOB_LOCALIZATION, nextState.state); assertSame(secondBlobFuture, nextState.pendingDownload, "pendingDownload not set properly"); assertEquals(newAssignment, nextState.pendingLocalization); //Error occurs, but assignment has not changed clearInvocations(localizer); nextState = Slot.stateMachineStep(nextState, staticState); verify(localizer).requestDownloadTopologyBlobs(newAssignment, port, cb); assertEquals(MachineState.WAITING_FOR_BLOB_LOCALIZATION, nextState.state); assertSame(thirdBlobFuture, nextState.pendingDownload, "pendingDownload not set properly"); assertEquals(newAssignment, nextState.pendingLocalization); assertThat(Time.currentTimeMillis(), greaterThan(3L)); nextState = Slot.stateMachineStep(nextState, staticState); verify(thirdBlobFuture).get(1000, TimeUnit.MILLISECONDS); verify(containerLauncher).launchContainer(port, newAssignment, state); assertEquals(MachineState.WAITING_FOR_WORKER_START, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(newAssignment, nextState.currentAssignment); assertSame(container, nextState.container); } }
public Optional<User> login(String nameOrEmail, String password) { if (nameOrEmail == null || password == null) { return Optional.empty(); } User user = userDAO.findByName(nameOrEmail); if (user == null) { user = userDAO.findByEmail(nameOrEmail); } if (user != null && !user.isDisabled()) { boolean authenticated = encryptionService.authenticate(password, user.getPassword(), user.getSalt()); if (authenticated) { performPostLoginActivities(user); return Optional.of(user); } } return Optional.empty(); }
@Test void callingLoginShouldNotReturnUserObjectOnUnsuccessfulAuthentication() { Mockito.when(userDAO.findByName("test")).thenReturn(normalUser); Mockito.when(passwordEncryptionService.authenticate(Mockito.anyString(), Mockito.any(byte[].class), Mockito.any(byte[].class))) .thenReturn(false); Optional<User> authenticatedUser = userService.login("test", "password"); Assertions.assertFalse(authenticatedUser.isPresent()); }
@Override public Optional<ReadError> read(DbFileSources.Line.Builder lineBuilder) { if (readError == null) { try { processSymbols(lineBuilder); } catch (RangeOffsetConverter.RangeOffsetConverterException e) { readError = new ReadError(Data.SYMBOLS, lineBuilder.getLine()); LOG.warn(format("Inconsistency detected in Symbols data. Symbols will be ignored for file '%s'", file.getKey()), e); } } return Optional.ofNullable(readError); }
@Test public void display_file_key_in_warning_when_range_offset_converter_throw_RangeOffsetConverterException() { TextRange declaration = newTextRange(LINE_1, LINE_1, OFFSET_1, OFFSET_3); doThrow(RangeOffsetConverter.RangeOffsetConverterException.class).when(rangeOffsetConverter).offsetToString(declaration, LINE_1, DEFAULT_LINE_LENGTH); SymbolsLineReader symbolsLineReader = newReader(newSymbol(declaration, newSingleLineTextRangeWithExpectedLabel(LINE_2, OFFSET_1, OFFSET_3, RANGE_LABEL_2))); assertThat(symbolsLineReader.read(line1)) .contains(new LineReader.ReadError(SYMBOLS, LINE_1)); assertThat(logTester.logs(WARN)).containsOnly("Inconsistency detected in Symbols data. Symbols will be ignored for file 'FILE_KEY'"); }
public static void validateJarOnClient(SubmitJobParameters parameterObject) throws IOException { if (parameterObject.isJarOnMember()) { throw new JetException("SubmitJobParameters is configured for jar on member"); } Path jarPath = parameterObject.getJarPath(); validateJarPathNotNull(jarPath); validateFileSizeIsNotZero(jarPath); validateFileExtension(jarPath); validateJobParameters(parameterObject.getJobParameters()); }
@Test public void noSuchFileException() { SubmitJobParameters parameterObject = SubmitJobParameters.withJarOnClient(); parameterObject.setJarPath(Paths.get("nosuchfile.jar")); assertThatThrownBy(() -> SubmitJobParametersValidator.validateJarOnClient(parameterObject)) .isInstanceOf(NoSuchFileException.class); }
public static AS2SignedDataGenerator createSigningGenerator( AS2SignatureAlgorithm signingAlgorithm, Certificate[] certificateChain, PrivateKey privateKey) throws HttpException { ObjectHelper.notNull(certificateChain, "certificateChain"); if (certificateChain.length == 0 || !(certificateChain[0] instanceof X509Certificate)) { throw new IllegalArgumentException("Invalid certificate chain"); } ObjectHelper.notNull(privateKey, "privateKey"); AS2SignedDataGenerator gen = new AS2SignedDataGenerator(); // Get first certificate in chain for signing X509Certificate signingCert = (X509Certificate) certificateChain[0]; // Create capabilities vector SMIMECapabilityVector capabilities = new SMIMECapabilityVector(); capabilities.addCapability(SMIMECapability.dES_EDE3_CBC); capabilities.addCapability(SMIMECapability.rC2_CBC, 128); capabilities.addCapability(SMIMECapability.dES_CBC); // Create signing attributes ASN1EncodableVector attributes = new ASN1EncodableVector(); attributes.add(new SMIMEEncryptionKeyPreferenceAttribute( new IssuerAndSerialNumber( new X500Name(signingCert.getIssuerDN().getName()), signingCert.getSerialNumber()))); attributes.add(new SMIMECapabilitiesAttribute(capabilities)); SignerInfoGenerator signerInfoGenerator = null; try { signerInfoGenerator = new JcaSimpleSignerInfoGeneratorBuilder().setProvider("BC") .setSignedAttributeGenerator(new AttributeTable(attributes)) .build(signingAlgorithm.getSignatureAlgorithmName(), privateKey, signingCert); } catch (Exception e) { throw new HttpException("Failed to create signer info", e); } gen.addSignerInfoGenerator(signerInfoGenerator); // Create and populate certificate store. try { JcaCertStore certs = new JcaCertStore(Arrays.asList(certificateChain)); gen.addCertificates(certs); } catch (CertificateEncodingException | CMSException e) { throw new HttpException("Failed to add certificate chain to signature", e); } return gen; }
@Test public void createSigningGeneratorTest() throws Exception { AS2SignedDataGenerator gen = SigningUtils.createSigningGenerator(AS2SignatureAlgorithm.SHA1WITHRSA, new Certificate[] { signingCert }, signingKP.getPrivate()); CMSProcessableByteArray sData = new CMSProcessableByteArray(MESSAGE.getBytes(StandardCharsets.UTF_8)); CMSSignedData signedData = gen.generate(sData, true); assertTrue(signedData.verifySignatures((SignerId sid) -> { return new JcaSimpleSignerInfoVerifierBuilder().setProvider("BC").build(signingCert); }), "Message was wrongly signed"); }
@Operation(summary = "Get single organization") @GetMapping(value = "name/{name}", produces = "application/json") @ResponseBody public Organization getByName(@PathVariable("name") String name) { return organizationService.getOrganizationByName(name); }
@Test public void getOrganizationByName() { when(organizationServiceMock.getOrganizationByName(anyString())).thenReturn(newOrganization()); Organization result = controllerMock.getByName("test"); assertEquals(newOrganization().getName(), result.getName()); verify(organizationServiceMock, times(1)).getOrganizationByName(anyString()); assertNotNull(result); }
@Override public Set<GPUInfo> retrieveResourceInfo(long gpuAmount) throws Exception { Preconditions.checkArgument( gpuAmount > 0, "The gpuAmount should be positive when retrieving the GPU resource information."); final Set<GPUInfo> gpuResources = new HashSet<>(); String output = executeDiscoveryScript(discoveryScriptFile, gpuAmount, args); if (!output.isEmpty()) { String[] indexes = output.split(","); for (String index : indexes) { if (!StringUtils.isNullOrWhitespaceOnly(index)) { gpuResources.add(new GPUInfo(index.trim())); } } } LOG.info("Discover GPU resources: {}.", gpuResources); return Collections.unmodifiableSet(gpuResources); }
@Test void testGPUDriverWithInvalidAmount() throws Exception { final int gpuAmount = -1; final Configuration config = new Configuration(); config.set(GPUDriverOptions.DISCOVERY_SCRIPT_PATH, TESTING_DISCOVERY_SCRIPT_PATH); final GPUDriver gpuDriver = new GPUDriver(config); assertThatThrownBy(() -> gpuDriver.retrieveResourceInfo(gpuAmount)) .isInstanceOf(IllegalArgumentException.class); }
public static void requireNotNull(final Object obj, final String name) { if (obj == null) { throw new NullPointerException(name + " must not be null"); } }
@Test(expectedExceptions = NullPointerException.class, expectedExceptionsMessageRegExp = ".*foo.*") public void testNotNullWithNull() { ArgumentUtil.requireNotNull(null, "foo"); }
public void execute() { execute(LOG); }
@Test public void should_not_fail_if_no_language_on_project() { QProfileVerifier profileLogger = new QProfileVerifier(store, profiles); profileLogger.execute(); }
@Override public void doFilter(HttpRequest request, HttpResponse response, FilterChain filterChain) throws IOException { boolean isAuthenticated = authenticate(request, response); response.setContentType(MediaTypes.JSON); try (JsonWriter jsonWriter = JsonWriter.of(response.getWriter())) { jsonWriter.beginObject(); jsonWriter.prop("valid", isAuthenticated); jsonWriter.endObject(); } }
@Test public void doFilter_whenDefaultForceAuthentication_shouldReturnFalse() throws Exception { underTest.doFilter(request, response, chain); verifyResponseIsFalse(); }
public String normalizeNamespace(String appId, String namespaceName) { AppNamespace appNamespace = appNamespaceServiceWithCache.findByAppIdAndNamespace(appId, namespaceName); if (appNamespace != null) { return appNamespace.getName(); } appNamespace = appNamespaceServiceWithCache.findPublicNamespaceByName(namespaceName); if (appNamespace != null) { return appNamespace.getName(); } return namespaceName; }
@Test public void testNormalizeNamespaceWithPublicNamespace() throws Exception { String someAppId = "someAppId"; String someNamespaceName = "someNamespaceName"; String someNormalizedNamespaceName = "someNormalizedNamespaceName"; AppNamespace someAppNamespace = mock(AppNamespace.class); when(someAppNamespace.getName()).thenReturn(someNormalizedNamespaceName); when(appNamespaceServiceWithCache.findByAppIdAndNamespace(someAppId, someNamespaceName)).thenReturn(null); when(appNamespaceServiceWithCache.findPublicNamespaceByName(someNamespaceName)).thenReturn(someAppNamespace); assertEquals(someNormalizedNamespaceName, namespaceUtil.normalizeNamespace(someAppId, someNamespaceName)); verify(appNamespaceServiceWithCache, times(1)).findByAppIdAndNamespace(someAppId, someNamespaceName); verify(appNamespaceServiceWithCache, times(1)).findPublicNamespaceByName(someNamespaceName); }
public static NotificationDispatcherMetadata newMetadata() { return METADATA; }
@Test public void reportFailures_notification_is_enable_at_project_level() { NotificationDispatcherMetadata metadata = ReportAnalysisFailureNotificationHandler.newMetadata(); assertThat(metadata.getProperty(PER_PROJECT_NOTIFICATION)).isEqualTo("true"); }
@Operation(summary = "Check connection", description = "Check connection for hosts") @PostMapping("/check-connection") public ResponseEntity<Boolean> checkConnection( @PathVariable Long clusterId, @RequestBody @Validated HostnamesReq hostnamesReq) { return ResponseEntity.success(hostService.checkConnection(hostnamesReq.getHostnames())); }
@Test void checkConnectionReturnsSuccess() { Long clusterId = 1L; HostnamesReq hostnamesReq = new HostnamesReq(); hostnamesReq.setHostnames(Arrays.asList("host1", "host2")); when(hostService.checkConnection(hostnamesReq.getHostnames())).thenReturn(true); ResponseEntity<Boolean> response = hostController.checkConnection(clusterId, hostnamesReq); assertTrue(response.isSuccess()); assertTrue(response.getData()); }
protected abstract void stealTheItem(String target);
@Test void testStealTheItem() { assertEquals(0, appender.getLogSize()); this.method.stealTheItem(this.expectedTarget); assertEquals(this.expectedStealMethod, appender.getLastMessage()); assertEquals(1, appender.getLogSize()); }
public static <T> void concat(T[] sourceFirst, T[] sourceSecond, T[] dest) { System.arraycopy(sourceFirst, 0, dest, 0, sourceFirst.length); System.arraycopy(sourceSecond, 0, dest, sourceFirst.length, sourceSecond.length); }
@Test public void concat() { Integer[] first = new Integer[]{1, 2, 3}; Integer[] second = new Integer[]{4}; Integer[] concatenated = new Integer[4]; ArrayUtils.concat(first, second, concatenated); assertEquals(4, concatenated.length); assertEquals(Integer.valueOf(1), concatenated[0]); assertEquals(Integer.valueOf(2), concatenated[1]); assertEquals(Integer.valueOf(3), concatenated[2]); assertEquals(Integer.valueOf(4), concatenated[3]); }
public ConfigEvaluatorBuilder setSecurityManager(SecurityManager manager) { evaluatorBuilder.setSecurityManager(manager); return this; }
@Test public void setSecurityManager() { var builder = ConfigEvaluatorBuilder.preconfigured(); assertThat(builder.getAllowedModules()).isEqualTo(SecurityManagers.defaultAllowedModules); assertThat(builder.getAllowedResources()).isEqualTo(SecurityManagers.defaultAllowedResources); var manager = SecurityManagers.standard(List.of(), List.of(), SecurityManagers.defaultTrustLevels, null); builder = ConfigEvaluatorBuilder.preconfigured().setSecurityManager(manager); assertThat(builder.getSecurityManager()).isSameAs(manager); }
@Override public double getStdDev() { // two-pass algorithm for variance, avoids numeric overflow if (values.length <= 1) { return 0; } final double mean = getMean(); double variance = 0; for (int i = 0; i < values.length; i++) { final double diff = values[i] - mean; variance += normWeights[i] * diff * diff; } return Math.sqrt(variance); }
@Test public void calculatesAStdDevOfZeroForAnEmptySnapshot() { final Snapshot emptySnapshot = new WeightedSnapshot( weightedArray(new long[]{}, new double[]{})); assertThat(emptySnapshot.getStdDev()) .isZero(); }
@Override public int compareTo( MonetDbVersion mDbVersion ) { int result = majorVersion.compareTo( mDbVersion.majorVersion ); if ( result != 0 ) { return result; } result = minorVersion.compareTo( mDbVersion.minorVersion ); if ( result != 0 ) { return result; } result = patchVersion.compareTo( mDbVersion.patchVersion ); if ( result != 0 ) { return result; } return result; }
@Test public void testCompareVersions_NoPatch() throws Exception { String dbVersionBigger = "11.18"; String dbVersion = "11.17.17"; assertEquals( 1, new MonetDbVersion( dbVersionBigger ).compareTo( new MonetDbVersion( dbVersion ) ) ); }
public MetricName metricInstance(MetricNameTemplate template, String... keyValue) { return metricInstance(template, MetricsUtils.getTags(keyValue)); }
@Test public void testMetricInstances() { MetricName n1 = metrics.metricInstance(SampleMetrics.METRIC1, "key1", "value1", "key2", "value2"); Map<String, String> tags = new HashMap<>(); tags.put("key1", "value1"); tags.put("key2", "value2"); MetricName n2 = metrics.metricInstance(SampleMetrics.METRIC2, tags); assertEquals(n1, n2, "metric names created in two different ways should be equal"); try { metrics.metricInstance(SampleMetrics.METRIC1, "key1"); fail("Creating MetricName with an odd number of keyValue should fail"); } catch (IllegalArgumentException e) { // this is expected } Map<String, String> parentTagsWithValues = new HashMap<>(); parentTagsWithValues.put("parent-tag", "parent-tag-value"); Map<String, String> childTagsWithValues = new HashMap<>(); childTagsWithValues.put("child-tag", "child-tag-value"); try (Metrics inherited = new Metrics(new MetricConfig().tags(parentTagsWithValues), singletonList(new JmxReporter()), time, true)) { MetricName inheritedMetric = inherited.metricInstance(SampleMetrics.METRIC_WITH_INHERITED_TAGS, childTagsWithValues); Map<String, String> filledOutTags = inheritedMetric.tags(); assertEquals(filledOutTags.get("parent-tag"), "parent-tag-value", "parent-tag should be set properly"); assertEquals(filledOutTags.get("child-tag"), "child-tag-value", "child-tag should be set properly"); try { inherited.metricInstance(SampleMetrics.METRIC_WITH_INHERITED_TAGS, parentTagsWithValues); fail("Creating MetricName should fail if the child metrics are not defined at runtime"); } catch (IllegalArgumentException e) { // this is expected } try { Map<String, String> runtimeTags = new HashMap<>(); runtimeTags.put("child-tag", "child-tag-value"); runtimeTags.put("tag-not-in-template", "unexpected-value"); inherited.metricInstance(SampleMetrics.METRIC_WITH_INHERITED_TAGS, runtimeTags); fail("Creating MetricName should fail if there is a tag at runtime that is not in the template"); } catch (IllegalArgumentException e) { // this is expected } } }
@Override public void execute(ComputationStep.Context context) { PostMeasuresComputationCheck.Context extensionContext = new ContextImpl(); for (PostMeasuresComputationCheck extension : extensions) { extension.onCheck(extensionContext); } }
@Test public void fail_if_an_extension_throws_an_exception() { PostMeasuresComputationCheck check1 = mock(PostMeasuresComputationCheck.class); PostMeasuresComputationCheck check2 = mock(PostMeasuresComputationCheck.class); doThrow(new IllegalStateException("BOOM")).when(check2).onCheck(any(Context.class)); PostMeasuresComputationCheck check3 = mock(PostMeasuresComputationCheck.class); try { newStep(check1, check2, check3).execute(new TestComputationStepContext()); fail(); } catch (IllegalStateException e) { assertThat(e).hasMessage("BOOM"); verify(check1).onCheck(any(Context.class)); verify(check3, never()).onCheck(any(Context.class)); } }
public Stream openWithOffsetInJsonPointer(InputStream in, String offsetInJsonPointer) throws IOException { return this.delegate.openWithOffsetInJsonPointer(in, offsetInJsonPointer); }
@Test public void testParseMultipleJsonsWithPointer() throws Exception { final JsonParser parser = new JsonParser(); final String multipleJsons = "{\"a\": {\"b\": 1}}{\"a\": {\"b\": 2}}"; try (JsonParser.Stream stream = parser.openWithOffsetInJsonPointer(toInputStream(multipleJsons), "/a/b")) { assertEquals(1, stream.next().asIntegerValue().asInt()); assertEquals(2, stream.next().asIntegerValue().asInt()); assertNull(stream.next()); } }
public long position() { if (isClosed) { return finalPosition; } return subscriberPosition.get(); }
@Test void shouldNotAdvancePastEndOfTerm() { final Image image = createImage(); final long expectedPosition = TERM_BUFFER_LENGTH - 32; position.setOrdered(expectedPosition); assertThat(image.position(), is(expectedPosition)); assertThrows(IllegalArgumentException.class, () -> image.position(TERM_BUFFER_LENGTH + 32)); }
@Override public List<Service> getServiceDefinitions() throws MockRepositoryImportException { List<Service> result = new ArrayList<>(); // Build a new service. Service service = new Service(); JsonNode metadataNode = spec.get("metadata"); if (metadataNode == null) { log.error("Missing mandatory metadata in {}", spec.asText()); throw new MockRepositoryImportException("Mandatory metadata property is missing in APIMetadata"); } service.setName(metadataNode.path("name").asText()); service.setVersion(metadataNode.path("version").asText()); Metadata metadata = new Metadata(); MetadataExtractor.completeMetadata(metadata, metadataNode); service.setMetadata(metadata); // Then build its operations. service.setOperations(extractOperations()); result.add(service); return result; }
@Test void testAPIMetadataImport() { MetadataImporter importer = null; try { importer = new MetadataImporter( "target/test-classes/io/github/microcks/util/metadata/hello-grpc-v1-metadata.yml"); } catch (IOException ioe) { fail("Exception should not be thrown"); } // Check that basic service properties are there. List<Service> services = null; try { services = importer.getServiceDefinitions(); } catch (MockRepositoryImportException e) { fail("Exception should not be thrown"); } assertEquals(1, services.size()); Service service = services.get(0); assertEquals("HelloService", service.getName()); assertEquals("v1", service.getVersion()); assertEquals(3, service.getMetadata().getLabels().size()); assertEquals("greeting", service.getMetadata().getLabels().get("domain")); assertEquals("stable", service.getMetadata().getLabels().get("status")); assertEquals("Team A", service.getMetadata().getLabels().get("team")); assertEquals(1, service.getOperations().size()); Operation operation = service.getOperations().get(0); assertEquals("POST /greeting", operation.getName()); assertEquals(Long.valueOf(100), operation.getDefaultDelay()); assertEquals(DispatchStyles.JSON_BODY, operation.getDispatcher()); assertNotNull(operation.getDispatcherRules()); }
public boolean usesBuckets( @NonNull VFSConnectionDetails details ) throws KettleException { return details.hasBuckets() && getResolvedRootPath( details ) == null; }
@Test public void testUsesBucketsReturnsFalseIfHasBucketsAndRootPath() throws KettleException { when( vfsConnectionDetails.hasBuckets() ).thenReturn( true ); assertFalse( vfsConnectionManagerHelper.usesBuckets( vfsConnectionDetails ) ); }
public int appendIndex(final long logIndex, final int position, final byte logType) { this.writeLock.lock(); try { assert (logIndex > getLastLogIndex()); final byte[] writeData = encodeData(toRelativeOffset(logIndex), position, logType); return doAppend(logIndex, writeData); } finally { this.writeLock.unlock(); } }
@Test public void testAppendIndex() { this.offsetIndex.appendIndex(appendEntry0.getOffset(), appendEntry0.getPosition(), segmentIndex); this.offsetIndex.appendIndex(appendEntry1.getOffset(), appendEntry1.getPosition(), segmentIndex); this.offsetIndex.appendIndex(appendEntry2.getOffset(), appendEntry2.getPosition(), segmentIndex); this.offsetIndex.flush(); assertEquals(this.offsetIndex.getLastLogIndex(), appendEntry2.getOffset()); }
private void ensureRecordingLogCoherent( final long leadershipTermId, final long termBaseLogPosition, final long logPosition, final long nowNs) { ensureRecordingLogCoherent( ctx, consensusModuleAgent.logRecordingId(), initialLogLeadershipTermId, initialTermBaseLogPosition, leadershipTermId, termBaseLogPosition, logPosition, nowNs); }
@Test void shouldThrowNonZeroLogPositionAndNullRecordingIdSpecified() { Election.ensureRecordingLogCoherent(ctx, NULL_POSITION, 0, 0, 0, 0, 0, 1); Election.ensureRecordingLogCoherent(ctx, NULL_POSITION, 0, 0, 0, 0, 1000, 1); verifyNoInteractions(recordingLog); }
public int getIdentityFromOrdinal(int fromOrdinal) { int hashCode = HashCodes.hashInt(fromOrdinal); int bucket = hashCode & (fromOrdinalsMap.length - 1); while(fromOrdinalsMap[bucket] != -1L) { if((int)fromOrdinalsMap[bucket] == fromOrdinal) { if((fromOrdinalsMap[bucket] & Long.MIN_VALUE) != 0L) return pivotedToOrdinalClusters.get((int)((fromOrdinalsMap[bucket] & Long.MAX_VALUE) >> 32)); return (int)(fromOrdinalsMap[bucket] >> 32); } bucket = (bucket + 1) & (fromOrdinalsMap.length - 1); } return -1; }
@Test public void testFromIdentityOrdinals() { Assert.assertEquals(1, map.getIdentityFromOrdinal(1)); Assert.assertEquals(4, map.getIdentityFromOrdinal(2)); Assert.assertEquals(7, map.getIdentityFromOrdinal(3)); Assert.assertEquals(5025, map.getIdentityFromOrdinal(100)); Assert.assertEquals(-1, map.getIdentityFromOrdinal(200)); }
public static JsonNode transferToJsonNode(Object obj) { return mapper.valueToTree(obj); }
@Test void testTransferToJsonNode() { JsonNode jsonNode1 = JacksonUtils.transferToJsonNode(Collections.singletonMap("key", "value")); assertEquals("value", jsonNode1.get("key").asText()); JsonNode jsonNode2 = JacksonUtils.transferToJsonNode(new TestOfAtomicObject()); assertEquals("0", jsonNode2.get("aLong").asText()); assertEquals("1", jsonNode2.get("aInteger").asText()); assertEquals("false", jsonNode2.get("aBoolean").asText()); }
public boolean sync() throws IOException { if (!preSyncCheck()) { return false; } if (!getAllDiffs()) { return false; } List<Path> sourcePaths = context.getSourcePaths(); final Path sourceDir = sourcePaths.get(0); final Path targetDir = context.getTargetPath(); final FileSystem tfs = targetDir.getFileSystem(conf); Path tmpDir = null; try { tmpDir = createTargetTmpDir(tfs, targetDir); DiffInfo[] renameAndDeleteDiffs = getRenameAndDeleteDiffsForSync(targetDir); if (renameAndDeleteDiffs.length > 0) { // do the real sync work: deletion and rename syncDiff(renameAndDeleteDiffs, tfs, tmpDir); } return true; } catch (Exception e) { DistCp.LOG.warn("Failed to use snapshot diff for distcp", e); return false; } finally { deleteTargetTmpDir(tfs, tmpDir); // TODO: since we have tmp directory, we can support "undo" with failures // set the source path using the snapshot path context.setSourcePaths(Arrays.asList(getSnapshotPath(sourceDir, context.getToSnapshot()))); } }
@Test public void testSyncWithCurrent() throws Exception { final DistCpOptions options = new DistCpOptions.Builder( Collections.singletonList(source), target) .withSyncFolder(true) .withUseDiff("s1", ".") .build(); context = new DistCpContext(options); initData(source); initData(target); enableAndCreateFirstSnapshot(); // make changes under source changeData(dfs, source); // do the sync sync(); // make sure the source path is still unchanged Assert.assertEquals(source, context.getSourcePaths().get(0)); }
public Map<String, Parameter> generateMergedWorkflowParams( WorkflowInstance instance, RunRequest request) { Workflow workflow = instance.getRuntimeWorkflow(); Map<String, ParamDefinition> allParamDefs = new LinkedHashMap<>(); Map<String, ParamDefinition> defaultWorkflowParams = defaultParamManager.getDefaultWorkflowParams(); // merge workflow params for start if (request.isFreshRun()) { // merge default workflow params ParamsMergeHelper.mergeParams( allParamDefs, defaultWorkflowParams, ParamsMergeHelper.MergeContext.workflowCreate(ParamSource.SYSTEM_DEFAULT, request)); // merge defined workflow params if (workflow.getParams() != null) { ParamsMergeHelper.mergeParams( allParamDefs, workflow.getParams(), ParamsMergeHelper.MergeContext.workflowCreate(ParamSource.DEFINITION, request)); } } // merge workflow params from previous instance for restart if (!request.isFreshRun() && instance.getParams() != null) { Map<String, ParamDefinition> previousParamDefs = instance.getParams().entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toDefinition())); // remove reserved params, which should be injected again by the system. for (String paramName : Constants.RESERVED_PARAM_NAMES) { previousParamDefs.remove(paramName); } ParamsMergeHelper.mergeParams( allParamDefs, previousParamDefs, ParamsMergeHelper.MergeContext.workflowCreate(ParamSource.SYSTEM, false)); } // merge run params if (request.getRunParams() != null) { ParamSource source = getParamSource(request.getInitiator(), request.isFreshRun()); ParamsMergeHelper.mergeParams( allParamDefs, request.getRunParams(), ParamsMergeHelper.MergeContext.workflowCreate(source, request)); } // merge user provided restart run params getUserRestartParam(request) .ifPresent( userRestartParams -> { ParamSource source = getParamSource(request.getInitiator(), request.isFreshRun()); ParamsMergeHelper.mergeParams( allParamDefs, userRestartParams, ParamsMergeHelper.MergeContext.workflowCreate(source, request)); }); // cleanup any placeholder params and convert to params return ParamsMergeHelper.convertToParameters(ParamsMergeHelper.cleanupParams(allParamDefs)); }
@Test public void testRestartConfigRunChangedParamMerge() { Map<String, Object> meta = Collections.singletonMap(Constants.METADATA_SOURCE_KEY, "SYSTEM_DEFAULT"); LongParameter param = LongParameter.builder() .name("TARGET_RUN_DATE") .value(1000L) .evaluatedResult(1000L) .evaluatedTime(123L) .mode(ParamMode.MUTABLE_ON_START_RESTART) .meta(meta) .build(); Map<String, ParamDefinition> restartParams = singletonMap( "TARGET_RUN_DATE", LongParamDefinition.builder() .name("TARGET_RUN_DATE") .value(1001L) .mode(ParamMode.MUTABLE_ON_START_RESTART) .build()); RunRequest request = RunRequest.builder() .initiator(new ManualInitiator()) .currentPolicy(RunPolicy.RESTART_FROM_SPECIFIC) .restartConfig( RestartConfig.builder() .addRestartNode("sample-wf-map-params", 1, "foo") .restartParams(restartParams) .build()) .build(); Map<String, Parameter> instanceParams = new LinkedHashMap<>(); instanceParams.put("TARGET_RUN_DATE", param); workflowInstance.setParams(instanceParams); Map<String, Parameter> workflowParams = paramsManager.generateMergedWorkflowParams(workflowInstance, request); Assert.assertFalse(workflowParams.isEmpty()); Assert.assertEquals( Long.valueOf(1001L), workflowParams.get("TARGET_RUN_DATE").asLongParam().getValue()); Assert.assertEquals(ParamSource.RESTART, workflowParams.get("TARGET_RUN_DATE").getSource()); }
public static Field p(String fieldName) { return SELECT_ALL_FROM_SOURCES_ALL.where(fieldName); }
@Test void contains_phrase_near_onear_equiv_empty_list_should_throw_illegal_argument_exception() { assertThrows(IllegalArgumentException.class, () -> Q.p("f1").containsPhrase(List.of()) .build()); assertThrows(IllegalArgumentException.class, () -> Q.p("f1").containsNear(List.of()) .build()); assertThrows(IllegalArgumentException.class, () -> Q.p("f1").containsOnear(List.of()) .build()); assertThrows(IllegalArgumentException.class, () -> Q.p("f1").containsEquiv(List.of()) .build()); }
@Override public void setUnixPermission(final Path file, final TransferStatus status) throws BackgroundException { try { Files.setPosixFilePermissions(session.toPath(file), PosixFilePermissions.fromString(status.getPermission().getSymbol())); } catch(IllegalArgumentException e) { throw new LocalExceptionMappingService().map("Failure to write attributes of {0}", new IOException(e), file); } catch(IOException e) { throw new LocalExceptionMappingService().map("Failure to write attributes of {0}", e, file); } }
@Test public void testSetUnixPermission() throws Exception { final LocalSession session = new LocalSession(new Host(new LocalProtocol(), new LocalProtocol().getDefaultHostname())); if(session.isPosixFilesystem()) { assertNotNull(session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback())); assertTrue(session.isConnected()); assertNotNull(session.getClient()); session.login(new DisabledLoginCallback(), new DisabledCancelCallback()); final Path workdir = new LocalHomeFinderFeature().find(); { final Path file = new Path(workdir, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); new LocalTouchFeature(session).touch(file, new TransferStatus()); new LocalUnixPermissionFeature(session).setUnixPermission(file, new Permission(666)); assertEquals("666", new LocalListService(session).list(workdir, new DisabledListProgressListener()).get(file).attributes().getPermission().getMode()); new LocalDeleteFeature(session).delete(Collections.<Path>singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); } { final Path directory = new Path(workdir, UUID.randomUUID().toString(), EnumSet.of(Path.Type.directory)); new LocalDirectoryFeature(session).mkdir(directory, new TransferStatus()); new LocalUnixPermissionFeature(session).setUnixPermission(directory, new Permission(666)); assertEquals("666", new LocalListService(session).list(workdir, new DisabledListProgressListener()).get(directory).attributes().getPermission().getMode()); new LocalDeleteFeature(session).delete(Collections.<Path>singletonList(directory), new DisabledLoginCallback(), new Delete.DisabledCallback()); } session.close(); } }
public Counter getCounterByRequestId(CounterRequest request) { final String requestId = request.getId(); for (final Counter counter : counters) { if (counter.isRequestIdFromThisCounter(requestId)) { return counter; } } return null; }
@Test public void testGetCounterByRequestId() { final Counter counter = createCounter(); final Collector collector = new Collector("test collector3", Collections.singletonList(counter)); counter.addRequest("test request", 0, 0, 0, false, 1000); final CounterRequest request = counter.getRequests().get(0); assertEquals("getCounterByRequestId", counter, collector.getCounterByRequestId(request)); assertNull("getCounterByRequestId", collector.getCounterByRequestId(new CounterRequest("test", "unknown"))); }
@Override public int totalSize() { return payload != null ? payload.length : 0; }
@Test public void totalSize_whenNonEmpty() { HeapData heapData = new HeapData(new byte[10]); assertEquals(10, heapData.totalSize()); }
static int toInteger(final JsonNode object) { if (object instanceof NumericNode) { return object.intValue(); } if (object instanceof TextNode) { try { return Integer.parseInt(object.textValue()); } catch (final NumberFormatException e) { throw failedStringCoercionException(SqlBaseType.INTEGER); } } throw invalidConversionException(object, SqlBaseType.INTEGER); }
@Test public void shouldNotIncludeValueInExceptionWhenFailingToInteger() { try { // When: JsonSerdeUtils.toInteger(JsonNodeFactory.instance.textNode("personal info: do not log me")); fail("Invalid test: should throw"); } catch (final Exception e) { assertThat(ExceptionUtils.getStackTrace(e), not(containsString("personal info"))); } }
public static ShowResultSet execute(ShowStmt statement, ConnectContext context) { return GlobalStateMgr.getCurrentState().getShowExecutor().showExecutorVisitor.visit(statement, context); }
@Test public void testShowBasicStatsMeta() throws Exception { new MockUp<AnalyzeMgr>() { @Mock public Map<AnalyzeMgr.StatsMetaKey, ExternalBasicStatsMeta> getExternalBasicStatsMetaMap() { Map<AnalyzeMgr.StatsMetaKey, ExternalBasicStatsMeta> map = new HashMap<>(); map.put(new AnalyzeMgr.StatsMetaKey("hive0", "testDb", "testTable"), new ExternalBasicStatsMeta("hive0", "testDb", "testTable", null, StatsConstants.AnalyzeType.FULL, LocalDateTime.now(), Maps.newHashMap())); return map; } }; ctx.setCurrentUserIdentity(UserIdentity.ROOT); ShowBasicStatsMetaStmt stmt = new ShowBasicStatsMetaStmt(null); ShowResultSet resultSet = ShowExecutor.execute(stmt, ctx); Assert.assertEquals("hive0.testDb", resultSet.getResultRows().get(0).get(0)); Assert.assertEquals("testTable", resultSet.getResultRows().get(0).get(1)); Assert.assertEquals("ALL", resultSet.getResultRows().get(0).get(2)); Assert.assertEquals("FULL", resultSet.getResultRows().get(0).get(3)); }
@Override public boolean shouldWait() { RingbufferContainer ringbuffer = getRingBufferContainerOrNull(); if (resultSet == null) { resultSet = new ReadResultSetImpl<>(minSize, maxSize, getNodeEngine().getSerializationService(), filter); sequence = startSequence; } if (ringbuffer == null) { return minSize > 0; } sequence = ringbuffer.clampReadSequenceToBounds(sequence); if (minSize == 0) { if (sequence < ringbuffer.tailSequence() + 1) { readMany(ringbuffer); } return false; } if (resultSet.isMinSizeReached()) { // enough items have been read, we are done. return false; } if (sequence == ringbuffer.tailSequence() + 1) { // the sequence is not readable return true; } readMany(ringbuffer); return !resultSet.isMinSizeReached(); }
@Test public void whenEnoughItemsAvailable() { long startSequence = ringbuffer.tailSequence() + 1; ReadManyOperation op = getReadManyOperation(startSequence, 1, 3, null); ringbuffer.add("item1"); ringbuffer.add("item2"); ringbuffer.add("item3"); ringbuffer.add("item4"); ringbuffer.add("item5"); assertFalse(op.shouldWait()); ReadResultSetImpl response = getReadResultSet(op); assertEquals(startSequence + 3, op.sequence); assertEquals(asList("item1", "item2", "item3"), response); assertEquals(3, response.readCount()); assertEquals(3, response.getNextSequenceToReadFrom()); }
@Override public double d(String a, String b) { if (weight != null) return weightedEdit(a, b); else if (FKP == null || a.length() == 1 || b.length() == 1) return damerau ? damerau(a, b) : levenshtein(a, b); else return br(a, b); }
@Test public void testDamerauSpeedTest() { System.out.println("Advanced Damerau speed test"); EditDistance edit = new EditDistance(Math.max(H1N1.length(), H1N5.length()), true); for (int i = 0; i < 100; i++) { edit.d(H1N1, H1N5); } }
public boolean poll(Timer timer, boolean waitForJoinGroup) { maybeUpdateSubscriptionMetadata(); invokeCompletedOffsetCommitCallbacks(); if (subscriptions.hasAutoAssignedPartitions()) { if (protocol == null) { throw new IllegalStateException("User configured " + ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG + " to empty while trying to subscribe for group protocol to auto assign partitions"); } // Always update the heartbeat last poll time so that the heartbeat thread does not leave the // group proactively due to application inactivity even if (say) the coordinator cannot be found. pollHeartbeat(timer.currentTimeMs()); if (coordinatorUnknownAndUnreadySync(timer)) { return false; } if (rejoinNeededOrPending()) { // due to a race condition between the initial metadata fetch and the initial rebalance, // we need to ensure that the metadata is fresh before joining initially. This ensures // that we have matched the pattern against the cluster's topics at least once before joining. if (subscriptions.hasPatternSubscription()) { // For consumer group that uses pattern-based subscription, after a topic is created, // any consumer that discovers the topic after metadata refresh can trigger rebalance // across the entire consumer group. Multiple rebalances can be triggered after one topic // creation if consumers refresh metadata at vastly different times. We can significantly // reduce the number of rebalances caused by single topic creation by asking consumer to // refresh metadata before re-joining the group as long as the refresh backoff time has // passed. if (this.metadata.timeToAllowUpdate(timer.currentTimeMs()) == 0) { this.metadata.requestUpdate(true); } if (!client.ensureFreshMetadata(timer)) { return false; } maybeUpdateSubscriptionMetadata(); } // if not wait for join group, we would just use a timer of 0 if (!ensureActiveGroup(waitForJoinGroup ? timer : time.timer(0L))) { // since we may use a different timer in the callee, we'd still need // to update the original timer's current time after the call timer.update(time.milliseconds()); return false; } } } else { // For manually assigned partitions, we do not try to pro-actively lookup coordinator; // instead we only try to refresh metadata when necessary. // If connections to all nodes fail, wakeups triggered while attempting to send fetch // requests result in polls returning immediately, causing a tight loop of polls. Without // the wakeup, poll() with no channels would block for the timeout, delaying re-connection. // awaitMetadataUpdate() in ensureCoordinatorReady initiates new connections with configured backoff and avoids the busy loop. if (metadata.updateRequested() && !client.hasReadyNodes(timer.currentTimeMs())) { client.awaitMetadataUpdate(timer); } // if there is pending coordinator requests, ensure they have a chance to be transmitted. client.pollNoWakeup(); } maybeAutoCommitOffsetsAsync(timer.currentTimeMs()); return true; }
@Test public void testAutoCommitAsyncWithUserAssignedType() { try (ConsumerCoordinator coordinator = buildCoordinator(rebalanceConfig, new Metrics(), assignors, true, subscriptions)) { subscriptions.assignFromUser(Collections.singleton(t1p)); // set timeout to 0 because we expect no requests sent coordinator.poll(time.timer(0)); assertTrue(coordinator.coordinatorUnknown()); assertFalse(client.hasInFlightRequests()); // elapse auto commit interval and set committable position time.sleep(autoCommitIntervalMs); subscriptions.seekUnvalidated(t1p, new SubscriptionState.FetchPosition(100L)); // should try to find coordinator since we are auto committing coordinator.poll(time.timer(0)); assertTrue(coordinator.coordinatorUnknown()); assertTrue(client.hasInFlightRequests()); client.respond(groupCoordinatorResponse(node, Errors.NONE)); coordinator.poll(time.timer(0)); assertFalse(coordinator.coordinatorUnknown()); // after we've discovered the coordinator we should send // out the commit request immediately assertTrue(client.hasInFlightRequests()); } }
public void shutDownConnections() { this.heartbeatHandler.shutdown(); this.rmProxyRelayer.shutdown(); }
@Test(timeout = 10000) public void testShutDownConnections() throws YarnException, IOException, InterruptedException { launchUAM(attemptId); registerApplicationMaster( RegisterApplicationMasterRequest.newInstance(null, 0, null), attemptId); uam.shutDownConnections(); while (uam.isHeartbeatThreadAlive()) { LOG.info("waiting for heartbeat thread to finish"); Thread.sleep(100); } }
public static Combine.BinaryCombineDoubleFn ofDoubles() { return new Max.MaxDoubleFn(); }
@Test public void testMaxDoubleFnNan() { testCombineFn( Max.ofDoubles(), Lists.newArrayList(Double.NaN, 2.0, 3.0, Double.POSITIVE_INFINITY), Double.NaN); }
public String toString() { StringBuilder sb = new StringBuilder(); sb.append("size(unknown):=") .append(this.getSize()) .append(", "); sb.append("signature(Compression type identifier):=") .append(new String(this.getSignature(), UTF_8)) .append(", "); sb.append("version(Possibly numeric code for LZX):=") .append(this.getVersion()) .append(System.getProperty("line.separator")); sb.append("resetInterval(The Huffman reset interval):=") .append(this.getResetInterval()) .append(", "); sb.append("windowSize:=") .append(this.getWindowSize()) .append(", "); sb.append("windowsPerReset(unknown (sometimes 2, sometimes 1, sometimes 0):=") .append(this.getWindowsPerReset()) .append(", "); sb.append("unknown_18:=") .append(this.getUnknown_18()) .append(System.getProperty("line.separator")); return sb.toString(); }
@Test public void testGetToString() { assertTrue( chmLzxcControlData.toString().contains(TestParameters.VP_CONTROL_DATA_SIGNATURE)); }
@Override protected boolean isInfinite(Integer number) { // Infinity never applies here because only types like Float and Double have Infinity return false; }
@Test void testIsInfinite() { IntegerSummaryAggregator ag = new IntegerSummaryAggregator(); // always false for Integer assertThat(ag.isInfinite(-1)).isFalse(); assertThat(ag.isInfinite(0)).isFalse(); assertThat(ag.isInfinite(23)).isFalse(); assertThat(ag.isInfinite(Integer.MAX_VALUE)).isFalse(); assertThat(ag.isInfinite(Integer.MIN_VALUE)).isFalse(); assertThat(ag.isInfinite(null)).isFalse(); }
void escape(String escapeChars, StringBuffer buf) { if ((pointer < patternLength)) { char next = pattern.charAt(pointer++); escapeUtil.escape(escapeChars, buf, next, pointer); } }
@Test public void testEscape() throws ScanException { { List<Token> tl = new TokenStream("\\%").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(new Token(Token.LITERAL, "%")); assertEquals(witness, tl); } { List<Token> tl = new TokenStream("\\%\\(\\t\\)\\r\\n").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(new Token(Token.LITERAL, "%(\t)\r\n")); assertEquals(witness, tl); } { List<Token> tl = new TokenStream("\\\\%x").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(new Token(Token.LITERAL, "\\")); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "x")); assertEquals(witness, tl); } { List<Token> tl = new TokenStream("%x\\)").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "x")); witness.add(new Token(Token.LITERAL, ")")); assertEquals(witness, tl); } { List<Token> tl = new TokenStream("%x\\_a").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "x")); witness.add(new Token(Token.LITERAL, "a")); assertEquals(witness, tl); } { List<Token> tl = new TokenStream("%x\\_%b").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "x")); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "b")); assertEquals(witness, tl); } }
public static Iterable<String> expandAtNFilepattern(String filepattern) { ImmutableList.Builder<String> builder = ImmutableList.builder(); Matcher match = AT_N_SPEC.matcher(filepattern); if (!match.find()) { builder.add(filepattern); } else { int numShards = Integer.parseInt(match.group("N")); String formatString = "-%0" + getShardWidth(numShards, filepattern) + "d-of-%05d"; for (int i = 0; i < numShards; ++i) { builder.add( AT_N_SPEC.matcher(filepattern).replaceAll(String.format(formatString, i, numShards))); } if (match.find()) { throw new IllegalArgumentException( "More than one @N wildcard found in filepattern: " + filepattern); } } return builder.build(); }
@Test public void testExpandAtNFilepatternHugeN() throws Exception { exception.expect(IllegalArgumentException.class); exception.expectMessage( "Unsupported number of shards: 2000000000 " + "in filepattern: gs://bucket/object@2000000000.ism"); Filepatterns.expandAtNFilepattern("gs://bucket/object@2000000000.ism"); }
@Override public AppResponse process(Flow flow, ActivateWithCodeRequest request) throws FlowNotDefinedException, IOException, NoSuchAlgorithmException { Map<String, Object> result = digidClient.activateAccountWithCode(appSession.getAccountId(), request.getActivationCode()); if (result.get(lowerUnderscore(STATUS)).equals("OK")) { appAuthenticator.setIssuerType((String) result.get(lowerUnderscore(ISSUER_TYPE))); return new OkResponse(); } if (result.get(lowerUnderscore(STATUS)).equals("NOK") && result.get(ERROR) != null ) { final var error = result.get(ERROR); if (ERROR_CODE_NOT_CORRECT.equals(error)) { // Logcode 88 is already logged in x, can be changed when switching to account microservice : return new EnterActivationResponse(ERROR_CODE_NOT_CORRECT, Map.of(REMAINING_ATTEMPTS, result.get(lowerUnderscore(REMAINING_ATTEMPTS)))); } else if (ERROR_CODE_BLOCKED.equals(error)) { digidClient.remoteLog("87", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); return new NokResponse((String) result.get(ERROR)); } else if (ERROR_CODE_INVALID.equals(error)) { digidClient.remoteLog("90", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); return new EnterActivationResponse(ERROR_CODE_INVALID, Map.of(DAYS_VALID, result.get(lowerUnderscore(DAYS_VALID)))); } } return new NokResponse(); }
@Test public void responseTestOK() throws FlowNotDefinedException, IOException, NoSuchAlgorithmException { //given when(digidClientMock.activateAccountWithCode(anyLong(), any())).thenReturn(Map.of( lowerUnderscore(STATUS), "OK", lowerUnderscore(ISSUER_TYPE), "type" )); //when AppResponse appResponse = activationCodeChecked.process(mockedFlow, activateWithCodeRequest); //then assertTrue(appResponse instanceof OkResponse); assertEquals("OK", ((StatusResponse) appResponse).getStatus()); }
public GsonBuilder newBuilder() { return new GsonBuilder(this); }
@Test public void testDefaultGsonNewBuilderModification() { Gson gson = new Gson(); GsonBuilder gsonBuilder = gson.newBuilder(); // Modifications of `gsonBuilder` should not affect `gson` object gsonBuilder.registerTypeAdapter( CustomClass1.class, new TypeAdapter<CustomClass1>() { @Override public CustomClass1 read(JsonReader in) throws IOException { throw new UnsupportedOperationException(); } @Override public void write(JsonWriter out, CustomClass1 value) throws IOException { out.value("custom-adapter"); } }); gsonBuilder.registerTypeHierarchyAdapter( CustomClass2.class, new JsonSerializer<CustomClass2>() { @Override public JsonElement serialize( CustomClass2 src, Type typeOfSrc, JsonSerializationContext context) { return new JsonPrimitive("custom-hierarchy-adapter"); } }); gsonBuilder.registerTypeAdapter( CustomClass3.class, new InstanceCreator<CustomClass3>() { @Override public CustomClass3 createInstance(Type type) { return new CustomClass3("custom-instance"); } }); assertDefaultGson(gson); // New GsonBuilder created from `gson` should not have been affected by changes either assertDefaultGson(gson.newBuilder().create()); // But new Gson instance from `gsonBuilder` should use custom adapters assertCustomGson(gsonBuilder.create()); }
@Override public ConfigOperateResult insertOrUpdateTag(final ConfigInfo configInfo, final String tag, final String srcIp, final String srcUser) { if (findConfigInfo4TagState(configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant(), tag) == null) { return addConfigInfo4Tag(configInfo, tag, srcIp, srcUser); } else { return updateConfigInfo4Tag(configInfo, tag, srcIp, srcUser); } }
@Test void testInsertOrUpdateTagOfUpdate() { String dataId = "dataId111222"; String group = "group"; String tenant = "tenant"; String appName = "appname1234"; String content = "c12345"; ConfigInfo configInfo = new ConfigInfo(dataId, group, tenant, appName, content); configInfo.setEncryptedDataKey("key23456"); //mock query config state and return obj after update ConfigInfoStateWrapper configInfoStateWrapper = new ConfigInfoStateWrapper(); configInfoStateWrapper.setLastModified(System.currentTimeMillis()); configInfoStateWrapper.setId(234567890L); String tag = "tag123"; Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant, tag}), eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(new ConfigInfoStateWrapper()).thenReturn(configInfoStateWrapper); String srcIp = "ip345678"; String srcUser = "user1234567"; ConfigOperateResult configOperateResult = embeddedConfigInfoTagPersistService.insertOrUpdateTag(configInfo, tag, srcIp, srcUser); //verify update to be invoked embeddedStorageContextHolderMockedStatic.verify(() -> EmbeddedStorageContextHolder.addSqlContext(anyString(), eq(content), eq(MD5Utils.md5Hex(content, Constants.PERSIST_ENCODE)), eq(srcIp), eq(srcUser), any(Timestamp.class), eq(appName), eq(dataId), eq(group), eq(tenant), eq(tag)), times(1)); assertEquals(configInfoStateWrapper.getId(), configOperateResult.getId()); assertEquals(configInfoStateWrapper.getLastModified(), configOperateResult.getLastModified()); }
public GenericException createInstallmentNotFoundException(int installmentId) { return GenericException.builder() .httpStatus(HttpStatus.NOT_FOUND) .logMessage(this.getClass().getName() + ".getCredit unpaid installment not found with installment id {0}", installmentId) .message(ErrorCode.INSTALLMENT_NOT_FOUND) .build(); }
@Test void createInstallmentNotFoundException() { // Act GenericException exception = creditService.createInstallmentNotFoundException(1); // Assert assertEquals(HttpStatus.NOT_FOUND, exception.getHttpStatus()); assertEquals(ErrorCode.INSTALLMENT_NOT_FOUND, exception.getErrorCode()); }
@Override public String generateSegmentName(int sequenceId, @Nullable Object minTimeValue, @Nullable Object maxTimeValue) { return _segmentName; }
@Test public void testWithMalFormedSegmentName() { assertEquals(new FixedSegmentNameGenerator("seg01").generateSegmentName(0, null, null), "seg01"); try { new FixedSegmentNameGenerator("seg*01").generateSegmentName(0, null, null); Assert.fail(); } catch (IllegalArgumentException e) { // Expected assertEquals(e.getMessage(), "Invalid partial or full segment name: seg*01"); } }
@Override public boolean add(FilteredBlock block) throws VerificationException, PrunedException { boolean success = super.add(block); if (success) { trackFilteredTransactions(block.getTransactionCount()); } return success; }
@Test public void unconnectedBlocks() throws Exception { Context.propagate(new Context(100, Coin.ZERO, false, true)); Block b1 = TESTNET.getGenesisBlock().createNextBlock(coinbaseTo); Block b2 = b1.createNextBlock(coinbaseTo); Block b3 = b2.createNextBlock(coinbaseTo); // Connected. assertTrue(testNetChain.add(b1)); // Unconnected but stored. The head of the chain is still b1. assertFalse(testNetChain.add(b3)); assertEquals(testNetChain.getChainHead().getHeader(), b1.cloneAsHeader()); // Add in the middle block. assertTrue(testNetChain.add(b2)); assertEquals(testNetChain.getChainHead().getHeader(), b3.cloneAsHeader()); }
@Override public double getMean() { if (values.length == 0) { return 0; } double sum = 0; for (int i = 0; i < values.length; i++) { sum += values[i] * normWeights[i]; } return sum; }
@Test public void calculatesAMeanOfZeroForAnEmptySnapshot() throws Exception { final Snapshot emptySnapshot = new WeightedSnapshot( WeightedArray(new long[]{}, new double[]{}) ); assertThat(emptySnapshot.getMean()) .isZero(); }
public static String cloudIdEncode(String... args) { final String joinedArgs = String.join("$", args); return Base64.getUrlEncoder().encodeToString(joinedArgs.getBytes()); }
@Test public void testThrowExceptionWhenKibanaSegmentSegmentIsUndefined() { String[] raw = new String[] {"us-east-1.aws.found.io", "my-elastic-cluster", "undefined"}; String encoded = CloudSettingId.cloudIdEncode(raw); Exception thrownException = assertThrows(org.jruby.exceptions.ArgumentError.class, () -> { new CloudSettingId(encoded); }); assertThat(thrownException.getMessage(), containsString("Cloud Id, after decoding, the kibana segment is 'undefined', literally. You may need to enable Kibana in the Cloud UI.")); }
static void validateCsvFormat(CSVFormat format) { String[] header = checkArgumentNotNull(format.getHeader(), "Illegal %s: header is required", CSVFormat.class); checkArgument(header.length > 0, "Illegal %s: header cannot be empty", CSVFormat.class); checkArgument( !format.getAllowMissingColumnNames(), "Illegal %s: cannot allow missing column names", CSVFormat.class); checkArgument( !format.getIgnoreHeaderCase(), "Illegal %s: cannot ignore header case", CSVFormat.class); checkArgument( !format.getAllowDuplicateHeaderNames(), "Illegal %s: cannot allow duplicate header names", CSVFormat.class); for (String columnName : header) { checkArgument( !Strings.isNullOrEmpty(columnName), "Illegal %s: column name is required", CSVFormat.class); } checkArgument( !format.getSkipHeaderRecord(), "Illegal %s: cannot skip header record because the header is already accounted for", CSVFormat.class); }
@Test public void givenCSVFormatWithHeaderContainingNull_throwsException() { CSVFormat format = csvFormat().withHeader(null, "bar"); String gotMessage = assertThrows( IllegalArgumentException.class, () -> CsvIOParseHelpers.validateCsvFormat(format)) .getMessage(); assertEquals( "Illegal class org.apache.commons.csv.CSVFormat: column name is required", gotMessage); }
public SimpleAuthenticationConfig addUser(@Nonnull String username, @Nonnull String password, String... roles) { addUser(username, new UserDto(password, roles)); return self(); }
@Test public void testAddUser() { SimpleAuthenticationConfig c = new SimpleAuthenticationConfig(); c.addUser("user1", "password1"); c.addUser("user2", "password2", "role1"); c.addUser("user3", "password3", "role1", "role2", "role3"); assertEquals(3, c.getUsernames().size()); assertEquals("password1", c.getPassword("user1")); assertTrue(c.getRoles("user3").contains("role3")); }
public static double getSourceTablesSizeInBytes(PlanNode node, Context context) { return getSourceTablesSizeInBytes(node, context.getLookup(), context.getStatsProvider()); }
@Test public void testGetSourceTablesSizeInBytes() { PlanBuilder planBuilder = new PlanBuilder(tester.getSession(), new PlanNodeIdAllocator(), tester.getMetadata()); VariableReferenceExpression variable = planBuilder.variable("col"); VariableReferenceExpression sourceVariable1 = planBuilder.variable("source1"); VariableReferenceExpression sourceVariable2 = planBuilder.variable("soruce2"); // missing source stats assertEquals( getSourceTablesSizeInBytes( planBuilder.values(variable), noLookup(), node -> PlanNodeStatsEstimate.unknown()), NaN); // two source plan nodes PlanNodeStatsEstimate sourceStatsEstimate1 = PlanNodeStatsEstimate.builder() .setOutputRowCount(10) .build(); PlanNodeStatsEstimate sourceStatsEstimate2 = PlanNodeStatsEstimate.builder() .setOutputRowCount(20) .build(); assertEquals( getSourceTablesSizeInBytes( planBuilder.union( ImmutableListMultimap.<VariableReferenceExpression, VariableReferenceExpression>builder() .put(variable, sourceVariable1) .put(variable, sourceVariable2) .build(), ImmutableList.of(planBuilder.tableScan( ImmutableList.of(sourceVariable1), ImmutableMap.of(sourceVariable1, new TestingColumnHandle("col"))), planBuilder.values(new PlanNodeId("valuesNode"), sourceVariable2))), noLookup(), node -> { if (node instanceof TableScanNode) { return sourceStatsEstimate1; } if (node instanceof ValuesNode) { return sourceStatsEstimate2; } return PlanNodeStatsEstimate.unknown(); }), 270.0); // join node assertEquals( getSourceTablesSizeInBytes( planBuilder.join( INNER, planBuilder.values(sourceVariable1), planBuilder.values(sourceVariable2)), noLookup(), node -> sourceStatsEstimate1), NaN); // unnest node assertEquals( getSourceTablesSizeInBytes( planBuilder.unnest( planBuilder.values(sourceVariable1), ImmutableList.of(), ImmutableMap.of(sourceVariable1, ImmutableList.of(sourceVariable1)), Optional.empty()), noLookup(), node -> sourceStatsEstimate1), NaN); }
public void transitionTo(ClassicGroupState groupState) { assertValidTransition(groupState); previousState = state; state = groupState; currentStateTimestamp = Optional.of(time.milliseconds()); metrics.onClassicGroupStateTransition(previousState, state); }
@Test public void testStateTransitionMetrics() { // Confirm metrics is not updated when a new GenericGroup is created but only when the group transitions // its state. GroupCoordinatorMetricsShard metrics = mock(GroupCoordinatorMetricsShard.class); ClassicGroup group = new ClassicGroup(new LogContext(), "groupId", EMPTY, Time.SYSTEM, metrics); verify(metrics, times(0)).onClassicGroupStateTransition(any(), any()); group.transitionTo(PREPARING_REBALANCE); verify(metrics, times(1)).onClassicGroupStateTransition(EMPTY, PREPARING_REBALANCE); group.transitionTo(COMPLETING_REBALANCE); verify(metrics, times(1)).onClassicGroupStateTransition(PREPARING_REBALANCE, COMPLETING_REBALANCE); group.transitionTo(STABLE); verify(metrics, times(1)).onClassicGroupStateTransition(COMPLETING_REBALANCE, STABLE); group.transitionTo(DEAD); verify(metrics, times(1)).onClassicGroupStateTransition(STABLE, DEAD); }
@Override public void connect() throws IllegalStateException, IOException { if (isConnected()) { throw new IllegalStateException("Already connected"); } try { connection = connectionFactory.newConnection(); } catch (TimeoutException e) { throw new IllegalStateException(e); } channel = connection.createChannel(); }
@Test public void shouldNotConnectToGraphiteServerMoreThenOnce() throws Exception { graphite.connect(); try { graphite.connect(); failBecauseExceptionWasNotThrown(IllegalStateException.class); } catch (IllegalStateException e) { assertThat(e.getMessage()).isEqualTo("Already connected"); } }
@POST @Path("/token") @Produces(MediaType.APPLICATION_JSON) public Response token( @FormParam("code") String code, @FormParam("grant_type") String grantType, @FormParam("redirect_uri") String redirectUri, @FormParam("client_id") String clientId, @FormParam("client_assertion_type") String clientAssertionType, @FormParam("client_assertion") String clientAssertion) { if (!"authorization_code".equals(grantType)) { return Response.status(Status.BAD_REQUEST).entity("bad 'grant_type': " + grantType).build(); } var authenticatedClient = authenticator.authenticate(new Request(clientId, clientAssertionType, clientAssertion)); var redeemed = tokenIssuer.redeem(code, redirectUri, authenticatedClient.clientId()); if (redeemed == null) { return Response.status(Status.BAD_REQUEST).entity("invalid code").build(); } var cacheControl = new CacheControl(); cacheControl.setNoStore(true); return Response.ok( new TokenResponse( redeemed.accessToken(), "Bearer", null, (int) redeemed.expiresInSeconds(), redeemed.idToken())) .cacheControl(cacheControl) .build(); }
@Test void token_badGrantType() { var tokenIssuer = mock(TokenIssuer.class); var authenticator = mock(ClientAuthenticator.class); var sut = new TokenEndpoint(tokenIssuer, authenticator); var clientId = "myapp"; var grantType = "yolo"; var code = "6238e4504332468aa0c12e300787fded"; when(tokenIssuer.redeem(code, null, null)).thenReturn(null); // when try (var res = sut.token(code, grantType, REDIRECT_URI.toString(), clientId, null, null)) { // then assertEquals(Status.BAD_REQUEST.getStatusCode(), res.getStatus()); } }
public static Node build(final List<JoinInfo> joins) { Node root = null; for (final JoinInfo join : joins) { if (root == null) { root = new Leaf(join.getLeftSource()); } if (root.containsSource(join.getRightSource()) && root.containsSource(join.getLeftSource())) { throw new KsqlException("Cannot perform circular join - both " + join.getRightSource() + " and " + join.getLeftJoinExpression() + " are already included in the current join tree: " + root.debugString(0)); } else if (root.containsSource(join.getLeftSource())) { root = new Join(root, new Leaf(join.getRightSource()), join); } else if (root.containsSource(join.getRightSource())) { root = new Join(root, new Leaf(join.getLeftSource()), join.flip()); } else { throw new KsqlException( "Cannot build JOIN tree; neither source in the join is the FROM source or included " + "in a previous JOIN: " + join + ". The current join tree is " + root.debugString(0) ); } } return root; }
@Test public void handlesRightThreeWayJoin() { // Given: when(j1.getLeftSource()).thenReturn(a); when(j1.getRightSource()).thenReturn(b); when(j2.getLeftSource()).thenReturn(c); when(j2.getRightSource()).thenReturn(a); when(j2.flip()).thenReturn(j2); final List<JoinInfo> joins = ImmutableList.of(j1, j2); // When: final Node root = JoinTree.build(joins); // Then: assertThat(root, instanceOf(Join.class)); assertThat(root, is( new Join( new Join( new Leaf(a), new Leaf(b), j1 ), new Leaf(c), j2 ) )); }
@Override public Optional<DiscreteResource> parent() { return id.parent().map(x -> Resources.discrete(x).resource()); }
@Test public void testThereIsParent() { DiscreteResource resource = Resources.discrete(D1, P1, VLAN1).resource(); DiscreteResource parent = Resources.discrete(D1, P1).resource(); assertThat(resource.parent(), is(Optional.of(parent))); }
public MetadataReportBuilder appendParameter(String key, String value) { this.parameters = appendParameter(this.parameters, key, value); return getThis(); }
@Test void appendParameter() { MetadataReportBuilder builder = new MetadataReportBuilder(); builder.appendParameter("default.num", "one").appendParameter("num", "ONE"); Map<String, String> parameters = builder.build().getParameters(); Assertions.assertTrue(parameters.containsKey("default.num")); Assertions.assertEquals("ONE", parameters.get("num")); }
@Override public V remove(K key) { return map.remove(key); }
@Test public void testRemove() { map.put(23, "value-23"); assertTrue(map.containsKey(23)); assertEquals("value-23", adapter.remove(23)); assertFalse(map.containsKey(23)); }
@Override public OAuth2CodeDO consumeAuthorizationCode(String code) { OAuth2CodeDO codeDO = oauth2CodeMapper.selectByCode(code); if (codeDO == null) { throw exception(OAUTH2_CODE_NOT_EXISTS); } if (DateUtils.isExpired(codeDO.getExpiresTime())) { throw exception(OAUTH2_CODE_EXPIRE); } oauth2CodeMapper.deleteById(codeDO.getId()); return codeDO; }
@Test public void testConsumeAuthorizationCode_null() { // 调用,并断言 assertServiceException(() -> oauth2CodeService.consumeAuthorizationCode(randomString()), OAUTH2_CODE_NOT_EXISTS); }
public void setMemorySegment(MemorySegment memorySegment, int offset) { Preconditions.checkArgument(memorySegment != null, "MemorySegment can not be null."); Preconditions.checkArgument(offset >= 0, "Offset should be positive integer."); Preconditions.checkArgument( offset + byteLength <= memorySegment.size(), "Could not set MemorySegment, the remain buffers is not enough."); this.memorySegment = memorySegment; this.offset = offset; }
@TestTemplate void verifyBitSetSize2() { assertThatThrownBy(() -> bitSet.setMemorySegment(null, 1)) .isInstanceOf(IllegalArgumentException.class); }
@Override public void doAfterResponse(String remoteAddr, RemotingCommand request, RemotingCommand response) { if (RequestCode.GET_ROUTEINFO_BY_TOPIC != request.getCode()) { return; } if (response == null || response.getBody() == null || ResponseCode.SUCCESS != response.getCode()) { return; } boolean zoneMode = Boolean.parseBoolean(request.getExtFields().get(MixAll.ZONE_MODE)); if (!zoneMode) { return; } String zoneName = request.getExtFields().get(MixAll.ZONE_NAME); if (StringUtils.isBlank(zoneName)) { return; } TopicRouteData topicRouteData = RemotingSerializable.decode(response.getBody(), TopicRouteData.class); response.setBody(filterByZoneName(topicRouteData, zoneName).encode()); }
@Test public void testDoAfterResponseWithNoResponse() { HashMap<String, String> extFields = new HashMap<>(); extFields.put(MixAll.ZONE_MODE, "true"); RemotingCommand request = RemotingCommand.createRequestCommand(105,null); request.setExtFields(extFields); zoneRouteRPCHook.doAfterResponse("", request, null); RemotingCommand response = RemotingCommand.createResponseCommand(null); response.setCode(ResponseCode.SUCCESS); zoneRouteRPCHook.doAfterResponse("", request, response); response.setBody(RemotingSerializable.encode(createSampleTopicRouteData())); response.setCode(ResponseCode.NO_PERMISSION); zoneRouteRPCHook.doAfterResponse("", request, response); }
public ArgumentListBuilder addKeyValuePairsFromPropertyString(String prefix, String properties, VariableResolver<String> vr) throws IOException { return addKeyValuePairsFromPropertyString(prefix, properties, vr, null); }
@Test public void addKeyValuePairsFromPropertyString() throws IOException { final Map<String, String> map = new HashMap<>(); map.put("PATH", "C:\\Windows"); final VariableResolver<String> resolver = new VariableResolver.ByMap<>(map); final String properties = "my.path=$PATH"; ArgumentListBuilder builder = new ArgumentListBuilder(); builder.addKeyValuePairsFromPropertyString("", properties, resolver); assertEquals("my.path=C:\\Windows", builder.toString()); builder = new ArgumentListBuilder(); builder.addKeyValuePairsFromPropertyString("", properties, resolver, null); assertEquals("my.path=C:\\Windows", builder.toString()); }
@VisibleForTesting static long calculateProcessingTimeTimerInterval(long watermarkInterval, long idleTimeout) { checkArgument(watermarkInterval > 0 || idleTimeout > 0); if (watermarkInterval <= 0) { return idleTimeout; } if (idleTimeout <= 0) { return watermarkInterval; } long smallerInterval = Math.min(watermarkInterval, idleTimeout); long largerInterval = Math.max(watermarkInterval, idleTimeout); // If one of the intervals is 5x smaller, just pick the smaller one. The firing interval // for the smaller one this way will be perfectly accurate, while for the larger one it will // be good enough™. For example one timer is every 2s the other every 11s, the 2nd timer // will be effectively checked every 12s, which is an acceptable accuracy. long timerInterval; if (smallerInterval * 5 < largerInterval) { timerInterval = smallerInterval; } else { // Otherwise, just pick an interval 5x smaller than the smaller interval. Again accuracy // will be good enough™. timerInterval = smallerInterval / 5; } return Math.max(timerInterval, 1); }
@Test public void testCalculateProcessingTimeTimerInterval() { assertThat(calculateProcessingTimeTimerInterval(5, 0)).isEqualTo(5); assertThat(calculateProcessingTimeTimerInterval(5, -1)).isEqualTo(5); assertThat(calculateProcessingTimeTimerInterval(0, 5)).isEqualTo(5); assertThat(calculateProcessingTimeTimerInterval(-1, 5)).isEqualTo(5); assertThat(calculateProcessingTimeTimerInterval(5, 42)).isEqualTo(5); assertThat(calculateProcessingTimeTimerInterval(42, 5)).isEqualTo(5); assertThat(calculateProcessingTimeTimerInterval(2, 4)).isEqualTo(1); assertThat(calculateProcessingTimeTimerInterval(4, 2)).isEqualTo(1); assertThat(calculateProcessingTimeTimerInterval(100, 110)).isEqualTo(20); assertThat(calculateProcessingTimeTimerInterval(110, 100)).isEqualTo(20); }
public PrepareEacResponse prepareEacRequestRestService(PrepareEacRequest request) { PrepareEacResponse response = new PrepareEacResponse(); EidSession session = initSession(request, null, response); if (session == null) return response; // 1.8 PA SOd sod = mapper.read(request.getEfSOd(), SOd.class); LdsSecurityObject ldsSecurityObject = sod.toLdsSecurityObject(mapper, cmsVerifier); ldsSecurityObject.verify(14, request.getDg14()); DataGroup14 dg14 = mapper.read(request.getDg14(), DataGroup14.class); // 1.11 generate PKca.pcd / SKca.pcd1 2 840 10045 2 1 EcPrivateKey ephemeralKey = securityFactory.generateKey( dg14.getSecurityInfos().getEcPublicKey().getParameters().getDomainParameters() ); session.setIdpicc(new ByteArray(request.getPaceIcc())); session.setEphemeralKey(ephemeralKey); response.setEphemeralPKey(KeyUtils.getEncodedPublicPoint(ephemeralKey.toPublicKeySpec())); KeyUtils.generateSecretKeys(session, dg14.getSecurityInfos().getEcPublicKey().getPublicParameters(), null); Certificate at = cvCertificateService.getAt(DocumentType.NIK, session.getUserConsentType()); session.setAtReference(at.getSubject()); Certificate dvca = cvCertificateService.getIssuer(at); byte[] efCvca = request.getEfCvca(); String cvcaCar = new String(efCvca, 2, efCvca[1], StandardCharsets.US_ASCII); List<Certificate> certificates = new ArrayList<>(); Certificate cvca = cvCertificateService.getIssuer(dvca); if (!cvca.getSubject().equals(cvcaCar) && !cvca.isTrusted()) { var chainSize = 0; do { certificates.add(0, cvca); logger.warn("Certificate: added {} as link certificate in chain", cvca.getSubject()); cvca = cvCertificateService.getIssuer(cvca); chainSize++; } while(!cvca.isTrusted() && !cvca.getSubject().equals(cvcaCar) && chainSize < 5); } certificates.add(dvca); certificates.add(at); response.setApdus(new ApduService(session).createPrepareEacNIKApdus(certificates, at.getSubject())); sessionRepo.save(session); return response; }
@Test void prepareEacRequestRestServiceTest() throws Exception { EidSession session = new EidSession(); PrepareEacRequest request = new PrepareEacRequest(); request.setHeader(createRequestHeader()); request.setDg14(Base64.decode("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS".getBytes())); request.setEfCvca(Base64.decode("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS".getBytes())); request.setEfSOd(Base64.decode("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS")); request.setPaceIcc(Base64.decode("PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP".getBytes())); Certificate certificate = new Certificate(); certificate.setSubject("Subject"); certificate.setIssuer("SSSSSSSSSSSSSSSS"); certificate.setTrusted(true); certificate.setRaw(Base64.decode("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS")); loadCscaCertificate("nik/tv/csca.crt"); when(securityFactory.generateKey(Mockito.eq(ephemeralKey.toDomainParameters()))).thenReturn(ephemeralKey); doReturn(session).when(nikService).initSession(any(AppRequest.class), isNull(), any(AppResponse.class)); when(cvCertificateServiceMock.getAt(eq(DocumentType.NIK), isNull())).thenReturn(certificate); when(cvCertificateServiceMock.getIssuer(any(Certificate.class))).thenReturn(certificate); PrepareEacResponse result = nikService.prepareEacRequestRestService(request); assertEquals("OK", result.getStatus()); assertEquals(8, result.getApdus().size()); verify(cvCertificateServiceMock, times(1)).getAt(eq(DocumentType.NIK), isNull()); verify(cvCertificateServiceMock, times(2)).getIssuer(any(Certificate.class)); }
@Override public void validateSmsCode(SmsCodeValidateReqDTO reqDTO) { validateSmsCode0(reqDTO.getMobile(), reqDTO.getCode(), reqDTO.getScene()); }
@Test public void validateSmsCode_notFound() { // 准备参数 SmsCodeValidateReqDTO reqDTO = randomPojo(SmsCodeValidateReqDTO.class, o -> { o.setMobile("15601691300"); o.setScene(randomEle(SmsSceneEnum.values()).getScene()); }); // mock 数据 SqlConstants.init(DbType.MYSQL); // 调用,并断言异常 assertServiceException(() -> smsCodeService.validateSmsCode(reqDTO), SMS_CODE_NOT_FOUND); }
public String transform() throws ScanException { StringBuilder stringBuilder = new StringBuilder(); compileNode(node, stringBuilder, new Stack<Node>()); return stringBuilder.toString(); }
@Test public void LOGBACK729() throws ScanException { String input = "${${k0}.jdbc.url}"; Node node = makeNode(input); NodeToStringTransformer nodeToStringTransformer = new NodeToStringTransformer(node, propertyContainer0); assertEquals("http://..", nodeToStringTransformer.transform()); }