focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public void run() { try { InputStreamReader isr = new InputStreamReader( this.is ); BufferedReader br = new BufferedReader( isr ); String line = null; while ( ( line = br.readLine() ) != null ) { String logEntry = this.type + " " + line; switch ( this.logLevel ) { case MINIMAL: log.logMinimal( logEntry ); break; case BASIC: log.logBasic( logEntry ); break; case DETAILED: log.logDetailed( logEntry ); break; case DEBUG: log.logDebug( logEntry ); break; case ROWLEVEL: log.logRowlevel( logEntry ); break; case ERROR: log.logError( logEntry ); break; default: // NONE break; } } } catch ( IOException ioe ) { if ( log.isError() ) { log.logError( this.type + " " + Const.getStackTracker( ioe ) ); } } }
@Test public void testLogRowlevel() { streamLogger = new ConfigurableStreamLogger( log, is, LogLevel.ROWLEVEL, PREFIX ); streamLogger.run(); Mockito.verify( log ).logRowlevel( OUT1 ); Mockito.verify( log ).logRowlevel( OUT2 ); }
public static Expression generateFilterExpression(SearchArgument sarg) { return translate(sarg.getExpression(), sarg.getLeaves()); }
@Test public void testBooleanType() { SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); SearchArgument arg = builder.startAnd().equals("boolean", PredicateLeaf.Type.BOOLEAN, true).end().build(); UnboundPredicate expected = Expressions.equal("boolean", true); UnboundPredicate actual = (UnboundPredicate) HiveIcebergFilterFactory.generateFilterExpression(arg); assertPredicatesMatch(expected, actual); }
@Override public void configure(Configuration parameters) { if (getFilePaths().length == 0) { // file path was not specified yet. Try to set it from the parameters. String filePath = parameters.getString(FILE_PARAMETER_KEY, null); if (filePath == null) { throw new IllegalArgumentException( "File path was not specified in input format or configuration."); } else { setFilePath(filePath); } } if (!this.enumerateNestedFiles) { this.enumerateNestedFiles = parameters.getBoolean(ENUMERATE_NESTED_FILES_FLAG, false); } }
@Test void testSetFileViaConfigurationEmptyPath() { assertThatThrownBy( () -> { final DummyFileInputFormat format = new DummyFileInputFormat(); final String filePath = null; Configuration conf = new Configuration(); conf.setString("input.file.path", filePath); format.configure(conf); }) .isInstanceOf(RuntimeException.class); }
@Override public void start() { setOutputStream(datagramSocketOutputStream(host, port)); super.start(); }
@Test void testSendMessage() throws Exception { try (DatagramSocket datagramSocket = new DatagramSocket(); UdpServer udpServer = new UdpServer(datagramSocket, 1) ) { Future<List<String>> receivedMessage = udpServer.receive(); OutputStreamAppender<ILoggingEvent> udpStreamAppender = new DropwizardUdpSocketAppender<>("localhost", datagramSocket.getLocalPort()); udpStreamAppender.setContext(Mockito.mock(Context.class)); udpStreamAppender.start(); udpStreamAppender.getOutputStream().write("Test message".getBytes(UTF_8)); assertThat(receivedMessage.get(5, TimeUnit.SECONDS)) .singleElement() .isEqualTo("Test message"); udpStreamAppender.stop(); } }
@Override public String buildContext() { final String selector = ((Collection<?>) getSource()) .stream() .map(s -> ((RoleDO) s).getRoleName()) .collect(Collectors.joining(",")); return String.format("the role[%s] is %s", selector, StringUtils.lowerCase(getType().getType().toString())); }
@Test public void testBuildContext() { String expectedSelector = roleDOList .stream() .map(RoleDO::getRoleName) .collect(Collectors.joining(",")); String expectedStr = String.format("the role[%s] is %s", expectedSelector, StringUtils.lowerCase(batchRoleDeletedEventTest.getType().getType().toString())); assertEquals(expectedStr, batchRoleDeletedEventTest.buildContext()); String expectedEmptySelector = emptyRoleDOList .stream() .map(RoleDO::getRoleName) .collect(Collectors.joining(",")); String expectedEmptyStr = String.format("the role[%s] is %s", expectedEmptySelector, StringUtils.lowerCase(batchRoleDeletedEventEmptySourceTest.getType().getType().toString())); assertEquals(expectedEmptyStr, batchRoleDeletedEventEmptySourceTest.buildContext()); }
@Override public Optional<Lock> lock(@Nonnull String resource, @Nullable String lockContext) { return doLock(resource, getLockedByString(lockContext)); }
@Test void alreadyTaken(MongoDBTestService mongodb) { if (new MongoLockService(otherNodeId, mongodb.mongoConnection(), MongoLockService.MIN_LOCK_TTL) .lock("test-resource", null).isEmpty()) { throw new IllegalStateException("Unable to create original lock."); } final Optional<Lock> lock = lockService.lock("test-resource", null); assertThat(lock).isEmpty(); }
public static Result parse(String body) throws ParseException { try { Matcher m; Result result = new Result(); JSONObject jo = new JSONObject(body); if (jo.has("error")) { throw new ParseException(jo.getString("error"), body); } String i3 = jo.getString("i3"); m = PATTERN_IMAGE_URL.matcher(i3); if (m.find()) { result.imageUrl = StringUtils.unescapeXml(StringUtils.trim(m.group(1))); } String i6 = jo.getString("i6"); m = PATTERN_SKIP_HATH_KEY.matcher(i6); if (m.find()) { result.skipHathKey = StringUtils.unescapeXml(StringUtils.trim(m.group(1))); } m = PATTERN_ORIGIN_IMAGE_URL_NEW.matcher(i6); if (m.find()) { result.otherImageUrl = StringUtils.unescapeXml(m.group(1)); } if (jo.isNull("i7")){ m = PATTERN_ORIGIN_IMAGE_URL.matcher(i6); }else { String i7 = jo.getString("i7"); m = PATTERN_ORIGIN_IMAGE_URL.matcher(i7); } if (m.find()) { result.originImageUrl = StringUtils.unescapeXml(m.group(1)) + "fullimg" + StringUtils.unescapeXml(m.group(2)); } if (!TextUtils.isEmpty(result.imageUrl)) { return result; } else { throw new ParseException("Parse image url and skip hath key error", body); } } catch (JSONException e) { throw new ParseException("Can't parse json", body, e); } }
@Test public void testParse() throws IOException, ParseException { InputStream resource = GalleryPageApiParserTest.class.getResourceAsStream("GalleryPageApiParserTest.json"); BufferedSource source = Okio.buffer(Okio.source(resource)); String body = source.readUtf8(); GalleryPageApiParser.Result result = GalleryPageApiParser.parse(body); assertEquals("http://69.30.203.46:60111/h/6047fa2f194742f6fa541ec1f631ec3ab438f960-183117-1280-960-jpg/keystamp=1550291100-c4438f48c8;fileindex=67379169;xres=1280/Valentines_2019_002.jpg", result.imageUrl); assertEquals("15151-430636", result.skipHathKey); assertEquals("https://e-hentai.org/fullimg.php?gid=1366222&page=3&key=puxxvyg98a4", result.originImageUrl); }
public static Write write() { return new AutoValue_RedisIO_Write.Builder() .setConnectionConfiguration(RedisConnectionConfiguration.create()) .setMethod(Write.Method.APPEND) .build(); }
@Test public void testWriteWithMethodRPush() { String key = "testWriteWithMethodRPush"; String value = "value"; client.lpush(key, value); String newValue = "newValue"; PCollection<KV<String, String>> write = p.apply(Create.of(KV.of(key, newValue))); write.apply(RedisIO.write().withEndpoint(REDIS_HOST, port).withMethod(Method.RPUSH)); p.run(); List<String> values = client.lrange(key, 0, -1); assertEquals(value + newValue, String.join("", values)); }
@Override public void logoutFailure(HttpRequest request, String errorMessage) { checkRequest(request); requireNonNull(errorMessage, "error message can't be null"); if (!LOGGER.isDebugEnabled()) { return; } LOGGER.debug("logout failure [error|{}][IP|{}|{}]", emptyIfNull(errorMessage), request.getRemoteAddr(), getAllIps(request)); }
@Test public void logout_failure_with_NPE_if_request_is_null() { logTester.setLevel(Level.INFO); assertThatThrownBy(() -> underTest.logoutFailure(null, "bad csrf")) .isInstanceOf(NullPointerException.class) .hasMessage("request can't be null"); }
public static String sanitizeIdentifierName(String identifier) { if ( identifier != null && identifier.length() > 0 ) { int firstAlphabeticIndex = 0; while ( firstAlphabeticIndex < identifier.length() && ( identifier.charAt( firstAlphabeticIndex ) == UNDERSCORE || Character.isDigit( identifier.charAt( firstAlphabeticIndex ) ) ) ) { firstAlphabeticIndex++; } if ( firstAlphabeticIndex < identifier.length()) { // If it is not consisted of only underscores String firstAlphaString = identifier.substring( firstAlphabeticIndex ).replace( "[]", "Array" ); StringBuilder sb = new StringBuilder( firstAlphaString.length() ); for ( int i = 0; i < firstAlphaString.length(); i++ ) { int codePoint = firstAlphaString.codePointAt( i ); if ( Character.isJavaIdentifierPart( codePoint ) || codePoint == '.') { sb.appendCodePoint( codePoint ); } else { sb.append( '_' ); } } return sb.toString(); } return identifier.replace( "[]", "Array" ); } return identifier; }
@Test public void testSanitizeIdentifierName() { assertThat( Strings.sanitizeIdentifierName( "test" ) ).isEqualTo( "test" ); assertThat( Strings.sanitizeIdentifierName( "int[]" ) ).isEqualTo( "intArray" ); assertThat( Strings.sanitizeIdentifierName( "_Test" ) ).isEqualTo( "Test" ); assertThat( Strings.sanitizeIdentifierName( "_int[]" ) ).isEqualTo( "intArray" ); assertThat( Strings.sanitizeIdentifierName( "__int[]" ) ).isEqualTo( "intArray" ); assertThat( Strings.sanitizeIdentifierName( "test_" ) ).isEqualTo( "test_" ); assertThat( Strings.sanitizeIdentifierName( "___" ) ).isEqualTo( "___" ); assertThat( Strings.sanitizeIdentifierName( "_0Test" ) ).isEqualTo( "Test" ); assertThat( Strings.sanitizeIdentifierName( "_0123456789Test" ) ).isEqualTo( "Test" ); assertThat( Strings.sanitizeIdentifierName( "_0int[]" ) ).isEqualTo( "intArray" ); assertThat( Strings.sanitizeIdentifierName( "__0int[]" ) ).isEqualTo( "intArray" ); assertThat( Strings.sanitizeIdentifierName( "___0" ) ).isEqualTo( "___0" ); assertThat( Strings.sanitizeIdentifierName( "bad/test" ) ).isEqualTo( "bad_test" ); }
public boolean supportsErrorHandling() { if ( databaseMeta != null ) { return databaseMeta.getDatabaseInterface().supportsErrorHandling(); } else { return true; } }
@Test public void testSupportsErrorHandling() throws Exception { TableOutputMeta tableOutputMeta = new TableOutputMeta(); DatabaseMeta dbMeta = mock( DatabaseMeta.class ); tableOutputMeta.setDatabaseMeta( dbMeta ); DatabaseInterface databaseInterface = mock( DatabaseInterface.class ); when( dbMeta.getDatabaseInterface() ).thenReturn( databaseInterface ); when( databaseInterface.supportsErrorHandling() ).thenReturn( true, false ); assertTrue( tableOutputMeta.supportsErrorHandling() ); assertFalse( tableOutputMeta.supportsErrorHandling() ); tableOutputMeta.setDatabaseMeta( null ); assertTrue( tableOutputMeta.supportsErrorHandling() ); }
@Override public boolean match(Message msg, StreamRule rule) { final boolean inverted = rule.getInverted(); final Object field = msg.getField(rule.getField()); if (field != null) { final String value = field.toString(); return inverted ^ value.contains(rule.getValue()); } else { return inverted; } }
@Test public void testNullFieldShouldNotMatch() { final String fieldName = "nullfield"; rule.setField(fieldName); msg.addField(fieldName, null); final StreamRuleMatcher matcher = getMatcher(rule); assertFalse(matcher.match(msg, rule)); }
@Override public DataInputStatus emitNext(DataOutput<T> output) throws Exception { if (sortedInput != null) { return emitNextSortedRecord(output); } DataInputStatus inputStatus = wrappedInput.emitNext(forwardingDataOutput); if (inputStatus == DataInputStatus.END_OF_DATA) { endSorting(); return emitNextSortedRecord(output); } return inputStatus; }
@Test void watermarkPropagation() throws Exception { CollectingDataOutput<Integer> collectingDataOutput = new CollectingDataOutput<>(); CollectionDataInput<Integer> input = new CollectionDataInput<>( Arrays.asList( new StreamRecord<>(1, 3), new Watermark(1), new StreamRecord<>(1, 1), new Watermark(2), new StreamRecord<>(2, 1), new Watermark(3), new StreamRecord<>(2, 3), new Watermark(4), new StreamRecord<>(1, 2), new Watermark(5), new StreamRecord<>(2, 2), new Watermark(6))); MockEnvironment environment = MockEnvironment.builder().build(); SortingDataInput<Integer, Integer> sortingDataInput = new SortingDataInput<>( input, new IntSerializer(), new IntSerializer(), (KeySelector<Integer, Integer>) value -> value, environment.getMemoryManager(), environment.getIOManager(), true, 1.0, new Configuration(), new DummyInvokable(), new ExecutionConfig()); DataInputStatus inputStatus; do { inputStatus = sortingDataInput.emitNext(collectingDataOutput); } while (inputStatus != DataInputStatus.END_OF_INPUT); assertThat(collectingDataOutput.events) .containsExactly( new StreamRecord<>(1, 1), new StreamRecord<>(1, 2), new StreamRecord<>(1, 3), new StreamRecord<>(2, 1), new StreamRecord<>(2, 2), new StreamRecord<>(2, 3), new Watermark(6)); }
@Override public RemotingCommand processRequest(final ChannelHandlerContext ctx, RemotingCommand request) throws RemotingCommandException { final long beginTimeMills = this.brokerController.getMessageStore().now(); request.addExtFieldIfNotExist(BORN_TIME, String.valueOf(System.currentTimeMillis())); if (Objects.equals(request.getExtFields().get(BORN_TIME), "0")) { request.addExtField(BORN_TIME, String.valueOf(System.currentTimeMillis())); } Channel channel = ctx.channel(); RemotingCommand response = RemotingCommand.createResponseCommand(PopMessageResponseHeader.class); final PopMessageResponseHeader responseHeader = (PopMessageResponseHeader) response.readCustomHeader(); final PopMessageRequestHeader requestHeader = (PopMessageRequestHeader) request.decodeCommandCustomHeader(PopMessageRequestHeader.class, true); StringBuilder startOffsetInfo = new StringBuilder(64); StringBuilder msgOffsetInfo = new StringBuilder(64); StringBuilder orderCountInfo = null; if (requestHeader.isOrder()) { orderCountInfo = new StringBuilder(64); } brokerController.getConsumerManager().compensateBasicConsumerInfo(requestHeader.getConsumerGroup(), ConsumeType.CONSUME_POP, MessageModel.CLUSTERING); response.setOpaque(request.getOpaque()); if (brokerController.getBrokerConfig().isEnablePopLog()) { POP_LOGGER.info("receive PopMessage request command, {}", request); } if (requestHeader.isTimeoutTooMuch()) { response.setCode(ResponseCode.POLLING_TIMEOUT); response.setRemark(String.format("the broker[%s] pop message is timeout too much", this.brokerController.getBrokerConfig().getBrokerIP1())); return response; } if (!PermName.isReadable(this.brokerController.getBrokerConfig().getBrokerPermission())) { response.setCode(ResponseCode.NO_PERMISSION); response.setRemark(String.format("the broker[%s] pop message is forbidden", this.brokerController.getBrokerConfig().getBrokerIP1())); return response; } if (requestHeader.getMaxMsgNums() > 32) { response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark(String.format("the broker[%s] pop message's num is greater than 32", this.brokerController.getBrokerConfig().getBrokerIP1())); return response; } if (!brokerController.getMessageStore().getMessageStoreConfig().isTimerWheelEnable()) { response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark(String.format("the broker[%s] pop message is forbidden because timerWheelEnable is false", this.brokerController.getBrokerConfig().getBrokerIP1())); return response; } TopicConfig topicConfig = this.brokerController.getTopicConfigManager().selectTopicConfig(requestHeader.getTopic()); if (null == topicConfig) { POP_LOGGER.error("The topic {} not exist, consumer: {} ", requestHeader.getTopic(), RemotingHelper.parseChannelRemoteAddr(channel)); response.setCode(ResponseCode.TOPIC_NOT_EXIST); response.setRemark(String.format("topic[%s] not exist, apply first please! %s", requestHeader.getTopic(), FAQUrl.suggestTodo(FAQUrl.APPLY_TOPIC_URL))); return response; } if (!PermName.isReadable(topicConfig.getPerm())) { response.setCode(ResponseCode.NO_PERMISSION); response.setRemark("the topic[" + requestHeader.getTopic() + "] peeking message is forbidden"); return response; } if (requestHeader.getQueueId() >= topicConfig.getReadQueueNums()) { String errorInfo = String.format("queueId[%d] is illegal, topic:[%s] topicConfig.readQueueNums:[%d] " + "consumer:[%s]", requestHeader.getQueueId(), requestHeader.getTopic(), topicConfig.getReadQueueNums(), channel.remoteAddress()); POP_LOGGER.warn(errorInfo); response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark(errorInfo); return response; } SubscriptionGroupConfig subscriptionGroupConfig = this.brokerController.getSubscriptionGroupManager().findSubscriptionGroupConfig(requestHeader.getConsumerGroup()); if (null == subscriptionGroupConfig) { response.setCode(ResponseCode.SUBSCRIPTION_GROUP_NOT_EXIST); response.setRemark(String.format("subscription group [%s] does not exist, %s", requestHeader.getConsumerGroup(), FAQUrl.suggestTodo(FAQUrl.SUBSCRIPTION_GROUP_NOT_EXIST))); return response; } if (!subscriptionGroupConfig.isConsumeEnable()) { response.setCode(ResponseCode.NO_PERMISSION); response.setRemark("subscription group no permission, " + requestHeader.getConsumerGroup()); return response; } BrokerConfig brokerConfig = brokerController.getBrokerConfig(); SubscriptionData subscriptionData = null; ExpressionMessageFilter messageFilter = null; if (requestHeader.getExp() != null && !requestHeader.getExp().isEmpty()) { try { subscriptionData = FilterAPI.build(requestHeader.getTopic(), requestHeader.getExp(), requestHeader.getExpType()); brokerController.getConsumerManager().compensateSubscribeData(requestHeader.getConsumerGroup(), requestHeader.getTopic(), subscriptionData); String retryTopic = KeyBuilder.buildPopRetryTopic(requestHeader.getTopic(), requestHeader.getConsumerGroup(), brokerConfig.isEnableRetryTopicV2()); SubscriptionData retrySubscriptionData = FilterAPI.build(retryTopic, SubscriptionData.SUB_ALL, requestHeader.getExpType()); brokerController.getConsumerManager().compensateSubscribeData(requestHeader.getConsumerGroup(), retryTopic, retrySubscriptionData); ConsumerFilterData consumerFilterData = null; if (!ExpressionType.isTagType(subscriptionData.getExpressionType())) { consumerFilterData = ConsumerFilterManager.build( requestHeader.getTopic(), requestHeader.getConsumerGroup(), requestHeader.getExp(), requestHeader.getExpType(), System.currentTimeMillis() ); if (consumerFilterData == null) { POP_LOGGER.warn("Parse the consumer's subscription[{}] failed, group: {}", requestHeader.getExp(), requestHeader.getConsumerGroup()); response.setCode(ResponseCode.SUBSCRIPTION_PARSE_FAILED); response.setRemark("parse the consumer's subscription failed"); return response; } } messageFilter = new ExpressionMessageFilter(subscriptionData, consumerFilterData, brokerController.getConsumerFilterManager()); } catch (Exception e) { POP_LOGGER.warn("Parse the consumer's subscription[{}] error, group: {}", requestHeader.getExp(), requestHeader.getConsumerGroup()); response.setCode(ResponseCode.SUBSCRIPTION_PARSE_FAILED); response.setRemark("parse the consumer's subscription failed"); return response; } } else { try { subscriptionData = FilterAPI.build(requestHeader.getTopic(), "*", ExpressionType.TAG); brokerController.getConsumerManager().compensateSubscribeData(requestHeader.getConsumerGroup(), requestHeader.getTopic(), subscriptionData); String retryTopic = KeyBuilder.buildPopRetryTopic(requestHeader.getTopic(), requestHeader.getConsumerGroup(), brokerConfig.isEnableRetryTopicV2()); SubscriptionData retrySubscriptionData = FilterAPI.build(retryTopic, "*", ExpressionType.TAG); brokerController.getConsumerManager().compensateSubscribeData(requestHeader.getConsumerGroup(), retryTopic, retrySubscriptionData); } catch (Exception e) { POP_LOGGER.warn("Build default subscription error, group: {}", requestHeader.getConsumerGroup()); } } int randomQ = random.nextInt(100); int reviveQid; if (requestHeader.isOrder()) { reviveQid = KeyBuilder.POP_ORDER_REVIVE_QUEUE; } else { reviveQid = (int) Math.abs(ckMessageNumber.getAndIncrement() % this.brokerController.getBrokerConfig().getReviveQueueNum()); } GetMessageResult getMessageResult = new GetMessageResult(requestHeader.getMaxMsgNums()); ExpressionMessageFilter finalMessageFilter = messageFilter; StringBuilder finalOrderCountInfo = orderCountInfo; // Due to the design of the fields startOffsetInfo, msgOffsetInfo, and orderCountInfo, // a single POP request could only invoke the popMsgFromQueue method once // for either a normal topic or a retry topic's queue. Retry topics v1 and v2 are // considered the same type because they share the same retry flag in previous fields. // Therefore, needRetryV1 is designed as a subset of needRetry, and within a single request, // only one type of retry topic is able to call popMsgFromQueue. boolean needRetry = randomQ % 5 == 0; boolean needRetryV1 = false; if (brokerConfig.isEnableRetryTopicV2() && brokerConfig.isRetrieveMessageFromPopRetryTopicV1()) { needRetryV1 = randomQ % 2 == 0; } long popTime = System.currentTimeMillis(); CompletableFuture<Long> getMessageFuture = CompletableFuture.completedFuture(0L); if (needRetry && !requestHeader.isOrder()) { if (needRetryV1) { String retryTopic = KeyBuilder.buildPopRetryTopicV1(requestHeader.getTopic(), requestHeader.getConsumerGroup()); getMessageFuture = popMsgFromTopic(retryTopic, true, getMessageResult, requestHeader, reviveQid, channel, popTime, finalMessageFilter, startOffsetInfo, msgOffsetInfo, orderCountInfo, randomQ, getMessageFuture); } else { String retryTopic = KeyBuilder.buildPopRetryTopic(requestHeader.getTopic(), requestHeader.getConsumerGroup(), brokerConfig.isEnableRetryTopicV2()); getMessageFuture = popMsgFromTopic(retryTopic, true, getMessageResult, requestHeader, reviveQid, channel, popTime, finalMessageFilter, startOffsetInfo, msgOffsetInfo, orderCountInfo, randomQ, getMessageFuture); } } if (requestHeader.getQueueId() < 0) { // read all queue getMessageFuture = popMsgFromTopic(topicConfig, false, getMessageResult, requestHeader, reviveQid, channel, popTime, finalMessageFilter, startOffsetInfo, msgOffsetInfo, orderCountInfo, randomQ, getMessageFuture); } else { int queueId = requestHeader.getQueueId(); getMessageFuture = getMessageFuture.thenCompose(restNum -> popMsgFromQueue(topicConfig.getTopicName(), requestHeader.getAttemptId(), false, getMessageResult, requestHeader, queueId, restNum, reviveQid, channel, popTime, finalMessageFilter, startOffsetInfo, msgOffsetInfo, finalOrderCountInfo)); } // if not full , fetch retry again if (!needRetry && getMessageResult.getMessageMapedList().size() < requestHeader.getMaxMsgNums() && !requestHeader.isOrder()) { if (needRetryV1) { String retryTopicV1 = KeyBuilder.buildPopRetryTopicV1(requestHeader.getTopic(), requestHeader.getConsumerGroup()); getMessageFuture = popMsgFromTopic(retryTopicV1, true, getMessageResult, requestHeader, reviveQid, channel, popTime, finalMessageFilter, startOffsetInfo, msgOffsetInfo, orderCountInfo, randomQ, getMessageFuture); } else { String retryTopic = KeyBuilder.buildPopRetryTopic(requestHeader.getTopic(), requestHeader.getConsumerGroup(), brokerConfig.isEnableRetryTopicV2()); getMessageFuture = popMsgFromTopic(retryTopic, true, getMessageResult, requestHeader, reviveQid, channel, popTime, finalMessageFilter, startOffsetInfo, msgOffsetInfo, orderCountInfo, randomQ, getMessageFuture); } } final RemotingCommand finalResponse = response; SubscriptionData finalSubscriptionData = subscriptionData; getMessageFuture.thenApply(restNum -> { if (!getMessageResult.getMessageBufferList().isEmpty()) { finalResponse.setCode(ResponseCode.SUCCESS); getMessageResult.setStatus(GetMessageStatus.FOUND); if (restNum > 0) { // all queue pop can not notify specified queue pop, and vice versa popLongPollingService.notifyMessageArriving( requestHeader.getTopic(), requestHeader.getQueueId(), requestHeader.getConsumerGroup(), null, 0L, null, null); } } else { PollingResult pollingResult = popLongPollingService.polling( ctx, request, new PollingHeader(requestHeader), finalSubscriptionData, finalMessageFilter); if (PollingResult.POLLING_SUC == pollingResult) { if (restNum > 0) { popLongPollingService.notifyMessageArriving( requestHeader.getTopic(), requestHeader.getQueueId(), requestHeader.getConsumerGroup(), null, 0L, null, null); } return null; } else if (PollingResult.POLLING_FULL == pollingResult) { finalResponse.setCode(ResponseCode.POLLING_FULL); } else { finalResponse.setCode(ResponseCode.POLLING_TIMEOUT); } getMessageResult.setStatus(GetMessageStatus.NO_MESSAGE_IN_QUEUE); } responseHeader.setInvisibleTime(requestHeader.getInvisibleTime()); responseHeader.setPopTime(popTime); responseHeader.setReviveQid(reviveQid); responseHeader.setRestNum(restNum); responseHeader.setStartOffsetInfo(startOffsetInfo.toString()); responseHeader.setMsgOffsetInfo(msgOffsetInfo.toString()); if (requestHeader.isOrder() && finalOrderCountInfo != null) { responseHeader.setOrderCountInfo(finalOrderCountInfo.toString()); } finalResponse.setRemark(getMessageResult.getStatus().name()); switch (finalResponse.getCode()) { case ResponseCode.SUCCESS: if (this.brokerController.getBrokerConfig().isTransferMsgByHeap()) { final byte[] r = this.readGetMessageResult(getMessageResult, requestHeader.getConsumerGroup(), requestHeader.getTopic(), requestHeader.getQueueId()); this.brokerController.getBrokerStatsManager().incGroupGetLatency(requestHeader.getConsumerGroup(), requestHeader.getTopic(), requestHeader.getQueueId(), (int) (this.brokerController.getMessageStore().now() - beginTimeMills)); finalResponse.setBody(r); } else { final GetMessageResult tmpGetMessageResult = getMessageResult; try { FileRegion fileRegion = new ManyMessageTransfer(finalResponse.encodeHeader(getMessageResult.getBufferTotalSize()), getMessageResult); channel.writeAndFlush(fileRegion) .addListener((ChannelFutureListener) future -> { tmpGetMessageResult.release(); Attributes attributes = RemotingMetricsManager.newAttributesBuilder() .put(LABEL_REQUEST_CODE, RemotingHelper.getRequestCodeDesc(request.getCode())) .put(LABEL_RESPONSE_CODE, RemotingHelper.getResponseCodeDesc(finalResponse.getCode())) .put(LABEL_RESULT, RemotingMetricsManager.getWriteAndFlushResult(future)) .build(); RemotingMetricsManager.rpcLatency.record(request.getProcessTimer().elapsed(TimeUnit.MILLISECONDS), attributes); if (!future.isSuccess()) { POP_LOGGER.error("Fail to transfer messages from page cache to {}", channel.remoteAddress(), future.cause()); } }); } catch (Throwable e) { POP_LOGGER.error("Error occurred when transferring messages from page cache", e); getMessageResult.release(); } return null; } break; default: return finalResponse; } return finalResponse; }).thenAccept(result -> NettyRemotingAbstract.writeResponse(channel, request, result)); return null; }
@Test public void testGetInitOffset_retryTopic() throws RemotingCommandException { when(messageStore.getMessageStoreConfig()).thenReturn(new MessageStoreConfig()); String newGroup = group + "-" + System.currentTimeMillis(); String retryTopic = KeyBuilder.buildPopRetryTopic(topic, newGroup); long minOffset = 100L; when(messageStore.getMinOffsetInQueue(retryTopic, 0)).thenReturn(minOffset); brokerController.getTopicConfigManager().getTopicConfigTable().put(retryTopic, new TopicConfig(retryTopic, 1, 1)); GetMessageResult getMessageResult = createGetMessageResult(0); when(messageStore.getMessageAsync(eq(newGroup), anyString(), anyInt(), anyLong(), anyInt(), any())) .thenReturn(CompletableFuture.completedFuture(getMessageResult)); long offset = brokerController.getConsumerOffsetManager().queryOffset(newGroup, retryTopic, 0); Assert.assertEquals(-1, offset); RemotingCommand request = createPopMsgCommand(newGroup, topic, 0, ConsumeInitMode.MAX); popMessageProcessor.processRequest(handlerContext, request); offset = brokerController.getConsumerOffsetManager().queryOffset(newGroup, retryTopic, 0); Assert.assertEquals(minOffset, offset); when(messageStore.getMinOffsetInQueue(retryTopic, 0)).thenReturn(minOffset * 2); popMessageProcessor.processRequest(handlerContext, request); offset = brokerController.getConsumerOffsetManager().queryOffset(newGroup, retryTopic, 0); Assert.assertEquals(minOffset, offset); // will not entry getInitOffset() again messageStore.getMinOffsetInQueue(retryTopic, 0); // prevent UnnecessaryStubbingException }
@Override public Map<StreamMessageId, Map<K, V>> range(int count, StreamMessageId startId, StreamMessageId endId) { return get(rangeAsync(count, startId, endId)); }
@Test public void testRange() { RStream<String, String> stream = redisson.getStream("test"); assertThat(stream.size()).isEqualTo(0); Map<String, String> entries1 = new HashMap<>(); entries1.put("1", "11"); entries1.put("3", "31"); stream.add(new StreamMessageId(1), StreamAddArgs.entries(entries1).trimNonStrict().maxLen(1).noLimit()); assertThat(stream.size()).isEqualTo(1); Map<String, String> entries2 = new HashMap<>(); entries2.put("5", "55"); entries2.put("7", "77"); stream.add(new StreamMessageId(2), StreamAddArgs.entries(entries2).trimNonStrict().maxLen(1).noLimit()); Map<StreamMessageId, Map<String, String>> r = stream.range(10, new StreamMessageId(0), new StreamMessageId(1)); assertThat(r).hasSize(1); assertThat(r.get(new StreamMessageId(1))).isEqualTo(entries1); Map<StreamMessageId, Map<String, String>> r2 = stream.range(10, StreamMessageId.MIN, StreamMessageId.MAX); assertThat(r2.keySet()).containsExactly(new StreamMessageId(1), new StreamMessageId(2)); assertThat(r2.get(new StreamMessageId(1))).isEqualTo(entries1); assertThat(r2.get(new StreamMessageId(2))).isEqualTo(entries2); }
public MeasureDto toMeasureDto(Measure measure, Metric metric, Component component) { MeasureDto out = new MeasureDto(); out.setMetricUuid(metric.getUuid()); out.setComponentUuid(component.getUuid()); out.setAnalysisUuid(analysisMetadataHolder.getUuid()); if (measure.hasQualityGateStatus()) { setAlert(out, measure.getQualityGateStatus()); } out.setValue(valueAsDouble(measure)); out.setData(data(measure)); return out; }
@Test public void toMeasureDto_maps_value_to_1_or_0_and_data_from_data_field_for_BOOLEAN_metric() { MeasureDto trueMeasureDto = underTest.toMeasureDto(Measure.newMeasureBuilder().create(true, SOME_DATA), SOME_BOOLEAN_METRIC, SOME_COMPONENT); assertThat(trueMeasureDto.getValue()).isEqualTo(1d); assertThat(trueMeasureDto.getData()).isEqualTo(SOME_DATA); MeasureDto falseMeasureDto = underTest.toMeasureDto(Measure.newMeasureBuilder().create(false, SOME_DATA), SOME_BOOLEAN_METRIC, SOME_COMPONENT); assertThat(falseMeasureDto.getValue()).isEqualTo(0d); assertThat(falseMeasureDto.getData()).isEqualTo(SOME_DATA); }
public static boolean isWithScoresArg(byte[] arg) { return Util.isAsciiBytesEquals(WITHSCORES, arg); }
@Test public void testWithScoresArg() { assertThat(ZSetCommonUtils.isWithScoresArg("withscores".getBytes())).isTrue(); assertThat(ZSetCommonUtils.isWithScoresArg("WITHSCORES".getBytes())).isTrue(); assertThat(ZSetCommonUtils.isWithScoresArg("WIthScoreS".getBytes())).isTrue(); assertThat(ZSetCommonUtils.isWithScoresArg("withscore".getBytes())).isFalse(); assertThat(ZSetCommonUtils.isWithScoresArg("WITHSCORE".getBytes())).isFalse(); }
@Deprecated public static Method findMethodByMethodSignature(Class<?> clazz, String methodName, String[] parameterTypes) throws NoSuchMethodException, ClassNotFoundException { Method method; if (parameterTypes == null) { List<Method> finded = new ArrayList<>(); for (Method m : clazz.getMethods()) { if (m.getName().equals(methodName)) { finded.add(m); } } if (finded.isEmpty()) { throw new NoSuchMethodException("No such method " + methodName + " in class " + clazz); } if (finded.size() > 1) { String msg = String.format( "Not unique method for method name(%s) in class(%s), find %d methods.", methodName, clazz.getName(), finded.size()); throw new IllegalStateException(msg); } method = finded.get(0); } else { Class<?>[] types = new Class<?>[parameterTypes.length]; for (int i = 0; i < parameterTypes.length; i++) { types[i] = ReflectUtils.name2class(parameterTypes[i]); } method = clazz.getMethod(methodName, types); } return method; }
@Test void testFindMethodByMethodSignatureNotFound() throws Exception { try { ReflectUtils.findMethodByMethodSignature(TestedClass.class, "doesNotExist", null); fail(); } catch (NoSuchMethodException expected) { assertThat(expected.getMessage(), containsString("No such method ")); assertThat(expected.getMessage(), containsString("in class")); } }
@Override public void deleteDictType(Long id) { // 校验是否存在 DictTypeDO dictType = validateDictTypeExists(id); // 校验是否有字典数据 if (dictDataService.getDictDataCountByDictType(dictType.getType()) > 0) { throw exception(DICT_TYPE_HAS_CHILDREN); } // 删除字典类型 dictTypeMapper.updateToDelete(id, LocalDateTime.now()); }
@Test public void testDeleteDictType_success() { // mock 数据 DictTypeDO dbDictType = randomDictTypeDO(); dictTypeMapper.insert(dbDictType);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbDictType.getId(); // 调用 dictTypeService.deleteDictType(id); // 校验数据不存在了 assertNull(dictTypeMapper.selectById(id)); }
@Override public CompletableFuture<RemovedTaskResult> remove(final TaskId taskId) { final CompletableFuture<RemovedTaskResult> future = new CompletableFuture<>(); tasksAndActionsLock.lock(); try { tasksAndActions.add(TaskAndAction.createRemoveTask(taskId, future)); tasksAndActionsCondition.signalAll(); } finally { tasksAndActionsLock.unlock(); } return future; }
@Test public void shouldThrowIfRemovingUpdatingStandbyTaskFailsWithStreamsException() throws Exception { final StandbyTask task = standbyTask(TASK_0_0, mkSet(TOPIC_PARTITION_A_0)).inState(State.RUNNING).build(); final StreamsException streamsException = new StreamsException("Something happened", task.id()); setupShouldThrowIfRemovingUpdatingStatefulTaskFailsWithException(task, streamsException); final CompletableFuture<StateUpdater.RemovedTaskResult> future = stateUpdater.remove(task.id()); verifyRemovingUpdatingStatefulTaskFails(future, task, streamsException, true); }
@VisibleForTesting void removeDisableUsers(Set<Long> assigneeUserIds) { if (CollUtil.isEmpty(assigneeUserIds)) { return; } Map<Long, AdminUserRespDTO> userMap = adminUserApi.getUserMap(assigneeUserIds); assigneeUserIds.removeIf(id -> { AdminUserRespDTO user = userMap.get(id); return user == null || !CommonStatusEnum.ENABLE.getStatus().equals(user.getStatus()); }); }
@Test public void testRemoveDisableUsers() { // 准备参数. 1L 可以找到;2L 是禁用的;3L 找不到 Set<Long> assigneeUserIds = asSet(1L, 2L, 3L); // mock 方法 AdminUserRespDTO user1 = randomPojo(AdminUserRespDTO.class, o -> o.setId(1L) .setStatus(CommonStatusEnum.ENABLE.getStatus())); AdminUserRespDTO user2 = randomPojo(AdminUserRespDTO.class, o -> o.setId(2L) .setStatus(CommonStatusEnum.DISABLE.getStatus())); Map<Long, AdminUserRespDTO> userMap = MapUtil.builder(user1.getId(), user1) .put(user2.getId(), user2).build(); when(adminUserApi.getUserMap(eq(assigneeUserIds))).thenReturn(userMap); // 调用 taskCandidateInvoker.removeDisableUsers(assigneeUserIds); // 断言 assertEquals(asSet(1L), assigneeUserIds); }
public void execute(ProjectReactor reactor) { executeProjectBuilders(projectBuilders, reactor, "Execute project builders"); }
@Test public void testProjectBuilderFailsWithToString() { ProjectBuilder builder = mock(ProjectBuilder.class); doThrow(new IllegalStateException()).when(builder).build(any(Context.class)); ProjectBuilder[] projectBuilders = {builder}; assertThatThrownBy(() -> new ProjectBuildersExecutor(mock(GlobalConfiguration.class), projectBuilders).execute(reactor)) .isInstanceOf(MessageException.class) .hasMessageContaining("Failed to execute project builder: Mock for ProjectBuilder"); }
public List<PluginRoleConfig> pluginRoleConfigsFor(String authConfigId) { List<PluginRoleConfig> rolesConfig = new ArrayList<>(); for (Role role : this) { if (role instanceof PluginRoleConfig) { if (((PluginRoleConfig) role).getAuthConfigId().equals(authConfigId)) { rolesConfig.add((PluginRoleConfig) role); } } } return rolesConfig; }
@Test public void shouldBeAbleToFetchPluginRolesForAAuthConfig() throws Exception { PluginRoleConfig admin = new PluginRoleConfig("admin", "corporate_ldap"); PluginRoleConfig view = new PluginRoleConfig("view", "corporate_ldap"); PluginRoleConfig operator = new PluginRoleConfig("operator", "internal_ldap"); RolesConfig rolesConfig = new RolesConfig(admin, view, operator, new RoleConfig(new CaseInsensitiveString("committer"))); assertThat(rolesConfig.pluginRoleConfigsFor("corporate_ldap"), hasSize(2)); assertThat(rolesConfig.pluginRoleConfigsFor("corporate_ldap"), containsInAnyOrder(admin, view)); assertThat(rolesConfig.pluginRoleConfigsFor("internal_ldap"), hasSize(1)); assertThat(rolesConfig.pluginRoleConfigsFor("internal_ldap"), containsInAnyOrder(operator)); }
public static void setTimeout(Integer timeout) { CONTEXT_HOLDER.put(KEY_TIMEOUT,timeout); }
@Test public void testSetTimeout() { RootContext.setTimeout(100); assertThat(RootContext.getTimeout()).isEqualTo(100); RootContext.setTimeout(null); assertThat(RootContext.getTimeout()).isEqualTo(null); }
public static <T extends PipelineOptions> T as(Class<T> klass) { return new Builder().as(klass); }
@Test public void testSetterAnnotatedWithDefault() throws Exception { expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage( "Expected setter for property [value] to not be marked with @Default on [" + "org.apache.beam.sdk.options.PipelineOptionsFactoryTest$SetterWithDefault]"); PipelineOptionsFactory.as(SetterWithDefault.class); }
@Override public int forEachByte(ByteProcessor processor) { ensureAccessible(); try { return forEachByteAsc0(readerIndex, writerIndex, processor); } catch (Exception e) { PlatformDependent.throwException(e); return -1; } }
@Test public void testForEachByte() { buffer.clear(); for (int i = 0; i < CAPACITY; i ++) { buffer.writeByte(i + 1); } final AtomicInteger lastIndex = new AtomicInteger(); buffer.setIndex(CAPACITY / 4, CAPACITY * 3 / 4); assertThat(buffer.forEachByte(new ByteProcessor() { int i = CAPACITY / 4; @Override public boolean process(byte value) throws Exception { assertThat(value, is((byte) (i + 1))); lastIndex.set(i); i ++; return true; } }), is(-1)); assertThat(lastIndex.get(), is(CAPACITY * 3 / 4 - 1)); }
public String tables(Namespace ns) { return SLASH.join("v1", prefix, "namespaces", RESTUtil.encodeNamespace(ns), "tables"); }
@Test public void testTablesWithSlash() { Namespace ns = Namespace.of("n/s"); assertThat(withPrefix.tables(ns)).isEqualTo("v1/ws/catalog/namespaces/n%2Fs/tables"); assertThat(withoutPrefix.tables(ns)).isEqualTo("v1/namespaces/n%2Fs/tables"); }
@Override public Image call() throws LayerPropertyNotFoundException { try (ProgressEventDispatcher ignored = progressEventDispatcherFactory.create("building image format", 1); TimerEventDispatcher ignored2 = new TimerEventDispatcher(buildContext.getEventHandlers(), DESCRIPTION)) { // Constructs the image. Image.Builder imageBuilder = Image.builder(buildContext.getTargetFormat()); // Base image layers baseImageLayers.forEach(imageBuilder::addLayer); // Passthrough config and count non-empty history entries int nonEmptyLayerCount = 0; for (HistoryEntry historyObject : baseImage.getHistory()) { imageBuilder.addHistory(historyObject); if (!historyObject.hasCorrespondingLayer()) { nonEmptyLayerCount++; } } imageBuilder .setArchitecture(baseImage.getArchitecture()) .setOs(baseImage.getOs()) .addEnvironment(baseImage.getEnvironment()) .addLabels(baseImage.getLabels()) .setHealthCheck(baseImage.getHealthCheck()) .addExposedPorts(baseImage.getExposedPorts()) .addVolumes(baseImage.getVolumes()) .setUser(baseImage.getUser()) .setWorkingDirectory(baseImage.getWorkingDirectory()); ContainerConfiguration containerConfiguration = buildContext.getContainerConfiguration(); // Add history elements for non-empty layers that don't have one yet Instant layerCreationTime = containerConfiguration.getCreationTime(); for (int count = 0; count < baseImageLayers.size() - nonEmptyLayerCount; count++) { imageBuilder.addHistory( HistoryEntry.builder() .setCreationTimestamp(layerCreationTime) .setComment("auto-generated by Jib") .build()); } // Add built layers/configuration for (PreparedLayer applicationLayer : applicationLayers) { imageBuilder .addLayer(applicationLayer) .addHistory( HistoryEntry.builder() .setCreationTimestamp(layerCreationTime) .setAuthor("Jib") .setCreatedBy(buildContext.getToolName() + ":" + buildContext.getToolVersion()) .setComment(applicationLayer.getName()) .build()); } imageBuilder .addEnvironment(containerConfiguration.getEnvironmentMap()) .setCreated(containerConfiguration.getCreationTime()) .setEntrypoint(computeEntrypoint(baseImage, containerConfiguration)) .setProgramArguments(computeProgramArguments(baseImage, containerConfiguration)) .addExposedPorts(containerConfiguration.getExposedPorts()) .addVolumes(containerConfiguration.getVolumes()) .addLabels(containerConfiguration.getLabels()); if (containerConfiguration.getUser() != null) { imageBuilder.setUser(containerConfiguration.getUser()); } if (containerConfiguration.getWorkingDirectory() != null) { imageBuilder.setWorkingDirectory(containerConfiguration.getWorkingDirectory().toString()); } // Gets the container configuration content descriptor. return imageBuilder.build(); } }
@Test public void test_propagateBaseImageConfiguration() { Mockito.when(mockContainerConfiguration.getEnvironmentMap()) .thenReturn(ImmutableMap.of("MY_ENV", "MY_ENV_VALUE", "BASE_ENV_2", "NEW_VALUE")); Mockito.when(mockContainerConfiguration.getLabels()) .thenReturn(ImmutableMap.of("my.label", "my.label.value", "base.label.2", "new.value")); Mockito.when(mockContainerConfiguration.getExposedPorts()) .thenReturn(ImmutableSet.of(Port.tcp(3000), Port.udp(4000))); Mockito.when(mockContainerConfiguration.getVolumes()) .thenReturn( ImmutableSet.of( AbsoluteUnixPath.get("/new/path1"), AbsoluteUnixPath.get("/new/path2"))); Image image = new BuildImageStep( mockBuildContext, mockProgressEventDispatcherFactory, baseImage, baseImageLayers, applicationLayers) .call(); Assert.assertEquals("wasm", image.getArchitecture()); Assert.assertEquals("js", image.getOs()); Assert.assertEquals( ImmutableMap.of( "BASE_ENV", "BASE_ENV_VALUE", "MY_ENV", "MY_ENV_VALUE", "BASE_ENV_2", "NEW_VALUE"), image.getEnvironment()); Assert.assertEquals( ImmutableMap.of( "base.label", "base.label.value", "my.label", "my.label.value", "base.label.2", "new.value"), image.getLabels()); Assert.assertNotNull(image.getHealthCheck()); Assert.assertEquals( ImmutableList.of("CMD-SHELL", "echo hi"), image.getHealthCheck().getCommand()); Assert.assertTrue(image.getHealthCheck().getInterval().isPresent()); Assert.assertEquals(Duration.ofSeconds(3), image.getHealthCheck().getInterval().get()); Assert.assertTrue(image.getHealthCheck().getTimeout().isPresent()); Assert.assertEquals(Duration.ofSeconds(2), image.getHealthCheck().getTimeout().get()); Assert.assertTrue(image.getHealthCheck().getStartPeriod().isPresent()); Assert.assertEquals(Duration.ofSeconds(1), image.getHealthCheck().getStartPeriod().get()); Assert.assertTrue(image.getHealthCheck().getRetries().isPresent()); Assert.assertEquals(20, (int) image.getHealthCheck().getRetries().get()); Assert.assertEquals( ImmutableSet.of(Port.tcp(1000), Port.udp(2000), Port.tcp(3000), Port.udp(4000)), image.getExposedPorts()); Assert.assertEquals( ImmutableSet.of( AbsoluteUnixPath.get("/base/path1"), AbsoluteUnixPath.get("/base/path2"), AbsoluteUnixPath.get("/new/path1"), AbsoluteUnixPath.get("/new/path2")), image.getVolumes()); Assert.assertEquals("/base/working/directory", image.getWorkingDirectory()); Assert.assertEquals("root", image.getUser()); Assert.assertEquals(image.getHistory().get(0), nonEmptyLayerHistory); Assert.assertEquals(image.getHistory().get(1), emptyLayerHistory); Assert.assertEquals(image.getHistory().get(2), emptyLayerHistory); Assert.assertEquals(ImmutableList.of(), image.getEntrypoint()); Assert.assertEquals(ImmutableList.of(), image.getProgramArguments()); }
@Override public PageData<WidgetsBundle> findAllTenantWidgetsBundlesByTenantId(WidgetsBundleFilter widgetsBundleFilter, PageLink pageLink) { return findTenantWidgetsBundlesByTenantIds(Arrays.asList(widgetsBundleFilter.getTenantId().getId(), NULL_UUID), widgetsBundleFilter, pageLink); }
@Test public void testSearchTextNotFound() { UUID tenantId = Uuids.timeBased(); createWidgetBundles(5, tenantId, "ABC_"); createSystemWidgetBundles(5, "SYS_"); widgetsBundles = widgetsBundleDao.find(TenantId.SYS_TENANT_ID); assertEquals(10, widgetsBundleDao.find(TenantId.SYS_TENANT_ID).size()); PageLink textPageLink = new PageLink(30, 0, "TEXT_NOT_FOUND"); PageData<WidgetsBundle> widgetsBundles4 = widgetsBundleDao.findAllTenantWidgetsBundlesByTenantId(WidgetsBundleFilter.fromTenantId(TenantId.fromUUID(tenantId)), textPageLink); assertEquals(0, widgetsBundles4.getData().size()); }
@Override public TransactionType getTransactionType() { return TransactionType.BASE; }
@Test void assertInit() { Map<String, DataSource> actual = getDataSourceMap(); assertThat(actual.size(), is(1)); assertThat(actual.get(DATA_SOURCE_UNIQUE_NAME), instanceOf(DataSourceProxy.class)); assertThat(seataTransactionManager.getTransactionType(), is(TransactionType.BASE)); }
public static ClusterMembership from(String stringValue, Version vespaVersion, Optional<DockerImage> dockerImageRepo) { return from(stringValue, vespaVersion, dockerImageRepo, ZoneEndpoint.defaultEndpoint); }
@Test void testContainerServiceInstance() { ClusterSpec cluster = ClusterSpec.request(ClusterSpec.Type.container, ClusterSpec.Id.from("id1")).vespaVersion("6.42").build(); assertContainerService(ClusterMembership.from(cluster, 3)); }
public static final String[] convertLineToStrings( LogChannelInterface log, String line, TextFileInputMeta inf, String delimiter, String enclosure, String escapeCharacters ) throws KettleException { String[] strings = new String[inf.inputFields.length]; int fieldnr; String pol; // piece of line try { if ( line == null ) { return null; } if ( inf.content.fileType.equalsIgnoreCase( "CSV" ) ) { // Split string in pieces, only for CSV! fieldnr = 0; int pos = 0; int length = line.length(); boolean dencl = false; int len_encl = ( enclosure == null ? 0 : enclosure.length() ); int len_esc = ( escapeCharacters == null ? 0 : escapeCharacters.length() ); while ( pos < length ) { int from = pos; int next; boolean encl_found; boolean contains_escaped_enclosures = false; boolean contains_escaped_separators = false; boolean contains_escaped_escape = false; // Is the field beginning with an enclosure? // "aa;aa";123;"aaa-aaa";000;... if ( len_encl > 0 && line.substring( from, from + len_encl ).equalsIgnoreCase( enclosure ) ) { if ( log.isRowLevel() ) { log.logRowlevel( BaseMessages.getString( PKG, "TextFileInput.Log.ConvertLineToRowTitle" ), BaseMessages .getString( PKG, "TextFileInput.Log.Encloruse", line.substring( from, from + len_encl ) ) ); } encl_found = true; int p = from + len_encl; boolean is_enclosure = len_encl > 0 && p + len_encl < length && line.substring( p, p + len_encl ).equalsIgnoreCase( enclosure ); boolean is_escape = len_esc > 0 && p + len_esc < length && line.substring( p, p + len_esc ).equalsIgnoreCase( inf.content.escapeCharacter ); boolean enclosure_after = false; // Is it really an enclosure? See if it's not repeated twice or escaped! if ( ( is_enclosure || is_escape ) && p < length - 1 ) { String strnext = line.substring( p + len_encl, p + 2 * len_encl ); if ( strnext.equalsIgnoreCase( enclosure ) ) { p++; enclosure_after = true; dencl = true; // Remember to replace them later on! if ( is_escape ) { contains_escaped_enclosures = true; } } else if ( strnext.equals( inf.content.escapeCharacter ) ) { p++; // Remember to replace them later on! if ( is_escape ) { contains_escaped_escape = true; // remember } } } // Look for a closing enclosure! while ( ( !is_enclosure || enclosure_after ) && p < line.length() ) { p++; enclosure_after = false; is_enclosure = len_encl > 0 && p + len_encl < length && line.substring( p, p + len_encl ).equals( enclosure ); is_escape = len_esc > 0 && p + len_esc < length && line.substring( p, p + len_esc ).equals( inf.content.escapeCharacter ); // Is it really an enclosure? See if it's not repeated twice or escaped! if ( ( is_enclosure || is_escape ) && p < length - 1 ) { String strnext = line.substring( p + len_encl, p + 2 * len_encl ); if ( strnext.equals( enclosure ) ) { p++; enclosure_after = true; dencl = true; // Remember to replace them later on! if ( is_escape ) { contains_escaped_enclosures = true; // remember } } else if ( strnext.equals( inf.content.escapeCharacter ) ) { p++; // Remember to replace them later on! if ( is_escape ) { contains_escaped_escape = true; // remember } } } } if ( p >= length ) { next = p; } else { next = p + len_encl; } if ( log.isRowLevel() ) { log.logRowlevel( BaseMessages.getString( PKG, "TextFileInput.Log.ConvertLineToRowTitle" ), BaseMessages .getString( PKG, "TextFileInput.Log.EndOfEnclosure", "" + p ) ); } } else { encl_found = false; boolean found = false; int startpoint = from; // int tries = 1; do { next = line.indexOf( delimiter, startpoint ); // See if this position is preceded by an escape character. if ( len_esc > 0 && next > 0 ) { String before = line.substring( next - len_esc, next ); if ( inf.content.escapeCharacter.equals( before ) ) { int previous_escapes = 1; int start = next - len_esc - 1; int end = next - 1; while ( start >= 0 ) { if ( inf.content.escapeCharacter.equals( line.substring( start, end ) ) ) { previous_escapes++; start--; end--; } else { break; } } // If behind the seperator there are a odd number of escaped // The separator is escaped. if ( previous_escapes % 2 != 0 ) { // take the next separator, this one is escaped... startpoint = next + 1; // tries++; contains_escaped_separators = true; } else { found = true; } } else { found = true; } } else { found = true; } } while ( !found && next >= 0 ); } if ( next == -1 ) { next = length; } if ( encl_found && ( ( from + len_encl ) <= ( next - len_encl ) ) ) { pol = line.substring( from + len_encl, next - len_encl ); if ( log.isRowLevel() ) { log.logRowlevel( BaseMessages.getString( PKG, "TextFileInput.Log.ConvertLineToRowTitle" ), BaseMessages .getString( PKG, "TextFileInput.Log.EnclosureFieldFound", "" + pol ) ); } } else { pol = line.substring( from, next ); if ( log.isRowLevel() ) { log.logRowlevel( BaseMessages.getString( PKG, "TextFileInput.Log.ConvertLineToRowTitle" ), BaseMessages .getString( PKG, "TextFileInput.Log.NormalFieldFound", "" + pol ) ); } } if ( dencl && Utils.isEmpty( inf.content.escapeCharacter ) ) { StringBuilder sbpol = new StringBuilder( pol ); int idx = sbpol.indexOf( enclosure + enclosure ); while ( idx >= 0 ) { sbpol.delete( idx, idx + enclosure.length() ); idx = sbpol.indexOf( enclosure + enclosure ); } pol = sbpol.toString(); } if ( !Utils.isEmpty( inf.content.escapeCharacter ) && ( inf.content.escapeCharacter.equals( enclosure ) ) && ( contains_escaped_escape || contains_escaped_enclosures ) ) { // replace the escaped enclosures with enclosures... String replace = inf.content.escapeCharacter + enclosure; String replaceWith = enclosure; pol = Const.replace( pol, replace, replaceWith ); } else { if ( contains_escaped_enclosures ) { String replace = inf.content.escapeCharacter + enclosure; String replaceWith = enclosure; pol = Const.replace( pol, replace, replaceWith ); } contains_escaped_escape = !Utils.isEmpty( inf.content.escapeCharacter ) && pol.contains( inf.content.escapeCharacter + inf.content.escapeCharacter ); if ( contains_escaped_escape ) { String replace = inf.content.escapeCharacter + inf.content.escapeCharacter; String replaceWith = inf.content.escapeCharacter; pol = Const.replace( pol, replace, replaceWith ); } } // replace the escaped separators with separators... if ( contains_escaped_separators ) { String replace = inf.content.escapeCharacter + delimiter; String replaceWith = delimiter; pol = Const.replace( pol, replace, replaceWith ); } // Now add pol to the strings found! try { strings[fieldnr] = pol; } catch ( ArrayIndexOutOfBoundsException e ) { // In case we didn't allocate enough space. // This happens when you have less header values specified than there are actual values in the rows. // As this is "the exception" we catch and resize here. // String[] newStrings = new String[strings.length]; for ( int x = 0; x < strings.length; x++ ) { newStrings[x] = strings[x]; } strings = newStrings; } pos = next + delimiter.length(); fieldnr++; } if ( pos == length ) { if ( log.isRowLevel() ) { log.logRowlevel( BaseMessages.getString( PKG, "TextFileInput.Log.ConvertLineToRowTitle" ), BaseMessages .getString( PKG, "TextFileInput.Log.EndOfEmptyLineFound" ) ); } if ( fieldnr < strings.length ) { strings[fieldnr] = Const.EMPTY_STRING; } fieldnr++; } } else { // Fixed file format: Simply get the strings at the required positions... // Note - charBased is the old default behavior. If this is an old transformation, content.length will be null // and should be processed as before. If the content.length is equal to "Characters" or there is no specified encoding, // it will still use the old behavior. The *only* way to get the new behavior is if content.length = "Bytes" and // the encoding is specified. boolean charBased = ( inf.content.length == null || inf.content.length.equalsIgnoreCase( "Characters" ) || inf.getEncoding() == null ); // Default to classic behavior for ( int i = 0; i < inf.inputFields.length; i++ ) { BaseFileField field = inf.inputFields[i]; int length; int fPos = field.getPosition(); int fLength = field.getLength(); int fPl = fPos + fLength; if ( charBased ) { length = line.length(); if ( fPl <= length ) { strings[i] = line.substring( fPos, fPl ); } else { if ( fPos < length ) { strings[i] = line.substring( fPos ); } else { strings[i] = ""; } } } else { byte[] b = null; String enc = inf.getEncoding(); b = line.getBytes( enc ); length = b.length; if ( fPl <= length ) { strings[i] = new String( Arrays.copyOfRange( b, fPos, fPl ), enc ); } else { if ( fPos < length ) { strings[i] = new String( Arrays.copyOfRange( b, fPos, length - 1 ), enc ); } else { strings[i] = ""; } } } } } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "TextFileInput.Log.Error.ErrorConvertingLine", e .toString() ), e ); } return strings; }
@Test public void convertCSVLinesToStrings() throws Exception { TextFileInputMeta inputMeta = Mockito.mock( TextFileInputMeta.class ); inputMeta.content = new TextFileInputMeta.Content(); inputMeta.content.fileType = "CSV"; inputMeta.inputFields = new BaseFileField[ 2 ]; inputMeta.content.escapeCharacter = "\\"; String line = "A\\\\,B"; // A\\,B String[] strings = TextFileInputUtils .convertLineToStrings( Mockito.mock( LogChannelInterface.class ), line, inputMeta, ",", "", "\\" ); Assert.assertNotNull( strings ); Assert.assertEquals( "A\\", strings[ 0 ] ); Assert.assertEquals( "B", strings[ 1 ] ); line = "\\,AB"; // \,AB strings = TextFileInputUtils .convertLineToStrings( Mockito.mock( LogChannelInterface.class ), line, inputMeta, ",", "", "\\" ); Assert.assertNotNull( strings ); Assert.assertEquals( ",AB", strings[ 0 ] ); Assert.assertEquals( null, strings[ 1 ] ); line = "\\\\\\,AB"; // \\\,AB strings = TextFileInputUtils .convertLineToStrings( Mockito.mock( LogChannelInterface.class ), line, inputMeta, ",", "", "\\" ); Assert.assertNotNull( strings ); Assert.assertEquals( "\\,AB", strings[ 0 ] ); Assert.assertEquals( null, strings[ 1 ] ); line = "AB,\\"; // AB,\ strings = TextFileInputUtils .convertLineToStrings( Mockito.mock( LogChannelInterface.class ), line, inputMeta, ",", "", "\\" ); Assert.assertNotNull( strings ); Assert.assertEquals( "AB", strings[ 0 ] ); Assert.assertEquals( "\\", strings[ 1 ] ); line = "AB,\\\\\\"; // AB,\\\ strings = TextFileInputUtils .convertLineToStrings( Mockito.mock( LogChannelInterface.class ), line, inputMeta, ",", "", "\\" ); Assert.assertNotNull( strings ); Assert.assertEquals( "AB", strings[ 0 ] ); Assert.assertEquals( "\\\\", strings[ 1 ] ); line = "A\\B,C"; // A\B,C strings = TextFileInputUtils .convertLineToStrings( Mockito.mock( LogChannelInterface.class ), line, inputMeta, ",", "", "\\" ); Assert.assertNotNull( strings ); Assert.assertEquals( "A\\B", strings[ 0 ] ); Assert.assertEquals( "C", strings[ 1 ] ); }
public static ByteString dataMapToByteString(Map<String, String> headers, DataMap dataMap) throws MimeTypeParseException, IOException { return ByteString.unsafeWrap(getContentType(headers).getCodec().mapToBytes(dataMap)); }
@Test public void testDataMapToPSONByteString() throws MimeTypeParseException, IOException { DataMap testDataMap = createTestDataMap(); byte[] expectedBytes = PSON_DATA_CODEC.mapToBytes(testDataMap); Map<String, String> headers = Collections.singletonMap(RestConstants.HEADER_CONTENT_TYPE, "application/x-pson"); ByteString byteString = DataMapConverter.dataMapToByteString(headers, testDataMap); Assert.assertEquals(byteString.copyBytes(), expectedBytes); }
public String getQueryGuid() { return this.queryGuid; }
@Test public void sameQueryInSameClusterAndOrgGetsSameId() { // Given: final String query1 = "CREATE STREAM my_stream (profileId VARCHAR, latitude DOUBLE, longitude " + "DOUBLE)\nWITH (kafka_topic='locations', value_format='json', partitions=1);"; final String query2 = "CREATE STREAM my_stream (profileId VARCHAR, latitude DOUBLE, " + "longitude DOUBLE) WITH (kafka_topic='locations', value_format='json', partitions=1);"; // When: final String queryId1 = new QueryGuid(TEST_NAMESPACE, query1, "TEST").getQueryGuid(); final String queryId2 = new QueryGuid(TEST_NAMESPACE, query2, "TEST").getQueryGuid(); // Then: Assert.assertEquals(queryId1, queryId2); }
public ByteKey interpolateKey(double fraction) { checkArgument( fraction >= 0.0 && fraction < 1.0, "Fraction %s must be in the range [0, 1)", fraction); byte[] startBytes = startKey.getBytes(); byte[] endBytes = endKey.getBytes(); // If the endKey is unspecified, add a leading 1 byte to it and a leading 0 byte to all other // keys, to get a concrete least upper bound for the desired range. if (endKey.isEmpty()) { startBytes = addHeadByte(startBytes, (byte) 0); endBytes = addHeadByte(endBytes, (byte) 1); } // Pad to the longest key. int paddedKeyLength = Math.max(startBytes.length, endBytes.length); BigInteger rangeStartInt = paddedPositiveInt(startBytes, paddedKeyLength); BigInteger rangeEndInt = paddedPositiveInt(endBytes, paddedKeyLength); // If the keys are equal subject to padding by 0, we can't interpolate. BigInteger range = rangeEndInt.subtract(rangeStartInt); checkState( !range.equals(BigInteger.ZERO), "Refusing to interpolate for near-empty %s where start and end keys differ only by trailing" + " zero bytes.", this); // Add precision so that range is at least 53 (double mantissa length) bits long. This way, we // can interpolate small ranges finely, e.g., split the range key 3 to key 4 into 1024 parts. // We add precision to range by adding zero bytes to the end of the keys, aka shifting the // underlying BigInteger left by a multiple of 8 bits. int bytesNeeded = ((53 - range.bitLength()) + 7) / 8; if (bytesNeeded > 0) { range = range.shiftLeft(bytesNeeded * 8); rangeStartInt = rangeStartInt.shiftLeft(bytesNeeded * 8); paddedKeyLength += bytesNeeded; } BigInteger interpolatedOffset = new BigDecimal(range).multiply(BigDecimal.valueOf(fraction)).toBigInteger(); int outputKeyLength = endKey.isEmpty() ? (paddedKeyLength - 1) : paddedKeyLength; return ByteKey.copyFrom( fixupHeadZeros(rangeStartInt.add(interpolatedOffset).toByteArray(), outputKeyLength)); }
@Test public void testInterpolateKey() { /* 0x80 is halfway between [] and [] */ assertEqualExceptPadding(ByteKey.of(0x80), ByteKeyRange.ALL_KEYS.interpolateKey(0.5)); /* 0x80 is halfway between [00] and [] */ ByteKeyRange after0 = ByteKeyRange.of(ByteKey.of(0), ByteKey.EMPTY); assertEqualExceptPadding(ByteKey.of(0x80), after0.interpolateKey(0.5)); /* 0x80 is halfway between [0000] and [] -- padding to longest key */ ByteKeyRange after00 = ByteKeyRange.of(ByteKey.of(0, 0), ByteKey.EMPTY); assertEqualExceptPadding(ByteKey.of(0x80), after00.interpolateKey(0.5)); /* 0x7f is halfway between [] and [fe] */ ByteKeyRange upToFE = ByteKeyRange.of(ByteKey.EMPTY, ByteKey.of(0xfe)); assertEqualExceptPadding(ByteKey.of(0x7f), upToFE.interpolateKey(0.5)); /* 0x40 is one-quarter of the way between [] and [] */ assertEqualExceptPadding(ByteKey.of(0x40), ByteKeyRange.ALL_KEYS.interpolateKey(0.25)); /* 0x40 is halfway between [] and [0x80] */ ByteKeyRange upTo80 = ByteKeyRange.of(ByteKey.EMPTY, ByteKey.of(0x80)); assertEqualExceptPadding(ByteKey.of(0x40), upTo80.interpolateKey(0.5)); /* 0x40 is halfway between [0x30] and [0x50] */ ByteKeyRange range30to50 = ByteKeyRange.of(ByteKey.of(0x30), ByteKey.of(0x50)); assertEqualExceptPadding(ByteKey.of(0x40), range30to50.interpolateKey(0.5)); /* 0x40 is halfway between [0x30, 0, 1] and [0x4f, 0xff, 0xff, 0, 0] */ ByteKeyRange range31to4f = ByteKeyRange.of(ByteKey.of(0x30, 0, 1), ByteKey.of(0x4f, 0xff, 0xff, 0, 0)); assertEqualExceptPadding(ByteKey.of(0x40), range31to4f.interpolateKey(0.5)); }
protected int getWeight(Invoker<?> invoker, Invocation invocation) { int weight; URL url = invoker.getUrl(); if (invoker instanceof ClusterInvoker) { url = ((ClusterInvoker<?>) invoker).getRegistryUrl(); } // Multiple registry scenario, load balance among multiple registries. if (REGISTRY_SERVICE_REFERENCE_PATH.equals(url.getServiceInterface())) { weight = url.getParameter(WEIGHT_KEY, DEFAULT_WEIGHT); } else { weight = url.getMethodParameter(RpcUtils.getMethodName(invocation), WEIGHT_KEY, DEFAULT_WEIGHT); if (weight > 0) { long timestamp = invoker.getUrl().getParameter(TIMESTAMP_KEY, 0L); if (timestamp > 0L) { long uptime = System.currentTimeMillis() - timestamp; if (uptime < 0) { return 1; } int warmup = invoker.getUrl().getParameter(WARMUP_KEY, DEFAULT_WARMUP); if (uptime > 0 && uptime < warmup) { weight = calculateWarmupWeight((int) uptime, warmup, weight); } } } } return Math.max(weight, 0); }
@Test void testGetRegistryWeight() { RpcInvocation invocation = new RpcInvocation(); invocation.setMethodName("say"); Invoker invoker1 = mock(Invoker.class, Mockito.withSettings().stubOnly()); URL url1 = new ServiceConfigURL("", "", 0, "DemoService", new HashMap<>()); given(invoker1.getUrl()).willReturn(url1); ClusterInvoker invoker2 = mock(ClusterInvoker.class, Mockito.withSettings().stubOnly()); URL url2 = new ServiceConfigURL("", "", 0, "org.apache.dubbo.registry.RegistryService", new HashMap<>()); url2 = url2.addParameter(WEIGHT_KEY, 20); URL registryUrl2 = new ServiceConfigURL("", "", 0, "org.apache.dubbo.registry.RegistryService", new HashMap<>()); registryUrl2 = registryUrl2.addParameter(WEIGHT_KEY, 30); given(invoker2.getUrl()).willReturn(url2); given(invoker2.getRegistryUrl()).willReturn(registryUrl2); Assertions.assertEquals(100, balance.getWeight(invoker1, invocation)); Assertions.assertEquals(30, balance.getWeight(invoker2, invocation)); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatCsasWithClause() { final String statementString = "CREATE STREAM S WITH(partitions=4) AS SELECT * FROM address;"; final Statement statement = parseSingle(statementString); final String result = SqlFormatter.formatSql(statement); assertThat(result, startsWith("CREATE STREAM S WITH (PARTITIONS=4) AS SELECT")); }
@Override public final void isEqualTo(@Nullable Object other) { if (Objects.equal(actual, other)) { return; } // Fail but with a more descriptive message: if (actual == null || !(other instanceof Map)) { super.isEqualTo(other); return; } containsEntriesInAnyOrder((Map<?, ?>) other, /* allowUnexpected= */ false); }
@Test public void isEqualToNotConsistentWithEquals_failure() { TreeMap<String, Integer> actual = new TreeMap<>(CASE_INSENSITIVE_ORDER); TreeMap<String, Integer> expected = new TreeMap<>(CASE_INSENSITIVE_ORDER); actual.put("one", 1); expected.put("ONE", 1); actual.put("two", 2); expectFailureWhenTestingThat(actual).isEqualTo(expected); // The exact message generated is unspecified. }
@Override public PluginRuntime getPluginRuntime() { PluginRuntime runtime = new PluginRuntime(getId()); for (int i = 0; i < decorators.size(); i++) { TaskDecorator decorator = decorators.get(i); runtime.addInfo("decorator" + i, decorator.getClass().getName()); } return runtime; }
@Test public void testGetRuntime() { ThreadPoolPlugin plugin = new TaskDecoratorPlugin(); PluginRuntime runtime = new TaskDecoratorPlugin().getPluginRuntime(); Assert.assertNotNull(runtime); Assert.assertEquals(plugin.getId(), runtime.getPluginId()); }
@Override public void preflight(Path file) throws BackgroundException { assumeRole(file, DELETEPERMISSION); }
@Test public void testPreflightFileMissingCustomProps() throws Exception { final Path file = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); file.setAttributes(file.attributes().withAcl(Acl.EMPTY)); new CteraDeleteFeature(session).preflight(file); }
public RunResponse restart(RunRequest runRequest, boolean blocking) { if (!runRequest.isFreshRun() && runRequest.getCurrentPolicy() != RunPolicy.RESTART_FROM_SPECIFIC) { updateRunRequestForRestartFromInlineRoot(runRequest); } RunResponse runResponse = actionHandler.restartRecursively(runRequest); if (runResponse.getStatus() == RunResponse.Status.DELEGATED) { return restartDirectly(runResponse, runRequest, blocking); } return runResponse; }
@Test public void testRestartNewRun() { when(instance.getStatus()).thenReturn(WorkflowInstance.Status.FAILED); when(instance.getInitiator()).thenReturn(new ManualInitiator()); when(actionHandler.restartRecursively(any())) .thenReturn(RunResponse.builder().status(RunResponse.Status.WORKFLOW_RUN_CREATED).build()); RunRequest runRequest = RunRequest.builder() .requester(user) .currentPolicy(RunPolicy.RESTART_FROM_SPECIFIC) .restartConfig( RestartConfig.builder().addRestartNode("sample-minimal-wf", 1, "job1").build()) .build(); RunResponse response = stepActionHandler.restart(runRequest, true); ArgumentCaptor<RunRequest> requestCaptor = ArgumentCaptor.forClass(RunRequest.class); Mockito.verify(actionHandler, Mockito.times(1)).restartRecursively(requestCaptor.capture()); RunRequest request = requestCaptor.getValue(); assertEquals(runRequest, request); assertEquals(RunResponse.Status.WORKFLOW_RUN_CREATED, response.getStatus()); }
static JavaInput reorderModifiers(String text) throws FormatterException { return reorderModifiers( new JavaInput(text), ImmutableList.of(Range.closedOpen(0, text.length()))); }
@Test public void subRange() throws FormatterException { String[] lines = { "class Test {", // " static public int a;", " static public int b;", "}", }; String input = Joiner.on('\n').join(lines); String substring = "static public int a"; int start = input.indexOf(substring); int end = start + substring.length(); String output = ModifierOrderer.reorderModifiers( new JavaInput(input), Arrays.asList(Range.closedOpen(start, end))) .getText(); assertThat(output).contains("public static int a;"); assertThat(output).contains("static public int b;"); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { try { final AttributedList<Path> objects = new AttributedList<>(); Marker marker = new Marker(null, null); final String containerId = fileid.getVersionId(containerService.getContainer(directory)); // Seen placeholders final Map<String, Long> revisions = new HashMap<>(); boolean hasDirectoryPlaceholder = containerService.isContainer(directory); do { if(log.isDebugEnabled()) { log.debug(String.format("List directory %s with marker %s", directory, marker)); } final B2ListFilesResponse response; if(versioning.isEnabled()) { // In alphabetical order by file name, and by reverse of date/time uploaded for // versions of files with the same name. response = session.getClient().listFileVersions(containerId, marker.nextFilename, marker.nextFileId, chunksize, this.createPrefix(directory), String.valueOf(Path.DELIMITER)); } else { response = session.getClient().listFileNames(containerId, marker.nextFilename, chunksize, this.createPrefix(directory), String.valueOf(Path.DELIMITER)); } marker = this.parse(directory, objects, response, revisions); if(null == marker.nextFileId) { if(!response.getFiles().isEmpty()) { hasDirectoryPlaceholder = true; } } listener.chunk(directory, objects); } while(marker.hasNext()); if(!hasDirectoryPlaceholder && objects.isEmpty()) { if(log.isWarnEnabled()) { log.warn(String.format("No placeholder found for directory %s", directory)); } throw new NotfoundException(directory.getAbsolute()); } return objects; } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map("Listing directory {0} failed", e, directory); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map(e); } }
@Test public void testListLexicographicSortOrderAssumption() throws Exception { final B2VersionIdProvider fileid = new B2VersionIdProvider(session); final Path directory = new B2DirectoryFeature(session, fileid).mkdir( new Path(String.format("test-%s", new AsciiRandomStringService().random()), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); assertTrue(new B2ObjectListService(session, fileid).list(directory, new DisabledListProgressListener()).isEmpty()); final List<String> files = Arrays.asList( "Z", "aa", "0a", "a", "AAA", "B", "~$a", ".c" ); for(String f : files) { new B2TouchFeature(session, fileid).touch(new Path(directory, f, EnumSet.of(Path.Type.file)), new TransferStatus()); } files.sort(session.getHost().getProtocol().getListComparator()); final AttributedList<Path> list = new B2ObjectListService(session, fileid).list(directory, new IndexedListProgressListener() { @Override public void message(final String message) { // } @Override public void visit(final AttributedList<Path> list, final int index, final Path file) { assertEquals(files.get(index), file.getName()); } }); for(int i = 0; i < list.size(); i++) { assertEquals(files.get(i), list.get(i).getName()); new B2DeleteFeature(session, fileid).delete(Collections.singletonList(list.get(i)), new DisabledLoginCallback(), new Delete.DisabledCallback()); } new B2DeleteFeature(session, fileid).delete(Collections.singletonList(directory), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static UserCodeException wrap(Throwable t) { if (t instanceof UserCodeException) { return (UserCodeException) t; } return new UserCodeException(t); }
@Test public void robustAgainstEmptyStackTrace() { RuntimeException runtimeException = new RuntimeException("empty stack"); runtimeException.setStackTrace(new StackTraceElement[0]); RuntimeException wrapped = UserCodeException.wrap(runtimeException); assertEquals(runtimeException, wrapped.getCause()); }
@Override public boolean supportsPositionedDelete() { return false; }
@Test void assertSupportsPositionedDelete() { assertFalse(metaData.supportsPositionedDelete()); }
public Future<KafkaVersionChange> reconcile() { return getPods() .compose(this::detectToAndFromVersions) .compose(i -> prepareVersionChange()); }
@Test public void testUpgradeWithAllVersion(VertxTestContext context) { VersionChangeCreator vcc = mockVersionChangeCreator( mockKafka(VERSIONS.defaultVersion().version(), VERSIONS.version(KafkaVersionTestUtils.PREVIOUS_KAFKA_VERSION).metadataVersion(), VERSIONS.defaultVersion().metadataVersion()), mockRos(mockUniformPods(VERSIONS.version(KafkaVersionTestUtils.PREVIOUS_KAFKA_VERSION).version())) ); Checkpoint async = context.checkpoint(); vcc.reconcile().onComplete(context.succeeding(c -> context.verify(() -> { assertThat(c.from(), is(VERSIONS.version(KafkaVersionTestUtils.PREVIOUS_KAFKA_VERSION))); assertThat(c.to(), is(VERSIONS.defaultVersion())); assertThat(c.metadataVersion(), is(VERSIONS.version(KafkaVersionTestUtils.PREVIOUS_KAFKA_VERSION).metadataVersion())); async.flag(); }))); }
@Override @Nonnull public <T> Future<T> submit(@Nonnull Callable<T> task) { throwRejectedExecutionExceptionIfShutdown(); try { T result = task.call(); return new CompletedFuture<>(result, null); } catch (Exception e) { return new CompletedFuture<>(null, e); } }
@Test void testSubmitRunnable() { final CompletableFuture<Thread> future = new CompletableFuture<>(); testTaskSubmissionBeforeShutdown( testInstance -> testInstance.submit(() -> future.complete(Thread.currentThread()))); assertThat(future).isCompletedWithValue(Thread.currentThread()); }
static List<String> findClassFiles(JarFile jarFile, String className) { String filename = className + ".class"; return jarFile.stream() .map(ZipEntry::getName) .map(Paths::get) .filter(byFilename(filename)) .map(Path::toString) .collect(Collectors.toList()); }
@Test public void should_NOT_find_class_in_jar() throws IOException { assertThat(dummyJarFile).exists(); try (JarFile jarFile = new JarFile(dummyJarFile.toFile())) { List<String> classFiles = JarScanner.findClassFiles(jarFile, "NonExistingClass"); assertThat(classFiles).isEmpty(); } }
@Override public DataSourceConfigDO getDataSourceConfig(Long id) { // 如果 id 为 0,默认为 master 的数据源 if (Objects.equals(id, DataSourceConfigDO.ID_MASTER)) { return buildMasterDataSourceConfig(); } // 从 DB 中读取 return dataSourceConfigMapper.selectById(id); }
@Test public void testGetDataSourceConfig_normal() { // mock 数据 DataSourceConfigDO dbDataSourceConfig = randomPojo(DataSourceConfigDO.class); dataSourceConfigMapper.insert(dbDataSourceConfig);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbDataSourceConfig.getId(); // 调用 DataSourceConfigDO dataSourceConfig = dataSourceConfigService.getDataSourceConfig(id); // 断言 assertPojoEquals(dbDataSourceConfig, dataSourceConfig); }
public static Status getSummaryStatus(Map<String, Status> statuses) { Level level = Level.OK; StringBuilder msg = new StringBuilder(); for (Map.Entry<String, Status> entry : statuses.entrySet()) { String key = entry.getKey(); Status status = entry.getValue(); Level l = status.getLevel(); if (Level.ERROR.equals(l)) { level = Level.ERROR; if (msg.length() > 0) { msg.append(','); } msg.append(key); } else if (Level.WARN.equals(l)) { if (!Level.ERROR.equals(level)) { level = Level.WARN; } if (msg.length() > 0) { msg.append(','); } msg.append(key); } } return new Status(level, msg.toString()); }
@Test void testGetSummaryStatus2() throws Exception { Status status1 = new Status(Status.Level.WARN); Status status2 = new Status(Status.Level.OK); Map<String, Status> statuses = new HashMap<String, Status>(); statuses.put("status1", status1); statuses.put("status2", status2); Status status = StatusUtils.getSummaryStatus(statuses); assertThat(status.getLevel(), is(Status.Level.WARN)); assertThat(status.getMessage(), containsString("status1")); assertThat(status.getMessage(), not(containsString("status2"))); }
public Map<String, List<PartitionInfo>> getAllTopicMetadata(Timer timer) { MetadataRequest.Builder request = MetadataRequest.Builder.allTopics(); return getTopicMetadata(request, timer); }
@Test public void testGetAllTopicsTimeout() { // since no response is prepared, the request should time out buildFetcher(); assignFromUser(singleton(tp0)); assertThrows(TimeoutException.class, () -> topicMetadataFetcher.getAllTopicMetadata(time.timer(50L))); }
public abstract IsmPrefixReaderIterator overKeyComponents(List<?> keyComponents) throws IOException;
@Test public void testReadMissingKeys() throws Exception { File tmpFile = tmpFolder.newFile(); List<IsmRecord<byte[]>> data = new ArrayList<>(); data.add(IsmRecord.<byte[]>of(ImmutableList.of(EMPTY, new byte[] {0x04}), EMPTY)); data.add(IsmRecord.<byte[]>of(ImmutableList.of(EMPTY, new byte[] {0x08}), EMPTY)); writeElementsToFile(data, tmpFile); IsmReader<byte[]> reader = new IsmReaderImpl<byte[]>( FileSystems.matchSingleFileSpec(tmpFile.getAbsolutePath()).resourceId(), CODER, cache); // Check that we got false with a key before all keys contained in the file. assertFalse(reader.overKeyComponents(ImmutableList.of(EMPTY, new byte[] {0x02})).start()); // Check that we got false with a key between two other keys contained in the file. assertFalse(reader.overKeyComponents(ImmutableList.of(EMPTY, new byte[] {0x06})).start()); // Check that we got false with a key that is after all keys contained in the file. assertFalse(reader.overKeyComponents(ImmutableList.of(EMPTY, new byte[] {0x10})).start()); }
public static Base64String wrap(final String base64String) { return new Base64String(base64String); }
@Test public void testNonValidBase64StringThrows() { assertThrows( RuntimeException.class, () -> Base64String.wrap("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqr")); }
public void retain(IndexSet indexSet, @Nullable Integer maxNumberOfIndices, RetentionExecutor.RetentionAction action, String actionName) { final Map<String, Set<String>> deflectorIndices = indexSet.getAllIndexAliases(); final int indexCount = (int) deflectorIndices.keySet() .stream() .filter(indexName -> !indices.isReopened(indexName)) .count(); if (maxNumberOfIndices == null) { LOG.warn("No retention strategy configuration found, not running index retention!"); return; } // Do we have more indices than the configured maximum? if (indexCount <= maxNumberOfIndices) { LOG.debug("Number of indices ({}) lower than limit ({}). Not performing any retention actions.", indexCount, maxNumberOfIndices); return; } // We have more indices than the configured maximum! Remove as many as needed. final int removeCount = indexCount - maxNumberOfIndices; final String msg = "Number of indices (" + indexCount + ") higher than limit (" + maxNumberOfIndices + "). " + "Running retention for " + removeCount + " indices."; LOG.info(msg); activityWriter.write(new Activity(msg, CountBasedRetentionExecutor.class)); retentionExecutor.runRetention(indexSet, removeCount, action, actionName); }
@Test public void shouldIgnoreWriteAliasWhenDeterminingRetainedIndices() { underTest.retain(indexSet, 5, action, "action"); verify(action, times(1)).retain(retainedIndexName.capture(), eq(indexSet)); assertThat(retainedIndexName.getValue()).containsExactly("test_1"); verify(activityWriter, times(2)).write(any(Activity.class)); }
@Override public ExecuteResult execute(final ServiceContext serviceContext, final ConfiguredKsqlPlan plan, final boolean restoreInProgress) { try { final ExecuteResult result = EngineExecutor .create(primaryContext, serviceContext, plan.getConfig()) .execute(plan.getPlan(), restoreInProgress); return result; } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { // add the statement text to the KsqlException throw new KsqlStatementException( e.getMessage(), e.getMessage(), plan.getPlan().getStatementText(), e.getCause() ); } }
@Test(expected = KsqlStatementException.class) public void shouldFailToCreateQueryIfSelectingFromNonExistentEntity() { // Given: setupKsqlEngineWithSharedRuntimeEnabled(); KsqlEngineTestUtil .execute( serviceContext, ksqlEngine, "select * from bar;", ksqlConfig, Collections.emptyMap() ); }
public void removeView(String name, boolean ifExists) { if (relationsStorage.removeView(name) == null && !ifExists) { throw QueryException.error("View does not exist: " + name); } }
@Test public void when_removesNonExistingViewWithIfExists_then_succeeds() { // given String name = "name"; given(relationsStorage.removeView(name)).willReturn(null); // when // then catalog.removeView(name, true); verifyNoInteractions(listener); }
@CanIgnoreReturnValue public Caffeine<K, V> recordStats() { requireState(this.statsCounterSupplier == null, "Statistics recording was already set"); statsCounterSupplier = ENABLED_STATS_COUNTER_SUPPLIER; return this; }
@Test public void recordStats() { var builder = Caffeine.newBuilder().recordStats(); assertThat(builder.statsCounterSupplier).isEqualTo(Caffeine.ENABLED_STATS_COUNTER_SUPPLIER); assertThat(builder.build()).isNotNull(); }
public <T> void resolve(T resolvable) { ParamResolver resolver = this; if (ParamScope.class.isAssignableFrom(resolvable.getClass())) { ParamScope newScope = (ParamScope) resolvable; resolver = newScope.applyOver(resolver); } resolveStringLeaves(resolvable, resolver); resolveNonStringLeaves(resolvable, resolver); resolveNodes(resolvable, resolver); }
@Test public void shouldInterpolateLiteralEscapedSequences() { PipelineConfig pipelineConfig = PipelineConfigMother.createPipelineConfig("cruise", "dev", "ant"); pipelineConfig.setLabelTemplate("2.1-${COUNT}-###{foo}-bar-#{bar}"); new ParamResolver(new ParamSubstitutionHandlerFactory(params(param("foo", "pavan"), param("bar", "jj"))), fieldCache).resolve(pipelineConfig); assertThat(pipelineConfig.getLabelTemplate(), is("2.1-${COUNT}-#pavan-bar-jj")); }
public void incGroupGetLatency(final String group, final String topic, final int queueId, final int incValue) { String statsKey; if (enableQueueStat) { statsKey = buildStatsKey(queueId, topic, group); } else { statsKey = buildStatsKey(topic, group); } this.statsTable.get(Stats.GROUP_GET_LATENCY).addRTValue(statsKey, incValue, 1); }
@Test public void testIncGroupGetLatency() { brokerStatsManager.incGroupGetLatency(GROUP_NAME, TOPIC, 1, 1); String statsKey = String.format("%d@%s@%s", 1, TOPIC, GROUP_NAME); assertThat(brokerStatsManager.getStatsItem(GROUP_GET_LATENCY, statsKey).getValue().doubleValue()).isEqualTo(1L); }
Object getCellValue(Cell cell, Schema.FieldType type) { ByteString cellValue = cell.getValue(); int valueSize = cellValue.size(); switch (type.getTypeName()) { case BOOLEAN: checkArgument(valueSize == 1, message("Boolean", 1)); return cellValue.toByteArray()[0] != 0; case BYTE: checkArgument(valueSize == 1, message("Byte", 1)); return cellValue.toByteArray()[0]; case INT16: checkArgument(valueSize == 2, message("Int16", 2)); return Shorts.fromByteArray(cellValue.toByteArray()); case INT32: checkArgument(valueSize == 4, message("Int32", 4)); return Ints.fromByteArray(cellValue.toByteArray()); case INT64: checkArgument(valueSize == 8, message("Int64", 8)); return Longs.fromByteArray(cellValue.toByteArray()); case FLOAT: checkArgument(valueSize == 4, message("Float", 4)); return Float.intBitsToFloat(Ints.fromByteArray(cellValue.toByteArray())); case DOUBLE: checkArgument(valueSize == 8, message("Double", 8)); return Double.longBitsToDouble(Longs.fromByteArray(cellValue.toByteArray())); case DATETIME: return DateTime.parse(cellValue.toStringUtf8()); case STRING: return cellValue.toStringUtf8(); case BYTES: return cellValue.toByteArray(); case LOGICAL_TYPE: String identifier = checkArgumentNotNull(type.getLogicalType()).getIdentifier(); throw new IllegalStateException("Unsupported logical type: " + identifier); default: throw new IllegalArgumentException( String.format("Unsupported cell value type '%s'.", type.getTypeName())); } }
@Test public void shouldParseBooleanTypeTrueOne() { byte[] value = new byte[] {4}; assertEquals(true, PARSER.getCellValue(cell(value), BOOLEAN)); }
@Override protected Collection<X509Certificate> getTrusted() { return repository.findTrustedCscaCertificates().stream().map( c -> X509Factory.toCertificate(c.getRaw()) ).collect(Collectors.toList()); }
@Test public void shouldNotLoadCertificateIfNoTrustedInDocumentType() throws CertificateException, IOException { final Certificate rdw = loadCertificate("rdw/acc/csca.crt", true); final Certificate nik = loadCertificate("nik/tv/csca.crt", false); certificateRepo.save(rdw); certificateRepo.save(nik); certificateRepo.flush(); final Collection<X509Certificate> trusted = service.getTrusted(); assertEquals(1, trusted.size()); assertEquals(rdw.getSubject(), X509Factory.toCanonical(trusted.toArray(new X509Certificate[0])[0].getSubjectX500Principal())); }
CacheConfig<K, V> asCacheConfig() { return this.copy(new CacheConfig<>(), false); }
@Test public void serializationSucceeds_whenKVTypesNotSpecified() { CacheConfig cacheConfig = newDefaultCacheConfig("test"); PreJoinCacheConfig preJoinCacheConfig = new PreJoinCacheConfig(cacheConfig); Data data = serializationService.toData(preJoinCacheConfig); PreJoinCacheConfig deserialized = serializationService.toObject(data); assertEquals(preJoinCacheConfig, deserialized); assertEquals(cacheConfig, deserialized.asCacheConfig()); }
public static List<EditLogFile> matchEditLogs(File logDir) throws IOException { return matchEditLogs(FileUtil.listFiles(logDir)); }
@Test(expected = IOException.class) public void testMatchEditLogInvalidDirThrowsIOException() throws IOException { File badDir = new File("does not exist"); FileJournalManager.matchEditLogs(badDir); }
public static FilterPredicate invert(FilterPredicate pred) { Objects.requireNonNull(pred, "pred cannot be null"); return pred.accept(INSTANCE); }
@Test public void testBaseCases() { assertEquals(notEq(intColumn, 17), invert(eq(intColumn, 17))); assertEquals(eq(intColumn, 17), invert(notEq(intColumn, 17))); assertEquals(gtEq(intColumn, 17), invert(lt(intColumn, 17))); assertEquals(gt(intColumn, 17), invert(ltEq(intColumn, 17))); assertEquals(ltEq(intColumn, 17), invert(gt(intColumn, 17))); assertEquals(lt(intColumn, 17), invert(gtEq(intColumn, 17))); FilterPredicate andPos = and(eq(intColumn, 17), eq(doubleColumn, 12.0)); FilterPredicate andInv = or(notEq(intColumn, 17), notEq(doubleColumn, 12.0)); assertEquals(andInv, invert(andPos)); FilterPredicate orPos = or(eq(intColumn, 17), eq(doubleColumn, 12.0)); FilterPredicate orInv = and(notEq(intColumn, 17), notEq(doubleColumn, 12.0)); assertEquals(orPos, invert(orInv)); assertEquals(eq(intColumn, 17), invert(not(eq(intColumn, 17)))); UserDefined<Integer, DummyUdp> ud = userDefined(intColumn, DummyUdp.class); assertEquals(new LogicalNotUserDefined<>(ud), invert(ud)); assertEquals(ud, invert(not(ud))); assertEquals(ud, invert(new LogicalNotUserDefined<>(ud))); }
public static ApplicationContextInitializer<ConfigurableApplicationContext> dynamicConfigPropertiesInitializer() { return appCtx -> new DynamicConfigOperations(appCtx) .loadDynamicPropertySource() .ifPresent(source -> appCtx.getEnvironment().getPropertySources().addFirst(source)); }
@Test void initializerAddsDynamicPropertySourceIfAllEnvVarsAreSet() throws Exception { Path propsFilePath = tmpDir.resolve("props.yaml"); Files.writeString(propsFilePath, SAMPLE_YAML_CONFIG, StandardOpenOption.CREATE); MutablePropertySources propertySources = new MutablePropertySources(); propertySources.addFirst(new MapPropertySource("test", Map.of("testK", "testV"))); when(envMock.getPropertySources()).thenReturn(propertySources); mockEnvWithVars(Map.of( DYNAMIC_CONFIG_ENABLED_ENV_PROPERTY, "true", DYNAMIC_CONFIG_PATH_ENV_PROPERTY, propsFilePath.toString() )); DynamicConfigOperations.dynamicConfigPropertiesInitializer().initialize(ctxMock); assertThat(propertySources.size()).isEqualTo(2); assertThat(propertySources.stream()) .element(0) .extracting(PropertySource::getName) .isEqualTo("dynamicProperties"); }
private CompletionStage<RestResponse> createCounter(RestRequest request) throws RestResponseException { NettyRestResponse.Builder responseBuilder = invocationHelper.newResponse(request); String counterName = request.variables().get("counterName"); String contents = request.contents().asString(); if (contents == null || contents.isEmpty()) { throw Log.REST.missingContent(); } CounterConfiguration configuration = createCounterConfiguration(contents); if (configuration == null) { throw Log.REST.invalidContent(); } return invocationHelper.getCounterManager() .defineCounterAsync(counterName, configuration) .thenApply(created -> created ? responseBuilder.build() : responseBuilder.status(NOT_MODIFIED) .entity("Unable to create counter: " + counterName) .build()); }
@Test public void testCounterCreation() { String counterName = "counter-creation"; createCounter(counterName, CounterConfiguration.builder(CounterType.WEAK).initialValue(1).build()); assertThat(doCounterCreateRequest(counterName, CounterConfiguration.builder(CounterType.WEAK).initialValue(1).build())).isNotModified(); assertThat(doCounterCreateRequest(counterName, CounterConfiguration.builder(CounterType.BOUNDED_STRONG).initialValue(2).build())).isNotModified(); }
@Override public Collection<ResourceRequirement> getAcquiredResources(JobID jobId) { Preconditions.checkNotNull(jobId); JobScopedResourceTracker tracker = trackers.get(jobId); return tracker == null ? Collections.emptyList() : tracker.getAcquiredResources(); }
@Test void testGetAcquiredResources() { DefaultResourceTracker tracker = new DefaultResourceTracker(); ResourceRequirement requirement1 = ResourceRequirement.create(ResourceProfile.ANY, 1); ResourceRequirement requirement2 = ResourceRequirement.create(ResourceProfile.ANY, 2); tracker.notifyAcquiredResource(JOB_ID_1, requirement1.getResourceProfile()); for (int x = 0; x < requirement2.getNumberOfRequiredSlots(); x++) { tracker.notifyAcquiredResource(JOB_ID_2, requirement2.getResourceProfile()); } assertThat(tracker.getAcquiredResources(JOB_ID_1)).contains(requirement1); assertThat(tracker.getAcquiredResources(JOB_ID_2)).contains(requirement2); tracker.notifyLostResource(JOB_ID_1, requirement1.getResourceProfile()); assertThat(tracker.getAcquiredResources(JOB_ID_1)).isEmpty(); }
public static void deleteDirectory(String path) throws IOException { FileUtils.deleteDirectory(new File(path)); }
@Test void deleteDirectory() throws IOException { Path diskutils = Paths.get(TMP_PATH, "diskutils"); File file = diskutils.toFile(); if (!file.exists()) { file.mkdir(); } assertTrue(file.exists()); DiskUtils.deleteDirectory(diskutils.toString()); assertFalse(file.exists()); }
@Override public final int compareTo(final SQLToken sqlToken) { return startIndex - sqlToken.startIndex; }
@Test void assertCompareToGreater() { assertTrue(new SQLTokenFixture(11, 20).compareTo(new SQLTokenFixture(0, 10)) > 0); }
@Override public IClonableStepAnalyzer newInstance() { return new GetXMLDataStepAnalyzer(); }
@Test public void testNewInstance(){ GetXMLDataStepAnalyzer analyzer = new GetXMLDataStepAnalyzer(); assertTrue( analyzer.newInstance().getClass().equals(GetXMLDataStepAnalyzer.class)); }
public Map<Integer, ControllerRegistration> controllers() { return controllers; }
@Test public void testInitialControllers() { ControllerRegistrationsPublisher publisher = new ControllerRegistrationsPublisher(); assertEquals(Collections.emptyMap(), publisher.controllers()); }
@Override public void visit(Entry entry) { Component component = componentProvider.createComponent(entry); if(component != null){ final EntryAccessor entryAccessor = new EntryAccessor(); entryAccessor.setComponent(entry, component); final AFreeplaneAction action = entryAccessor.getAction(entry); if (action != null) { final ActionEnabler actionEnabler = new ActionEnabler(component); action.addPropertyChangeListener(actionEnabler); entry.setAttribute(actionEnabler.getClass(), actionEnabler); } final JToolBar container = (JToolBar) new EntryAccessor().getAncestorComponent(entry); GridBagConstraints constraints = layoutConstraintsForEntry(entry, component); container.add(component, constraints); } }
@Test public void createsToolbarButtonWithAction() { Entry actionEntry = new Entry(); final AFreeplaneAction action = Mockito.mock(AFreeplaneAction.class); new EntryAccessor().setAction(actionEntry, action); Entry toolbarEntry = new Entry(); final FreeplaneToolBar toolbar = new FreeplaneToolBar("toolbar", SwingConstants.HORIZONTAL); new EntryAccessor().setComponent(toolbarEntry, toolbar); toolbarEntry.addChild(actionEntry); final JToolbarComponentBuilder toolbarActionGroupBuilder = new JToolbarComponentBuilder(resourceAccessorMock); toolbarActionGroupBuilder.visit(actionEntry); JButton button = (JButton)new EntryAccessor().getComponent(actionEntry); assertThat(button.getAction(), CoreMatchers.<Action>equalTo(action)); assertThat(button.getParent(), CoreMatchers.equalTo((Container)toolbar)); }
@VisibleForTesting boolean isImagePushed(Optional<ManifestAndDigest<ManifestTemplate>> manifestResult) { return !(JibSystemProperties.skipExistingImages() && manifestResult.isPresent()); }
@Test public void testIsImagePushed_skipExistingImageDisabledAndManifestPresent() { Optional<ManifestAndDigest<ManifestTemplate>> manifestResult = Mockito.mock(Optional.class); System.setProperty(JibSystemProperties.SKIP_EXISTING_IMAGES, "false"); Assert.assertTrue(stepsRunner.isImagePushed(manifestResult)); }
@Override public long get(long key) { return super.get0(key, 0); }
@Test public void testPutGetMany() { final int k = 1000; for (int i = 1; i <= k; i++) { long key = (long) i; insert(key); } for (int i = 1; i <= k; i++) { long key = (long) i; long valueAddress = hsa.get(key); verifyValue(key, valueAddress); } }
@ExecuteOn(TaskExecutors.IO) @Post(uri = "logs/daily") @Operation(tags = {"Stats"}, summary = "Get daily statistics for logs") public List<LogStatistics> logsDailyStatistics(@Body @Valid LogStatisticRequest logStatisticRequest) { return logRepositoryInterface.statistics( logStatisticRequest.q(), tenantService.resolveTenant(), logStatisticRequest.namespace(), logStatisticRequest.flowId(), logStatisticRequest.logLevel(), logStatisticRequest.startDate() != null ? logStatisticRequest.startDate().withZoneSameInstant(ZoneId.systemDefault()) : null, logStatisticRequest.endDate() != null ? logStatisticRequest.endDate().withZoneSameInstant(ZoneId.systemDefault()) : null, null ); }
@Test void logsDailyStatistics() { var dailyStatistics = client.toBlocking().retrieve( HttpRequest .POST("/api/v1/stats/logs/daily", new StatsController.LogStatisticRequest(null, null, null, null, ZonedDateTime.now().minusDays(1), ZonedDateTime.now())) .contentType(MediaType.APPLICATION_JSON), Argument.listOf(LogStatistics.class) ); assertThat(dailyStatistics, notNullValue()); }
public static String[] toStringArray(ObjectArrayList<String> stringArrayList) { Object elements = stringArrayList.elements(); if (elements instanceof String[]) { String[] stringArrayListElements = (String[]) elements; if (stringArrayListElements.length == stringArrayList.size()) { return stringArrayListElements; } } return stringArrayList.toArray(new String[0]); }
@Test public void testToStringArray() { // Test empty list ObjectArrayList<String> stringArrayList = new ObjectArrayList<String>(); String[] stringArray = ArrayListUtils.toStringArray(stringArrayList); assertEquals(stringArray.length, 0); // Test list with one element stringArrayList.add("1"); stringArray = ArrayListUtils.toStringArray(stringArrayList); assertEquals(stringArray.length, 1); assertEquals(stringArray[0], "1"); // Test list with multiple elements stringArrayList.add("2"); stringArrayList.add("3"); stringArray = ArrayListUtils.toStringArray(stringArrayList); assertEquals(stringArray.length, 3); assertEquals(stringArray[0], "1"); assertEquals(stringArray[1], "2"); assertEquals(stringArray[2], "3"); }
public ClassicGroupMember replaceStaticMember( String groupInstanceId, String oldMemberId, String newMemberId ) { ClassicGroupMember removedMember = members.remove(oldMemberId); if (removedMember == null) { throw new IllegalArgumentException("Cannot replace non-existing member id " + oldMemberId); } // Fence potential duplicate member immediately if someone awaits join/sync future. JoinGroupResponseData joinGroupResponse = new JoinGroupResponseData() .setMembers(Collections.emptyList()) .setMemberId(oldMemberId) .setProtocolName(null) .setProtocolType(null) .setLeader(NO_LEADER) .setSkipAssignment(false) .setErrorCode(Errors.FENCED_INSTANCE_ID.code()); completeJoinFuture(removedMember, joinGroupResponse); SyncGroupResponseData syncGroupResponse = new SyncGroupResponseData() .setAssignment(new byte[0]) .setProtocolName(null) .setProtocolType(null) .setErrorCode(Errors.FENCED_INSTANCE_ID.code()); completeSyncFuture(removedMember, syncGroupResponse); ClassicGroupMember newMember = new ClassicGroupMember( newMemberId, removedMember.groupInstanceId(), removedMember.clientId(), removedMember.clientHost(), removedMember.rebalanceTimeoutMs(), removedMember.sessionTimeoutMs(), removedMember.protocolType(), removedMember.supportedProtocols(), removedMember.assignment() ); members.put(newMemberId, newMember); if (isLeader(oldMemberId)) { leaderId = Optional.of(newMemberId); } staticMembers.put(groupInstanceId, newMemberId); return newMember; }
@Test public void testReplaceGroupInstanceWithNonExistingMember() { String newMemberId = "newMemberId"; assertThrows(IllegalArgumentException.class, () -> group.replaceStaticMember(groupInstanceId, memberId, newMemberId)); }
public static BigDecimal asBigDecimal(Number num) { return num instanceof BigDecimal ? (BigDecimal) num : BigDecimal.valueOf(num.doubleValue()); }
@Test public void asBigDecimal() { assertThat(OperatorUtils.asBigDecimal(1)).isEqualTo(new BigDecimal("1.0")); assertThat(OperatorUtils.asBigDecimal(1.0)).isEqualTo(new BigDecimal("1.0")); assertThat(OperatorUtils.asBigDecimal(new BigDecimal("1.0"))).isEqualTo(new BigDecimal("1.0")); }
@SuppressWarnings("java:S1172") public static void ignore(Throwable t) { }
@Test public void test_ignore_whenNull() { ignore(null); }
public Schema<?> getSchema(String topic, Object object, String schemaTypeOrClassName, boolean input) { return getSchema(topic, object.getClass(), schemaTypeOrClassName, input); }
@Test public void testGetSchema() { TopicSchema topicSchema = new TopicSchema(null, Thread.currentThread().getContextClassLoader()); String TOPIC = "public/default/test"; Schema<?> schema = topicSchema.getSchema(TOPIC + "1", DummyClass.class, Optional.of(SchemaType.JSON)); assertEquals(schema.getClass(), JSONSchema.class); schema = topicSchema.getSchema(TOPIC + "2", DummyClass.class, Optional.of(SchemaType.AVRO)); assertEquals(schema.getClass(), AvroSchema.class); // use an arbitrary protobuf class for testing purpose schema = topicSchema.getSchema(TOPIC + "3", Request.ServiceRequest.class, Optional.of(SchemaType.PROTOBUF)); assertEquals(schema.getClass(), ProtobufSchema.class); schema = topicSchema .getSchema(TOPIC + "4", Request.ServiceRequest.class, Optional.of(SchemaType.PROTOBUF_NATIVE)); assertEquals(schema.getClass(), ProtobufNativeSchema.class); }
public static Read<JmsRecord> read() { return new AutoValue_JmsIO_Read.Builder<JmsRecord>() .setMaxNumRecords(Long.MAX_VALUE) .setCoder(SerializableCoder.of(JmsRecord.class)) .setCloseTimeout(DEFAULT_CLOSE_TIMEOUT) .setRequiresDeduping(false) .setMessageMapper( new MessageMapper<JmsRecord>() { @Override public JmsRecord mapMessage(Message message) throws Exception { TextMessage textMessage = (TextMessage) message; Map<String, Object> properties = new HashMap<>(); @SuppressWarnings("rawtypes") Enumeration propertyNames = textMessage.getPropertyNames(); while (propertyNames.hasMoreElements()) { String propertyName = (String) propertyNames.nextElement(); properties.put(propertyName, textMessage.getObjectProperty(propertyName)); } return new JmsRecord( textMessage.getJMSMessageID(), textMessage.getJMSTimestamp(), textMessage.getJMSCorrelationID(), textMessage.getJMSReplyTo(), textMessage.getJMSDestination(), textMessage.getJMSDeliveryMode(), textMessage.getJMSRedelivered(), textMessage.getJMSType(), textMessage.getJMSExpiration(), textMessage.getJMSPriority(), properties, textMessage.getText()); } }) .build(); }
@Test public void testSplitForTopic() throws Exception { JmsIO.Read read = JmsIO.read().withTopic(TOPIC); PipelineOptions pipelineOptions = PipelineOptionsFactory.create(); int desiredNumSplits = 5; JmsIO.UnboundedJmsSource initialSource = new JmsIO.UnboundedJmsSource(read); List<JmsIO.UnboundedJmsSource> splits = initialSource.split(desiredNumSplits, pipelineOptions); // in the case of a topic, we can have only a unique subscriber on the topic per pipeline // else it means we can have duplicate messages (all subscribers on the topic receive every // message). // So, whatever the desizedNumSplits is, the actual number of splits should be 1. assertEquals(1, splits.size()); }
@Override public boolean decide(final SelectStatementContext selectStatementContext, final List<Object> parameters, final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database, final ShardingRule rule, final Collection<DataNode> includedDataNodes) { Collection<String> tableNames = rule.getShardingLogicTableNames(selectStatementContext.getTablesContext().getTableNames()); if (tableNames.isEmpty()) { return false; } includedDataNodes.addAll(getTableDataNodes(rule, tableNames, database)); if (selectStatementContext.isContainsSubquery() || selectStatementContext.isContainsHaving() || selectStatementContext.isContainsCombine() || selectStatementContext.isContainsPartialDistinctAggregation()) { return true; } if (!selectStatementContext.isContainsJoinQuery() || rule.isAllTablesInSameDataSource(tableNames)) { return false; } if (1 == tableNames.size() && selectStatementContext.isContainsJoinQuery() && !rule.isAllBindingTables(database, selectStatementContext, tableNames)) { return true; } return tableNames.size() > 1 && !rule.isAllBindingTables(database, selectStatementContext, tableNames); }
@Test void assertDecideWhenContainsSubquery() { SelectStatementContext select = createStatementContext(); when(select.isContainsSubquery()).thenReturn(true); Collection<DataNode> includedDataNodes = new HashSet<>(); ShardingRule shardingRule = createShardingRule(); assertTrue(new ShardingSQLFederationDecider().decide(select, Collections.emptyList(), mock(RuleMetaData.class), createDatabase(shardingRule), shardingRule, includedDataNodes)); assertThat(includedDataNodes.size(), is(4)); }
public static List<FieldInfo> buildSourceSchemaEntity(final LogicalSchema schema) { final List<FieldInfo> allFields = schema.columns().stream() .map(EntityUtil::toFieldInfo) .collect(Collectors.toList()); if (allFields.isEmpty()) { throw new IllegalArgumentException("Root schema should contain columns: " + schema); } return allFields; }
@Test public void shouldSupportSchemasWithExtractedHeaderColumns() { // Given: final LogicalSchema schema = LogicalSchema.builder() .headerColumn(ColumnName.of("field1"), Optional.of("abc")) .build(); // When: final List<FieldInfo> fields = EntityUtil.buildSourceSchemaEntity(schema); // Then: assertThat(fields, hasSize(1)); assertThat(fields.get(0).getName(), equalTo("field1")); assertThat(fields.get(0).getSchema().getTypeName(), equalTo("BYTES")); assertThat(fields.get(0).getType(), equalTo(Optional.of(FieldType.HEADER))); assertThat(fields.get(0).getHeaderKey(), equalTo(Optional.of("abc"))); }
@Override public Expression createExpression(Expression source, String expression, Object[] properties) { return doCreateJsonPathExpression(source, expression, properties, false); }
@Test public void testDontUnpackJsonArray() { Exchange exchange = new DefaultExchange(context); exchange.getIn().setBody(new File("src/test/resources/books.json")); JsonPathLanguage language = (JsonPathLanguage) context.resolveLanguage("jsonpath"); Expression expression = language.createExpression("$.store.book", new Object[] { null, null, null, null, false, true }); String json = expression.evaluate(exchange, String.class); // check that an array is returned, not a single object assertTrue(json.startsWith("[") && json.endsWith("]")); }
public Publisher<V> iterator() { return iterator(0, true); }
@Test public void testListIteratorIndex() { RListRx<Integer> list = redisson.getList("list2"); sync(list.add(1)); sync(list.add(2)); sync(list.add(3)); sync(list.add(4)); sync(list.add(5)); sync(list.add(0)); sync(list.add(7)); sync(list.add(8)); sync(list.add(0)); sync(list.add(10)); Iterator<Integer> iterator = toIterator(list.iterator()); Assertions.assertTrue(1 == iterator.next()); Assertions.assertTrue(2 == iterator.next()); Assertions.assertTrue(3 == iterator.next()); Assertions.assertTrue(4 == iterator.next()); Assertions.assertTrue(5 == iterator.next()); Assertions.assertTrue(0 == iterator.next()); Assertions.assertTrue(7 == iterator.next()); Assertions.assertTrue(8 == iterator.next()); Assertions.assertTrue(0 == iterator.next()); Assertions.assertTrue(10 == iterator.next()); Assertions.assertFalse(iterator.hasNext()); }
@SuppressWarnings("unchecked") public static RuntimeException unexpectedStateHandleException( Class<? extends StateObject> expectedStateHandleClass, Class<? extends StateObject> actualStateHandleClass) { return unexpectedStateHandleException( new Class[] {expectedStateHandleClass}, actualStateHandleClass); }
@Test @SuppressWarnings("unchecked") void unexpectedStateExceptionForMultipleExpectedTypes() { Exception exception = StateUtil.unexpectedStateHandleException( new Class[] {KeyGroupsStateHandle.class, KeyGroupsStateHandle.class}, KeyGroupsStateHandle.class); assertThat(exception.getMessage()) .contains( "Unexpected state handle type, expected one of: class org.apache.flink.runtime.state.KeyGroupsStateHandle, class org.apache.flink.runtime.state.KeyGroupsStateHandle, but found: class org.apache.flink.runtime.state.KeyGroupsStateHandle. This can mostly happen when a different StateBackend from the one that was used for taking a checkpoint/savepoint is used when restoring."); }
@Override public boolean isFinished() { return true; }
@Test (timeout=5000) public void testCompletedTaskAttempt(){ TaskAttemptInfo attemptInfo= mock(TaskAttemptInfo.class); when(attemptInfo.getRackname()).thenReturn("Rackname"); when(attemptInfo.getShuffleFinishTime()).thenReturn(11L); when(attemptInfo.getSortFinishTime()).thenReturn(12L); when(attemptInfo.getShufflePort()).thenReturn(10); JobID jobId= new JobID("12345",0); TaskID taskId =new TaskID(jobId,TaskType.REDUCE, 0); TaskAttemptID taskAttemptId= new TaskAttemptID(taskId, 0); when(attemptInfo.getAttemptId()).thenReturn(taskAttemptId); CompletedTaskAttempt taskAttemt= new CompletedTaskAttempt(null,attemptInfo); assertEquals( "Rackname", taskAttemt.getNodeRackName()); assertEquals( Phase.CLEANUP, taskAttemt.getPhase()); assertTrue( taskAttemt.isFinished()); assertEquals( 11L, taskAttemt.getShuffleFinishTime()); assertEquals( 12L, taskAttemt.getSortFinishTime()); assertEquals( 10, taskAttemt.getShufflePort()); }
public static boolean booleanLabel(HasMetadata resource, String label, boolean defaultValue) { if (resource != null && resource.getMetadata() != null && resource.getMetadata().getLabels() != null) { String value = resource.getMetadata().getLabels().get(label); return value != null ? parseBoolean(value) : defaultValue; } else { return defaultValue; } }
@Test public void testBooleanLabel() { final String label = "my-label"; assertThat(Labels.booleanLabel(null, label, true), is(true)); assertThat(Labels.booleanLabel(new PodBuilder().build(), label, true), is(true)); assertThat(Labels.booleanLabel(new PodBuilder().withNewMetadata().withName("my-pod").endMetadata().build(), label, true), is(true)); assertThat(Labels.booleanLabel(new PodBuilder().withNewMetadata().withName("my-pod").addToLabels("not-my-label", "false").endMetadata().build(), label, true), is(true)); assertThat(Labels.booleanLabel(new PodBuilder().withNewMetadata().withName("my-pod").addToLabels(label, null).endMetadata().build(), label, true), is(true)); assertThat(Labels.booleanLabel(new PodBuilder().withNewMetadata().withName("my-pod").addToLabels(label, "true").endMetadata().build(), label, true), is(true)); assertThat(Labels.booleanLabel(new PodBuilder().withNewMetadata().withName("my-pod").addToLabels(label, "potato").endMetadata().build(), label, true), is(false)); assertThat(Labels.booleanLabel(new PodBuilder().withNewMetadata().withName("my-pod").addToLabels(label, "false").endMetadata().build(), label, true), is(false)); }
public void handleAssignment(final Map<TaskId, Set<TopicPartition>> activeTasks, final Map<TaskId, Set<TopicPartition>> standbyTasks) { log.info("Handle new assignment with:\n" + "\tNew active tasks: {}\n" + "\tNew standby tasks: {}\n" + "\tExisting active tasks: {}\n" + "\tExisting standby tasks: {}", activeTasks.keySet(), standbyTasks.keySet(), activeTaskIds(), standbyTaskIds()); topologyMetadata.addSubscribedTopicsFromAssignment( activeTasks.values().stream().flatMap(Collection::stream).collect(Collectors.toSet()), logPrefix ); final Map<TaskId, Set<TopicPartition>> activeTasksToCreate = new HashMap<>(activeTasks); final Map<TaskId, Set<TopicPartition>> standbyTasksToCreate = new HashMap<>(standbyTasks); final Map<Task, Set<TopicPartition>> tasksToRecycle = new HashMap<>(); final Set<Task> tasksToCloseClean = new TreeSet<>(Comparator.comparing(Task::id)); final Set<TaskId> tasksToLock = tasks.allTaskIds().stream() .filter(x -> activeTasksToCreate.containsKey(x) || standbyTasksToCreate.containsKey(x)) .collect(Collectors.toSet()); maybeLockTasks(tasksToLock); // first put aside those unrecognized tasks because of unknown named-topologies tasks.clearPendingTasksToCreate(); tasks.addPendingActiveTasksToCreate(pendingTasksToCreate(activeTasksToCreate)); tasks.addPendingStandbyTasksToCreate(pendingTasksToCreate(standbyTasksToCreate)); // first rectify all existing tasks: // 1. for tasks that are already owned, just update input partitions / resume and skip re-creating them // 2. for tasks that have changed active/standby status, just recycle and skip re-creating them // 3. otherwise, close them since they are no longer owned final Map<TaskId, RuntimeException> failedTasks = new LinkedHashMap<>(); if (stateUpdater == null) { handleTasksWithoutStateUpdater(activeTasksToCreate, standbyTasksToCreate, tasksToRecycle, tasksToCloseClean); } else { handleTasksWithStateUpdater( activeTasksToCreate, standbyTasksToCreate, tasksToRecycle, tasksToCloseClean, failedTasks ); failedTasks.putAll(collectExceptionsAndFailedTasksFromStateUpdater()); } final Map<TaskId, RuntimeException> taskCloseExceptions = closeAndRecycleTasks(tasksToRecycle, tasksToCloseClean); maybeUnlockTasks(tasksToLock); failedTasks.putAll(taskCloseExceptions); maybeThrowTaskExceptions(failedTasks); createNewTasks(activeTasksToCreate, standbyTasksToCreate); }
@Test public void shouldClassifyExistingTasksWithoutStateUpdater() { final TaskManager taskManager = setUpTaskManager(ProcessingMode.AT_LEAST_ONCE, false); final Map<TaskId, Set<TopicPartition>> runningActiveTasks = mkMap(mkEntry(taskId01, mkSet(t1p1))); final Map<TaskId, Set<TopicPartition>> standbyTasks = mkMap(mkEntry(taskId02, mkSet(t2p2))); final Map<TaskId, Set<TopicPartition>> restoringActiveTasks = mkMap(mkEntry(taskId03, mkSet(t1p3))); final Map<TaskId, Set<TopicPartition>> activeTasks = new HashMap<>(runningActiveTasks); activeTasks.putAll(restoringActiveTasks); handleAssignment(runningActiveTasks, standbyTasks, restoringActiveTasks); taskManager.handleAssignment(activeTasks, standbyTasks); verifyNoInteractions(stateUpdater); }
@Override public boolean deleteFile(String path) throws IOException { path = stripPath(path); File file = new File(path); return file.isFile() && file.delete(); }
@Test public void deleteFile() throws IOException { String filepath = PathUtils.concatPath(mLocalUfsRoot, getUniqueFileName()); mLocalUfs.create(filepath).close(); mLocalUfs.deleteFile(filepath); assertFalse(mLocalUfs.isFile(filepath)); File file = new File(filepath); assertFalse(file.exists()); }
public static void notNullOrEmpty(String string) { notNullOrEmpty(string, String.format("string [%s] is null or empty", string)); }
@Test public void testNotNull1NotEmpty1() { Precondition.notNullOrEmpty("test"); }
protected Map<String, String> formatResult(String url, String content) { return Map.of( "url", url, "content", trimContent(content) ); }
@Test void testFormatResultWithLongContent() { String url = "http://example.com"; String content = "a".repeat(2000); Map<String, String> result = rawBrowserAction.formatResult(url, content); assertEquals(url, result.get("url")); assertEquals(1000, result.get("content").length()); assertEquals("a".repeat(1000), result.get("content")); }
@Udf public String concat(@UdfParameter final String... jsonStrings) { if (jsonStrings == null) { return null; } final List<JsonNode> nodes = new ArrayList<>(jsonStrings.length); boolean allObjects = true; for (final String jsonString : jsonStrings) { if (jsonString == null) { return null; } final JsonNode node = UdfJsonMapper.parseJson(jsonString); if (node.isMissingNode()) { return null; } if (allObjects && !node.isObject()) { allObjects = false; } nodes.add(node); } JsonNode result = nodes.get(0); if (allObjects) { for (int i = 1; i < nodes.size(); i++) { result = concatObjects((ObjectNode) result, (ObjectNode) nodes.get(i)); } } else { for (int i = 1; i < nodes.size(); i++) { result = concatArrays(toArrayNode(result), toArrayNode(nodes.get(i))); } } return UdfJsonMapper.writeValueAsJson(result); }
@Test public void shouldReturnNullIfBothdArgsAreNull() { assertNull(udf.concat(null, null)); }
public CompletableFuture<VertexThreadInfoStats> triggerThreadInfoRequest( Map<ImmutableSet<ExecutionAttemptID>, CompletableFuture<TaskExecutorThreadInfoGateway>> executionsWithGateways, int numSamples, Duration delayBetweenSamples, int maxStackTraceDepth) { checkNotNull(executionsWithGateways, "Tasks to sample"); checkArgument(executionsWithGateways.size() > 0, "No tasks to sample"); checkArgument(numSamples >= 1, "No number of samples"); checkArgument(maxStackTraceDepth >= 0, "Negative maximum stack trace depth"); // Execution IDs of running tasks grouped by the task manager Collection<ImmutableSet<ExecutionAttemptID>> runningSubtasksIds = executionsWithGateways.keySet(); synchronized (lock) { if (isShutDown) { return FutureUtils.completedExceptionally(new IllegalStateException("Shut down")); } final int requestId = requestIdCounter++; log.debug("Triggering thread info request {}", requestId); final PendingThreadInfoRequest pending = new PendingThreadInfoRequest(requestId, runningSubtasksIds); // requestTimeout is treated as the time on top of the expected sampling duration. // Discard the request if it takes too long. We don't send cancel // messages to the task managers, but only wait for the responses // and then ignore them. long expectedDuration = numSamples * delayBetweenSamples.toMillis(); Time timeout = Time.milliseconds(expectedDuration + requestTimeout.toMillis()); // Add the pending request before scheduling the discard task to // prevent races with removing it again. pendingRequests.put(requestId, pending); ThreadInfoSamplesRequest requestParams = new ThreadInfoSamplesRequest( requestId, numSamples, delayBetweenSamples, maxStackTraceDepth); requestThreadInfo(executionsWithGateways, requestParams, timeout); return pending.getStatsFuture(); } }
@Test void testShutDown() throws Exception { Map<ImmutableSet<ExecutionAttemptID>, CompletableFuture<TaskExecutorThreadInfoGateway>> executionWithGateways = createMockSubtaskWithGateways( // request future will only be completed after all gateways // successfully return thread infos. CompletionType.SUCCESSFULLY, CompletionType.NEVER_COMPLETE); List<CompletableFuture<VertexThreadInfoStats>> requestFutures = new ArrayList<>(); CompletableFuture<VertexThreadInfoStats> requestFuture1 = coordinator.triggerThreadInfoRequest( executionWithGateways, DEFAULT_NUMBER_OF_SAMPLES, DEFAULT_DELAY_BETWEEN_SAMPLES, DEFAULT_MAX_STACK_TRACE_DEPTH); CompletableFuture<VertexThreadInfoStats> requestFuture2 = coordinator.triggerThreadInfoRequest( executionWithGateways, DEFAULT_NUMBER_OF_SAMPLES, DEFAULT_DELAY_BETWEEN_SAMPLES, DEFAULT_MAX_STACK_TRACE_DEPTH); // trigger request requestFutures.add(requestFuture1); requestFutures.add(requestFuture2); for (CompletableFuture<VertexThreadInfoStats> future : requestFutures) { assertThat(future).isNotDone(); } // shut down coordinator.shutDown(); // verify all completed for (CompletableFuture<VertexThreadInfoStats> future : requestFutures) { assertThat(future).isCompletedExceptionally(); } // verify new trigger returns failed future CompletableFuture<VertexThreadInfoStats> future = coordinator.triggerThreadInfoRequest( executionWithGateways, DEFAULT_NUMBER_OF_SAMPLES, DEFAULT_DELAY_BETWEEN_SAMPLES, DEFAULT_MAX_STACK_TRACE_DEPTH); assertThat(future).isCompletedExceptionally(); }
@ConstantFunction(name = "minutes_sub", argTypes = {DATETIME, INT}, returnType = DATETIME, isMonotonic = true) public static ConstantOperator minutesSub(ConstantOperator date, ConstantOperator minute) { return ConstantOperator.createDatetimeOrNull(date.getDatetime().minusMinutes(minute.getInt())); }
@Test public void minutesSub() { assertEquals("2015-03-23T09:13:55", ScalarOperatorFunctions.minutesSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString()); }
boolean openNextFile() { try { if ( meta.getFileInFields() ) { data.readrow = getRow(); // Grab another row ... if ( data.readrow == null ) { // finished processing! if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.FinishedProcessing" ) ); } return false; } if ( first ) { first = false; data.inputRowMeta = getInputRowMeta(); data.outputRowMeta = data.inputRowMeta.clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); // Create convert meta-data objects that will contain Date & Number formatters // All non binary content is handled as a String. It would be converted to the target type after the processing. data.convertRowMeta = data.outputRowMeta.cloneToType( ValueMetaInterface.TYPE_STRING ); if ( meta.getFileInFields() ) { // Check is filename field is provided if ( Utils.isEmpty( meta.getDynamicFilenameField() ) ) { logError( BaseMessages.getString( PKG, "LoadFileInput.Log.NoField" ) ); throw new KettleException( BaseMessages.getString( PKG, "LoadFileInput.Log.NoField" ) ); } // cache the position of the field if ( data.indexOfFilenameField < 0 ) { data.indexOfFilenameField = data.inputRowMeta.indexOfValue( meta.getDynamicFilenameField() ); if ( data.indexOfFilenameField < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "LoadFileInput.Log.ErrorFindingField" ) + "[" + meta.getDynamicFilenameField() + "]" ); throw new KettleException( BaseMessages.getString( PKG, "LoadFileInput.Exception.CouldnotFindField", meta.getDynamicFilenameField() ) ); } } // Get the number of previous fields data.totalpreviousfields = data.inputRowMeta.size(); } } // end if first // get field value String Fieldvalue = data.inputRowMeta.getString( data.readrow, data.indexOfFilenameField ); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.Stream", meta.getDynamicFilenameField(), Fieldvalue ) ); } try { // Source is a file. data.file = KettleVFS.getFileObject( Fieldvalue ); } catch ( Exception e ) { throw new KettleException( e ); } } else { if ( data.filenr >= data.files.nrOfFiles() ) { // finished processing! if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.FinishedProcessing" ) ); } return false; } // Is this the last file? data.last_file = ( data.filenr == data.files.nrOfFiles() - 1 ); data.file = data.files.getFile( data.filenr ); } // Check if file exists if ( meta.isIgnoreMissingPath() && !data.file.exists() ) { logBasic( BaseMessages.getString( PKG, "LoadFileInput.Error.FileNotExists", "" + data.file.getName() ) ); return openNextFile(); } // Check if file is empty data.fileSize = data.file.getContent().getSize(); // Move file pointer ahead! data.filenr++; if ( meta.isIgnoreEmptyFile() && data.fileSize == 0 ) { logError( BaseMessages.getString( PKG, "LoadFileInput.Error.FileSizeZero", "" + data.file.getName() ) ); return openNextFile(); } else { if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.OpeningFile", data.file.toString() ) ); } data.filename = KettleVFS.getFilename( data.file ); // Add additional fields? if ( meta.getShortFileNameField() != null && meta.getShortFileNameField().length() > 0 ) { data.shortFilename = data.file.getName().getBaseName(); } if ( meta.getPathField() != null && meta.getPathField().length() > 0 ) { data.path = KettleVFS.getFilename( data.file.getParent() ); } if ( meta.isHiddenField() != null && meta.isHiddenField().length() > 0 ) { data.hidden = data.file.isHidden(); } if ( meta.getExtensionField() != null && meta.getExtensionField().length() > 0 ) { data.extension = data.file.getName().getExtension(); } if ( meta.getLastModificationDateField() != null && meta.getLastModificationDateField().length() > 0 ) { data.lastModificationDateTime = new Date( data.file.getContent().getLastModifiedTime() ); } if ( meta.getUriField() != null && meta.getUriField().length() > 0 ) { data.uriName = Const.optionallyDecodeUriString( data.file.getName().getURI() ); } if ( meta.getRootUriField() != null && meta.getRootUriField().length() > 0 ) { data.rootUriName = data.file.getName().getRootURI(); } // get File content getFileContent(); addFileToResultFilesName( data.file ); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.FileOpened", data.file.toString() ) ); } } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "LoadFileInput.Log.UnableToOpenFile", "" + data.filenr, data.file .toString(), e.toString() ) ); stopAll(); setErrors( 1 ); return false; } return true; }
@Test public void testOpenNextFile_000() { assertFalse( stepMetaInterface.isIgnoreEmptyFile() ); // ensure default value stepInputFiles.addFile( getFile( "input0.txt" ) ); stepInputFiles.addFile( getFile( "input0.txt" ) ); stepInputFiles.addFile( getFile( "input0.txt" ) ); assertTrue( stepLoadFileInput.openNextFile() ); assertTrue( stepLoadFileInput.openNextFile() ); assertTrue( stepLoadFileInput.openNextFile() ); assertFalse( stepLoadFileInput.openNextFile() ); }
static long calculateFailedRequestDelay(int failures, TimingValues timingValues) { long delay = timingValues.getFixedDelay() * (long)Math.pow(2, failures); delay = Math.max(timingValues.getFixedDelay(), Math.min(60_000, delay)); // between timingValues.getFixedDelay() and 60 seconds delay = timingValues.getPlusMinusFractionRandom(delay, randomFraction); return delay; }
@Test public void testDelayCalculation() { TimingValues defaultTimingValues = new TimingValues(); Random random = new Random(0); // Use seed to make delays deterministic TimingValues timingValues = new TimingValues(defaultTimingValues, random); int failures = 1; // First time failure long delay = calculateFailedRequestDelay(failures, timingValues); assertEquals(10924, delay); failures++; // 2nd time failure delay = calculateFailedRequestDelay(failures, timingValues); assertEquals(22652, delay); failures++; // 3rd time failure delay = calculateFailedRequestDelay(failures, timingValues); assertEquals(35849, delay); }
public void setLocalTimeZone(ZoneId zoneId) { final String zone; if (zoneId instanceof ZoneOffset) { // Give ZoneOffset a timezone for backwards compatibility reasons. // In general, advertising either TZDB ID, GMT+xx:xx, or UTC is the best we can do. zone = ZoneId.ofOffset("GMT", (ZoneOffset) zoneId).toString(); } else { zone = zoneId.toString(); } validateTimeZone(zone); configuration.set(TableConfigOptions.LOCAL_TIME_ZONE, zone); }
@Test public void testSetInvalidLocalTimeZone() { assertThatThrownBy(() -> CONFIG_BY_METHOD.setLocalTimeZone(ZoneId.of("UTC-10:00"))) .isInstanceOf(ValidationException.class) .hasMessageContaining("Invalid time zone."); }
public static synchronized boolean contains(@NonNull String polygonInString, @NonNull Coordinates coordinates) { if (polygonInString.isEmpty() || polygonInString.isBlank()) { throw new RuntimeException("Polygon string can't be empty or null!"); } JsonArray polygonsJson = normalizePolygonsJson(JsonParser.parseString(polygonInString).getAsJsonArray()); List<Geometry> polygons = buildPolygonsFromJson(polygonsJson); Set<Geometry> holes = extractHolesFrom(polygons); polygons.removeIf(holes::contains); Geometry globalGeometry = unionToGlobalGeometry(polygons, holes); var point = jtsCtx.getShapeFactory().getGeometryFactory() .createPoint(new Coordinate(coordinates.getLatitude(), coordinates.getLongitude())); return globalGeometry.contains(point); }
@Test public void testPointsInSelfIntersectingPolygons() { Assertions.assertTrue(GeoUtil.contains(SELF_INTERSECTING, POINT_INSIDE_SELF_INTERSECTING_UPPER_CENTER), "Polygon " + SELF_INTERSECTING + " must contain the dot " + POINT_INSIDE_SELF_INTERSECTING_UPPER_CENTER ); Assertions.assertTrue(GeoUtil.contains(SELF_INTERSECTING, POINT_INSIDE_SELF_INTERSECTING_LOWER_CENTER), "Polygon " + SELF_INTERSECTING + " must contain the dot " + POINT_INSIDE_SELF_INTERSECTING_LOWER_CENTER ); Assertions.assertTrue(GeoUtil.contains(SELF_INTERSECTING, POINT_INSIDE_SELF_INTERSECTING_NEAR_BORDER), "Polygon " + SELF_INTERSECTING + " must contain the dot " + POINT_INSIDE_SELF_INTERSECTING_NEAR_BORDER ); Assertions.assertTrue(GeoUtil.contains(SELF_INTERSECTING_WITH_HOLES, POINT_INSIDE_SELF_INTERSECTING_NEAR_BORDER), "Polygon " + SELF_INTERSECTING_WITH_HOLES + " must contain the dot " + POINT_INSIDE_SAND_CLOCK_NEAR_BORDER ); Assertions.assertFalse(GeoUtil.contains(SELF_INTERSECTING, POINT_OUTSIDE_SELF_INTERSECTING_1), "Polygon " + SELF_INTERSECTING + " must not contain the dot " + POINT_OUTSIDE_SELF_INTERSECTING_1 ); Assertions.assertFalse(GeoUtil.contains(SELF_INTERSECTING, POINT_OUTSIDE_SELF_INTERSECTING_2), "Polygon " + SELF_INTERSECTING + " must not contain the dot " + POINT_OUTSIDE_SELF_INTERSECTING_2 ); Assertions.assertFalse(GeoUtil.contains(SELF_INTERSECTING, POINT_OUTSIDE_SELF_INTERSECTING_3), "Polygon " + SELF_INTERSECTING + " must not contain the dot " + POINT_OUTSIDE_SELF_INTERSECTING_3 ); Assertions.assertFalse(GeoUtil.contains(SELF_INTERSECTING_WITH_HOLES, POINT_OUTSIDE_SELF_INTERSECTING_1), "Polygon " + SELF_INTERSECTING_WITH_HOLES + " must not contain the dot " + POINT_OUTSIDE_SELF_INTERSECTING_1 ); Assertions.assertFalse(GeoUtil.contains(SELF_INTERSECTING_WITH_HOLES, POINT_OUTSIDE_SELF_INTERSECTING_2), "Polygon " + SELF_INTERSECTING_WITH_HOLES + " must not contain the dot " + POINT_OUTSIDE_SELF_INTERSECTING_2 ); Assertions.assertFalse(GeoUtil.contains(SELF_INTERSECTING_WITH_HOLES, POINT_OUTSIDE_SELF_INTERSECTING_3), "Polygon " + SELF_INTERSECTING_WITH_HOLES + " must not contain the dot " + POINT_OUTSIDE_SELF_INTERSECTING_3 ); Assertions.assertFalse(GeoUtil.contains(SELF_INTERSECTING_WITH_HOLES, POINT_INSIDE_SELF_INTERSECTING_UPPER_CENTER), "Polygon " + SELF_INTERSECTING_WITH_HOLES + " must not contain the dot " + POINT_INSIDE_SELF_INTERSECTING_UPPER_CENTER ); Assertions.assertFalse(GeoUtil.contains(SELF_INTERSECTING_WITH_HOLES, POINT_INSIDE_SELF_INTERSECTING_LOWER_CENTER), "Polygon " + SELF_INTERSECTING_WITH_HOLES + " must not contain the dot " + POINT_INSIDE_SELF_INTERSECTING_LOWER_CENTER ); }
public static boolean compareAndIncreaseOnly(final AtomicLong target, final long value) { long prev = target.get(); while (value > prev) { boolean updated = target.compareAndSet(prev, value); if (updated) return true; prev = target.get(); } return false; }
@Test public void testCompareAndIncreaseOnly() { AtomicLong target = new AtomicLong(5); assertThat(MixAll.compareAndIncreaseOnly(target, 6)).isTrue(); assertThat(target.get()).isEqualTo(6); assertThat(MixAll.compareAndIncreaseOnly(target, 4)).isFalse(); assertThat(target.get()).isEqualTo(6); }