focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static Map<String, String> zip(String keys, String values, String delimiter, boolean isOrder) { return ArrayUtil.zip(StrUtil.splitToArray(keys, delimiter), StrUtil.splitToArray(values, delimiter), isOrder); }
@Test public void zipTest() { final Collection<String> keys = CollUtil.newArrayList("a", "b", "c", "d"); final Collection<Integer> values = CollUtil.newArrayList(1, 2, 3, 4); final Map<String, Integer> map = CollUtil.zip(keys, values); assertEquals(4, Objects.requireNonNull(map).size()); assertEquals(1, map.get("a").intValue()); assertEquals(2, map.get("b").intValue()); assertEquals(3, map.get("c").intValue()); assertEquals(4, map.get("d").intValue()); }
@Override public int publish(TopicPath topic, List<OutgoingMessage> outgoingMessages) throws IOException { PublishRequest.Builder request = PublishRequest.newBuilder().setTopic(topic.getPath()); for (OutgoingMessage outgoingMessage : outgoingMessages) { PubsubMessage.Builder message = outgoingMessage.getMessage().toBuilder().clearMessageId().clearPublishTime(); if (timestampAttribute != null) { message.putAttributes( timestampAttribute, String.valueOf(outgoingMessage.getTimestampMsSinceEpoch())); } if (idAttribute != null && !Strings.isNullOrEmpty(outgoingMessage.recordId())) { message.putAttributes(idAttribute, outgoingMessage.recordId()); } request.addMessages(message); } PublishResponse response = publisherStub().publish(request.build()); return response.getMessageIdsCount(); }
@Test public void publishOneMessage() throws IOException { initializeClient(TIMESTAMP_ATTRIBUTE, ID_ATTRIBUTE); String expectedTopic = TOPIC.getPath(); PubsubMessage expectedPubsubMessage = PubsubMessage.newBuilder() .setData(ByteString.copyFrom(DATA.getBytes(StandardCharsets.UTF_8))) .putAllAttributes(ATTRIBUTES) .putAllAttributes( ImmutableMap.of( TIMESTAMP_ATTRIBUTE, String.valueOf(MESSAGE_TIME_MS), ID_ATTRIBUTE, RECORD_ID)) .build(); final PublishRequest expectedRequest = PublishRequest.newBuilder() .setTopic(expectedTopic) .addAllMessages(ImmutableList.of(expectedPubsubMessage)) .build(); final PublishResponse response = PublishResponse.newBuilder().addAllMessageIds(ImmutableList.of(MESSAGE_ID)).build(); final List<PublishRequest> requestsReceived = new ArrayList<>(); PublisherImplBase publisherImplBase = new PublisherImplBase() { @Override public void publish( PublishRequest request, StreamObserver<PublishResponse> responseObserver) { requestsReceived.add(request); responseObserver.onNext(response); responseObserver.onCompleted(); } }; Server server = InProcessServerBuilder.forName(channelName).addService(publisherImplBase).build().start(); try { OutgoingMessage actualMessage = OutgoingMessage.of( com.google.pubsub.v1.PubsubMessage.newBuilder() .setData(ByteString.copyFromUtf8(DATA)) .putAllAttributes(ATTRIBUTES) .build(), MESSAGE_TIME_MS, RECORD_ID, null); int n = client.publish(TOPIC, ImmutableList.of(actualMessage)); assertEquals(1, n); assertEquals(expectedRequest, Iterables.getOnlyElement(requestsReceived)); } finally { server.shutdownNow(); } }
public final void sort(long startIndex, long length) { quickSort(startIndex, length - 1); }
@Test public void testQuickSortLong() { final long[] array = longArrayWithRandomElements(); final long baseAddr = memMgr.getAllocator().allocate(ARRAY_LENGTH * LONG_SIZE_IN_BYTES); final MemoryAccessor mem = memMgr.getAccessor(); for (int i = 0; i < ARRAY_LENGTH; i++) { mem.putLong(baseAddr + LONG_SIZE_IN_BYTES * i, array[i]); } Arrays.sort(array); new LongMemArrayQuickSorter(mem, baseAddr).sort(0, ARRAY_LENGTH); for (int i = 0; i < ARRAY_LENGTH; i++) { assertEquals("Mismatch at " + i, array[i], mem.getLong(baseAddr + LONG_SIZE_IN_BYTES * i)); } }
@Override public Entry next(Entry reuse) throws IOException { // Ignore reuse, because each HeadStream has its own reuse BinaryRowData. return next(); }
@Test public void testMergeOfTwoStreams() throws Exception { List<MutableObjectIterator<BinaryRowData>> iterators = new ArrayList<>(); iterators.add( newIterator(new int[] {1, 2, 4, 5, 10}, new String[] {"1", "2", "4", "5", "10"})); iterators.add( newIterator(new int[] {3, 6, 7, 10, 12}, new String[] {"3", "6", "7", "10", "12"})); final int[] expected = new int[] {1, 2, 3, 4, 5, 6, 7, 10, 10, 12}; MutableObjectIterator<BinaryRowData> iterator = new BinaryMergeIterator<>( iterators, reused(2), (o1, o2) -> this.comparator.compare(o1, o2)); BinaryRowData row = serializer.createInstance(); int pos = 0; while ((row = iterator.next(row)) != null) { assertThat(row.getInt(0)).isEqualTo(expected[pos++]); } }
@Override public void cancel() { context.goToCanceling( getExecutionGraph(), getExecutionGraphHandler(), getOperatorCoordinatorHandler(), getFailures()); }
@Test void testCancelTransitionsToCancellingState() throws Exception { try (MockExecutingContext ctx = new MockExecutingContext()) { Executing exec = new ExecutingStateBuilder().build(ctx); ctx.setExpectCancelling(assertNonNull()); exec.cancel(); } }
@Udf public <T> String join( @UdfParameter(description = "the array to join using the default delimiter '" + DEFAULT_DELIMITER + "'") final List<T> array ) { return join(array, DEFAULT_DELIMITER); }
@Test public void shouldThrowExceptionForExamplesOfUnsupportedElementTypes() { assertThrows(KsqlFunctionException.class, () -> arrayJoinUDF.join(Arrays.asList('a','b'))); assertThrows(KsqlFunctionException.class, () -> arrayJoinUDF.join(Arrays.asList(BigInteger.ONE,BigInteger.ZERO))); assertThrows(KsqlFunctionException.class, () -> arrayJoinUDF.join(Arrays.asList(-23.0f,42.42f,0.0f))); assertThrows(KsqlFunctionException.class, () -> arrayJoinUDF.join(Arrays.asList( new HashSet<>(Arrays.asList("foo", "blah")), new HashSet<>(Arrays.asList("ksqlDB", "UDF")) )) ); }
public Result fetchArtifacts(String[] uris) { checkArgument(uris != null && uris.length > 0, "At least one URI is required."); ArtifactUtils.createMissingParents(baseDir); List<File> artifacts = Arrays.stream(uris) .map(FunctionUtils.uncheckedFunction(this::fetchArtifact)) .collect(Collectors.toList()); if (artifacts.size() > 1) { return new Result(null, artifacts); } if (artifacts.size() == 1) { return new Result(artifacts.get(0), null); } // Should not happen. throw new IllegalStateException("Corrupt artifact fetching state."); }
@Test void testFileSystemFetchWithoutAdditionalUri() throws Exception { File sourceFile = TestingUtils.getClassFile(getClass()); String uriStr = "file://" + sourceFile.toURI().getPath(); ArtifactFetchManager fetchMgr = new ArtifactFetchManager(configuration); ArtifactFetchManager.Result res = fetchMgr.fetchArtifacts(uriStr, null); assertThat(res.getJobJar()).exists(); assertThat(res.getJobJar().getName()).isEqualTo(sourceFile.getName()); assertThat(res.getArtifacts()).isNull(); }
@Override public String toString() { StringBuilder sb = new StringBuilder("{"); addField(sb, "\"userUuid\": ", this.userUuid, true); addField(sb, "\"userLogin\": ", this.userLogin, true); addField(sb, "\"name\": ", this.name, true); addField(sb, "\"email\": ", this.email, true); addField(sb, "\"isActive\": ", Objects.toString(this.isActive, ""), false); addField(sb, "\"scmAccounts\": ", String.join(",", scmAccounts), true); addField(sb, "\"externalId\": ", this.externalId, true); addField(sb, "\"externalLogin\": ", this.externalLogin, true); addField(sb, "\"externalIdentityProvider\": ", this.externalIdentityProvider, true); addField(sb, "\"local\": ", Objects.toString(this.local, ""), false); addField(sb, "\"lastConnectionDate\": ", this.lastConnectionDate == null ? "" : DateUtils.formatDateTime(this.lastConnectionDate), true); endString(sb); return sb.toString(); }
@Test void toString_givenEmptyScmAccount_returnValidJSON() { UserDto userDto = createUserDto(); userDto.setScmAccounts(emptyList()); UserNewValue userNewValue = new UserNewValue(userDto); String jsonString = userNewValue.toString(); assertValidJSON(jsonString); }
protected void commonDeclareThen(final KiePMMLDroolsRule rule, final StringJoiner joiner) { if (rule.getFocusedAgendaGroup() != null) { joiner.add(String.format(FOCUS_AGENDA_GROUP, rule.getFocusedAgendaGroup())); } if (rule.getToAccumulate() != null) { joiner.add(String.format(UPDATE_STATUS_HOLDER_ACCUMULATE, rule.getToAccumulate())); } if (rule.isAccumulationResult()) { joiner.add(RETURN_ACCUMULATION); } if (rule.getResultCode() != null) { joiner.add(String.format(SET_PMML4_RESULT_CODE, rule.getResultCode())); } if (rule.getResult() != null) { joiner.add(String.format(ADD_PMML4_RESULT_VARIABLE, rule.getResult())); } if (rule.getReasonCodeAndValue() != null) { final KiePMMLReasonCodeAndValue reasonCodeAndValue = rule.getReasonCodeAndValue(); joiner.add(String.format(ADD_OUTPUTFIELD_VALUE, reasonCodeAndValue.getReasonCode(), reasonCodeAndValue.getValue())); } if (rule.getOutputFields() != null) { if (rule.getResult() != null) { commonDeclareOutputFields(rule.getOutputFields(), rule.getResult(), joiner); } else if (rule.isAccumulationResult()) { commonDeclareOutputFields(rule.getOutputFields(), "$statusHolder.getAccumulator()", joiner); } } }
@Test void commonDeclareThen() { String ruleName = "RULENAME"; String statusToSet = "STATUSTOSET"; String outputFieldName = "OUTPUTFIELDNAME"; Object result = "RESULT"; OutputField outputField = new OutputField(); outputField.setName(outputFieldName); outputField.setResultFeature(ResultFeature.PREDICTED_VALUE); List<OutputField> outputFields = Collections.singletonList(outputField); KiePMMLDroolsRule.Builder builder = KiePMMLDroolsRule.builder(ruleName, statusToSet, outputFields); KiePMMLDroolsRule rule = builder.build(); StringJoiner joiner = new StringJoiner(""); KiePMMLDescrRhsFactory.factory(ruleBuilder).commonDeclareThen(rule, joiner); String retrieved = joiner.toString(); assertThat(retrieved).isEmpty(); // ResultCode resultCode = ResultCode.OK; builder = builder.withResultCode(resultCode); rule = builder.build(); joiner = new StringJoiner(""); KiePMMLDescrRhsFactory.factory(ruleBuilder).commonDeclareThen(rule, joiner); retrieved = joiner.toString(); String expected = String.format(SET_PMML4_RESULT_CODE, resultCode); assertThat(retrieved).contains(expected); // builder = builder.withResult(result); rule = builder.build(); joiner = new StringJoiner(""); KiePMMLDescrRhsFactory.factory(ruleBuilder).commonDeclareThen(rule, joiner); retrieved = joiner.toString(); expected = String.format(ADD_PMML4_RESULT_VARIABLE, result); assertThat(retrieved).contains(expected); expected = String.format(ADD_PMML4_OUTPUT_FIELD, outputFieldName, result); assertThat(retrieved).contains(expected); // String focusedAgendaGroup = "FOCUSEDAGENDAGROUP"; builder = builder.withFocusedAgendaGroup(focusedAgendaGroup); rule = builder.build(); joiner = new StringJoiner(""); KiePMMLDescrRhsFactory.factory(ruleBuilder).commonDeclareThen(rule, joiner); retrieved = joiner.toString(); expected = String.format(FOCUS_AGENDA_GROUP, focusedAgendaGroup); assertThat(retrieved).contains(expected); // }
public CoordinatorResult<TxnOffsetCommitResponseData, CoordinatorRecord> commitTransactionalOffset( RequestContext context, TxnOffsetCommitRequestData request ) throws ApiException { validateTransactionalOffsetCommit(context, request); final TxnOffsetCommitResponseData response = new TxnOffsetCommitResponseData(); final List<CoordinatorRecord> records = new ArrayList<>(); final long currentTimeMs = time.milliseconds(); request.topics().forEach(topic -> { final TxnOffsetCommitResponseTopic topicResponse = new TxnOffsetCommitResponseTopic().setName(topic.name()); response.topics().add(topicResponse); topic.partitions().forEach(partition -> { if (isMetadataInvalid(partition.committedMetadata())) { topicResponse.partitions().add(new TxnOffsetCommitResponsePartition() .setPartitionIndex(partition.partitionIndex()) .setErrorCode(Errors.OFFSET_METADATA_TOO_LARGE.code())); } else { log.debug("[GroupId {}] Committing transactional offsets {} for partition {}-{} from member {} with leader epoch {}.", request.groupId(), partition.committedOffset(), topic.name(), partition.partitionIndex(), request.memberId(), partition.committedLeaderEpoch()); topicResponse.partitions().add(new TxnOffsetCommitResponsePartition() .setPartitionIndex(partition.partitionIndex()) .setErrorCode(Errors.NONE.code())); final OffsetAndMetadata offsetAndMetadata = OffsetAndMetadata.fromRequest( partition, currentTimeMs ); records.add(GroupCoordinatorRecordHelpers.newOffsetCommitRecord( request.groupId(), topic.name(), partition.partitionIndex(), offsetAndMetadata, metadataImage.features().metadataVersion() )); } }); }); if (!records.isEmpty()) { metrics.record(GroupCoordinatorMetrics.OFFSET_COMMITS_SENSOR_NAME, records.size()); } return new CoordinatorResult<>(records, response); }
@Test public void testConsumerGroupTransactionalOffsetCommitWithStaleMemberEpoch() { OffsetMetadataManagerTestContext context = new OffsetMetadataManagerTestContext.Builder().build(); // Create an empty group. ConsumerGroup group = context.groupMetadataManager.getOrMaybeCreatePersistedConsumerGroup( "foo", true ); // Add member. group.updateMember(new ConsumerGroupMember.Builder("member") .setMemberEpoch(10) .setPreviousMemberEpoch(10) .build() ); assertThrows(IllegalGenerationException.class, () -> context.commitTransactionalOffset( new TxnOffsetCommitRequestData() .setGroupId("foo") .setMemberId("member") .setGenerationId(100) .setTopics(Collections.singletonList( new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic() .setName("bar") .setPartitions(Collections.singletonList( new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition() .setPartitionIndex(0) .setCommittedOffset(100L) .setCommittedLeaderEpoch(10) .setCommittedMetadata("metadata") )) )) )); }
public URI qualifiedURI(String filename) throws IOException { try { URI fileURI = new URI(filename); if (RESOURCE_URI_SCHEME.equals(fileURI.getScheme())) { return fileURI; } } catch (URISyntaxException ignore) { } return qualifiedPath(filename).toUri(); }
@Test public void qualifiedURITestForWindows() throws IOException { Assume.assumeTrue(System.getProperty("os.name").toLowerCase().startsWith("win")); URI uri = this.command.qualifiedURI(WIN_FILE_PATH); Assert.assertEquals("/C:/Test/Downloads/test.parquet", uri.getPath()); }
public HttpHeaders preflightResponseHeaders() { if (preflightHeaders.isEmpty()) { return EmptyHttpHeaders.INSTANCE; } final HttpHeaders preflightHeaders = new DefaultHttpHeaders(); for (Entry<CharSequence, Callable<?>> entry : this.preflightHeaders.entrySet()) { final Object value = getValue(entry.getValue()); if (value instanceof Iterable) { preflightHeaders.add(entry.getKey(), (Iterable<?>) value); } else { preflightHeaders.add(entry.getKey(), value); } } return preflightHeaders; }
@Test public void preflightResponseHeadersMultipleValues() { final CorsConfig cors = forAnyOrigin().preflightResponseHeader("MultipleValues", "value1", "value2").build(); assertThat(cors.preflightResponseHeaders().getAll(of("MultipleValues")), hasItems("value1", "value2")); }
@VisibleForTesting StandardContext addStaticDir(Tomcat tomcat, String contextPath, File dir) { try { fs.createOrCleanupDir(dir); } catch (IOException e) { throw new IllegalStateException(format("Fail to create or clean-up directory %s", dir.getAbsolutePath()), e); } return addContext(tomcat, contextPath, dir); }
@Test public void fail_if_static_directory_can_not_be_initialized() throws Exception { File dir = temp.newFolder(); TomcatContexts.Fs fs = mock(TomcatContexts.Fs.class); doThrow(new IOException()).when(fs).createOrCleanupDir(any(File.class)); assertThatThrownBy(() -> new TomcatContexts(fs).addStaticDir(tomcat, "/deploy", dir)) .isInstanceOf(IllegalStateException.class) .hasMessageContaining("Fail to create or clean-up directory " + dir.getAbsolutePath()); }
public static boolean canDrop( FilterPredicate pred, List<ColumnChunkMetaData> columns, DictionaryPageReadStore dictionaries) { Objects.requireNonNull(pred, "pred cannnot be null"); Objects.requireNonNull(columns, "columns cannnot be null"); return pred.accept(new DictionaryFilter(columns, dictionaries)); }
@Test public void testInverseUdp() throws Exception { InInt32UDP droppable = new InInt32UDP(ImmutableSet.of(42)); InInt32UDP undroppable = new InInt32UDP(ImmutableSet.of(205)); Set<Integer> allValues = ImmutableSet.copyOf(Ints.asList(intValues)); InInt32UDP completeMatch = new InInt32UDP(allValues); FilterPredicate inverse = LogicalInverseRewriter.rewrite(not(userDefined(intColumn("int32_field"), droppable))); FilterPredicate inverse1 = LogicalInverseRewriter.rewrite(not(userDefined(intColumn("int32_field"), undroppable))); FilterPredicate inverse2 = LogicalInverseRewriter.rewrite(not(userDefined(intColumn("int32_field"), completeMatch))); assertFalse("Should not drop block for inverse of non-matching UDP", canDrop(inverse, ccmd, dictionaries)); assertFalse( "Should not drop block for inverse of UDP with some matches", canDrop(inverse1, ccmd, dictionaries)); assertTrue("Should drop block for inverse of UDP with all matches", canDrop(inverse2, ccmd, dictionaries)); }
@Override public String getDescription() { return "field: " + field + ", value: " + value + ", grace: " + grace + ", repeat notifications: " + repeatNotifications; }
@Test public void testConstructor() throws Exception { final Map<String, Object> parameters = getParametersMap(0, "field", "value"); final FieldContentValueAlertCondition condition = getCondition(parameters, alertConditionTitle); assertNotNull(condition); assertNotNull(condition.getDescription()); }
@Override public boolean test(Pair<Point, Point> pair) { return testVertical(pair) && testHorizontal(pair); }
@Test public void testHorizontalSeparation() { Point p1 = (new PointBuilder()).time(EPOCH).latLong(0.0, 0.0).altitude(Distance.ofFeet(1000.0)).build(); Point p2 = (new PointBuilder()).time(EPOCH).latLong(0.0, 0.0).altitude(Distance.ofFeet(1000.0)).build(); Point p3 = (new PointBuilder()).time(EPOCH).latLong(0.0, 1.0).altitude(Distance.ofFeet(1000.0)).build(); double MAX_HORIZ_SEPARATION_IN_FT = 1000; double MAX_VERT_SEPARATION = 500; CylindricalFilter filter = new CylindricalFilter(MAX_HORIZ_SEPARATION_IN_FT, MAX_VERT_SEPARATION); assertTrue(filter.test(Pair.of(p1, p1)), "A point is in the same cylindar with itself"); assertTrue(filter.test(Pair.of(p1, p2)), "A point is in the same cylindar with itself"); //manually compute distance double distanceInNM = Spherical.distanceInNM( 0.0, 0.0, 0.0, 1.0 ); assertTrue(distanceInNM * feetPerNM() > MAX_HORIZ_SEPARATION_IN_FT, "confirm distance is big"); assertFalse(filter.test(Pair.of(p1, p3)), "confirm filter rejects points that are far apart"); }
@Override public MapperResult findConfigInfoByAppFetchRows(MapperContext context) { final String appName = (String) context.getWhereParameter(FieldConstant.APP_NAME); final String tenantId = (String) context.getWhereParameter(FieldConstant.TENANT_ID); String sql = "SELECT id,data_id,group_id,tenant_id,app_name,content FROM config_info" + " WHERE tenant_id LIKE ? AND app_name= ?" + " LIMIT " + context.getStartRow() + "," + context.getPageSize(); return new MapperResult(sql, CollectionUtils.list(tenantId, appName)); }
@Test void testFindConfigInfoByAppFetchRows() { MapperResult mapperResult = configInfoMapperByMySql.findConfigInfoByAppFetchRows(context); assertEquals(mapperResult.getSql(), "SELECT id,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE tenant_id LIKE ? AND app_name= ? LIMIT " + startRow + "," + pageSize); assertArrayEquals(new Object[] {tenantId, appName}, mapperResult.getParamList().toArray()); }
public static String canonicalizeUrl(String url, String refer) { URL base; try { try { base = new URL(refer); } catch (MalformedURLException e) { // the base is unsuitable, but the attribute may be abs on its own, so try that URL abs = new URL(refer); return abs.toExternalForm(); } // workaround: java resolves '//path/file + ?foo' to '//path/?foo', not '//path/file?foo' as desired if (url.startsWith("?")) url = base.getPath() + url; URL abs = new URL(base, url); return abs.toExternalForm(); } catch (MalformedURLException e) { return ""; } }
@Test public void testFixRelativeUrl() { String absoluteUrl = UrlUtils.canonicalizeUrl("aa", "http://www.dianping.com/sh/ss/com"); assertThat(absoluteUrl).isEqualTo("http://www.dianping.com/sh/ss/aa"); absoluteUrl = UrlUtils.canonicalizeUrl("../aa", "http://www.dianping.com/sh/ss/com"); assertThat(absoluteUrl).isEqualTo("http://www.dianping.com/sh/aa"); absoluteUrl = UrlUtils.canonicalizeUrl("../mshz", "http://www.court.gov.cn/zgcpwsw/zgrmfy/"); assertThat(absoluteUrl).isEqualTo("http://www.court.gov.cn/zgcpwsw/mshz"); absoluteUrl = UrlUtils.canonicalizeUrl("..aa", "http://www.dianping.com/sh/ss/com"); assertThat(absoluteUrl).isEqualTo("http://www.dianping.com/sh/ss/..aa"); absoluteUrl = UrlUtils.canonicalizeUrl("../../aa", "http://www.dianping.com/sh/ss/com/"); assertThat(absoluteUrl).isEqualTo("http://www.dianping.com/sh/aa"); absoluteUrl = UrlUtils.canonicalizeUrl("../../aa", "http://www.dianping.com/sh/ss/com"); assertThat(absoluteUrl).isEqualTo("http://www.dianping.com/aa"); }
public static List<ParameterMarkerExpressionSegment> getParameterMarkerExpressions(final Collection<ExpressionSegment> expressions) { List<ParameterMarkerExpressionSegment> result = new ArrayList<>(); extractParameterMarkerExpressions(result, expressions); return result; }
@Test void assertGetParameterMarkerExpressionsFromInExpression() { ListExpression listExpression = new ListExpression(0, 0); listExpression.getItems().add(new ParameterMarkerExpressionSegment(0, 0, 1, ParameterMarkerType.QUESTION)); listExpression.getItems().add(new ParameterMarkerExpressionSegment(0, 0, 2, ParameterMarkerType.QUESTION)); Collection<ExpressionSegment> inExpressions = Collections.singleton(new InExpression(0, 0, new ColumnSegment(0, 0, new IdentifierValue("order_id")), listExpression, false)); List<ParameterMarkerExpressionSegment> actual = ExpressionExtractUtils.getParameterMarkerExpressions(inExpressions); assertThat(actual.size(), is(2)); }
boolean hasEnoughResource(ContinuousResource request) { double allocated = allocations.stream() .filter(x -> x.resource() instanceof ContinuousResource) .map(x -> (ContinuousResource) x.resource()) .mapToDouble(ContinuousResource::value) .sum(); double left = original.value() - allocated; return request.value() <= left; }
@Test public void testHasEnoughResourceWhenExactResourceIsRequested() { ContinuousResource original = Resources.continuous(DID, PN1, Bandwidth.class).resource(Bandwidth.gbps(1).bps()); ContinuousResource allocated = Resources.continuous(DID, PN1, Bandwidth.class).resource(Bandwidth.mbps(500).bps()); ResourceConsumer consumer = IntentId.valueOf(1); ContinuousResourceAllocation sut = new ContinuousResourceAllocation(original, ImmutableList.of(new ResourceAllocation(allocated, consumer))); ContinuousResource request = Resources.continuous(DID, PN1, Bandwidth.class).resource(Bandwidth.mbps(500).bps()); assertThat(sut.hasEnoughResource(request), is(true)); }
@SuppressWarnings("WEAK_MESSAGE_DIGEST_MD5") @Override public Object convert(String value) { if (value == null || value.isEmpty()) { return value; } // MessageDigest is not threadsafe. #neverForget return DigestUtils.md5Hex(value); }
@Test public void testConvert() throws Exception { Converter hc = new HashConverter(new HashMap<String, Object>()); assertNull(hc.convert(null)); assertEquals("", hc.convert("")); assertEquals("c029b5a72ae255853d7151a9e28c6260", hc.convert("graylog2")); }
public static ConfigurableResource parseResourceConfigValue(String value) throws AllocationConfigurationException { return parseResourceConfigValue(value, Long.MAX_VALUE); }
@Test public void testInvalidNumPercentage() throws Exception { expectInvalidResourcePercentage("cpu"); parseResourceConfigValue("95A% cpu, 50% memory"); }
@Override public void onQueuesUpdate(List<Queue> queues) { List<QueueUpdateMsg> queueUpdateMsgs = queues.stream() .map(queue -> QueueUpdateMsg.newBuilder() .setTenantIdMSB(queue.getTenantId().getId().getMostSignificantBits()) .setTenantIdLSB(queue.getTenantId().getId().getLeastSignificantBits()) .setQueueIdMSB(queue.getId().getId().getMostSignificantBits()) .setQueueIdLSB(queue.getId().getId().getLeastSignificantBits()) .setQueueName(queue.getName()) .setQueueTopic(queue.getTopic()) .setPartitions(queue.getPartitions()) .setDuplicateMsgToAllPartitions(queue.isDuplicateMsgToAllPartitions()) .build()) .collect(Collectors.toList()); ToRuleEngineNotificationMsg ruleEngineMsg = ToRuleEngineNotificationMsg.newBuilder().addAllQueueUpdateMsgs(queueUpdateMsgs).build(); ToCoreNotificationMsg coreMsg = ToCoreNotificationMsg.newBuilder().addAllQueueUpdateMsgs(queueUpdateMsgs).build(); ToTransportMsg transportMsg = ToTransportMsg.newBuilder().addAllQueueUpdateMsgs(queueUpdateMsgs).build(); doSendQueueNotifications(ruleEngineMsg, coreMsg, transportMsg); }
@Test public void testOnQueueChangeSingleMonolith() { when(partitionService.getAllServiceIds(ServiceType.TB_RULE_ENGINE)).thenReturn(Sets.newHashSet(MONOLITH)); when(partitionService.getAllServiceIds(ServiceType.TB_CORE)).thenReturn(Sets.newHashSet(MONOLITH)); when(partitionService.getAllServiceIds(ServiceType.TB_TRANSPORT)).thenReturn(Sets.newHashSet(MONOLITH)); TbQueueProducer<TbProtoQueueMsg<TransportProtos.ToRuleEngineNotificationMsg>> tbQueueProducer = mock(TbQueueProducer.class); when(producerProvider.getRuleEngineNotificationsMsgProducer()).thenReturn(tbQueueProducer); clusterService.onQueuesUpdate(List.of(createTestQueue())); verify(topicService, times(1)).getNotificationsTopic(ServiceType.TB_RULE_ENGINE, MONOLITH); verify(topicService, never()).getNotificationsTopic(eq(ServiceType.TB_CORE), any()); verify(topicService, never()).getNotificationsTopic(eq(ServiceType.TB_TRANSPORT), any()); verify(tbQueueProducer, times(1)) .send(eq(topicService.getNotificationsTopic(ServiceType.TB_RULE_ENGINE, MONOLITH)), any(TbProtoQueueMsg.class), isNull()); verify(producerProvider, never()).getTbCoreNotificationsMsgProducer(); verify(producerProvider, never()).getTransportNotificationsMsgProducer(); }
public boolean isCacheSecurityCredentials() { return cacheSecurityCredentials; }
@Test void testIsCacheSecurityCredentials() { assertTrue(StsConfig.getInstance().isCacheSecurityCredentials()); StsConfig.getInstance().setCacheSecurityCredentials(false); assertFalse(StsConfig.getInstance().isCacheSecurityCredentials()); }
public static void mergeParams( Map<String, ParamDefinition> params, Map<String, ParamDefinition> paramsToMerge, MergeContext context) { if (paramsToMerge == null) { return; } Stream.concat(params.keySet().stream(), paramsToMerge.keySet().stream()) .forEach( name -> { ParamDefinition paramToMerge = paramsToMerge.get(name); if (paramToMerge == null) { return; } if (paramToMerge.getType() == ParamType.MAP && paramToMerge.isLiteral()) { Map<String, ParamDefinition> baseMap = mapValueOrEmpty(params, name); Map<String, ParamDefinition> toMergeMap = mapValueOrEmpty(paramsToMerge, name); mergeParams( baseMap, toMergeMap, MergeContext.copyWithParentMode( context, params.getOrDefault(name, paramToMerge).getMode())); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else if (paramToMerge.getType() == ParamType.STRING_MAP && paramToMerge.isLiteral()) { Map<String, String> baseMap = stringMapValueOrEmpty(params, name); Map<String, String> toMergeMap = stringMapValueOrEmpty(paramsToMerge, name); baseMap.putAll(toMergeMap); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else { params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, paramToMerge.getValue())); } }); }
@Test public void testMergeMapDefinedBySEL() throws JsonProcessingException { Map<String, ParamDefinition> allParams = parseParamDefMap( "{'tomerge': {'type': 'MAP','expression': 'data = new HashMap(); data.put(\\'foo\\', 123); return data;'}}"); Map<String, ParamDefinition> paramsToMerge = parseParamDefMap( "{'tomerge': {'type': 'MAP','expression': 'data = new HashMap(); data.put(\\'foo\\', 1.23); return data;'}}"); ParamsMergeHelper.mergeParams(allParams, paramsToMerge, definitionContext); assertEquals(1, allParams.size()); assertEquals( "data = new HashMap(); data.put(\"foo\", 1.23); return data;", allParams.get("tomerge").asMapParamDef().getExpression()); }
public static void run(Options options) { Pipeline p = Pipeline.create(options); double samplingThreshold = 0.1; p.apply(TextIO.read().from(options.getWikiInput())) .apply(MapElements.via(new ParseTableRowJson())) .apply(new ComputeTopSessions(samplingThreshold)) .apply("Write", TextIO.write().to(options.getOutput())); p.run().waitUntilFinish(); }
@Test @Category(ValidatesRunner.class) public void testComputeTopUsers() { PCollection<String> output = p.apply( Create.of( Arrays.asList( new TableRow().set("timestamp", 0).set("contributor_username", "user1"), new TableRow().set("timestamp", 1).set("contributor_username", "user1"), new TableRow().set("timestamp", 2).set("contributor_username", "user1"), new TableRow().set("timestamp", 0).set("contributor_username", "user2"), new TableRow().set("timestamp", 1).set("contributor_username", "user2"), new TableRow().set("timestamp", 3601).set("contributor_username", "user2"), new TableRow().set("timestamp", 3602).set("contributor_username", "user2"), new TableRow() .set("timestamp", 35 * 24 * 3600) .set("contributor_username", "user3")))) .apply(new TopWikipediaSessions.ComputeTopSessions(1.0)); PAssert.that(output) .containsInAnyOrder( Arrays.asList( "user1 : [1970-01-01T00:00:00.000Z..1970-01-01T01:00:02.000Z)" + " : 3 : 1970-01-01T00:00:00.000Z", "user3 : [1970-02-05T00:00:00.000Z..1970-02-05T01:00:00.000Z)" + " : 1 : 1970-02-01T00:00:00.000Z")); p.run().waitUntilFinish(); }
public static Set<Result> anaylze(String log) { Set<Result> results = new HashSet<>(); for (Rule rule : Rule.values()) { Matcher matcher = rule.pattern.matcher(log); if (matcher.find()) { results.add(new Result(rule, log, matcher)); } } return results; }
@Test public void forgeFoundDuplicateMods() throws IOException { CrashReportAnalyzer.Result result = findResultByRule( CrashReportAnalyzer.anaylze(loadLog("/logs/forge_found_duplicate_mods.txt")), CrashReportAnalyzer.Rule.FORGE_FOUND_DUPLICATE_MODS); assertEquals(("\tMod ID: 'jei' from mod files: REIPluginCompatibilities-forge-12.0.93.jar, jei-1.20.1-forge-15.2.0.27.jar\n").replaceAll("\\s+", ""), result.getMatcher().group("reason").replaceAll("\\s+", "")); }
@Override public void marshal(Exchange exchange, Object graph, OutputStream stream) throws Exception { ResourceConverter converter = new ResourceConverter(dataFormatTypeClasses); byte[] objectAsBytes = converter.writeDocument(new JSONAPIDocument<>(graph)); stream.write(objectAsBytes); }
@Test public void testJsonApiMarshalNoAnnotationOnType() { Class<?>[] formats = { MyBook.class, MyAuthor.class }; JsonApiDataFormat jsonApiDataFormat = new JsonApiDataFormat(formats); Exchange exchange = new DefaultExchange(context); ByteArrayOutputStream baos = new ByteArrayOutputStream(); assertThrows(DocumentSerializationException.class, () -> jsonApiDataFormat.marshal(exchange, new FooBar(), baos)); }
public boolean initAndAddIssue(Issue issue) { DefaultInputComponent inputComponent = (DefaultInputComponent) issue.primaryLocation().inputComponent(); if (noSonar(inputComponent, issue)) { return false; } ActiveRule activeRule = activeRules.find(issue.ruleKey()); if (activeRule == null) { // rule does not exist or is not enabled -> ignore the issue return false; } ScannerReport.Issue rawIssue = createReportIssue(issue, inputComponent.scannerId(), activeRule.severity()); if (filters.accept(inputComponent, rawIssue)) { write(inputComponent.scannerId(), rawIssue); return true; } return false; }
@Test public void should_ignore_lines_commented_with_nosonar() { initModuleIssues(); DefaultIssue issue = new DefaultIssue(project) .at(new DefaultIssueLocation().on(file).at(file.selectLine(3)).message("")) .forRule(JAVA_RULE_KEY); file.noSonarAt(new HashSet<>(Collections.singletonList(3))); boolean added = moduleIssues.initAndAddIssue(issue); assertThat(added).isFalse(); verifyNoInteractions(reportPublisher); }
@JsonProperty public DeltaTable getDeltaTable() { return deltaTable; }
@Test public void testJsonRoundTrip() { List<DeltaColumn> columns = ImmutableList.of( new DeltaColumn("c1", parseTypeSignature(StandardTypes.REAL), true, true), new DeltaColumn("c2", parseTypeSignature(INTEGER), false, true), new DeltaColumn("c3", parseTypeSignature(DOUBLE), false, false), new DeltaColumn("c4", parseTypeSignature(DATE), true, false)); DeltaTable deltaTable = new DeltaTable( "schema", "table", "s3:/bucket/table/location", Optional.of(1L), columns); DeltaColumnHandle c1ColumnHandle = new DeltaColumnHandle( columns.get(0).getName(), columns.get(0).getType(), columns.get(0).isPartition() ? PARTITION : REGULAR, Optional.empty()); DeltaTableHandle expected = new DeltaTableHandle("delta", deltaTable); String json = getJsonCodec().toJson(expected); DeltaTableHandle actual = getJsonCodec().fromJson(json); assertEquals(actual.getDeltaTable(), expected.getDeltaTable()); }
public <V> Future<Iterable<Map.Entry<ByteString, V>>> valuePrefixFuture( ByteString prefix, String stateFamily, Coder<V> valueCoder) { // First request has no continuation position. StateTag<ByteString> stateTag = StateTag.<ByteString>of(Kind.VALUE_PREFIX, prefix, stateFamily).toBuilder().build(); return valuesToPagingIterableFuture( stateTag, valueCoder, this.stateFuture(stateTag, valueCoder)); }
@Test public void testReadTagValuePrefixWithContinuations() throws Exception { Future<Iterable<Map.Entry<ByteString, Integer>>> future = underTest.valuePrefixFuture(STATE_KEY_PREFIX, STATE_FAMILY, INT_CODER); Mockito.verifyNoMoreInteractions(mockWindmill); Windmill.KeyedGetDataRequest.Builder expectedRequest1 = Windmill.KeyedGetDataRequest.newBuilder() .setKey(DATA_KEY) .setShardingKey(SHARDING_KEY) .setWorkToken(WORK_TOKEN) .setMaxBytes(WindmillStateReader.MAX_KEY_BYTES) .addTagValuePrefixesToFetch( Windmill.TagValuePrefixRequest.newBuilder() .setTagPrefix(STATE_KEY_PREFIX) .setStateFamily(STATE_FAMILY) .setFetchMaxBytes(WindmillStateReader.MAX_TAG_VALUE_PREFIX_BYTES)); final ByteString CONT = ByteString.copyFrom("CONTINUATION", StandardCharsets.UTF_8); Windmill.KeyedGetDataResponse.Builder response1 = Windmill.KeyedGetDataResponse.newBuilder() .setKey(DATA_KEY) .addTagValuePrefixes( Windmill.TagValuePrefixResponse.newBuilder() .setTagPrefix(STATE_KEY_PREFIX) .setStateFamily(STATE_FAMILY) .setContinuationPosition(CONT) .addTagValues( Windmill.TagValue.newBuilder() .setTag(STATE_KEY_1) .setStateFamily(STATE_FAMILY) .setValue(intValue(8)))); Windmill.KeyedGetDataRequest.Builder expectedRequest2 = Windmill.KeyedGetDataRequest.newBuilder() .setKey(DATA_KEY) .setShardingKey(SHARDING_KEY) .setWorkToken(WORK_TOKEN) .setMaxBytes(WindmillStateReader.MAX_KEY_BYTES) .addTagValuePrefixesToFetch( Windmill.TagValuePrefixRequest.newBuilder() .setTagPrefix(STATE_KEY_PREFIX) .setStateFamily(STATE_FAMILY) .setRequestPosition(CONT) .setFetchMaxBytes(WindmillStateReader.MAX_TAG_VALUE_PREFIX_BYTES)); Windmill.KeyedGetDataResponse.Builder response2 = Windmill.KeyedGetDataResponse.newBuilder() .setKey(DATA_KEY) .addTagValuePrefixes( Windmill.TagValuePrefixResponse.newBuilder() .setTagPrefix(STATE_KEY_PREFIX) .setStateFamily(STATE_FAMILY) .setRequestPosition(CONT) .addTagValues( Windmill.TagValue.newBuilder() .setTag(STATE_KEY_2) .setStateFamily(STATE_FAMILY) .setValue(intValue(9)))); Mockito.when(mockWindmill.getStateData(COMPUTATION, expectedRequest1.build())) .thenReturn(response1.build()); Mockito.when(mockWindmill.getStateData(COMPUTATION, expectedRequest2.build())) .thenReturn(response2.build()); Iterable<Map.Entry<ByteString, Integer>> results = future.get(); Mockito.verify(mockWindmill).getStateData(COMPUTATION, expectedRequest1.build()); for (Map.Entry<ByteString, Integer> unused : results) { // Iterate over the results to force loading all the pages. } Mockito.verify(mockWindmill).getStateData(COMPUTATION, expectedRequest2.build()); Mockito.verifyNoMoreInteractions(mockWindmill); assertThat( results, Matchers.containsInAnyOrder( new AbstractMap.SimpleEntry<>(STATE_KEY_1, 8), new AbstractMap.SimpleEntry<>(STATE_KEY_2, 9))); // NOTE: The future will still contain a reference to the underlying reader , thus not calling // assertNoReader(future). }
@Override public String get(String key) { return variables.get(key); }
@Test public void testGet() { assertThat(unmodifiables.get(MY_KEY), CoreMatchers.is(vars.get(MY_KEY))); }
public synchronized Topology addSource(final String name, final String... topics) { internalTopologyBuilder.addSource(null, name, null, null, null, topics); return this; }
@Test public void testNamedTopicMatchesAlreadyProvidedPattern() { topology.addSource("source-1", Pattern.compile("f.*")); try { topology.addSource("source-2", "foo"); fail("Should have thrown TopologyException for overlapping topic with already registered pattern"); } catch (final TopologyException expected) { } }
@Override public RelativeRange apply(final Period period) { if (period != null) { return RelativeRange.Builder.builder() .from(period.withYears(0).withMonths(0).plusDays(period.getYears() * 365).plusDays(period.getMonths() * 30).toStandardSeconds().getSeconds()) .build(); } else { return null; } }
@Test void testReturnsNullOnNullInput() { assertNull(converter.apply(null)); }
public static Row toBeamRow(GenericRecord record, Schema schema, ConversionOptions options) { List<Object> valuesInOrder = schema.getFields().stream() .map( field -> { try { org.apache.avro.Schema.Field avroField = record.getSchema().getField(field.getName()); Object value = avroField != null ? record.get(avroField.pos()) : null; return convertAvroFormat(field.getType(), value, options); } catch (Exception cause) { throw new IllegalArgumentException( "Error converting field " + field + ": " + cause.getMessage(), cause); } }) .collect(toList()); return Row.withSchema(schema).addValues(valuesInOrder).build(); }
@Test public void testToBeamRow_arrayNulls() { Row beamRow = BigQueryUtils.toBeamRow(ARRAY_TYPE_NULLS, BQ_ARRAY_ROW_NULLS); assertEquals(ARRAY_ROW_NULLS, beamRow); }
@Override public Collection<Event> filter(Collection<Event> events, final FilterMatchListener filterMatchListener) { for (Event e : events) { if (overwrite || e.getField(target) == null) { e.setField(target, UUID.randomUUID().toString()); } filterMatchListener.filterMatched(e); } return events; }
@Test public void testUuidWithoutOverwrite() { String targetField = "target_field"; String originalValue = "originalValue"; Map<String, Object> rawConfig = new HashMap<>(); rawConfig.put(Uuid.TARGET_CONFIG.name(), targetField); Configuration config = new ConfigurationImpl(rawConfig); Uuid uuid = new Uuid(ID, config, new ContextImpl(null, null)); PluginUtil.validateConfig(uuid, config); org.logstash.Event e = new org.logstash.Event(); e.setField(targetField, originalValue); Collection<Event> filteredEvents = uuid.filter(Collections.singletonList(e), NO_OP_MATCH_LISTENER); Assert.assertEquals(1, filteredEvents.size()); Event finalEvent = filteredEvents.stream().findFirst().orElse(null); Assert.assertNotNull(finalEvent); Assert.assertEquals(originalValue, finalEvent.getField(targetField)); }
@Override public AbstractResponse getErrorResponse(int throttleTimeMs, Throwable e) { short versionId = version(); short errorCode = Errors.forException(e).code(); List<ListOffsetsTopicResponse> responses = new ArrayList<>(); for (ListOffsetsTopic topic : data.topics()) { ListOffsetsTopicResponse topicResponse = new ListOffsetsTopicResponse().setName(topic.name()); List<ListOffsetsPartitionResponse> partitions = new ArrayList<>(); for (ListOffsetsPartition partition : topic.partitions()) { ListOffsetsPartitionResponse partitionResponse = new ListOffsetsPartitionResponse() .setErrorCode(errorCode) .setPartitionIndex(partition.partitionIndex()); if (versionId == 0) { partitionResponse.setOldStyleOffsets(Collections.emptyList()); } else { partitionResponse.setOffset(ListOffsetsResponse.UNKNOWN_OFFSET) .setTimestamp(ListOffsetsResponse.UNKNOWN_TIMESTAMP); } partitions.add(partitionResponse); } topicResponse.setPartitions(partitions); responses.add(topicResponse); } ListOffsetsResponseData responseData = new ListOffsetsResponseData() .setThrottleTimeMs(throttleTimeMs) .setTopics(responses); return new ListOffsetsResponse(responseData); }
@Test public void testGetErrorResponse() { for (short version = 1; version <= ApiKeys.LIST_OFFSETS.latestVersion(); version++) { List<ListOffsetsTopic> topics = Collections.singletonList( new ListOffsetsTopic() .setName("topic") .setPartitions(Collections.singletonList( new ListOffsetsPartition() .setPartitionIndex(0)))); ListOffsetsRequest request = ListOffsetsRequest.Builder .forConsumer(true, IsolationLevel.READ_COMMITTED, false, false) .setTargetTimes(topics) .build(version); ListOffsetsResponse response = (ListOffsetsResponse) request.getErrorResponse(0, Errors.NOT_LEADER_OR_FOLLOWER.exception()); List<ListOffsetsTopicResponse> v = Collections.singletonList( new ListOffsetsTopicResponse() .setName("topic") .setPartitions(Collections.singletonList( new ListOffsetsPartitionResponse() .setErrorCode(Errors.NOT_LEADER_OR_FOLLOWER.code()) .setLeaderEpoch(ListOffsetsResponse.UNKNOWN_EPOCH) .setOffset(ListOffsetsResponse.UNKNOWN_OFFSET) .setPartitionIndex(0) .setTimestamp(ListOffsetsResponse.UNKNOWN_TIMESTAMP)))); ListOffsetsResponseData data = new ListOffsetsResponseData() .setThrottleTimeMs(0) .setTopics(v); ListOffsetsResponse expectedResponse = new ListOffsetsResponse(data); assertEquals(expectedResponse.data().topics(), response.data().topics()); assertEquals(expectedResponse.throttleTimeMs(), response.throttleTimeMs()); } }
@Override public double getDouble(int index) { return Double.longBitsToDouble(getLong(index)); }
@Test public void testGetDoubleAfterRelease() { assertThrows(IllegalReferenceCountException.class, new Executable() { @Override public void execute() { releasedBuffer().getDouble(0); } }); }
@Override public final Object getValue(final int columnIndex, final Class<?> type) throws SQLException { ShardingSpherePreconditions.checkNotContains(INVALID_MEMORY_TYPES, type, () -> new SQLFeatureNotSupportedException(String.format("Get value from `%s`", type.getName()))); Object result = currentResultSetRow.getCell(columnIndex); wasNull = null == result; return result; }
@Test void assertGetValueForSQLXML() { assertThrows(SQLFeatureNotSupportedException.class, () -> memoryMergedResult.getValue(1, SQLXML.class)); }
public FEELFnResult<String> invoke(@ParameterName("string") String string, @ParameterName("start position") Number start) { return invoke(string, start, null); }
@Test void invokeStartOutOfListBounds() { FunctionTestUtil.assertResultError(substringFunction.invoke("test", 10), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(substringFunction.invoke("test", 10, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(substringFunction.invoke("test", -10), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(substringFunction.invoke("test", -10, null), InvalidParametersEvent.class); }
void closeAllStreams() { // Supplier<Stream>.get() starts the stream which is an expensive operation as it initiates the // streaming RPCs by possibly making calls over the network. Do not close the streams unless // they have already been started. if (started.get()) { getWorkStream.get().shutdown(); getDataStream.get().shutdown(); workCommitter.get().stop(); commitWorkStream.get().shutdown(); } }
@Test public void testCloseAllStreams_doesNotCloseUnstartedStreams() { WindmillStreamSender windmillStreamSender = newWindmillStreamSender(GetWorkBudget.builder().setBytes(1L).setItems(1L).build()); windmillStreamSender.closeAllStreams(); verifyNoInteractions(streamFactory); }
@Override public Class<? extends AvgHistogramFunctionBuilder> builder() { return AvgHistogramFunctionBuilder.class; }
@Test public void testBuilder() throws IllegalAccessException, InstantiationException { HistogramFunctionInst inst = new HistogramFunctionInst(); inst.accept( MeterEntity.newService("service-test", Layer.GENERAL), new BucketedValues( BUCKETS, new long[] { 1, 4, 10, 10 }) ); inst.calculate(); final StorageBuilder storageBuilder = inst.builder().newInstance(); // Simulate the storage layer do, convert the datatable to string. final HashMapConverter.ToStorage toStorage = new HashMapConverter.ToStorage(); storageBuilder.entity2Storage(inst, toStorage); final Map<String, Object> map = toStorage.obtain(); map.put(SUMMATION, ((DataTable) map.get(SUMMATION)).toStorageData()); map.put(COUNT, ((DataTable) map.get(COUNT)).toStorageData()); map.put(DATASET, ((DataTable) map.get(DATASET)).toStorageData()); final AvgHistogramFunction inst2 = (AvgHistogramFunction) storageBuilder.storage2Entity( new HashMapConverter.ToEntity(map)); Assertions.assertEquals(inst, inst2); // HistogramFunction equal doesn't include dataset. Assertions.assertEquals(inst.getDataset(), inst2.getDataset()); }
@Restricted(NoExternalUse.class) public static Icon tryGetIcon(String iconGuess) { // Jenkins Symbols don't have metadata so return null if (iconGuess == null || iconGuess.startsWith("symbol-")) { return null; } Icon iconMetadata = IconSet.icons.getIconByClassSpec(iconGuess); // `iconGuess` must be class names if it contains a whitespace. // It may contains extra css classes unrelated to icons. // Filter classes with `icon-` prefix. if (iconMetadata == null && iconGuess.contains(" ")) { iconMetadata = IconSet.icons.getIconByClassSpec(filterIconNameClasses(iconGuess)); } if (iconMetadata == null) { // Icon could be provided as a simple iconFileName e.g. "help.svg" iconMetadata = IconSet.icons.getIconByClassSpec(IconSet.toNormalizedIconNameClass(iconGuess) + " icon-md"); } if (iconMetadata == null) { // Icon could be provided as an absolute iconFileName e.g. "/plugin/foo/abc.png" iconMetadata = IconSet.icons.getIconByUrl(iconGuess); } return iconMetadata; }
@Test public void tryGetIcon_shouldReturnMetadataForExtraSpec() throws Exception { assertThat(Functions.tryGetIcon("icon-help icon-sm extra-class"), is(not(nullValue()))); }
@Override public void execute(String commandName, BufferedReader reader, BufferedWriter writer) throws Py4JException, IOException { char subCommand = safeReadLine(reader).charAt(0); String returnCommand = null; if (subCommand == LIST_SLICE_SUB_COMMAND_NAME) { returnCommand = slice_list(reader); } else if (subCommand == LIST_CONCAT_SUB_COMMAND_NAME) { returnCommand = concat_list(reader); } else if (subCommand == LIST_MULT_SUB_COMMAND_NAME) { returnCommand = mult_list(reader); } else if (subCommand == LIST_IMULT_SUB_COMMAND_NAME) { returnCommand = imult_list(reader); } else if (subCommand == LIST_COUNT_SUB_COMMAND_NAME) { returnCommand = count_list(reader); } else { returnCommand = call_collections_method(reader, subCommand); } logger.finest("Returning command: " + returnCommand); writer.write(returnCommand); writer.flush(); }
@Test public void testSort() { String inputCommand = ListCommand.LIST_SORT_SUB_COMMAND_NAME + "\n" + target + "\ne\n"; try { command.execute("l", new BufferedReader(new StringReader(inputCommand)), writer); assertEquals("!yv\n", sWriter.toString()); assertEquals(list.get(0), "1"); assertEquals(list.get(3), "9"); } catch (Exception e) { e.printStackTrace(); fail(); } }
@Override protected JobExceptionsInfoWithHistory handleRequest( HandlerRequest<EmptyRequestBody> request, ExecutionGraphInfo executionGraph) { final List<Integer> exceptionToReportMaxSizes = request.getQueryParameter(UpperLimitExceptionParameter.class); final int exceptionToReportMaxSize = exceptionToReportMaxSizes.size() > 0 ? exceptionToReportMaxSizes.get(0) : MAX_NUMBER_EXCEPTION_TO_REPORT; List<FailureLabelFilterParameter.FailureLabel> failureLabelFilter = request.getQueryParameter(FailureLabelFilterParameter.class); failureLabelFilter = failureLabelFilter.size() > 0 ? failureLabelFilter : EMPTY_FAILURE_LABEL_FILTER; return createJobExceptionsInfo( executionGraph, exceptionToReportMaxSize, failureLabelFilter); }
@Test void testOnlyExceptionHistoryWithNoMatchingFailureLabel() throws HandlerRequestException { final RuntimeException rootThrowable = new RuntimeException("exception #0"); final long rootTimestamp = System.currentTimeMillis(); final RootExceptionHistoryEntry rootEntry = fromGlobalFailure(rootThrowable, rootTimestamp); final ExecutionGraphInfo executionGraphInfo = createExecutionGraphInfoWithoutFailureCause(rootEntry); // rootEntry with EMPTY_FAILURE_LABELS so no match final HandlerRequest<EmptyRequestBody> request = createRequest(executionGraphInfo.getJobId(), 10, Arrays.asList("key:value")); final JobExceptionsInfoWithHistory response = testInstance.handleRequest(request, executionGraphInfo); assertThat(response.getRootException()).isNull(); assertThat(response.getRootTimestamp()).isNull(); assertThat(response.getExceptionHistory().getEntries()).isEmpty(); }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PCollectionsImmutableNavigableSet<?> that = (PCollectionsImmutableNavigableSet<?>) o; return Objects.equals(underlying(), that.underlying()); }
@Test public void testEquals() { final TreePSet<Object> mock = mock(TreePSet.class); assertEquals(new PCollectionsImmutableNavigableSet<>(mock), new PCollectionsImmutableNavigableSet<>(mock)); final TreePSet<Object> someOtherMock = mock(TreePSet.class); assertNotEquals(new PCollectionsImmutableNavigableSet<>(mock), new PCollectionsImmutableNavigableSet<>(someOtherMock)); }
@Override public IQ handleIQ(IQ packet) { IQ response = IQ.createResultIQ(packet); Element timeElement = DocumentHelper.createElement(QName.get(info.getName(), info.getNamespace())); timeElement.addElement("tzo").setText(formatsTimeZone(TimeZone.getDefault())); timeElement.addElement("utc").setText(getUtcDate(new Date())); response.setChildElement(timeElement); return response; }
@Test public void testIQ() { IQEntityTimeHandler iqEntityTimeHandler = new IQEntityTimeHandler(); IQ input = new IQ(IQ.Type.get, "1"); IQ result = iqEntityTimeHandler.handleIQ(input); assertEquals(result.getChildElement().getName(), "time"); assertEquals(result.getChildElement().getNamespace().getText(), "urn:xmpp:time"); assertEquals(result.getChildElement().content().size(), 2); assertTrue(result.getChildElement().content().get(0) instanceof Element); assertTrue(result.getChildElement().content().get(1) instanceof Element); assertEquals(result.getChildElement().content().get(0).getName(), "tzo"); assertEquals(result.getChildElement().content().get(1).getName(), "utc"); }
public BitSet toBitSet() { BitSet resultSet = new BitSet(); int ordinal = this.nextSetBit(0); while(ordinal!=-1) { resultSet.set(ordinal); ordinal = this.nextSetBit(ordinal + 1); } return resultSet; }
@Test public void testToBitSet() { BitSet bSet = new BitSet(); ThreadSafeBitSet tsbSet = new ThreadSafeBitSet(); int[] ordinals = new int[] { 1, 5, 10 }; // init for (int ordinal : ordinals) { bSet.set(ordinal); tsbSet.set(ordinal); } // validate content for (int ordinal : ordinals) { Assert.assertEquals(bSet.get(ordinal), tsbSet.get(ordinal)); } Assert.assertEquals(bSet.cardinality(), tsbSet.cardinality()); // compare toBitSet BitSet bSet2 = tsbSet.toBitSet(); Assert.assertEquals(bSet, bSet2); // compare toString Assert.assertEquals(bSet.toString(), bSet.toString()); }
@Override public String toString() { return String.format(TO_STRING_FORMAT, getClass().getSimpleName(), maskKeyData(defaultPublicKey), maskKeyData(defaultPrivateKey), maskKeyData(publicKeys), maskKeyData(privateKeys)); }
@Test public void testToString() { DefaultCryptoKeyReaderConfigurationData conf = new DefaultCryptoKeyReaderConfigurationData(); assertEquals(conf.toString(), "DefaultCryptoKeyReaderConfigurationData(defaultPublicKey=null, defaultPrivateKey=null, publicKeys={}, privateKeys={})"); conf.setDefaultPublicKey("file:///path/to/default-public.key"); conf.setDefaultPrivateKey("data:AAAAA"); conf.setPublicKey("key1", "file:///path/to/public.key"); conf.setPrivateKey("key2", "file:///path/to/private.key"); assertEquals(conf.toString(), "DefaultCryptoKeyReaderConfigurationData(defaultPublicKey=file:///path/to/default-public.key, defaultPrivateKey=data:*****, publicKeys={key1=file:///path/to/public.key}, privateKeys={key2=file:///path/to/private.key})"); conf.setPublicKey("key3", "data:BBBBB"); conf.setPrivateKey("key4", "data:CCCCC"); assertTrue(conf.toString().startsWith( "DefaultCryptoKeyReaderConfigurationData(defaultPublicKey=file:///path/to/default-public.key, defaultPrivateKey=data:*****, publicKeys={")); assertTrue(conf.toString().contains("key3=data:*****")); assertFalse(conf.toString().contains("key3=data:BBBBB")); assertTrue(conf.toString().contains("key4=data:*****")); assertFalse(conf.toString().contains("key4=data:CCCCC")); assertTrue(conf.toString().endsWith("})")); }
private static String getProperty(String name, Configuration configuration) { return Optional.of(configuration.getStringArray(relaxPropertyName(name))) .filter(values -> values.length > 0) .map(Arrays::stream) .map(stream -> stream.collect(Collectors.joining(","))) .orElse(null); }
@Test public void assertDynamicEnvConfig() throws IOException { Map<String, Object> baseProperties = new HashMap<>(); Map<String, String> mockedEnvironmentVariables = new HashMap<>(); String configFile = File.createTempFile("pinot-configuration-test-4", ".properties").getAbsolutePath(); baseProperties.put("server.host", "ENV_SERVER_HOST"); baseProperties.put("not.templated.cli", "static-value"); baseProperties.put("dynamic.env.config", "server.host"); mockedEnvironmentVariables.put("ENV_VAR_HOST", "test-host"); mockedEnvironmentVariables.put("TEST_PROPERTY", "test-property"); mockedEnvironmentVariables.put("ENV_SERVER_HOST", "test-server-host"); baseProperties.put("config.paths", "classpath:/pinot-configuration-4.properties"); copyClasspathResource("/pinot-configuration-4.properties", configFile); PinotConfiguration configuration = new PinotConfiguration(baseProperties, mockedEnvironmentVariables); // Tests that cli arguments have the highest priority. Assert.assertEquals(configuration.getProperty("server.host"), "test-server-host"); // Checking for non templated values Assert.assertEquals(configuration.getProperty("not.templated.cli"), "static-value"); // Templated values in configFile Assert.assertEquals(configuration.getProperty("pinot.controller.host"), "test-host"); }
public FEELFnResult<BigDecimal> invoke(@ParameterName("string") String string) { if ( string == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "string", "cannot be null")); } else { return FEELFnResult.ofResult(NumberEvalHelper.getBigDecimalOrNull(string.codePointCount(0, string.length()))); } }
@Test void invoke() { FunctionTestUtil.assertResult(stringLengthFunction.invoke("testString"), BigDecimal.TEN); }
@SuppressWarnings("unchecked") public static <T> Class<T> compile(ClassLoader cl, String name, String code) { try { // The class name is part of the "code" and makes the string unique, // to prevent class leaks we don't cache the class loader directly // but only its hash code final ClassKey classKey = new ClassKey(cl.hashCode(), code); return (Class<T>) COMPILED_CLASS_CACHE.get(classKey, () -> doCompile(cl, name, code)); } catch (Exception e) { throw new FlinkRuntimeException(e.getMessage(), e); } }
@Test public void testWrongCode() { String code = "public class111 Main {\n" + " int i;\n" + " int j;\n" + "}"; assertThatThrownBy( () -> CompileUtils.compile(this.getClass().getClassLoader(), "Main", code)) .isInstanceOf(FlinkRuntimeException.class) .hasMessageContaining( "Table program cannot be compiled. This is a bug. Please file an issue."); }
public static boolean isUnanimousCandidate( final ClusterMember[] clusterMembers, final ClusterMember candidate, final int gracefulClosedLeaderId) { int possibleVotes = 0; for (final ClusterMember member : clusterMembers) { if (member.id == gracefulClosedLeaderId) { continue; } if (NULL_POSITION == member.logPosition || compareLog(candidate, member) < 0) { return false; } possibleVotes++; } return possibleVotes >= ClusterMember.quorumThreshold(clusterMembers.length); }
@Test void isUnanimousCandidateReturnFalseIfThereIsAMemberWithMoreUpToDateLog() { final int gracefulClosedLeaderId = Aeron.NULL_VALUE; final ClusterMember candidate = newMember(4, 10, 800); final ClusterMember[] members = new ClusterMember[] { newMember(1, 2, 100), newMember(2, 8, 6), newMember(3, 11, 1000) }; assertFalse(isUnanimousCandidate(members, candidate, gracefulClosedLeaderId)); }
@SuppressWarnings("deprecation") public static <K> KStreamHolder<K> build( final KStreamHolder<K> left, final KStreamHolder<K> right, final StreamStreamJoin<K> join, final RuntimeBuildContext buildContext, final StreamJoinedFactory streamJoinedFactory) { final QueryContext queryContext = join.getProperties().getQueryContext(); final QueryContext.Stacker stacker = QueryContext.Stacker.of(queryContext); final LogicalSchema leftSchema; final LogicalSchema rightSchema; final Formats rightFormats; final Formats leftFormats; if (join.getJoinType().equals(RIGHT)) { leftFormats = join.getRightInternalFormats(); rightFormats = join.getLeftInternalFormats(); leftSchema = right.getSchema(); rightSchema = left.getSchema(); } else { leftFormats = join.getLeftInternalFormats(); rightFormats = join.getRightInternalFormats(); leftSchema = left.getSchema(); rightSchema = right.getSchema(); } final PhysicalSchema leftPhysicalSchema = PhysicalSchema.from( leftSchema, leftFormats.getKeyFeatures(), leftFormats.getValueFeatures() ); final Serde<GenericRow> leftSerde = buildContext.buildValueSerde( leftFormats.getValueFormat(), leftPhysicalSchema, stacker.push(LEFT_SERDE_CTX).getQueryContext() ); final PhysicalSchema rightPhysicalSchema = PhysicalSchema.from( rightSchema, rightFormats.getKeyFeatures(), rightFormats.getValueFeatures() ); final Serde<GenericRow> rightSerde = buildContext.buildValueSerde( rightFormats.getValueFormat(), rightPhysicalSchema, stacker.push(RIGHT_SERDE_CTX).getQueryContext() ); final Serde<K> keySerde = left.getExecutionKeyFactory().buildKeySerde( leftFormats.getKeyFormat(), leftPhysicalSchema, queryContext ); final StreamJoined<K, GenericRow, GenericRow> joined = streamJoinedFactory.create( keySerde, leftSerde, rightSerde, StreamsUtil.buildOpName(queryContext), StreamsUtil.buildOpName(queryContext) ); final JoinParams joinParams = JoinParamsFactory .create(join.getKeyColName(), leftSchema, rightSchema); JoinWindows joinWindows; // Grace, as optional, helps to identify if a user specified the GRACE PERIOD syntax in the // join window. If specified, then we'll call the new KStreams API ofTimeDifferenceAndGrace() // which enables the "spurious" results bugfix with left/outer joins (see KAFKA-10847). if (join.getGraceMillis().isPresent()) { joinWindows = JoinWindows.ofTimeDifferenceAndGrace( join.getBeforeMillis(), join.getGraceMillis().get()); } else { joinWindows = JoinWindows.of(join.getBeforeMillis()); } joinWindows = joinWindows.after(join.getAfterMillis()); final KStream<K, GenericRow> result; switch (join.getJoinType()) { case LEFT: result = left.getStream().leftJoin( right.getStream(), joinParams.getJoiner(), joinWindows, joined); break; case RIGHT: result = right.getStream().leftJoin( left.getStream(), joinParams.getJoiner(), joinWindows, joined); break; case OUTER: result = left.getStream().outerJoin( right.getStream(), joinParams.getJoiner(), joinWindows, joined); break; case INNER: result = left.getStream().join( right.getStream(), joinParams.getJoiner(), joinWindows, joined); break; default: throw new IllegalStateException("invalid join type"); } return left.withStream(result, joinParams.getSchema()); }
@Test public void shouldDoLeftJoin() { // Given: givenLeftJoin(L_KEY); // When: final KStreamHolder<Struct> result = join.build(planBuilder, planInfo); // Then: verify(leftKStream).leftJoin( same(rightKStream), eq(new KsqlValueJoiner(LEFT_SCHEMA.value().size(), RIGHT_SCHEMA.value().size(), 0)), eq(WINDOWS_NO_GRACE), same(joined) ); verifyNoMoreInteractions(leftKStream, rightKStream, resultKStream); assertThat(result.getStream(), is(resultKStream)); assertThat(result.getExecutionKeyFactory(), is(executionKeyFactory)); }
@Restricted(NoExternalUse.class) public static Icon tryGetIcon(String iconGuess) { // Jenkins Symbols don't have metadata so return null if (iconGuess == null || iconGuess.startsWith("symbol-")) { return null; } Icon iconMetadata = IconSet.icons.getIconByClassSpec(iconGuess); // `iconGuess` must be class names if it contains a whitespace. // It may contains extra css classes unrelated to icons. // Filter classes with `icon-` prefix. if (iconMetadata == null && iconGuess.contains(" ")) { iconMetadata = IconSet.icons.getIconByClassSpec(filterIconNameClasses(iconGuess)); } if (iconMetadata == null) { // Icon could be provided as a simple iconFileName e.g. "help.svg" iconMetadata = IconSet.icons.getIconByClassSpec(IconSet.toNormalizedIconNameClass(iconGuess) + " icon-md"); } if (iconMetadata == null) { // Icon could be provided as an absolute iconFileName e.g. "/plugin/foo/abc.png" iconMetadata = IconSet.icons.getIconByUrl(iconGuess); } return iconMetadata; }
@Test public void tryGetIcon_shouldReturnNullForSymbol() throws Exception { assertThat(Functions.tryGetIcon("symbol-search"), is(nullValue())); }
@Override @CheckForNull public EmailMessage format(Notification notif) { if (!(notif instanceof ChangesOnMyIssuesNotification)) { return null; } ChangesOnMyIssuesNotification notification = (ChangesOnMyIssuesNotification) notif; if (notification.getChange() instanceof AnalysisChange) { checkState(!notification.getChangedIssues().isEmpty(), "changedIssues can't be empty"); return formatAnalysisNotification(notification.getChangedIssues().keySet().iterator().next(), notification); } return formatMultiProject(notification); }
@Test public void format_set_html_message_with_header_dealing_with_plural_when_change_from_Analysis() { Set<ChangedIssue> changedIssues = IntStream.range(0, 2 + new Random().nextInt(4)) .mapToObj(i -> newChangedIssue(i + "", randomValidStatus(), newProject("prj_" + i), newRandomNotAHotspotRule("rule_" + i))) .collect(toSet()); AnalysisChange analysisChange = newAnalysisChange(); EmailMessage singleIssueMessage = underTest.format(new ChangesOnMyIssuesNotification(analysisChange, changedIssues.stream().limit(1).collect(toSet()))); EmailMessage multiIssueMessage = underTest.format(new ChangesOnMyIssuesNotification(analysisChange, changedIssues)); HtmlFragmentAssert.assertThat(singleIssueMessage.getMessage()) .hasParagraph("Hi,") .hasParagraph("An analysis has updated an issue assigned to you:"); HtmlFragmentAssert.assertThat(multiIssueMessage.getMessage()) .hasParagraph("Hi,") .hasParagraph("An analysis has updated issues assigned to you:"); }
public static Getter newFieldGetter(Object object, Getter parent, Field field, String modifier) throws Exception { return newGetter(object, parent, modifier, field.getType(), field::get, (t, et) -> new FieldGetter(parent, field, modifier, t, et)); }
@Test public void newFieldGetter_whenExtractingFromNonEmpty_Array_FieldAndParentIsNonEmptyMultiResult_nullFirstValue_thenInferReturnType() throws Exception { OuterObject object = new OuterObject("name", new InnerObject("inner", null, 0, 1, 2, 3)); Getter parentGetter = GetterFactory.newFieldGetter(object, null, innersArrayField, "[any]"); Getter innerObjectNameGetter = GetterFactory.newFieldGetter(object, parentGetter, innerAttributesArrayField, "[any]"); Class<?> returnType = innerObjectNameGetter.getReturnType(); assertEquals(Integer.class, returnType); }
public boolean releaseTrigger(JobTriggerDto trigger, JobTriggerUpdate triggerUpdate) { requireNonNull(trigger, "trigger cannot be null"); requireNonNull(triggerUpdate, "triggerUpdate cannot be null"); final var filter = and( // Make sure that the owner still owns the trigger eq(FIELD_LOCK_OWNER, nodeId), idEq(getId(trigger)), // Only release running triggers. The trigger might have been paused while the trigger was running // so we don't want to set it to RUNNABLE again. // TODO: This is an issue. If a user set it to PAUSED, we will not unlock it. Figure something out. // Maybe a manual trigger pause will set "nextStatus" if the trigger is currently running? // That next status would need to be set on release. eq(FIELD_STATUS, JobTriggerStatus.RUNNING) ); final List<Bson> updates = new ArrayList<>(); updates.add(unset(FIELD_LOCK_OWNER)); if (triggerUpdate.concurrencyReschedule()) { updates.add(inc(FIELD_CONCURRENCY_RESCHEDULE_COUNT, 1)); } else { updates.add(set(FIELD_CONCURRENCY_RESCHEDULE_COUNT, 0)); } // An empty next time indicates that this trigger should not be fired anymore. (e.g. for "once" schedules) if (triggerUpdate.nextTime().isPresent()) { if (triggerUpdate.status().isPresent()) { updates.add(set(FIELD_STATUS, triggerUpdate.status().get())); } else { updates.add(set(FIELD_STATUS, JobTriggerStatus.RUNNABLE)); } updates.add(set(FIELD_NEXT_TIME, triggerUpdate.nextTime().get())); } else { updates.add(set(FIELD_STATUS, triggerUpdate.status().orElse(JobTriggerStatus.COMPLETE))); } if (triggerUpdate.data().isPresent()) { updates.add(set(FIELD_DATA, triggerUpdate.data())); } trigger.triggeredAt().ifPresent(triggeredAt -> { var duration = new org.joda.time.Duration(triggeredAt, clock.nowUTC()); updates.add(set(FIELD_LAST_EXECUTION_DURATION, Optional.of(duration.getMillis()))); }); return collection.updateOne(filter, combine(updates)).getModifiedCount() == 1; }
@Test public void releaseTrigger() { final JobTriggerDto trigger1 = dbJobTriggerService.create(JobTriggerDto.Builder.create(clock) .jobDefinitionId("abc-123") .jobDefinitionType("event-processor-execution-v1") .concurrencyRescheduleCount(42) .schedule(IntervalJobSchedule.builder() .interval(1) .unit(TimeUnit.SECONDS) .build()) .build()); final JobTriggerData newData = TestJobTriggerData.create(Collections.singletonMap("hello", "world")); final JobTriggerUpdate update = JobTriggerUpdate.withNextTimeAndData(clock.nowUTC().plusSeconds(20), newData); // Releasing the trigger should not do anything because the trigger has not been locked yet assertThat(dbJobTriggerService.releaseTrigger(trigger1, update)).isFalse(); // Lock the trigger final Optional<JobTriggerDto> runnableTrigger = dbJobTriggerService.nextRunnableTrigger(); assertThat(runnableTrigger).isNotEmpty(); clock.plus(15, TimeUnit.SECONDS); // Releasing the trigger should work now assertThat(dbJobTriggerService.releaseTrigger(runnableTrigger.get(), update)).isTrue(); assertThat(dbJobTriggerService.get(trigger1.id())) .isPresent() .get() .satisfies(trigger -> { // Make sure the lock is gone assertThat(trigger.lock().owner()).isNull(); assertThat(trigger.status()).isEqualTo(JobTriggerStatus.RUNNABLE); assertThat(trigger.nextTime()).isEqualTo(update.nextTime().orElse(null)); assertThat(trigger.executionDurationMs()).isPresent().get().isEqualTo(15_000L); assertThat(trigger.concurrencyRescheduleCount()).isEqualTo(0); assertThat(trigger.data()).isPresent().get().satisfies(data -> { assertThat(data).isInstanceOf(TestJobTriggerData.class); assertThat(data).isEqualTo(TestJobTriggerData.create(Collections.singletonMap("hello", "world"))); }); }); // Releasing it again doesn't do anything assertThat(dbJobTriggerService.releaseTrigger(trigger1, update)).isFalse(); }
@Override public void close() { if (input != null) { try { input.close(); } catch (IOException e) { LOG.warn("failed to close stream", e); } } }
@Test public void close() throws IOException { ss.open(); ss.close(); int n = -1; try { byte[] buff = new byte[1]; n = ss.read(buff); } catch (IOException ignored) { } assertEquals(-1, n); }
@SuppressWarnings("deprecation") public void runLocalization(final InetSocketAddress nmAddr) throws IOException, InterruptedException { // load credentials initDirs(conf, user, appId, lfs, localDirs); final Credentials creds = new Credentials(); DataInputStream credFile = null; try { // assume credentials in cwd // TODO: Fix Path tokenPath = new Path(tokenFileName); credFile = lfs.open(tokenPath); creds.readTokenStorageStream(credFile); // Explicitly deleting token file. lfs.delete(tokenPath, false); } finally { if (credFile != null) { credFile.close(); } } // create localizer context UserGroupInformation remoteUser = UserGroupInformation.createRemoteUser(user); remoteUser.addToken(creds.getToken(LocalizerTokenIdentifier.KIND)); final LocalizationProtocol nodeManager = remoteUser.doAs(new PrivilegedAction<LocalizationProtocol>() { @Override public LocalizationProtocol run() { return getProxy(nmAddr); } }); // create user context UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user); for (Token<? extends TokenIdentifier> token : creds.getAllTokens()) { ugi.addToken(token); } ExecutorService exec = null; try { exec = createDownloadThreadPool(); CompletionService<Path> ecs = createCompletionService(exec); localizeFiles(nodeManager, ecs, ugi); } catch (Throwable e) { throw new IOException(e); } finally { try { if (exec != null) { exec.shutdown(); destroyShellProcesses(getAllShells()); exec.awaitTermination(10, TimeUnit.SECONDS); } LocalDirAllocator.removeContext(appCacheDirContextName); } finally { closeFileSystems(ugi); } } }
@Test public void testMultipleLocalizers() throws Exception { FakeContainerLocalizerWrapper testA = new FakeContainerLocalizerWrapper(); FakeContainerLocalizerWrapper testB = new FakeContainerLocalizerWrapper(); FakeContainerLocalizer localizerA = testA.init(); FakeContainerLocalizer localizerB = testB.init(); // run localization Thread threadA = new Thread() { @Override public void run() { try { localizerA.runLocalization(nmAddr); } catch (Exception e) { LOG.warn(e.toString()); } } }; Thread threadB = new Thread() { @Override public void run() { try { localizerB.runLocalization(nmAddr); } catch (Exception e) { LOG.warn(e.toString()); } } }; ShellCommandExecutor shexcA = null; ShellCommandExecutor shexcB = null; try { threadA.start(); threadB.start(); GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { FakeContainerLocalizer.FakeLongDownload downloader = localizerA.getDownloader(); return downloader != null && downloader.getShexc() != null && downloader.getShexc().getProcess() != null; } }, 10, 30000); GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { FakeContainerLocalizer.FakeLongDownload downloader = localizerB.getDownloader(); return downloader != null && downloader.getShexc() != null && downloader.getShexc().getProcess() != null; } }, 10, 30000); shexcA = localizerA.getDownloader().getShexc(); shexcB = localizerB.getDownloader().getShexc(); assertTrue("Localizer A process not running, but should be", shexcA.getProcess().isAlive()); assertTrue("Localizer B process not running, but should be", shexcB.getProcess().isAlive()); // Stop heartbeat from giving anymore resources to download testA.heartbeatResponse++; testB.heartbeatResponse++; // Send DIE to localizerA. This should kill its subprocesses testA.heartbeatResponse++; threadA.join(); shexcA.getProcess().waitFor(10000, TimeUnit.MILLISECONDS); assertFalse("Localizer A process is still running, but shouldn't be", shexcA.getProcess().isAlive()); assertTrue("Localizer B process not running, but should be", shexcB.getProcess().isAlive()); } finally { // Make sure everything gets cleaned up // Process A should already be dead shexcA.getProcess().destroy(); shexcB.getProcess().destroy(); shexcA.getProcess().waitFor(10000, TimeUnit.MILLISECONDS); shexcB.getProcess().waitFor(10000, TimeUnit.MILLISECONDS); threadA.join(); // Send DIE to localizer B testB.heartbeatResponse++; threadB.join(); } }
public abstract FetchedAppReport getApplicationReport(ApplicationId appId) throws YarnException, IOException;
@Test void testFetchReportAHSEnabled() throws YarnException, IOException { testHelper(true); Mockito.verify(historyManager, Mockito.times(1)) .getApplicationReport(Mockito.any(GetApplicationReportRequest.class)); Mockito.verify(appManager, Mockito.times(1)) .getApplicationReport(Mockito.any(GetApplicationReportRequest.class)); }
static ZipFileRO open(final String zipFileName) { final Ref<ZipArchiveHandle> handle = new Ref<>(null); final int error = OpenArchive(zipFileName, handle); if (isTruthy(error)) { ALOGW("Error opening archive %s: %s", zipFileName, ErrorCodeString(error)); CloseArchive(); return null; } return new ZipFileRO(handle.get(), zipFileName); }
@Test public void open_emptyZip() throws Exception { // ensure ZipFileRO cam handle an empty zip file with no central directory File blob = File.createTempFile("prefix", "zip"); try (ZipOutputStream zip = new ZipOutputStream(new FileOutputStream(blob))) {} ZipFileRO zipFile = ZipFileRO.open(blob.toString()); assertThat(zipFile).isNotNull(); }
@Subscribe public void onChatMessage(ChatMessage event) { final String message = event.getMessage(); if (event.getType() != ChatMessageType.SPAM && event.getType() != ChatMessageType.GAMEMESSAGE) { return; } if (message.contains(DODGY_NECKLACE_PROTECTION_MESSAGE) || message.contains(SHADOW_VEIL_PROTECTION_MESSAGE)) { removeGameTimer(PICKPOCKET_STUN); } if (message.contains(PICKPOCKET_FAILURE_MESSAGE) && config.showPickpocketStun() && message.contains("pocket")) { if (message.contains("hero") || message.contains("elf")) { createGameTimer(PICKPOCKET_STUN, Duration.ofSeconds(6)); } else { createGameTimer(PICKPOCKET_STUN, Duration.ofSeconds(5)); } } if (message.equals(ABYSSAL_SIRE_STUN_MESSAGE) && config.showAbyssalSireStun()) { createGameTimer(ABYSSAL_SIRE_STUN); } if (config.showCannon()) { if (message.equals(CANNON_BASE_MESSAGE) || message.equals(CANNON_STAND_MESSAGE) || message.equals(CANNON_BARRELS_MESSAGE) || message.equals(CANNON_FURNACE_MESSAGE) || message.contains(CANNON_REPAIR_MESSAGE)) { removeGameTimer(CANNON_REPAIR); TimerTimer cannonTimer = createGameTimer(CANNON); cannonTimer.setTooltip(cannonTimer.getTooltip() + " - World " + client.getWorld()); } else if (message.equals(CANNON_BROKEN_MESSAGE)) { removeGameTimer(CANNON); TimerTimer cannonTimer = createGameTimer(CANNON_REPAIR); cannonTimer.setTooltip(cannonTimer.getTooltip() + " - World " + client.getWorld()); } else if (message.equals(CANNON_PICKUP_MESSAGE) || message.equals(CANNON_DESTROYED_MESSAGE)) { removeGameTimer(CANNON); removeGameTimer(CANNON_REPAIR); } } if (config.showPrayerEnhance() && message.startsWith("You drink some of your") && message.contains("prayer enhance")) { createGameTimer(PRAYER_ENHANCE); } if (config.showPrayerEnhance() && message.equals(PRAYER_ENHANCE_EXPIRED)) { removeGameTimer(PRAYER_ENHANCE); } if (config.showStaffOfTheDead() && message.contains(STAFF_OF_THE_DEAD_SPEC_MESSAGE)) { createGameTimer(STAFF_OF_THE_DEAD); } if (config.showStaffOfTheDead() && message.contains(STAFF_OF_THE_DEAD_SPEC_EXPIRED_MESSAGE)) { removeGameTimer(STAFF_OF_THE_DEAD); } if (config.showFreezes() && message.equals(FROZEN_MESSAGE)) { freezeTimer = createGameTimer(ICEBARRAGE); freezeTime = client.getTickCount(); } if (config.showArceuus()) { final int magicLevel = client.getRealSkillLevel(Skill.MAGIC); if (message.endsWith(SHADOW_VEIL_MESSAGE)) { createGameTimer(SHADOW_VEIL, Duration.of(magicLevel, RSTimeUnit.GAME_TICKS)); } else if (message.endsWith(WARD_OF_ARCEUUS_MESSAGE)) { createGameTimer(WARD_OF_ARCEUUS, Duration.of(magicLevel, RSTimeUnit.GAME_TICKS)); } else if (message.endsWith(MARK_OF_DARKNESS_MESSAGE)) { createGameTimer(MARK_OF_DARKNESS, Duration.of(magicLevel, RSTimeUnit.GAME_TICKS)); } else if (message.contains(RESURRECT_THRALL_MESSAGE_START) && message.endsWith(RESURRECT_THRALL_MESSAGE_END)) { // by default the thrall lasts 1 tick per magic level int t = client.getBoostedSkillLevel(Skill.MAGIC); // ca tiers being completed boosts this if (client.getVarbitValue(Varbits.COMBAT_ACHIEVEMENT_TIER_GRANDMASTER) == 2) { t += t; // 100% boost } else if (client.getVarbitValue(Varbits.COMBAT_ACHIEVEMENT_TIER_MASTER) == 2) { t += t / 2; // 50% boost } createGameTimer(RESURRECT_THRALL, Duration.of(t, RSTimeUnit.GAME_TICKS)); } } if (config.showArceuusCooldown()) { final int magicLevel = client.getRealSkillLevel(Skill.MAGIC); if (message.endsWith(MARK_OF_DARKNESS_MESSAGE)) { createGameTimer(MARK_OF_DARKNESS_COOLDOWN, Duration.of(magicLevel - 10, RSTimeUnit.GAME_TICKS)); } } if (TZHAAR_PAUSED_MESSAGE.matcher(message).find()) { log.debug("Pausing tzhaar timer"); config.tzhaarLastTime(Instant.now()); if (config.showTzhaarTimers()) { createTzhaarTimer(); } return; } Matcher matcher = TZHAAR_WAVE_MESSAGE.matcher(message); if (matcher.find()) { int wave = Integer.parseInt(matcher.group(1)); if (wave == 1) { log.debug("Starting tzhaar timer"); Instant now = Instant.now(); if (isInInferno()) { // The first wave message of the inferno comes six seconds after the ingame timer starts counting config.tzhaarStartTime(now.minus(Duration.ofSeconds(6))); } else { config.tzhaarStartTime(now); } config.tzhaarLastTime(null); if (config.showTzhaarTimers()) { createTzhaarTimer(); } } else if (config.tzhaarStartTime() != null && config.tzhaarLastTime() != null) { log.debug("Unpausing tzhaar timer"); // Advance start time by how long it has been paused Instant tzhaarStartTime = config.tzhaarStartTime(); tzhaarStartTime = tzhaarStartTime.plus(Duration.between(config.tzhaarLastTime(), Instant.now())); config.tzhaarStartTime(tzhaarStartTime); config.tzhaarLastTime(null); if (config.showTzhaarTimers()) { createTzhaarTimer(); } } } if (message.equals(SILK_DRESSING_MESSAGE) && config.showSilkDressing()) { createGameTimer(SILK_DRESSING); } if (message.equals(BLESSED_CRYSTAL_SCARAB_MESSAGE) && config.showBlessedCrystalScarab()) { createGameTimer(BLESSED_CRYSTAL_SCARAB); } if (message.equals(LIQUID_ADRENALINE_MESSAGE) && config.showLiquidAdrenaline()) { createGameTimer(LIQUID_ADRENALINE); } }
@Test public void testThrall() { when(timersAndBuffsConfig.showArceuus()).thenReturn(true); when(client.getBoostedSkillLevel(Skill.MAGIC)).thenReturn(60); ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", "<col=ef0083>You resurrect a greater zombified thrall.</col>", "", 0); timersAndBuffsPlugin.onChatMessage(chatMessage); ArgumentCaptor<InfoBox> ibcaptor = ArgumentCaptor.forClass(InfoBox.class); verify(infoBoxManager).addInfoBox(ibcaptor.capture()); TimerTimer infoBox = (TimerTimer) ibcaptor.getValue(); assertEquals(GameTimer.RESURRECT_THRALL, infoBox.getTimer()); assertEquals(Duration.of(60, RSTimeUnit.GAME_TICKS), infoBox.getDuration()); }
@Override public AuditReplayCommand parse(Text inputLine, Function<Long, Long> relativeToAbsolute) throws IOException { Matcher m = logLineParseRegex.matcher(inputLine.toString()); if (!m.find()) { throw new IOException( "Unable to find valid message pattern from audit log line: `" + inputLine + "` using regex `" + logLineParseRegex + "`"); } long relativeTimestamp; try { relativeTimestamp = dateFormat.parse(m.group("timestamp")).getTime() - startTimestamp; } catch (ParseException p) { throw new IOException( "Exception while parsing timestamp from audit log line: `" + inputLine + "`", p); } // Sanitize the = in the rename options field into a : so we can split on = String auditMessageSanitized = m.group("message").replace("(options=", "(options:"); Map<String, String> parameterMap = new HashMap<String, String>(); String[] auditMessageSanitizedList = auditMessageSanitized.split("\t"); for (String auditMessage : auditMessageSanitizedList) { String[] splitMessage = auditMessage.split("=", 2); try { parameterMap.put(splitMessage[0], splitMessage[1]); } catch (ArrayIndexOutOfBoundsException e) { throw new IOException( "Exception while parsing a message from audit log line: `" + inputLine + "`", e); } } return new AuditReplayCommand(relativeToAbsolute.apply(relativeTimestamp), // Split the UGI on space to remove the auth and proxy portions of it SPACE_SPLITTER.split(parameterMap.get("ugi")).iterator().next(), parameterMap.get("cmd").replace("(options:", "(options="), parameterMap.get("src"), parameterMap.get("dst"), parameterMap.get("ip")); }
@Test public void testSimpleInput() throws Exception { Text in = getAuditString("1970-01-01 00:00:11,000", "fakeUser", "listStatus", "sourcePath", "null"); AuditReplayCommand expected = new AuditReplayCommand(1000, "fakeUser", "listStatus", "sourcePath", "null", "0.0.0.0"); assertEquals(expected, parser.parse(in, Function.identity())); }
@Operation(summary = "Start Bvd session") @GetMapping(value = "/frontchannel/saml/v4/entrance/start_bvd_session") public RedirectView startBvdSession(@RequestParam(value = "SAMLart") String artifact) throws SamlSessionException, AdException, BvdException, UnsupportedEncodingException { SamlSession samlSession = samlSessionService.findSamlSessionByArtifact(artifact); AdSession adSession = adService.getAdSession(samlSession.getHttpSessionId()); return new RedirectView(adService.checkAuthenticationStatus(adSession, samlSession, artifact)); }
@Test public void startBvdSession() throws BvdException, SamlSessionException, AdException, UnsupportedEncodingException { SamlSession samlSession = new SamlSession(1L); samlSession.setHttpSessionId("httpSessionId"); samlSession.setServiceEntityId("serviceEntityId"); samlSession.setServiceUuid("serviceUuid"); samlSession.setTransactionId("transactionId"); adSession.setBsn("bsn"); adSession.setAuthenticationLevel(10); when(adServiceMock.getAdSession(anyString())).thenReturn(adSession); when(samlSessionServiceMock.findSamlSessionByArtifact(anyString())).thenReturn(samlSession); RedirectView result = sessionController.startBvdSession("SAMLArtifact"); assertNotNull(result); verify(samlSessionServiceMock, times(1)).findSamlSessionByArtifact(anyString()); verify(adServiceMock, times(1)).getAdSession(anyString()); verify(adServiceMock, times(1)).checkAuthenticationStatus(any(AdSession.class), any(SamlSession.class), anyString()); }
@Override public String authenticate(AuthenticationDataSource authData) throws AuthenticationException { SocketAddress clientAddress; String roleToken; ErrorCode errorCode = ErrorCode.UNKNOWN; try { if (authData.hasDataFromPeer()) { clientAddress = authData.getPeerAddress(); } else { errorCode = ErrorCode.NO_CLIENT; throw new AuthenticationException("Authentication data source does not have a client address"); } if (authData.hasDataFromCommand()) { roleToken = authData.getCommandData(); } else if (authData.hasDataFromHttp()) { roleToken = authData.getHttpHeader(AuthZpeClient.ZPE_TOKEN_HDR); } else { errorCode = ErrorCode.NO_TOKEN; throw new AuthenticationException("Authentication data source does not have a role token"); } if (roleToken == null) { errorCode = ErrorCode.NO_TOKEN; throw new AuthenticationException("Athenz token is null, can't authenticate"); } if (roleToken.isEmpty()) { errorCode = ErrorCode.NO_TOKEN; throw new AuthenticationException("Athenz RoleToken is empty, Server is Using Athenz Authentication"); } if (log.isDebugEnabled()) { log.debug("Athenz RoleToken : [{}] received from Client: {}", roleToken, clientAddress); } RoleToken token = new RoleToken(roleToken); if (!domainNameList.contains(token.getDomain())) { errorCode = ErrorCode.DOMAIN_MISMATCH; throw new AuthenticationException( String.format("Athenz RoleToken Domain mismatch, Expected: %s, Found: %s", domainNameList.toString(), token.getDomain())); } // Synchronize for non-thread safe static calls inside athenz library synchronized (this) { PublicKey ztsPublicKey = AuthZpeClient.getZtsPublicKey(token.getKeyId()); if (ztsPublicKey == null) { errorCode = ErrorCode.NO_PUBLIC_KEY; throw new AuthenticationException("Unable to retrieve ZTS Public Key"); } if (token.validate(ztsPublicKey, allowedOffset, false, null)) { log.debug("Athenz Role Token : {}, Authenticated for Client: {}", roleToken, clientAddress); AuthenticationMetrics.authenticateSuccess(getClass().getSimpleName(), getAuthMethodName()); return token.getPrincipal(); } else { errorCode = ErrorCode.INVALID_TOKEN; throw new AuthenticationException( String.format("Athenz Role Token Not Authenticated from Client: %s", clientAddress)); } } } catch (AuthenticationException exception) { incrementFailureMetric(errorCode); throw exception; } }
@Test public void testAuthenticateUnsignedToken() throws Exception { List<String> roles = new ArrayList<String>() { { add("test_role"); } }; RoleToken token = new RoleToken.Builder("Z1", "test_provider", roles).principal("test_app").build(); AuthenticationDataSource authData = new AuthenticationDataCommand(token.getUnsignedToken(), new InetSocketAddress("localhost", 0), null); try { provider.authenticate(authData); fail("Unsigned token should not be authenticated"); } catch (AuthenticationException e) { // OK, expected } }
@Override public Response toResponse(Throwable exception) { debugLog(exception); if (exception instanceof WebApplicationException w) { var res = w.getResponse(); if (res.getStatus() >= 500) { log(w); } return res; } if (exception instanceof AuthenticationException) { return Response.status(Status.UNAUTHORIZED).build(); } if (exception instanceof ValidationException ve) { if (ve.seeOther() != null) { return Response.seeOther(ve.seeOther()).build(); } return buildContentNegotiatedErrorResponse(ve.localizedMessage(), Status.BAD_REQUEST); } // the remaining exceptions are unexpected, let's log them log(exception); if (exception instanceof FederationException fe) { var errorMessage = new Message(FEDERATION_ERROR_MESSAGE, fe.reason().name()); return buildContentNegotiatedErrorResponse(errorMessage, Status.INTERNAL_SERVER_ERROR); } var status = Status.INTERNAL_SERVER_ERROR; var errorMessage = new Message(SERVER_ERROR_MESSAGE, (String) null); return buildContentNegotiatedErrorResponse(errorMessage, status); }
@Test void toResponse_withBody() { when(uriInfo.getRequestUri()).thenReturn(REQUEST_URI); when(headers.getAcceptableMediaTypes()).thenReturn(List.of(MediaType.WILDCARD_TYPE)); mockHeaders("de-DE"); // when var res = mapper.toResponse(new IllegalArgumentException()); // then assertEquals(500, res.getStatus()); assertEquals(MediaType.TEXT_HTML_TYPE, res.getMediaType()); assertNotNull(res.getEntity()); }
public static String convertToHtml(String input) { return new Markdown().convert(StringEscapeUtils.escapeHtml4(input)); }
@Test public void shouldDecorateRelativeUrl() { assertThat(Markdown.convertToHtml("[Google](/google/com)")) .isEqualTo("<a href=\"/google/com\">Google</a>"); }
@Description("Returns a Geometry type Polygon object from Well-Known Text representation (WKT)") @ScalarFunction("ST_Polygon") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stPolygon(@SqlType(VARCHAR) Slice input) { Geometry geometry = jtsGeometryFromWkt(input.toStringUtf8()); validateType("ST_Polygon", geometry, EnumSet.of(POLYGON)); return serialize(geometry); }
@Test public void testSTPolygon() { assertFunction("ST_AsText(ST_Polygon('POLYGON EMPTY'))", VARCHAR, "POLYGON EMPTY"); assertFunction("ST_AsText(ST_Polygon('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))'))", VARCHAR, "POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))"); assertInvalidFunction("ST_AsText(ST_Polygon('LINESTRING (1 1, 2 2, 1 3)'))", "ST_Polygon only applies to POLYGON. Input type is: LINE_STRING"); assertInvalidFunction("ST_Polygon('POLYGON((-1 1, 1 -1))')", INVALID_FUNCTION_ARGUMENT, "Invalid WKT: Points of LinearRing do not form a closed linestring"); }
@Override public int hashCode() { return Objects.hash(taskId, topicPartitions); }
@Test public void shouldBeEqualsIfOnlyDifferInCommittedOffsets() { final TaskMetadataImpl stillSameDifferCommittedOffsets = new TaskMetadataImpl( TASK_ID, TOPIC_PARTITIONS, mkMap(mkEntry(TP_1, 1000000L), mkEntry(TP_1, 2L)), END_OFFSETS, TIME_CURRENT_IDLING_STARTED); assertThat(taskMetadata, equalTo(stillSameDifferCommittedOffsets)); assertThat(taskMetadata.hashCode(), equalTo(stillSameDifferCommittedOffsets.hashCode())); }
public static <T extends Collection<E>, E> T removeNull(T collection) { return filter(collection, Objects::nonNull); }
@Test public void removeNullTest() { final ArrayList<String> list = CollUtil.newArrayList("a", "b", "c", null, "", " "); final ArrayList<String> filtered = CollUtil.removeNull(list); // 原地过滤 assertSame(list, filtered); assertEquals(CollUtil.newArrayList("a", "b", "c", "", " "), filtered); }
private static SnapshotDiffReport getSnapshotDiffReport( final FileSystem fs, final Path snapshotDir, final String fromSnapshot, final String toSnapshot) throws IOException { try { return (SnapshotDiffReport) getSnapshotDiffReportMethod(fs).invoke( fs, snapshotDir, fromSnapshot, toSnapshot); } catch (InvocationTargetException e) { throw new IOException(e.getCause()); } catch (NoSuchMethodException|IllegalAccessException e) { throw new IllegalArgumentException( "Failed to invoke getSnapshotDiffReport.", e); } }
@Test public void testSync4() throws Exception { initData4(source); initData4(target); enableAndCreateFirstSnapshot(); // make changes under source changeData4(source); dfs.createSnapshot(source, "s2"); SnapshotDiffReport report = dfs.getSnapshotDiffReport(source, "s1", "s2"); System.out.println(report); syncAndVerify(); }
public void updateInstanceStatus(String status) { if (!STATUS_UP.equalsIgnoreCase(status) && !STATUS_DOWN.equalsIgnoreCase(status)) { LOGGER.warning(String.format(Locale.ENGLISH,"can't support status={%s}," + "please choose UP or DOWN", status)); return; } String serviceId = RegisterContext.INSTANCE.getClientInfo().getServiceId(); String group = nacosRegisterConfig.getGroup(); Instance updateInstance = nacosServiceManager.buildNacosInstanceFromRegistration(); updateInstance.setEnabled(!STATUS_DOWN.equalsIgnoreCase(status)); try { nacosServiceManager.getNamingMaintainService().updateInstance(serviceId, group, updateInstance); } catch (NacosException e) { LOGGER.log(Level.SEVERE, String.format(Locale.ENGLISH, "update nacos instance status failed," + "serviceId={%s}", serviceId), e); } }
@Test public void testUpdateInstanceStatus() throws NacosException { mockNamingService(); nacosClient.updateInstanceStatus(STATUS_DOWN); Assert.assertNotNull(ReflectUtils.getFieldValue(nacosClient, "instance")); }
@Override public Optional<DatabaseAdminExecutor> create(final SQLStatementContext sqlStatementContext) { return delegated.create(sqlStatementContext); }
@Test void assertCreateExecutorForSelectDatabase() { SelectStatementContext selectStatementContext = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); when(selectStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singletonList("pg_database")); Optional<DatabaseAdminExecutor> actual = new OpenGaussAdminExecutorCreator() .create(selectStatementContext, "select datname, datcompatibility from pg_database where datname = 'sharding_db'", "postgres", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(OpenGaussSystemCatalogAdminQueryExecutor.class)); }
@Override public void set(K key, V value) { cache.put(key, value); }
@Test public void testSet() { adapter.set(23, "test"); assertEquals("test", cache.get(23)); }
@Override public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) { int type = columnDef.getColumnMeta() >> 8; int length = columnDef.getColumnMeta() & 0xff; // unpack type & length, see https://bugs.mysql.com/bug.php?id=37426. if (0x30 != (type & 0x30)) { length += ((type & 0x30) ^ 0x30) << 4; type |= 0x30; } switch (MySQLBinaryColumnType.valueOf(type)) { case ENUM: return readEnumValue(length, payload); case SET: return payload.getByteBuf().readByte(); case STRING: return new MySQLBinaryString(payload.readStringFixByBytes(readActualLength(length, payload))); default: throw new UnsupportedSQLOperationException(MySQLBinaryColumnType.valueOf(type).toString()); } }
@Test void assertReadSetValue() { columnDef.setColumnMeta(MySQLBinaryColumnType.SET.getValue() << 8); when(payload.getByteBuf()).thenReturn(byteBuf); when(byteBuf.readByte()).thenReturn((byte) 0xff); assertThat(new MySQLStringBinlogProtocolValue().read(columnDef, payload), is((byte) 0xff)); }
@Override public Long del(byte[]... keys) { if (isQueueing() || isPipelined()) { for (byte[] key: keys) { write(key, LongCodec.INSTANCE, RedisCommands.DEL, key); } return null; } CommandBatchService es = new CommandBatchService(executorService); for (byte[] key: keys) { es.writeAsync(key, StringCodec.INSTANCE, RedisCommands.DEL, key); } BatchResult<Long> b = (BatchResult<Long>) es.execute(); return b.getResponses().stream().collect(Collectors.summarizingLong(v -> v)).getSum(); }
@Test public void testDel() { List<byte[]> keys = new ArrayList<>(); for (int i = 0; i < 10; i++) { byte[] key = ("test" + i).getBytes(); keys.add(key); connection.set(key, ("test" + i).getBytes()); } assertThat(connection.del(keys.toArray(new byte[0][]))).isEqualTo(10); }
@VisibleForTesting static StreamExecutionEnvironment createStreamExecutionEnvironment(FlinkPipelineOptions options) { return createStreamExecutionEnvironment( options, MoreObjects.firstNonNull(options.getFilesToStage(), Collections.emptyList()), options.getFlinkConfDir()); }
@Test public void shouldCreateRocksDbStateBackend() { FlinkPipelineOptions options = getDefaultPipelineOptions(); options.setStreaming(true); options.setStateBackend("rocksDB"); options.setStateBackendStoragePath(temporaryFolder.getRoot().toURI().toString()); StreamExecutionEnvironment sev = FlinkExecutionEnvironments.createStreamExecutionEnvironment(options); assertThat(sev.getStateBackend(), instanceOf(RocksDBStateBackend.class)); }
public String render(File templateFile) throws IOException { String template = FileUtils.readFileToString(templateFile, Charset.defaultCharset()); return render(template); }
@Test void testIterate() { // given K8sSpecTemplate template = new K8sSpecTemplate(); template.put("dict", ImmutableMap.of( "k1", "v1", "k2", "v2" )); // when String spec = template.render( "{% for key, value in dict.items() %}" + "key = {{key}}, value = {{value}}\n" + "{% endfor %}" ); // then assertEquals( "key = k1, value = v1\n" + "key = k2, value = v2\n", spec); }
@Override public List<EDGE> getEdges() { return path.getEdges(); }
@Test public void minimal_nontrivial_cycle() { String nodeA = "Node-A"; String nodeB = "Node-B"; CycleInternal<Edge<String>> cycle = new CycleInternal<>(asList(stringEdge(nodeA, nodeB), stringEdge(nodeB, nodeA))); assertThat(cycle.getEdges()).hasSize(2); }
public static NamespaceName get(String tenant, String namespace) { validateNamespaceName(tenant, namespace); return get(tenant + '/' + namespace); }
@Test(expectedExceptions = IllegalArgumentException.class) public void namespace_propertyClusterNamespaceTopic() { NamespaceName.get("property/cluster/namespace/topic"); }
@Override public void writeShort(final int v) throws IOException { ensureAvailable(SHORT_SIZE_IN_BYTES); Bits.writeShort(buffer, pos, (short) v, isBigEndian); pos += SHORT_SIZE_IN_BYTES; }
@Test public void testWriteShortForVByteOrder() throws Exception { short expected = 100; out.writeShort(2, expected, LITTLE_ENDIAN); short actual = Bits.readShortL(out.buffer, 2); assertEquals(expected, actual); }
protected AbstractNamedContainerCollector(NodeEngine nodeEngine, ConcurrentMap<String, C> containers) { super(nodeEngine); this.containers = containers; this.partitionService = nodeEngine.getPartitionService(); }
@Test public void testAbstractNamedContainerCollector() { TestNamedContainerCollector collector = new TestNamedContainerCollector(nodeEngine, true, true); assertEqualsStringFormat("Expected the to have %d containers, but found %d", 1, collector.containers.size()); collector.run(); assertEqualsStringFormat("Expected %d merging values, but found %d", 1L, collector.getMergingValueCount()); assertEquals("Expected the collected containers to be removed from the container map", 0, collector.containers.size()); }
public static byte[] decodeChecked(CharSequence encoded) throws ValidateException { try { return decodeChecked(encoded, true); } catch (ValidateException ignore) { return decodeChecked(encoded, false); } }
@Test public void decodeCheckedTest() { String a = "3vQB7B6MrGQZaxCuFg4oh"; byte[] decode = Base58.decodeChecked(1 + a); assertArrayEquals("hello world".getBytes(StandardCharsets.UTF_8),decode); decode = Base58.decodeChecked(a); assertArrayEquals("hello world".getBytes(StandardCharsets.UTF_8),decode); }
@Override public ServletInputStream getInputStream() throws IOException { final ByteArrayInputStream inputStream = new ByteArrayInputStream(body); return new ServletInputStream() { @Override public int read() throws IOException { return inputStream.read(); } @Override public boolean isFinished() { return false; } @Override public boolean isReady() { return false; } @Override public void setReadListener(ReadListener readListener) { } }; }
@Test void testGetInputStream() throws IOException { ServletInputStream inputStream = reuseHttpServletRequest.getInputStream(); assertNotNull(inputStream); int read = inputStream.read(); while (read != -1) { read = inputStream.read(); } }
@VisibleForTesting public void loadDataFromRemote(String filePath, long offset, long lengthToLoad, PositionReader reader, int chunkSize) throws IOException { ByteBuffer buf = ByteBuffer.allocateDirect(chunkSize); String fileId = new AlluxioURI(filePath).hash(); while (lengthToLoad > 0) { long currentPageIndex = offset / mPageSize; PageId pageId = new PageId(fileId.toString(), currentPageIndex); int lengthToRead = (int) Math.min(chunkSize, lengthToLoad); int lengthRead = reader.read(offset, buf, lengthToRead); if (lengthRead != lengthToRead) { throw new FailedPreconditionRuntimeException( "Read " + lengthRead + " bytes, expected to read " + lengthToRead + " bytes"); } buf.flip(); mCacheManager.put(pageId, buf); offset += lengthRead; lengthToLoad -= lengthRead; buf.clear(); } }
@Test public void testLoadFromReader() throws IOException { String ufsPath = "testLoadRemote"; mWorker.loadDataFromRemote(ufsPath, 0, 10, new TestDataReader(100), (int) mPageSize); byte[] buffer = new byte[10]; String fileId = new AlluxioURI(ufsPath).hash(); List<PageId> cachedPages = mCacheManager.getCachedPageIdsByFileId(fileId, 10); assertEquals(1, cachedPages.size()); mCacheManager.get(new PageId(fileId, 0), 10, buffer, 0); assertTrue(BufferUtils.equalIncreasingByteArray(0, 10, buffer)); }
@CheckForNull @Override public Instant forkDate(String referenceBranch, Path rootBaseDir) { return null; }
@Test public void forkDate_returns_null() throws SVNException { SvnScmProvider provider = new SvnScmProvider(config, new SvnBlameCommand(config)); assertThat(provider.forkDate("", Paths.get(""))).isNull(); }
public static FilesApplicationPackage fromFile(File appDir) { return fromFile(appDir, false); }
@Test public void testLegacyOverrides() { File appDir = new File("src/test/resources/app-legacy-overrides"); ApplicationPackage app = FilesApplicationPackage.fromFile(appDir); var overrides = app.legacyOverrides(); assertEquals(2, overrides.size()); assertEquals("something here", overrides.get("foo-bar")); assertEquals("false", overrides.get("v7-geo-positions")); }
public void updateCheckboxes( EnumSet<RepositoryFilePermission> permissionEnumSet ) { updateCheckboxes( false, permissionEnumSet ); }
@Test public void testUpdateCheckboxesNoPermissionsAppropriateTrue() { permissionsCheckboxHandler.updateCheckboxes( true, EnumSet.noneOf( RepositoryFilePermission.class ) ); verify( readCheckbox, times( 1 ) ).setChecked( false ); verify( writeCheckbox, times( 1 ) ).setChecked( false ); verify( deleteCheckbox, times( 1 ) ).setChecked( false ); verify( manageCheckbox, times( 1 ) ).setChecked( false ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); verify( readCheckbox, times( 1 ) ).setDisabled( false ); }
@VisibleForTesting void startKsql(final KsqlConfig ksqlConfigWithPort) { cleanupOldState(); initialize(ksqlConfigWithPort); }
@Test public void shouldConfigureRocksDBConfigSetter() { // When: app.startKsql(ksqlConfig); // Then: verify(rocksDBConfigSetterHandler).accept(ksqlConfig); }
public void validate(AlmSettingDto almSettingDto) { String bitbucketUrl = almSettingDto.getUrl(); String bitbucketToken = almSettingDto.getDecryptedPersonalAccessToken(encryption); if (bitbucketUrl == null || bitbucketToken == null) { throw new IllegalArgumentException("Your global Bitbucket Server configuration is incomplete."); } bitbucketServerRestClient.validateUrl(bitbucketUrl); bitbucketServerRestClient.validateToken(bitbucketUrl, bitbucketToken); bitbucketServerRestClient.validateReadPermission(bitbucketUrl, bitbucketToken); }
@Test public void validate_success() { AlmSettingDto almSettingDto = createNewBitbucketDto("http://abc.com", "abc"); when(encryption.isEncrypted(any())).thenReturn(false); underTest.validate(almSettingDto); verify(bitbucketServerRestClient, times(1)).validateUrl("http://abc.com"); verify(bitbucketServerRestClient, times(1)).validateToken("http://abc.com", "abc"); verify(bitbucketServerRestClient, times(1)).validateReadPermission("http://abc.com", "abc"); }
@Override public URL getResource(String name) { for (ClassLoader pluginClassloader : pluginClassloaders) { URL url = pluginClassloader.getResource(name); if (url != null) { return url; } } return getClass().getClassLoader().getResource(name); }
@Test public void aggregate_plugin_classloaders() { URLClassLoader checkstyle = newCheckstyleClassloader(); I18nClassloader i18nClassloader = new I18nClassloader(Lists.newArrayList(checkstyle)); assertThat(i18nClassloader.getResource("org/sonar/l10n/checkstyle.properties")).isNotNull(); assertThat(i18nClassloader.getResource("org/sonar/l10n/checkstyle.properties").getFile()).endsWith("checkstyle.properties"); }
@Override public void run() throws Exception { // Get list of files to process. List<String> filteredFiles = SegmentGenerationUtils.listMatchedFilesWithRecursiveOption(_inputDirFS, _inputDirURI, _spec.getIncludeFileNamePattern(), _spec.getExcludeFileNamePattern(), _spec.isSearchRecursively()); if (_consistentPushEnabled) { ConsistentDataPushUtils.configureSegmentPostfix(_spec); } File localTempDir = new File(FileUtils.getTempDirectory(), "pinot-" + UUID.randomUUID()); try { int numInputFiles = filteredFiles.size(); _segmentCreationTaskCountDownLatch = new CountDownLatch(numInputFiles); if (!SegmentGenerationJobUtils.useGlobalDirectorySequenceId(_spec.getSegmentNameGeneratorSpec())) { Map<String, List<String>> localDirIndex = new HashMap<>(); for (String filteredFile : filteredFiles) { java.nio.file.Path filteredParentPath = Paths.get(filteredFile).getParent(); localDirIndex.computeIfAbsent(filteredParentPath.toString(), k -> new ArrayList<>()).add(filteredFile); } for (String parentPath : localDirIndex.keySet()) { List<String> siblingFiles = localDirIndex.get(parentPath); Collections.sort(siblingFiles); for (int i = 0; i < siblingFiles.size(); i++) { URI inputFileURI = SegmentGenerationUtils .getFileURI(siblingFiles.get(i), SegmentGenerationUtils.getDirectoryURI(parentPath)); submitSegmentGenTask(localTempDir, inputFileURI, i); } } } else { //iterate on the file list, for each for (int i = 0; i < numInputFiles; i++) { final URI inputFileURI = SegmentGenerationUtils.getFileURI(filteredFiles.get(i), _inputDirURI); submitSegmentGenTask(localTempDir, inputFileURI, i); } } _segmentCreationTaskCountDownLatch.await(); if (_failure.get() != null) { _executorService.shutdownNow(); throw _failure.get(); } } finally { //clean up FileUtils.deleteQuietly(localTempDir); _executorService.shutdown(); } }
@Test public void testFailureHandling() throws Exception { File testDir = makeTestDir(); File inputDir = new File(testDir, "input"); inputDir.mkdirs(); File inputFile1 = new File(inputDir, "input1.csv"); FileUtils.writeLines(inputFile1, Lists.newArrayList("col1,col2", "value11,11", "value12,12")); File inputFile2 = new File(inputDir, "input2.csv"); FileUtils.writeLines(inputFile2, Lists.newArrayList("col1,col2", "value21,notanint", "value22,22")); File inputFile3 = new File(inputDir, "input3.csv"); FileUtils.writeLines(inputFile3, Lists.newArrayList("col1,col2", "value31,31", "value32,32")); File outputDir = new File(testDir, "output"); final String schemaName = "mySchema"; File schemaFile = makeSchemaFile(testDir, schemaName); File tableConfigFile = makeTableConfigFile(testDir, schemaName); SegmentGenerationJobSpec jobSpec = makeJobSpec(inputDir, outputDir, schemaFile, tableConfigFile); // Set up for a segment name that matches our input filename, so we can validate // that only the first input file gets processed. SegmentNameGeneratorSpec nameSpec = new SegmentNameGeneratorSpec(); nameSpec.setType(BatchConfigProperties.SegmentNameGeneratorType.INPUT_FILE); nameSpec.getConfigs().put(SegmentGenerationTaskRunner.FILE_PATH_PATTERN, ".+/(.+)\\.csv"); nameSpec.getConfigs().put(SegmentGenerationTaskRunner.SEGMENT_NAME_TEMPLATE, "${filePathPattern:\\1}"); jobSpec.setSegmentNameGeneratorSpec(nameSpec); try { SegmentGenerationJobRunner jobRunner = new SegmentGenerationJobRunner(jobSpec); jobRunner.run(); fail("Job should have failed"); } catch (Exception e) { assertTrue(e.getMessage().contains("input2.csv"), "Didn't find filename in exception message"); // We should only have one output file, since segment generation will // terminate after the second input file. File[] segments = outputDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith(".tar.gz"); } }); // We rely on the SegmentGenerationJobRunner doing a sort by name, so "input1.csv" will be the // first file we process, and "input2.csv" (the bad file) will be the second one. assertEquals(segments.length, 1); assertTrue(segments[0].getName().endsWith("input1.tar.gz")); } }
public static void ensureCorrectArgs( final FunctionName functionName, final Object[] args, final Class<?>... argTypes ) { if (args == null) { throw new KsqlFunctionException("Null argument list for " + functionName.text() + "."); } if (args.length != argTypes.length) { throw new KsqlFunctionException("Incorrect arguments for " + functionName.text() + "."); } for (int i = 0; i < argTypes.length; i++) { if (args[i] == null) { continue; } if (!argTypes[i].isAssignableFrom(args[i].getClass())) { throw new KsqlFunctionException( String.format( "Incorrect arguments type for %s. " + "Expected %s for arg number %d but found %s.", functionName.text(), argTypes[i].getCanonicalName(), i, args[i].getClass().getCanonicalName() )); } } }
@Test public void shouldHandleSubTypes() { final Object[] args = new Object[]{1.345, 55}; UdfUtil.ensureCorrectArgs(FUNCTION_NAME, args, Number.class, Number.class); }
public static UUID fromString(String src) { return UUID.fromString(src.substring(7, 15) + "-" + src.substring(3, 7) + "-1" + src.substring(0, 3) + "-" + src.substring(15, 19) + "-" + src.substring(19)); }
@Test public void basicUuid() { System.out.println(UUIDConverter.fromString("1e746126eaaefa6a91992ebcb67fe33")); }
abstract boolean isHandlerMethod(Object handler);
@Test void WebMvc25_isHandlerMethod_isFalse() { HandlerMethod handlerMethod = mock(HandlerMethod.class); assertThat(new WebMvc25().isHandlerMethod(handlerMethod)) .isFalse(); }
public List<String> split(String in) { final StringBuilder result = new StringBuilder(); final char[] chars = in.toCharArray(); for (int i = 0; i < chars.length; i++) { final char c = chars[i]; if (CHAR_OPERATORS.contains(String.valueOf(c))) { if (i < chars.length - 2 && CHAR_OPERATORS.contains(String.valueOf(chars[i + 1])) && !("(".equals(String.valueOf(chars[i + 1])) || ")".equals(String.valueOf(chars[i + 1])))) { result.append(" ").append(c).append(chars[i + 1]).append(" "); i++; } else { result.append(" ").append(c).append(" "); } } else { result.append(c); } } final String[] tokens = result.toString().split(SPLIT_EXPRESSION); final List<String> list = new ArrayList<>(); for (int i = 0; i < tokens.length; i++) { tokens[i] = tokens[i].trim(); if (!tokens[i].equals("")) { list.add(tokens[i]); } } return list; }
@Test public void split2() { List<String> tokens = parser.split("(a and b)"); assertEquals(Arrays.asList("(", "a", "and", "b", ")"), tokens); }
@Udf(description = "Returns Euler's number e raised to the power of an INT value.") public Double exp( @UdfParameter( value = "exponent", description = "the exponent to raise e to." ) final Integer exponent ) { return exp(exponent == null ? null : exponent.doubleValue()); }
@Test public void shouldHandleZero() { assertThat(udf.exp(0), is(1.0)); assertThat(udf.exp(0L), is(1.0)); assertThat(udf.exp(0.0), is(1.0)); }
public static Set<Class<? extends PipelineOptions>> getRegisteredOptions() { return Collections.unmodifiableSet(CACHE.get().registeredOptions); }
@Test public void testAutomaticRegistrationOfPipelineOptions() { assertTrue(PipelineOptionsFactory.getRegisteredOptions().contains(RegisteredTestOptions.class)); }
public List<String> toPrefix(String in) { List<String> tokens = buildTokens(alignINClause(in)); List<String> output = new ArrayList<>(); List<String> stack = new ArrayList<>(); for (String token : tokens) { if (isOperand(token)) { if (token.equals(")")) { while (openParanthesesFound(stack)) { output.add(stack.remove(stack.size() - 1)); } if (!stack.isEmpty()) { // temporarily fix for issue #189 stack.remove(stack.size() - 1); } } else { while (openParanthesesFound(stack) && !hasHigherPrecedence(token, stack.get(stack.size() - 1))) { output.add(stack.remove(stack.size() - 1)); } stack.add(token); } } else { output.add(token); } } while (!stack.isEmpty()) { output.add(stack.remove(stack.size() - 1)); } return output; }
@Test(expected = NullPointerException.class) public void parserShouldNotAcceptNull() { parser.toPrefix(null); fail(); }