focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static DateTime parse(CharSequence dateStr, DateFormat dateFormat) { return new DateTime(dateStr, dateFormat); }
@Test public void issueI8NMP7Test() { final String str = "1702262524444"; final DateTime parse = DateUtil.parse(str); assertEquals("2023-12-11 10:42:04", Objects.requireNonNull(parse).toString()); }
@Nullable public static TNetworkAddress getBackendHost(ImmutableMap<Long, ComputeNode> backendMap, Reference<Long> backendIdRef) { ComputeNode node = getBackend(backendMap); if (node != null) { backendIdRef.setRef(node.getId()); return new TNetworkAddress(node.getHost(), node.getBePort()); } return null; }
@Test public void testEmptyBackendList() throws InterruptedException { Reference<Long> idRef = new Reference<>(); TNetworkAddress address = SimpleScheduler.getBackendHost(null, idRef); Assert.assertNull(address); ImmutableMap.Builder<Long, ComputeNode> builder = ImmutableMap.builder(); address = SimpleScheduler.getBackendHost(builder.build(), idRef); Assert.assertNull(address); }
public long scan( final UnsafeBuffer termBuffer, final long rebuildPosition, final long hwmPosition, final long nowNs, final int termLengthMask, final int positionBitsToShift, final int initialTermId) { boolean lossFound = false; int rebuildOffset = (int)rebuildPosition & termLengthMask; if (rebuildPosition < hwmPosition) { final int rebuildTermCount = (int)(rebuildPosition >>> positionBitsToShift); final int hwmTermCount = (int)(hwmPosition >>> positionBitsToShift); final int rebuildTermId = initialTermId + rebuildTermCount; final int hwmTermOffset = (int)hwmPosition & termLengthMask; final int limitOffset = rebuildTermCount == hwmTermCount ? hwmTermOffset : termLengthMask + 1; rebuildOffset = scanForGap(termBuffer, rebuildTermId, rebuildOffset, limitOffset, this); if (rebuildOffset < limitOffset) { if (scannedTermOffset != activeTermOffset || scannedTermId != activeTermId) { activateGap(nowNs); lossFound = true; } checkTimerExpiry(nowNs); } } return pack(rebuildOffset, lossFound); }
@Test void shouldNotSendNakWhenBufferIsEmpty() { final long rebuildPosition = ACTIVE_TERM_POSITION; final long hwmPosition = ACTIVE_TERM_POSITION; lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID); currentTime = TimeUnit.MILLISECONDS.toNanos(100); lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID); verifyNoInteractions(lossHandler); }
public Optional<VoterSet> updateVoter(VoterNode voter) { VoterNode oldVoter = voters.get(voter.voterKey().id()); if (oldVoter != null && oldVoter.isVoter(voter.voterKey())) { HashMap<Integer, VoterNode> newVoters = new HashMap<>(voters); newVoters.put(voter.voterKey().id(), voter); return Optional.of(new VoterSet(newVoters)); } return Optional.empty(); }
@Test void testUpdateVoter() { Map<Integer, VoterSet.VoterNode> aVoterMap = voterMap(IntStream.of(1, 2, 3), true); VoterSet voterSet = VoterSet.fromMap(new HashMap<>(aVoterMap)); assertEquals(Optional.empty(), voterSet.updateVoter(voterNode(4, true))); assertFalse(voterSet.voterNodeNeedsUpdate(voterNode(4, true))); assertEquals(Optional.empty(), voterSet.updateVoter(voterNode(3, true))); assertFalse(voterSet.voterNodeNeedsUpdate(voterNode(3, true))); VoterSet.VoterNode voter3 = aVoterMap.get(3); VoterSet.VoterNode newVoter3 = VoterSet.VoterNode.of( voter3.voterKey(), Endpoints.fromInetSocketAddresses( Collections.singletonMap( ListenerName.normalised("ABC"), InetSocketAddress.createUnresolved("abc", 1234) ) ), new SupportedVersionRange((short) 1, (short) 1) ); aVoterMap.put(3, newVoter3); assertTrue(voterSet.voterNodeNeedsUpdate(newVoter3)); assertEquals( Optional.of(VoterSet.fromMap(new HashMap<>(aVoterMap))), voterSet.updateVoter(newVoter3) ); }
Bytes prefixBytes(final KO key) { //The serialization format. Note that primaryKeySerialized is not required/used in this function. //{Integer.BYTES foreignKeyLength}{foreignKeySerialized}{Optional-primaryKeySerialized} final byte[] foreignKeySerializedData = foreignKeySerializer.serialize(foreignKeySerdeTopic, key); final ByteBuffer buf = ByteBuffer.allocate(Integer.BYTES + foreignKeySerializedData.length); buf.putInt(foreignKeySerializedData.length); buf.put(foreignKeySerializedData); return Bytes.wrap(buf.array()); }
@Test public void prefixKeySerdeTest() { final CombinedKeySchema<String, Integer> cks = new CombinedKeySchema<>( () -> "fkTopic", Serdes.String(), () -> "pkTopic", Serdes.Integer() ); final String foreignKey = "someForeignKey"; final byte[] foreignKeySerializedData = Serdes.String().serializer().serialize("fkTopic", foreignKey); final Bytes prefix = cks.prefixBytes(foreignKey); final ByteBuffer buf = ByteBuffer.allocate(Integer.BYTES + foreignKeySerializedData.length); buf.putInt(foreignKeySerializedData.length); buf.put(foreignKeySerializedData); final Bytes expectedPrefixBytes = Bytes.wrap(buf.array()); assertEquals(expectedPrefixBytes, prefix); }
static int getEncryptedPacketLength(ByteBuf buffer, int offset) { int packetLength = 0; // SSLv3 or TLS - Check ContentType boolean tls; switch (buffer.getUnsignedByte(offset)) { case SSL_CONTENT_TYPE_CHANGE_CIPHER_SPEC: case SSL_CONTENT_TYPE_ALERT: case SSL_CONTENT_TYPE_HANDSHAKE: case SSL_CONTENT_TYPE_APPLICATION_DATA: case SSL_CONTENT_TYPE_EXTENSION_HEARTBEAT: tls = true; break; default: // SSLv2 or bad data tls = false; } if (tls) { // SSLv3 or TLS or GMSSLv1.0 or GMSSLv1.1 - Check ProtocolVersion int majorVersion = buffer.getUnsignedByte(offset + 1); int version = buffer.getShort(offset + 1); if (majorVersion == 3 || version == GMSSL_PROTOCOL_VERSION) { // SSLv3 or TLS or GMSSLv1.0 or GMSSLv1.1 packetLength = unsignedShortBE(buffer, offset + 3) + SSL_RECORD_HEADER_LENGTH; if (packetLength <= SSL_RECORD_HEADER_LENGTH) { // Neither SSLv3 or TLSv1 (i.e. SSLv2 or bad data) tls = false; } } else if (version == DTLS_1_0 || version == DTLS_1_2 || version == DTLS_1_3) { if (buffer.readableBytes() < offset + DTLS_RECORD_HEADER_LENGTH) { return NOT_ENOUGH_DATA; } // length is the last 2 bytes in the 13 byte header. packetLength = unsignedShortBE(buffer, offset + DTLS_RECORD_HEADER_LENGTH - 2) + DTLS_RECORD_HEADER_LENGTH; } else { // Neither SSLv3 or TLSv1 (i.e. SSLv2 or bad data) tls = false; } } if (!tls) { // SSLv2 or bad data - Check the version int headerLength = (buffer.getUnsignedByte(offset) & 0x80) != 0 ? 2 : 3; int majorVersion = buffer.getUnsignedByte(offset + headerLength + 1); if (majorVersion == 2 || majorVersion == 3) { // SSLv2 packetLength = headerLength == 2 ? (shortBE(buffer, offset) & 0x7FFF) + 2 : (shortBE(buffer, offset) & 0x3FFF) + 3; if (packetLength <= headerLength) { return NOT_ENOUGH_DATA; } } else { return NOT_ENCRYPTED; } } return packetLength; }
@Test public void shouldGetPacketLengthOfGmsslProtocolFromByteBuf() { int bodyLength = 65; ByteBuf buf = Unpooled.buffer() .writeByte(SslUtils.SSL_CONTENT_TYPE_HANDSHAKE) .writeShort(SslUtils.GMSSL_PROTOCOL_VERSION) .writeShort(bodyLength); int packetLength = getEncryptedPacketLength(buf, 0); assertEquals(bodyLength + SslUtils.SSL_RECORD_HEADER_LENGTH, packetLength); buf.release(); }
@Override public AuditReplayCommand parse(Text inputLine, Function<Long, Long> relativeToAbsolute) throws IOException { Matcher m = logLineParseRegex.matcher(inputLine.toString()); if (!m.find()) { throw new IOException( "Unable to find valid message pattern from audit log line: `" + inputLine + "` using regex `" + logLineParseRegex + "`"); } long relativeTimestamp; try { relativeTimestamp = dateFormat.parse(m.group("timestamp")).getTime() - startTimestamp; } catch (ParseException p) { throw new IOException( "Exception while parsing timestamp from audit log line: `" + inputLine + "`", p); } // Sanitize the = in the rename options field into a : so we can split on = String auditMessageSanitized = m.group("message").replace("(options=", "(options:"); Map<String, String> parameterMap = new HashMap<String, String>(); String[] auditMessageSanitizedList = auditMessageSanitized.split("\t"); for (String auditMessage : auditMessageSanitizedList) { String[] splitMessage = auditMessage.split("=", 2); try { parameterMap.put(splitMessage[0], splitMessage[1]); } catch (ArrayIndexOutOfBoundsException e) { throw new IOException( "Exception while parsing a message from audit log line: `" + inputLine + "`", e); } } return new AuditReplayCommand(relativeToAbsolute.apply(relativeTimestamp), // Split the UGI on space to remove the auth and proxy portions of it SPACE_SPLITTER.split(parameterMap.get("ugi")).iterator().next(), parameterMap.get("cmd").replace("(options:", "(options="), parameterMap.get("src"), parameterMap.get("dst"), parameterMap.get("ip")); }
@Test public void testInputWithEquals() throws Exception { Text in = getAuditString("1970-01-01 00:00:11,000", "fakeUser", "listStatus", "day=1970", "null"); AuditReplayCommand expected = new AuditReplayCommand(1000, "fakeUser", "listStatus", "day=1970", "null", "0.0.0.0"); assertEquals(expected, parser.parse(in, Function.identity())); }
public void setUpdateTime(long updateTime) { this.updateTime = updateTime; }
@Test public void testSetUpdateTime() { replicatedRecord.setUpdateTime(2342); assertEquals(2342, replicatedRecord.getUpdateTime()); }
@Override public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) { int seconds = payload.readInt4(); return 0 == seconds ? MySQLTimeValueUtils.DATETIME_OF_ZERO : DateTimeFormatterFactory.getStandardFormatter().format(new Timestamp(seconds * 1000L).toLocalDateTime()); }
@Test void assertRead() { int currentSeconds = Long.valueOf(System.currentTimeMillis() / 1000L).intValue(); when(payload.readInt4()).thenReturn(currentSeconds); assertThat(new MySQLTimestampBinlogProtocolValue().read(columnDef, payload), is(DateTimeFormatterFactory.getStandardFormatter().format(new Timestamp(currentSeconds * 1000L).toLocalDateTime()))); }
@Override public final String method() { return delegate.getMethod(); }
@Test void method() { when(request.getMethod()).thenReturn("POST"); assertThat(wrapper.method()) .isEqualTo("POST"); }
public static boolean isDomain(String url) { String host = getDomain(url); if (StringUtils.isEmpty(host)) { return false; } else { return !isIp(host); } }
@Test public void testIsDomain() { assertTrue(isDomain("bolt://alipay.com:80?a=b")); assertTrue(isDomain("alipay.com:80?a=b")); assertTrue(isDomain("bolt://alipay.com:80")); assertTrue(isDomain("alipay.com:80")); assertTrue(isDomain("bolt://alipay?a=b")); assertTrue(isDomain("alipay")); assertTrue(isDomain("sofagw-pool")); assertFalse(isDomain("bolt://1.1.1.1:80?a=b")); assertFalse(isDomain("1.1.1.1:80?a=b")); assertFalse(isDomain("bolt://1.1.1.1:80")); assertFalse(isDomain("1.1.1.1:80")); assertFalse(isDomain("1.1.1.1")); //todo now we do not support ipv6 // assertFalse(isDomain("bolt://FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF#12200?a=b")); // assertFalse(isDomain("FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF#12200?a=b")); // assertFalse(isDomain("bolt://FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF#12200")); // assertFalse(isDomain("bolt://FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF?a=b")); // assertFalse(isDomain("FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF")); }
public static <T> T newInstanceOrNull(Class<? extends T> clazz, Object... params) { Constructor<T> constructor = selectMatchingConstructor(clazz, params); if (constructor == null) { return null; } try { return constructor.newInstance(params); } catch (IllegalAccessException | InstantiationException | InvocationTargetException e) { return null; } }
@Test(expected = AmbiguousInstantiationException.class) public void newInstanceOrNull_ambigiousConstructor() { InstantiationUtils.newInstanceOrNull(ClassWithTwoConstructors.class, "foo"); }
public String transactionalProducerId() { return transactionalProducerId(groupId()); }
@Test public void shouldConstructExpectedTransactionalId() { Map<String, String> workerProps = configs(); workerProps.put(GROUP_ID_CONFIG, "why did i stay up all night writing unit tests"); assertEquals( "connect-cluster-why did i stay up all night writing unit tests", new DistributedConfig(workerProps).transactionalProducerId() ); workerProps.put(GROUP_ID_CONFIG, "connect-cluster"); assertEquals( "connect-cluster-connect-cluster", new DistributedConfig(workerProps).transactionalProducerId() ); workerProps.put(GROUP_ID_CONFIG, "\u2603"); assertEquals( "connect-cluster-\u2603", new DistributedConfig(workerProps).transactionalProducerId() ); }
Double applyMin(double predictionDouble) { return targetField.getMin() != null ? Math.max(targetField.getMin(), predictionDouble) : predictionDouble; }
@Test void applyMin() { TargetField targetField = new TargetField(Collections.emptyList(), null, "string", null, null, null, null, null); KiePMMLTarget kiePMMLTarget = getBuilder(targetField).build(); assertThat(kiePMMLTarget.applyMin(4.33)).isCloseTo(4.33, Offset.offset(0.0)); targetField = new TargetField(Collections.emptyList(), null, "string", null, 4.34, null, null, null); kiePMMLTarget = getBuilder(targetField).build(); assertThat(kiePMMLTarget.applyMin(4.33)).isCloseTo(4.34, Offset.offset(0.0)); assertThat(kiePMMLTarget.applyMin(4.35)).isCloseTo(4.35, Offset.offset(0.0)); }
@Override public ElasticAgentPluginInfo pluginInfoFor(GoPluginDescriptor descriptor) { String pluginId = descriptor.id(); PluggableInstanceSettings pluggableInstanceSettings = null; if (!extension.supportsClusterProfiles(pluginId)) { pluggableInstanceSettings = getPluginSettingsAndView(descriptor, extension); } return new ElasticAgentPluginInfo(descriptor, elasticElasticAgentProfileSettings(pluginId), elasticClusterProfileSettings(pluginId), image(pluginId), pluggableInstanceSettings, capabilities(pluginId)); }
@Test public void shouldBuildPluginInfoWithClusterProfileSettings() { GoPluginDescriptor descriptor = GoPluginDescriptor.builder().id("plugin1").build(); List<PluginConfiguration> elasticAgentProfileConfigurations = List.of(new PluginConfiguration("aws_password", new Metadata(true, false))); List<PluginConfiguration> clusterProfileConfigurations = List.of(new PluginConfiguration("aws_url", new Metadata(true, false))); PluginSettingsProperty property = new PluginSettingsProperty("ami-id", "ami-123"); PluginSettingsConfiguration pluginSettingsConfiguration = new PluginSettingsConfiguration(); pluginSettingsConfiguration.add(property); Image icon = new Image("content_type", "data", "hash"); when(pluginManager.resolveExtensionVersion("plugin1", ELASTIC_AGENT_EXTENSION, SUPPORTED_VERSIONS)).thenReturn("1.0"); when(extension.getPluginSettingsConfiguration(descriptor.id())).thenReturn(pluginSettingsConfiguration); when(extension.getPluginSettingsView(descriptor.id())).thenReturn("some html"); when(extension.getIcon(descriptor.id())).thenReturn(icon); when(extension.getClusterProfileMetadata(descriptor.id())).thenReturn(clusterProfileConfigurations); when(extension.getClusterProfileView(descriptor.id())).thenReturn("cluster_profile_view"); when(extension.getProfileMetadata(descriptor.id())).thenReturn(elasticAgentProfileConfigurations); when(extension.getProfileView(descriptor.id())).thenReturn("elastic_agent_profile_view"); when(extension.supportsClusterProfiles("plugin1")).thenReturn(true); ElasticAgentPluginInfoBuilder builder = new ElasticAgentPluginInfoBuilder(extension); ElasticAgentPluginInfo pluginInfo = builder.pluginInfoFor(descriptor); assertThat(pluginInfo.getDescriptor(), is(descriptor)); assertThat(pluginInfo.getExtensionName(), is("elastic-agent")); assertThat(pluginInfo.getImage(), is(icon)); assertThat(pluginInfo.getElasticAgentProfileSettings(), is(new PluggableInstanceSettings(elasticAgentProfileConfigurations, new PluginView("elastic_agent_profile_view")))); assertThat(pluginInfo.getClusterProfileSettings(), is(new PluggableInstanceSettings(clusterProfileConfigurations, new PluginView("cluster_profile_view")))); assertNull(pluginInfo.getPluginSettings()); assertFalse(pluginInfo.supportsStatusReport()); }
ClassicGroup getOrMaybeCreateClassicGroup( String groupId, boolean createIfNotExists ) throws GroupIdNotFoundException { Group group = groups.get(groupId); if (group == null && !createIfNotExists) { throw new GroupIdNotFoundException(String.format("Classic group %s not found.", groupId)); } if (group == null) { ClassicGroup classicGroup = new ClassicGroup(logContext, groupId, ClassicGroupState.EMPTY, time, metrics); groups.put(groupId, classicGroup); metrics.onClassicGroupStateTransition(null, classicGroup.currentState()); return classicGroup; } else { if (group.type() == CLASSIC) { return (ClassicGroup) group; } else { // We don't support upgrading/downgrading between protocols at the moment so // we throw an exception if a group exists with the wrong type. throw new GroupIdNotFoundException(String.format("Group %s is not a classic group.", groupId)); } } }
@Test public void testStaticMemberRejoinWithUnknownMemberIdAndChangeOfProtocolWhileSelectProtocolUnchanged() throws Exception { GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder() .build(); GroupMetadataManagerTestContext.RebalanceResult rebalanceResult = context.staticMembersJoinAndRebalance( "group-id", "leader-instance-id", "follower-instance-id" ); ClassicGroup group = context.groupMetadataManager.getOrMaybeCreateClassicGroup("group-id", false); // A static follower rejoin with protocol changing to leader protocol subset won't trigger rebalance if updated // group's selectProtocol remain unchanged. JoinGroupRequestProtocolCollection protocols = GroupMetadataManagerTestContext.toProtocols(group.selectProtocol()); JoinGroupRequestData request = new GroupMetadataManagerTestContext.JoinGroupRequestBuilder() .withGroupId("group-id") .withGroupInstanceId("follower-instance-id") .withMemberId(UNKNOWN_MEMBER_ID) .withProtocols(protocols) .build(); GroupMetadataManagerTestContext.JoinResult followerJoinResult = context.sendClassicGroupJoin( request, true, true ); assertEquals( Collections.singletonList(GroupCoordinatorRecordHelpers.newGroupMetadataRecord(group, group.groupAssignment(), MetadataVersion.latestTesting())), followerJoinResult.records ); // Simulate a successful write to the log. followerJoinResult.appendFuture.complete(null); assertTrue(followerJoinResult.joinFuture.isDone()); JoinGroupResponseData expectedResponse = new JoinGroupResponseData() .setErrorCode(Errors.NONE.code()) .setGenerationId(rebalanceResult.generationId) .setMemberId(followerJoinResult.joinFuture.get().memberId()) .setLeader(rebalanceResult.leaderId) .setProtocolName("range") .setProtocolType("consumer") .setSkipAssignment(false) .setMembers(Collections.emptyList()); checkJoinGroupResponse( expectedResponse, followerJoinResult.joinFuture.get(), group, STABLE, Collections.emptySet() ); // Join with old member id will fail because the member id is updated String newFollowerId = followerJoinResult.joinFuture.get().memberId(); assertNotEquals(rebalanceResult.followerId, newFollowerId); followerJoinResult = context.sendClassicGroupJoin(request.setMemberId(rebalanceResult.followerId)); assertTrue(followerJoinResult.records.isEmpty()); assertEquals(Errors.FENCED_INSTANCE_ID.code(), followerJoinResult.joinFuture.get().errorCode()); // Sync with old member id will fail because the member id is updated SyncGroupRequestData syncRequest = new GroupMetadataManagerTestContext.SyncGroupRequestBuilder() .withGroupId("group-id") .withGroupInstanceId("follower-instance-id") .withGenerationId(rebalanceResult.generationId) .withMemberId(rebalanceResult.followerId) .withAssignment(Collections.emptyList()) .build(); GroupMetadataManagerTestContext.SyncResult syncResult = context.sendClassicGroupSync(syncRequest); assertTrue(syncResult.records.isEmpty()); assertTrue(syncResult.syncFuture.isDone()); assertEquals(Errors.FENCED_INSTANCE_ID.code(), syncResult.syncFuture.get().errorCode()); // Sync with new member id succeeds syncResult = context.sendClassicGroupSync(syncRequest.setMemberId(newFollowerId)); assertTrue(syncResult.records.isEmpty()); assertTrue(syncResult.syncFuture.isDone()); assertEquals(Errors.NONE.code(), syncResult.syncFuture.get().errorCode()); assertEquals(rebalanceResult.followerAssignment, syncResult.syncFuture.get().assignment()); }
public static byte[] unhex(final byte[] bytes) { final byte[] out = new byte[(bytes.length + 1) >> 1]; int i = bytes.length - 2; int j = out.length - 1; while (i >= 0) { int l = Character.digit(bytes[i], 16); int r = Character.digit(bytes[i + 1], 16); if (l == -1 || r == -1) { return null; } i -= 2; out[j--] = (byte) (((l << 4) | r) & 0xFF); } // length is odd and first byte is invalid if (i == -1 && Character.digit(bytes[0], 16) == -1) { return null; } return out; }
@Test void testUnhex() { assertThat(EncodingUtils.unhex("".getBytes())).isEqualTo(new byte[0]); assertThat(EncodingUtils.unhex("1".getBytes())).isEqualTo(new byte[] {0}); assertThat(EncodingUtils.unhex("146".getBytes())).isEqualTo(new byte[] {0, 0x46}); assertThat(EncodingUtils.unhex("z".getBytes())).isEqualTo(null); assertThat(EncodingUtils.unhex("1-".getBytes())).isEqualTo(null); assertThat(EncodingUtils.unhex("466C696E6B".getBytes())) .isEqualTo(new byte[] {0x46, 0x6c, 0x69, 0x6E, 0x6B}); assertThat(EncodingUtils.unhex("4D7953514C".getBytes())) .isEqualTo(new byte[] {0x4D, 0x79, 0x53, 0x51, 0x4C}); assertThat(EncodingUtils.unhex("\uD83D\uDE00".getBytes())).isEqualTo(null); assertThat(EncodingUtils.unhex(EncodingUtils.hex("\uD83D\uDE00").getBytes())) .isEqualTo("\uD83D\uDE00".getBytes()); }
public abstract Iterator<T> sample(Iterator<T> input);
@TestTemplate @RetryOnFailure(times = 3) void testReservoirSamplerSampledSize2() { RandomSampler<Double> sampler = new ReservoirSamplerWithoutReplacement<>(20000); Iterator<Double> sampled = sampler.sample(source.iterator()); assertThat(getSize(sampled)) .as( "ReservoirSamplerWithoutReplacement sampled output size should not beyond the source size.") .isEqualTo(SOURCE_SIZE); }
@GET @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) public AppInfo get() { return getAppInfo(); }
@Test public void testBlacklistedNodes() throws JSONException, Exception { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("blacklistednodes").accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); verifyBlacklistedNodesInfo(json, appContext); }
public boolean isFailoverSwitch() { return failoverReactor.isFailoverSwitch(); }
@Test void testIsFailoverSwitch() throws IllegalAccessException, NoSuchFieldException, NacosException { FailoverReactor mock = injectMockFailoverReactor(); when(mock.isFailoverSwitch()).thenReturn(true); assertTrue(holder.isFailoverSwitch()); }
public List<Duration> calculatePreciseDuration(final Date then) { return calculatePreciseDuration(then != null ? then.toInstant() : null); }
@Test public void testPreciseInTheFuture() throws Exception { PrettyTime t = new PrettyTime(); List<Duration> durations = t.calculatePreciseDuration(now.plusHours(5).plusMinutes(10).plusSeconds(1)); Assert.assertTrue(durations.size() >= 2); Assert.assertEquals(5, durations.get(0).getQuantity()); Assert.assertEquals(10, durations.get(1).getQuantity()); }
public static ResourceModel processResource(final Class<?> resourceClass) { return processResource(resourceClass, null); }
@Test(expectedExceptions = ResourceConfigException.class) public void failsOnInconsistentMethodWithCallbackAndNonVoidReturn() { @RestLiCollection(name = "callbackAndResult") class LocalClass extends CollectionResourceTemplate<Long, EmptyRecord> { @Action(name = "callbackAndResult") public List<EmptyRecord> callbackAndResult(@CallbackParam Callback<EmptyRecord> callback) { return Collections.emptyList(); } } RestLiAnnotationReader.processResource(LocalClass.class); Assert.fail("#getInterfaceType should fail throwing a ResourceConfigException"); }
@Override public int drainTo(Collection<? super V> c) { return get(drainToAsync(c)); }
@Test public void testDrainTo() { RTransferQueue<Integer> queue = redisson.getTransferQueue("queue"); for (int i = 0 ; i < 100; i++) { queue.offer(i); } Assertions.assertEquals(100, queue.size()); Set<Integer> batch = new HashSet<>(); int count = queue.drainTo(batch, 10); Assertions.assertEquals(10, count); Assertions.assertEquals(10, batch.size()); Assertions.assertEquals(90, queue.size()); queue.drainTo(batch, 10); queue.drainTo(batch, 20); queue.drainTo(batch, 60); Assertions.assertEquals(0, queue.size()); }
public void deleteStatefulSet() throws InterruptedException { String statefulSetName = createJobName(instanceConfig.getFunctionDetails(), this.jobName); final V1DeleteOptions options = new V1DeleteOptions(); options.setGracePeriodSeconds((long) gracePeriodSeconds); options.setPropagationPolicy("Foreground"); String fqfn = FunctionCommon.getFullyQualifiedName(instanceConfig.getFunctionDetails()); Actions.Action deleteStatefulSet = Actions.Action.builder() .actionName(String.format("Deleting statefulset for function %s", fqfn)) .numRetries(KubernetesRuntimeFactory.numRetries) .sleepBetweenInvocationsMs(KubernetesRuntimeFactory.sleepBetweenRetriesMs) .supplier(() -> { Response response; try { // cannot use deleteNamespacedStatefulSet because of bug in kuberenetes // https://github.com/kubernetes-client/java/issues/86 response = appsClient.deleteNamespacedStatefulSetCall( statefulSetName, jobNamespace, null, null, gracePeriodSeconds, null, "Foreground", options, null) .execute(); } catch (ApiException e) { // if already deleted if (e.getCode() == HTTP_NOT_FOUND) { log.warn("Statefulset for function {} does not exist", fqfn); return Actions.ActionResult.builder().success(true).build(); } String errorMsg = e.getResponseBody() != null ? e.getResponseBody() : e.getMessage(); return Actions.ActionResult.builder() .success(false) .errorMsg(errorMsg) .build(); } catch (IOException e) { return Actions.ActionResult.builder() .success(false) .errorMsg(e.getMessage()) .build(); } // if already deleted if (response.code() == HTTP_NOT_FOUND) { log.warn("Statefulset for function {} does not exist", fqfn); return Actions.ActionResult.builder().success(true).build(); } else { return Actions.ActionResult.builder() .success(response.isSuccessful()) .errorMsg(response.message()) .build(); } }) .build(); Actions.Action waitForStatefulSetDeletion = Actions.Action.builder() .actionName(String.format("Waiting for StatefulSet deletion to complete deletion of function %s", fqfn)) // set retry period to be about 2x the graceshutdown time .numRetries(KubernetesRuntimeFactory.numRetries * 2) .sleepBetweenInvocationsMs(KubernetesRuntimeFactory.sleepBetweenRetriesMs * 2) .supplier(() -> { V1StatefulSet response; try { response = appsClient.readNamespacedStatefulSet(statefulSetName, jobNamespace, null); } catch (ApiException e) { // statefulset is gone if (e.getCode() == HTTP_NOT_FOUND) { return Actions.ActionResult.builder().success(true).build(); } String errorMsg = e.getResponseBody() != null ? e.getResponseBody() : e.getMessage(); return Actions.ActionResult.builder() .success(false) .errorMsg(errorMsg) .build(); } return Actions.ActionResult.builder() .success(false) .errorMsg(response.getStatus().toString()) .build(); }) .build(); // Need to wait for all pods to die so we can cleanup subscriptions. Actions.Action waitForStatefulPodsToTerminate = Actions.Action.builder() .actionName(String.format("Waiting for pods for function %s to terminate", fqfn)) .numRetries(KubernetesRuntimeFactory.numRetries * 2) .sleepBetweenInvocationsMs(KubernetesRuntimeFactory.sleepBetweenRetriesMs * 2) .supplier(() -> { Map<String, String> validLabels = getLabels(instanceConfig.getFunctionDetails()); String labels = String.format("tenant=%s,namespace=%s,name=%s", validLabels.get("tenant"), validLabels.get("namespace"), validLabels.get("name")); V1PodList response; try { response = coreClient.listNamespacedPod(jobNamespace, null, null, null, null, labels, null, null, null, null, null); } catch (ApiException e) { String errorMsg = e.getResponseBody() != null ? e.getResponseBody() : e.getMessage(); return Actions.ActionResult.builder() .success(false) .errorMsg(errorMsg) .build(); } if (response.getItems().size() > 0) { return Actions.ActionResult.builder() .success(false) .errorMsg(response.getItems().size() + " pods still alive.") .build(); } else { return Actions.ActionResult.builder() .success(true) .build(); } }) .build(); AtomicBoolean success = new AtomicBoolean(false); Actions.newBuilder() .addAction(deleteStatefulSet.toBuilder() .continueOn(true) .build()) .addAction(waitForStatefulSetDeletion.toBuilder() .continueOn(false) .onSuccess((ignored) -> success.set(true)) .build()) .addAction(deleteStatefulSet.toBuilder() .continueOn(true) .build()) .addAction(waitForStatefulSetDeletion.toBuilder() .onSuccess((ignored) -> success.set(true)) .build()) .run(); if (!success.get()) { throw new RuntimeException(String.format("Failed to delete statefulset for function %s", fqfn)); } else { // wait for pods to terminate Actions.newBuilder() .addAction(waitForStatefulPodsToTerminate) .run(); } }
@Test public void testDeleteStatefulSetWithTranslatedKubernetesLabelChars() throws Exception { InstanceConfig config = createJavaInstanceConfig(FunctionDetails.Runtime.JAVA, false); config.setFunctionDetails(createFunctionDetails(FunctionDetails.Runtime.JAVA, false, (fb) -> fb.setTenant("c:tenant").setNamespace("c:ns").setName("c:fn"))); CoreV1Api coreApi = mock(CoreV1Api.class); AppsV1Api appsApi = mock(AppsV1Api.class); Call successfulCall = mock(Call.class); Response okResponse = mock(Response.class); when(okResponse.code()).thenReturn(HttpURLConnection.HTTP_OK); when(okResponse.isSuccessful()).thenReturn(true); when(okResponse.message()).thenReturn(""); when(successfulCall.execute()).thenReturn(okResponse); final String expectedFunctionNamePrefix = String.format("pf-%s-%s-%s", "c-tenant", "c-ns", "c-fn"); factory = createKubernetesRuntimeFactory(null, 10, 1.0, 1.0); factory.setCoreClient(coreApi); factory.setAppsClient(appsApi); ArgumentMatcher<String> hasTranslatedFunctionName = (String t) -> t.startsWith(expectedFunctionNamePrefix); when(appsApi.deleteNamespacedStatefulSetCall( argThat(hasTranslatedFunctionName), anyString(), isNull(), isNull(), anyInt(), isNull(), anyString(), any(), isNull())).thenReturn(successfulCall); ApiException notFoundException = mock(ApiException.class); when(notFoundException.getCode()).thenReturn(HttpURLConnection.HTTP_NOT_FOUND); when(appsApi.readNamespacedStatefulSet( argThat(hasTranslatedFunctionName), anyString(), isNull())).thenThrow(notFoundException); V1PodList podList = mock(V1PodList.class); when(podList.getItems()).thenReturn(Collections.emptyList()); String expectedLabels = String.format("tenant=%s,namespace=%s,name=%s", "c-tenant", "c-ns", "c-fn"); when(coreApi.listNamespacedPod(anyString(), isNull(), isNull(), isNull(), isNull(), eq(expectedLabels), isNull(), isNull(), isNull(), isNull(), isNull())).thenReturn(podList); KubernetesRuntime kr = factory.createContainer(config, "/test/code", "code.yml", "/test/transforms", "transform.yml", Long.MIN_VALUE); kr.deleteStatefulSet(); verify(coreApi).listNamespacedPod(anyString(), isNull(), isNull(), isNull(), isNull(), eq(expectedLabels), isNull(), isNull(), isNull(), isNull(), isNull()); }
@Override public Optional<CompletableFuture<TaskManagerLocation>> getTaskManagerLocation( ExecutionVertexID executionVertexId) { return inputsLocationsRetriever .getTaskManagerLocation(executionVertexId) .filter(future -> future.isDone() && !future.isCompletedExceptionally()); }
@Test void testNoInputLocationIfFailed() { TestingInputsLocationsRetriever originalLocationRetriever = getOriginalLocationRetriever(); originalLocationRetriever.failTaskManagerLocation(EV1, new Throwable()); InputsLocationsRetriever availableInputsLocationsRetriever = new AvailableInputsLocationsRetriever(originalLocationRetriever); assertThat(availableInputsLocationsRetriever.getTaskManagerLocation(EV1)).isNotPresent(); }
public String getSecretsConfigView(String pluginId) { return getVersionedSecretsExtension(pluginId).getSecretsConfigView(pluginId); }
@Test void getSecretsConfigView_shouldDelegateToVersionedExtension() { SecretsExtensionV1 secretsExtensionV1 = mock(SecretsExtensionV1.class); Map<String, VersionedSecretsExtension> secretsExtensionMap = Map.of("1.0", secretsExtensionV1); extension = new SecretsExtension(pluginManager, extensionsRegistry, secretsExtensionMap); when(pluginManager.resolveExtensionVersion(PLUGIN_ID, SECRETS_EXTENSION, SUPPORTED_VERSIONS)).thenReturn(SecretsExtensionV1.VERSION); this.extension.getSecretsConfigView(PLUGIN_ID); verify(secretsExtensionV1).getSecretsConfigView(PLUGIN_ID); }
@JsonIgnore public StepInstance.Status getTerminalStatus(String wfId, long instanceId) { Checks.checkTrue( action.isUsingUpstream(), "[%s][%s] cannot getTerminalStatus for action [%s], which does not support upstream mode", wfId, instanceId, this); if (Objects.equals(workflowId, wfId) && workflowInstanceId == instanceId) { return action.getStatus(); } return StepInstance.Status.STOPPED; }
@Test public void testGetTerminalStatus() { StepAction action = StepAction.builder() .workflowId("sample-dag-test-3") .workflowInstanceId(1) .stepId("job1") .action(Actions.StepInstanceAction.STOP) .build(); assertEquals(StepInstance.Status.STOPPED, action.getTerminalStatus("sample-dag-test-3", 1)); assertEquals(StepInstance.Status.STOPPED, action.getTerminalStatus("downstream-wf", 2)); action = StepAction.builder() .workflowId("sample-dag-test-3") .workflowInstanceId(1) .stepId("job1") .action(Actions.StepInstanceAction.KILL) .build(); assertEquals( StepInstance.Status.FATALLY_FAILED, action.getTerminalStatus("sample-dag-test-3", 1)); assertEquals(StepInstance.Status.STOPPED, action.getTerminalStatus("downstream-wf", 2)); action = StepAction.builder() .workflowId("sample-dag-test-3") .workflowInstanceId(1) .stepId("job1") .action(Actions.StepInstanceAction.SKIP) .build(); assertEquals(StepInstance.Status.SKIPPED, action.getTerminalStatus("sample-dag-test-3", 1)); assertEquals(StepInstance.Status.STOPPED, action.getTerminalStatus("downstream-wf", 2)); StepAction invalidAction = StepAction.builder() .workflowId("sample-dag-test-3") .workflowInstanceId(1) .stepId("job1") .action(Actions.StepInstanceAction.PAUSE) .build(); AssertHelper.assertThrows( "Cannot call getTerminalStatus for non-termination actions", IllegalArgumentException.class, "[sample-dag-test-3][1] cannot getTerminalStatus for action", () -> invalidAction.getTerminalStatus("sample-dag-test-3", 1)); }
@SuppressWarnings("unchecked") public static <S, F> S visit(final SqlType type, final SqlTypeWalker.Visitor<S, F> visitor) { final BiFunction<SqlTypeWalker.Visitor<?, ?>, SqlType, Object> handler = HANDLER .get(type.baseType()); if (handler == null) { throw new UnsupportedOperationException("Unsupported schema type: " + type.baseType()); } return (S) handler.apply(visitor, type); }
@Test public void shouldVisitDouble() { // Given: final SqlPrimitiveType type = SqlTypes.DOUBLE; when(visitor.visitDouble(any())).thenReturn("Expected"); // When: final String result = SqlTypeWalker.visit(type, visitor); // Then: verify(visitor).visitDouble(same(type)); assertThat(result, is("Expected")); }
byte[] getContent(String filename) { try { return Files.readAllBytes(Paths.get(filename)); } catch (IOException e) { throw new RuntimeException(format("Can't read content of %s", filename), e); } }
@Test public void readNonExistingContent() { RuntimeException ex = Assertions.assertThrows( RuntimeException.class, () -> render.getContent("__file_that_may_not_exists_else_it_will_fail") ); MatcherAssert.assertThat(ex, allOf( hasMessage(containsString("Can't read content of __file_that_may_not_exists_else_it_will_fail")), hasCause(instanceOf(IOException.class)) )); }
@Override public void trace(String msg) { logger.trace(msg); }
@Test public void testTrace() { Logger mockLogger = mock(Logger.class); when(mockLogger.getName()).thenReturn("foo"); InternalLogger logger = new Slf4JLogger(mockLogger); logger.trace("a"); verify(mockLogger).getName(); verify(mockLogger).trace("a"); }
public static boolean passCheck(ResourceWrapper resourceWrapper, /*@Valid*/ ParamFlowRule rule, /*@Valid*/ int count, Object... args) { if (args == null) { return true; } int paramIdx = rule.getParamIdx(); if (args.length <= paramIdx) { return true; } // Get parameter value. Object value = args[paramIdx]; // Assign value with the result of paramFlowKey method if (value instanceof ParamFlowArgument) { value = ((ParamFlowArgument) value).paramFlowKey(); } // If value is null, then pass if (value == null) { return true; } if (rule.isClusterMode() && rule.getGrade() == RuleConstant.FLOW_GRADE_QPS) { return passClusterCheck(resourceWrapper, rule, count, value); } return passLocalCheck(resourceWrapper, rule, count, value); }
@Test public void testPassLocalCheckForCollection() throws InterruptedException { final String resourceName = "testPassLocalCheckForCollection"; final ResourceWrapper resourceWrapper = new StringResourceWrapper(resourceName, EntryType.IN); int paramIdx = 0; double globalThreshold = 1; ParamFlowRule rule = new ParamFlowRule(resourceName).setParamIdx(paramIdx).setCount(globalThreshold); String v1 = "a", v2 = "B", v3 = "Cc"; List<String> list = Arrays.asList(v1, v2, v3); ParameterMetric metric = new ParameterMetric(); ParameterMetricStorage.getMetricsMap().put(resourceWrapper.getName(), metric); metric.getRuleTimeCounterMap().put(rule, new ConcurrentLinkedHashMapWrapper<Object, AtomicLong>(4000)); metric.getRuleTokenCounterMap().put(rule, new ConcurrentLinkedHashMapWrapper<>(4000)); assertTrue(ParamFlowChecker.passCheck(resourceWrapper, rule, 1, list)); assertFalse(ParamFlowChecker.passCheck(resourceWrapper, rule, 1, list)); }
public void isFalse() { if (actual == null) { isEqualTo(false); // fails } else if (actual) { failWithoutActual(simpleFact("expected to be false")); } }
@Test public void nullIsFalseFailing() { expectFailureWhenTestingThat(null).isFalse(); assertFailureKeys("expected", "but was"); assertFailureValue("expected", "false"); assertFailureValue("but was", "null"); }
@Operation(summary = "update the connection") @PatchMapping(value = "{id}", consumes = "application/json") public Connection update(@PathVariable("id") Long id, @RequestBody ConnectionDTO connectionDTO) { Connection connectionUpdated = connectionService.updateAttributes(id, connectionDTO); metadataProcessorService.startCollectMetadata(connectionUpdated, new HashMap<>()); connectionService.updateConnection(connectionUpdated); return connectionUpdated; }
@Test public void updateConnection() { when(connectionServiceMock.updateAttributes(anyLong(), any(ConnectionDTO.class))).thenReturn(getNewConnection()); Connection result = controllerMock.update(1L, getNewConnectionDTO()); verify(connectionServiceMock, times(1)).updateAttributes(anyLong(), any(ConnectionDTO.class)); verify(metadataProcessorServiceMock, times(1)).startCollectMetadata(any(Connection.class), anyMap()); verify(connectionServiceMock, times(1)).updateConnection(any(Connection.class)); assertNotNull(result); }
public static Connection fromHostList(String... brokers) { return fromHostList(Arrays.asList(brokers), getDefault()); }
@Test public void testBrokerListWithHeaders() { // Create the connection List<String> brokers = ImmutableList.of("127.0.0.1:1234", "localhost:2345"); Map<String, String> headers = ImmutableMap.of("Caller", "curl"); JsonAsyncHttpPinotClientTransportFactory factory = new JsonAsyncHttpPinotClientTransportFactory(); factory.setHeaders(headers); Connection connection = ConnectionFactory.fromHostList(brokers, factory.buildTransport()); // Check that the broker list has the right length and has the same servers Assert.assertEquals(connection.getBrokerList(), brokers); }
public LongValue increment(long increment) { this.value += increment; this.set = true; return this; }
@Test public void increment_LongVariationValue_has_no_effect_if_arg_is_null() { verifyUnsetVariationValue(new LongValue().increment(null)); }
@Override public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { // Automatically detect the character encoding try (AutoDetectReader reader = new AutoDetectReader(CloseShieldInputStream.wrap(stream), metadata, getEncodingDetector(context))) { //try to get detected content type; could be a subclass of text/plain //such as vcal, etc. String incomingMime = metadata.get(Metadata.CONTENT_TYPE); MediaType mediaType = MediaType.TEXT_PLAIN; if (incomingMime != null) { MediaType tmpMediaType = MediaType.parse(incomingMime); if (tmpMediaType != null) { mediaType = tmpMediaType; } } Charset charset = reader.getCharset(); MediaType type = new MediaType(mediaType, charset); metadata.set(Metadata.CONTENT_TYPE, type.toString()); // deprecated, see TIKA-431 metadata.set(Metadata.CONTENT_ENCODING, charset.name()); XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata); xhtml.startDocument(); xhtml.startElement("p"); char[] buffer = new char[4096]; int n = reader.read(buffer); while (n != -1) { xhtml.characters(buffer, 0, n); n = reader.read(buffer); } xhtml.endElement("p"); xhtml.endDocument(); } }
@Test public void testCharsetDetectionWithShortSnipet() throws Exception { final String text = "Hello, World!"; Metadata metadata = new Metadata(); parser.parse(new ByteArrayInputStream(text.getBytes(UTF_8)), new BodyContentHandler(), metadata, new ParseContext()); assertEquals("text/plain; charset=ISO-8859-1", metadata.get(Metadata.CONTENT_TYPE)); // Now verify that if we tell the parser the encoding is UTF-8, that's what // we get back (see TIKA-868) metadata.set(Metadata.CONTENT_TYPE, "application/binary; charset=UTF-8"); parser.parse(new ByteArrayInputStream(text.getBytes(UTF_8)), new BodyContentHandler(), metadata, new ParseContext()); assertEquals("application/binary; charset=UTF-8", metadata.get(Metadata.CONTENT_TYPE)); }
@Override @MethodNotAvailable public void evictAll() { throw new MethodNotAvailableException(); }
@Test(expected = MethodNotAvailableException.class) public void testEvictAll() { adapter.evictAll(); }
public static MatchAll matchAll() { return new AutoValue_FileIO_MatchAll.Builder() .setConfiguration(MatchConfiguration.create(EmptyMatchTreatment.ALLOW_IF_WILDCARD)) .build(); }
@Test @Category(NeedsRunner.class) public void testMatchAllDisallowEmptyNonWildcard() throws IOException { p.apply(Create.of(tmpFolder.getRoot().getAbsolutePath() + "/blah")) .apply(FileIO.matchAll().withEmptyMatchTreatment(EmptyMatchTreatment.ALLOW_IF_WILDCARD)); thrown.expectCause(isA(FileNotFoundException.class)); p.run(); }
@Override public synchronized Iterable<ConnectorFactory> getConnectorFactories() { return ImmutableList.of(new KafkaConnectorFactory(extension)); }
@Test public void testSpinup() { KafkaPlugin plugin = new KafkaPlugin(); ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories()); assertInstanceOf(factory, KafkaConnectorFactory.class); Connector c = factory.create( "test-connector", ImmutableMap.<String, String>builder() .put("kafka.table-names", "test") .put("kafka.nodes", "localhost:9092") .build(), new TestingConnectorContext()); assertNotNull(c); }
@Override public CompletableFuture<JobManagerRunnerResult> getResultFuture() { return resultFuture; }
@Test void testInitializationFailureSetsFailureInfoProperly() throws ExecutionException, InterruptedException { final CompletableFuture<JobMasterService> jobMasterServiceFuture = new CompletableFuture<>(); DefaultJobMasterServiceProcess serviceProcess = createTestInstance(jobMasterServiceFuture); final RuntimeException originalCause = new RuntimeException("Expected RuntimeException"); long beforeFailureTimestamp = System.currentTimeMillis(); jobMasterServiceFuture.completeExceptionally(originalCause); long afterFailureTimestamp = System.currentTimeMillis(); final JobManagerRunnerResult result = serviceProcess.getResultFuture().get(); final ErrorInfo executionGraphFailure = result.getExecutionGraphInfo().getArchivedExecutionGraph().getFailureInfo(); assertThat(executionGraphFailure).isNotNull(); assertInitializationException( executionGraphFailure.getException(), originalCause, executionGraphFailure.getTimestamp(), beforeFailureTimestamp, afterFailureTimestamp); }
public Predicate convert(List<ScalarOperator> operators, DeltaLakeContext context) { DeltaLakeExprVisitor visitor = new DeltaLakeExprVisitor(); List<Predicate> predicates = Lists.newArrayList(); for (ScalarOperator operator : operators) { Predicate predicate = operator.accept(visitor, context); if (predicate != null) { predicates.add(predicate); } } Optional<Predicate> result = predicates.stream().reduce(And::new); return result.orElse(ALWAYS_TRUE); }
@Test public void testConvertCompoundPredicate() { ScalarOperationToDeltaLakeExpr converter = new ScalarOperationToDeltaLakeExpr(); ScalarOperationToDeltaLakeExpr.DeltaLakeContext context = new ScalarOperationToDeltaLakeExpr.DeltaLakeContext(schema, new HashSet<>()); ConstantOperator value1 = ConstantOperator.createInt(5); ConstantOperator value2 = ConstantOperator.createInt(10); Literal literal1 = Literal.ofInt(5); Literal literal2 = Literal.ofInt(10); List<ScalarOperator> operators; // and ScalarOperator gtOperator = new BinaryPredicateOperator(BinaryType.GT, cIntCol, value1); ScalarOperator ltOperator = new BinaryPredicateOperator(BinaryType.LT, cIntCol, value2); CompoundPredicateOperator operator = new CompoundPredicateOperator(CompoundPredicateOperator.CompoundType.AND, gtOperator, ltOperator); operators = new ArrayList<>(List.of(operator)); Predicate convertExpr = converter.convert(operators, context); Predicate expectedExpr = new And(new Predicate(">", cDeltaIntCol, literal1), new Predicate("<", cDeltaIntCol, literal2)); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // or ltOperator = new BinaryPredicateOperator(BinaryType.LT, cIntCol, value1); gtOperator = new BinaryPredicateOperator(BinaryType.GT, cIntCol, value2); operator = new CompoundPredicateOperator(CompoundPredicateOperator.CompoundType.OR, ltOperator, gtOperator); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Or(new Predicate("<", cDeltaIntCol, literal1), new Predicate(">", cDeltaIntCol, literal2)); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // NOT ltOperator = new BinaryPredicateOperator(BinaryType.LT, cIntCol, value1); operator = new CompoundPredicateOperator(CompoundPredicateOperator.CompoundType.NOT, ltOperator); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Predicate("NOT", new Predicate("<", cDeltaIntCol, literal1)); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); }
public static <T> T getBean(Class<T> interfaceClass, Class typeClass) { Object object = serviceMap.get(interfaceClass.getName() + "<" + typeClass.getName() + ">"); if(object == null) return null; if(object instanceof Object[]) { return (T)Array.get(object, 0); } else { return (T)object; } }
@Test public void testMultipleInterfaceOneBean() { D1 d1 = SingletonServiceFactory.getBean(D1.class); D2 d2 = SingletonServiceFactory.getBean(D2.class); Assert.assertEquals(d1, d2); }
@Override public List<ParamInfo> extractParam(HttpServletRequest request) throws NacosRuntimeException { ArrayList<ParamInfo> paramInfos = new ArrayList<>(); String listenConfigs = request.getParameter("Listening-Configs"); if (StringUtils.isBlank(listenConfigs)) { return paramInfos; } try { listenConfigs = URLDecoder.decode(listenConfigs, Constants.ENCODE); } catch (UnsupportedEncodingException e) { throw new NacosRuntimeException(ErrorCode.UnKnowError.getCode(), e); } if (StringUtils.isBlank(listenConfigs)) { return paramInfos; } String[] lines = listenConfigs.split(Character.toString(LINE_SEPARATOR_CHAR)); for (String line : lines) { ParamInfo paramInfo = new ParamInfo(); String[] words = line.split(Character.toString(WORD_SEPARATOR_CHAR)); if (words.length < 2 || words.length > 4) { throw new IllegalArgumentException("invalid probeModify"); } paramInfo.setDataId(words[0]); paramInfo.setGroup(words[1]); if (words.length == 4) { paramInfo.setNamespaceId(words[3]); } paramInfos.add(paramInfo); } return paramInfos; }
@Test void testNormal() { String listenerConfigsString = getListenerConfigsString(); Mockito.when(httpServletRequest.getParameter(eq("Listening-Configs"))).thenReturn(listenerConfigsString); configListenerHttpParamExtractor = new ConfigListenerHttpParamExtractor(); configListenerHttpParamExtractor.extractParam(httpServletRequest); }
@Override public Result invoke(Invocation invocation) throws RpcException { // When broadcasting, it should be called remotely. if (isBroadcast()) { if (logger.isDebugEnabled()) { logger.debug("Performing broadcast call for method: " + RpcUtils.getMethodName(invocation) + " of service: " + getUrl().getServiceKey()); } return invoker.invoke(invocation); } if (peerFlag) { if (logger.isDebugEnabled()) { logger.debug("Performing point-to-point call for method: " + RpcUtils.getMethodName(invocation) + " of service: " + getUrl().getServiceKey()); } // If it's a point-to-point direct connection, invoke the original Invoker return invoker.invoke(invocation); } if (isInjvmExported()) { if (logger.isDebugEnabled()) { logger.debug("Performing local JVM call for method: " + RpcUtils.getMethodName(invocation) + " of service: " + getUrl().getServiceKey()); } // If it's exported to the local JVM, invoke the corresponding Invoker return injvmInvoker.invoke(invocation); } if (logger.isDebugEnabled()) { logger.debug("Performing remote call for method: " + RpcUtils.getMethodName(invocation) + " of service: " + getUrl().getServiceKey()); } // Otherwise, delegate the invocation to the original Invoker return invoker.invoke(invocation); }
@Test void testBroadcast() { URL url = URL.valueOf("remote://1.2.3.4/" + DemoService.class.getName()); url = url.addParameter(REFER_KEY, URL.encode(PATH_KEY + "=" + DemoService.class.getName())); url = url.addParameter("cluster", "broadcast"); url = url.setScopeModel(ApplicationModel.defaultModel().getDefaultModule()); Invoker<DemoService> cluster = getClusterInvoker(url); invokers.add(cluster); RpcInvocation invocation = new RpcInvocation(); invocation.setMethodName("doSomething8"); invocation.setParameterTypes(new Class[] {}); Result ret = cluster.invoke(invocation); Assertions.assertEquals("doSomething8", ret.getValue()); }
@Override public int getMedium(int index) { int value = getUnsignedMedium(index); if ((value & 0x800000) != 0) { value |= 0xff000000; } return value; }
@Test public void getMediumBoundaryCheck2() { assertThrows(IndexOutOfBoundsException.class, new Executable() { @Override public void execute() { buffer.getMedium(buffer.capacity() - 2); } }); }
public boolean shouldLog(final Logger logger, final String path, final int responseCode) { if (rateLimitersByPath.containsKey(path)) { final RateLimiter rateLimiter = rateLimitersByPath.get(path); if (!rateLimiter.tryAcquire()) { if (pathLimitHit.tryAcquire()) { logger.info("Hit rate limit for path " + path + " with limit " + rateLimiter.getRate()); } return false; } } if (rateLimitersByResponseCode.containsKey(responseCode)) { final RateLimiter rateLimiter = rateLimitersByResponseCode.get(responseCode); if (!rateLimiter.tryAcquire()) { if (responseCodeLimitHit.tryAcquire()) { logger.info("Hit rate limit for response code " + responseCode + " with limit " + rateLimiter.getRate()); } return false; } } return true; }
@Test public void shouldLog_notRateLimited() { // When: assertThat(loggingRateLimiter.shouldLog(logger, "/foo", 200), is(true)); // Then: verify(rateLimiter, never()).tryAcquire(); verify(logger, never()).info(any()); }
@Override public List<Predicate> getOperands() { return operands; }
@Test void requireThatConstructorsWork() { Predicate foo = SimplePredicates.newString("foo"); Predicate bar = SimplePredicates.newString("bar"); Conjunction node = new Conjunction(foo, bar); assertEquals(List.of(foo, bar), node.getOperands()); node = new Conjunction(List.of(foo, bar)); assertEquals(List.of(foo, bar), node.getOperands()); }
public boolean matches(String comment) { for (String escapedMatcher : escapeMatchers()) { Pattern pattern = Pattern.compile(String.join(escapedMatcher, "\\B", "\\B|\\b", "\\b")); if (pattern.matcher(comment).find()) { return true; } } return false; }
@Test void shouldEscapeRegexes() throws Exception { assertThat(new Matcher("[").matches("[")).isTrue(); assertThat(new Matcher("]").matches("]]")).isTrue(); assertThat(new Matcher("\\").matches("\\\\")).isTrue(); assertThat(new Matcher("^^").matches("^^")).isTrue(); assertThat(new Matcher("$").matches("$$")).isTrue(); assertThat(new Matcher("..").matches("...")).isTrue(); assertThat(new Matcher("|||").matches("||||")).isTrue(); assertThat(new Matcher("??").matches("???")).isTrue(); assertThat(new Matcher("**").matches("**")).isTrue(); assertThat(new Matcher("++").matches("++")).isTrue(); assertThat(new Matcher("((").matches("(((")).isTrue(); assertThat(new Matcher("))").matches(")))")).isTrue(); }
public boolean isHealthy() { Optional<Boolean> operatorsAreReady = areOperatorsStarted(operators); if (operatorsAreReady.isEmpty() || !operatorsAreReady.get()) { return false; } Optional<Boolean> runtimeInfosAreHealthy = operators.stream() .map(operator -> checkInformersHealth(operator.getRuntimeInfo())) .reduce((a, b) -> a && b); if (runtimeInfosAreHealthy.isEmpty() || !runtimeInfosAreHealthy.get()) { return false; } for (SentinelManager<?> sentinelManager : sentinelManagers) { if (!sentinelManager.allSentinelsAreHealthy()) { log.error("One sentinel manager {} reported an unhealthy condition.", sentinelManager); return false; } } return true; }
@Test void testHealthProbeWithSentinelHealthWithMultiOperators() { var sentinelManager = mock(SentinelManager.class); HealthProbe healthyProbe = new HealthProbe(operators, Collections.singletonList(sentinelManager)); isRunning.set(true); isRunning2.set(true); when(sentinelManager.allSentinelsAreHealthy()).thenReturn(false); assertFalse( healthyProbe.isHealthy(), "Healthy Probe should fail when sentinels report failures"); when(sentinelManager.allSentinelsAreHealthy()).thenReturn(true); assertTrue(healthyProbe.isHealthy(), "Healthy Probe should pass"); }
@Override public RouteContext route(final ShardingRule shardingRule) { return new RouteContext(); }
@Test void assertRoute() { ShardingIgnoreRoutingEngine ignoreRoutingEngine = new ShardingIgnoreRoutingEngine(); RouteContext routeContext = ignoreRoutingEngine.route(mock(ShardingRule.class)); assertTrue(routeContext.getRouteUnits().isEmpty()); assertTrue(routeContext.getOriginalDataNodes().isEmpty()); assertTrue(routeContext.getRouteStageContexts().isEmpty()); }
@Override public boolean overlap(final Window other) throws IllegalArgumentException { if (getClass() != other.getClass()) { throw new IllegalArgumentException("Cannot compare windows of different type. Other window has type " + other.getClass() + "."); } final TimeWindow otherWindow = (TimeWindow) other; return startMs < otherWindow.endMs && otherWindow.startMs < endMs; }
@Test public void shouldNotOverlapIsOtherWindowIsAfterThisWindow() { /* * This: [-------) * Other: [------) */ assertFalse(window.overlap(new TimeWindow(end, end + 1))); assertFalse(window.overlap(new TimeWindow(end, 150))); assertFalse(window.overlap(new TimeWindow(end + 1, 150))); assertFalse(window.overlap(new TimeWindow(125, 150))); }
public static boolean isUnclosedQuote(final String line) { // CHECKSTYLE_RULES.ON: CyclomaticComplexity int quoteStart = -1; for (int i = 0; i < line.length(); ++i) { if (quoteStart < 0 && isQuoteChar(line, i)) { quoteStart = i; } else if (quoteStart >= 0 && isTwoQuoteStart(line, i) && !isEscaped(line, i)) { // Together, two quotes are effectively an escaped quote and don't act as a quote character. // Skip the next quote char, since it's coupled with the first. i++; } else if (quoteStart >= 0 && isQuoteChar(line, i) && !isEscaped(line, i)) { quoteStart = -1; } } final int commentInd = line.indexOf(COMMENT); if (commentInd < 0) { return quoteStart >= 0; } else if (quoteStart < 0) { return false; } else { return commentInd > quoteStart; } }
@Test public void shouldFindUnclosedQuote_escaped() { // Given: final String line = "some line 'this is in a quote\\'"; // Then: assertThat(UnclosedQuoteChecker.isUnclosedQuote(line), is(true)); }
public static <T> void chainFuture( CompletableFuture<? extends T> sourceFuture, CompletableFuture<T> destinationFuture ) { sourceFuture.whenComplete((BiConsumer<T, Throwable>) (val, throwable) -> { if (throwable != null) { destinationFuture.completeExceptionally(throwable); } else { destinationFuture.complete(val); } }); }
@Test public void testChainFuture() throws Throwable { CompletableFuture<Integer> sourceFuture = new CompletableFuture<>(); CompletableFuture<Number> destinationFuture = new CompletableFuture<>(); FutureUtils.chainFuture(sourceFuture, destinationFuture); assertFalse(sourceFuture.isDone()); assertFalse(destinationFuture.isDone()); assertFalse(sourceFuture.isCancelled()); assertFalse(destinationFuture.isCancelled()); assertFalse(sourceFuture.isCompletedExceptionally()); assertFalse(destinationFuture.isCompletedExceptionally()); sourceFuture.complete(123); assertEquals(123, destinationFuture.get()); }
public static AdminBootstrapAddresses fromConfig(AbstractConfig config) { List<String> bootstrapServers = config.getList(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG); if (bootstrapServers == null) { bootstrapServers = Collections.emptyList(); } List<String> controllerServers = config.getList(AdminClientConfig.BOOTSTRAP_CONTROLLERS_CONFIG); if (controllerServers == null) { controllerServers = Collections.emptyList(); } String clientDnsLookupConfig = config.getString(CommonClientConfigs.CLIENT_DNS_LOOKUP_CONFIG); if (bootstrapServers.isEmpty()) { if (controllerServers.isEmpty()) { throw new ConfigException("You must set either " + CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG + " or " + AdminClientConfig.BOOTSTRAP_CONTROLLERS_CONFIG); } else { return new AdminBootstrapAddresses(true, ClientUtils.parseAndValidateAddresses(controllerServers, clientDnsLookupConfig)); } } else { if (controllerServers.isEmpty()) { return new AdminBootstrapAddresses(false, ClientUtils.parseAndValidateAddresses(bootstrapServers, clientDnsLookupConfig)); } else { throw new ConfigException("You cannot set both " + CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG + " and " + AdminClientConfig.BOOTSTRAP_CONTROLLERS_CONFIG); } } }
@Test public void testTwoBootstrapsSet() { Map<String, Object> map = new HashMap<>(); map.put(AdminClientConfig.BOOTSTRAP_CONTROLLERS_CONFIG, "localhost:9092"); map.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); AdminClientConfig config = new AdminClientConfig(map); assertEquals("You cannot set both bootstrap.servers and bootstrap.controllers", assertThrows(ConfigException.class, () -> AdminBootstrapAddresses.fromConfig(config)). getMessage()); }
public static DataSourceProvider tryGetDataSourceProviderOrNull(Configuration hdpConfig) { final String configuredPoolingType = MetastoreConf.getVar(hdpConfig, MetastoreConf.ConfVars.CONNECTION_POOLING_TYPE); return Iterables.tryFind(FACTORIES, factory -> { String poolingType = factory.getPoolingType(); return poolingType != null && poolingType.equalsIgnoreCase(configuredPoolingType); }).orNull(); }
@Test public void testSetHikariCpStringProperty() throws SQLException { MetastoreConf.setVar(conf, ConfVars.CONNECTION_POOLING_TYPE, HikariCPDataSourceProvider.HIKARI); conf.set(HikariCPDataSourceProvider.HIKARI + ".connectionInitSql", "select 1 from dual"); conf.set(HikariCPDataSourceProvider.HIKARI + ".initializationFailTimeout", "-1"); DataSourceProvider dsp = DataSourceProviderFactory.tryGetDataSourceProviderOrNull(conf); Assert.assertNotNull(dsp); DataSource ds = dsp.create(conf); Assert.assertTrue(ds instanceof HikariDataSource); Assert.assertEquals("select 1 from dual", ((HikariDataSource)ds).getConnectionInitSql()); }
@Override public void configure(Map<String, ?> configs) { final SimpleConfig simpleConfig = new SimpleConfig(CONFIG_DEF, configs); final String field = simpleConfig.getString(FIELD_CONFIG); final String type = simpleConfig.getString(TARGET_TYPE_CONFIG); String formatPattern = simpleConfig.getString(FORMAT_CONFIG); final String unixPrecision = simpleConfig.getString(UNIX_PRECISION_CONFIG); schemaUpdateCache = new SynchronizedCache<>(new LRUCache<>(16)); replaceNullWithDefault = simpleConfig.getBoolean(REPLACE_NULL_WITH_DEFAULT_CONFIG); if (type.equals(TYPE_STRING) && Utils.isBlank(formatPattern)) { throw new ConfigException("TimestampConverter requires format option to be specified when using string timestamps"); } SimpleDateFormat format = null; if (!Utils.isBlank(formatPattern)) { try { format = new SimpleDateFormat(formatPattern); format.setTimeZone(UTC); } catch (IllegalArgumentException e) { throw new ConfigException("TimestampConverter requires a SimpleDateFormat-compatible pattern for string timestamps: " + formatPattern, e); } } config = new Config(field, type, format, unixPrecision); }
@Test public void testConfigInvalidUnixPrecision() { Map<String, String> config = new HashMap<>(); config.put(TimestampConverter.TARGET_TYPE_CONFIG, "unix"); config.put(TimestampConverter.UNIX_PRECISION_CONFIG, "invalid"); assertThrows(ConfigException.class, () -> xformValue.configure(config)); }
@Override @Nullable public <T> ParamConverter<T> getConverter(Class<T> rawType, @Nullable Type genericType, Annotation[] annotations) { if (!rawType.isEnum()) { return null; } @SuppressWarnings("unchecked") final Class<Enum<?>> type = (Class<Enum<?>>) rawType; final Enum<?>[] constants = type.getEnumConstants(); final String parameterName = getParameterNameFromAnnotations(annotations).orElse("Parameter"); final Method fromStringMethod = AccessController.doPrivileged(ReflectionHelper.getFromStringStringMethodPA(rawType)); return new FuzzyEnumParamConverter<>(rawType, fromStringMethod, constants, parameterName); }
@Test void testEnumViaExplicitFromStringNonStatic() { final ParamConverter<ExplicitFromStringNonStatic> converter = getConverter(ExplicitFromStringNonStatic.class); assertThatExceptionOfType(WebApplicationException.class) .isThrownBy(() -> converter.fromString("1")) .matches(e -> e.getResponse().getStatus() == 400) .extracting(Throwable::getMessage) .matches(msg -> msg.contains("A")) .matches(msg -> msg.contains("B")); assertThat(converter.fromString("A")).isSameAs(ExplicitFromStringNonStatic.A); }
@Override public Batch toBatch() { return new SparkBatch( sparkContext, table, readConf, groupingKeyType(), taskGroups(), expectedSchema, hashCode()); }
@TestTemplate public void testUnpartitionedIsNull() throws Exception { createUnpartitionedTable(spark, tableName); SparkScanBuilder builder = scanBuilder(); TruncateFunction.TruncateString function = new TruncateFunction.TruncateString(); UserDefinedScalarFunc udf = toUDF(function, expressions(intLit(4), fieldRef("data"))); Predicate predicate = new Predicate("IS_NULL", expressions(udf)); pushFilters(builder, predicate); Batch scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(10); // NOT IsNull builder = scanBuilder(); predicate = new Not(predicate); pushFilters(builder, predicate); scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(10); }
public void verify( Optional<String> expectedClusterId, OptionalInt expectedNodeId, EnumSet<VerificationFlag> verificationFlags ) { Map<Uuid, String> seenUuids = new HashMap<>(); if (verificationFlags.contains(VerificationFlag.REQUIRE_AT_LEAST_ONE_VALID)) { if (logDirProps.isEmpty()) { throw new RuntimeException("No readable meta.properties files found."); } } for (Entry<String, MetaProperties> entry : logDirProps.entrySet()) { String logDir = entry.getKey(); String path = new File(logDir, META_PROPERTIES_NAME).toString(); MetaProperties metaProps = entry.getValue(); if (verificationFlags.contains(VerificationFlag.REQUIRE_V0)) { if (!metaProps.version().equals(MetaPropertiesVersion.V0)) { throw new RuntimeException("Found unexpected version in " + path + ". " + "ZK-based brokers that are not migrating only support version 0 " + "(which is implicit when the `version` field is missing)."); } } if (!metaProps.clusterId().isPresent()) { if (metaProps.version().alwaysHasClusterId()) { throw new RuntimeException("cluster.id was not specified in the v1 file: " + path); } } else if (!expectedClusterId.isPresent()) { expectedClusterId = metaProps.clusterId(); } else if (!metaProps.clusterId().get().equals(expectedClusterId.get())) { throw new RuntimeException("Invalid cluster.id in: " + path + ". Expected " + expectedClusterId.get() + ", but read " + metaProps.clusterId().get()); } if (!metaProps.nodeId().isPresent()) { if (metaProps.version().alwaysHasNodeId()) { throw new RuntimeException("node.id was not specified in " + path); } } else if (!expectedNodeId.isPresent()) { expectedNodeId = metaProps.nodeId(); } else if (metaProps.nodeId().getAsInt() != expectedNodeId.getAsInt()) { throw new RuntimeException("Stored node id " + metaProps.nodeId().getAsInt() + " doesn't match previous node id " + expectedNodeId.getAsInt() + " in " + path + ". If you moved your data, make sure your configured node id matches. If you " + "intend to create a new node, you should remove all data in your data " + "directories."); } if (metaProps.directoryId().isPresent()) { if (DirectoryId.reserved(metaProps.directoryId().get())) { throw new RuntimeException("Invalid resrved directory ID " + metaProps.directoryId().get() + " found in " + logDir); } String prevLogDir = seenUuids.put(metaProps.directoryId().get(), logDir); if (prevLogDir != null) { throw new RuntimeException("Duplicate directory ID " + metaProps.directoryId() + " found. It was the ID of " + prevLogDir + ", " + "but also of " + logDir); } } } if (verificationFlags.contains(VerificationFlag.REQUIRE_METADATA_LOG_DIR)) { if (!metadataLogDir.isPresent()) { throw new RuntimeException("No metadata log directory was specified."); } } if (metadataLogDir.isPresent()) { if (errorLogDirs.contains(metadataLogDir.get())) { throw new RuntimeException("Encountered I/O error in metadata log directory " + metadataLogDir.get() + ". Cannot continue."); } } }
@Test public void testVerificationFailureOnMetadataLogDirWithError() { MetaPropertiesEnsemble ensemble = new MetaPropertiesEnsemble( Collections.emptySet(), Collections.singleton("/tmp/foo1"), Collections.emptyMap(), Optional.of("/tmp/foo1")); assertEquals("Encountered I/O error in metadata log directory /tmp/foo1. Cannot continue.", assertThrows(RuntimeException.class, () -> ensemble.verify(Optional.empty(), OptionalInt.empty(), EnumSet.of(REQUIRE_METADATA_LOG_DIR))). getMessage()); }
public static AbstractHealthChecker deserialize(String jsonString) { try { return MAPPER.readValue(jsonString, AbstractHealthChecker.class); } catch (IOException e) { throw new NacosDeserializationException(AbstractHealthChecker.class, e); } }
@Test void testDeserialize() { String tcpString = "{\"type\":\"TCP\"}"; AbstractHealthChecker actual = HealthCheckerFactory.deserialize(tcpString); assertEquals(Tcp.class, actual.getClass()); }
@Override @NonNull public String getId() { return ID; }
@Test public void shouldNotCreateCredentialsForBadUrl1() throws Exception { User user = login("Ken", "Create", "ken@create.item"); MockAuthorizationStrategy a = new MockAuthorizationStrategy(); a.grant(Jenkins.READ, Item.CREATE).everywhere().to(user.getId()); j.jenkins.setAuthorizationStrategy(a); String scmPath = "/organizations/" + getOrgName() + "/scm/git/"; // First create a credential String scmValidatePath = scmPath + "validate"; // We're relying on github letting you do a git-ls for repos with bad creds so long as they're public Map params = MapsHelper.of( "userName", "someguy", "password", "password", "repositoryUrl", "htt" ); Map resp = new RequestBuilder(baseUrl) .status(400) .jwtToken(getJwtToken(j.jenkins,user.getId(), user.getId())) .crumb( crumb ) .data(params) .post(scmValidatePath) .build(Map.class); assertTrue(resp.get("message").toString().toLowerCase().contains("invalid url")); }
public ClusterSerdes init(Environment env, ClustersProperties clustersProperties, int clusterIndex) { ClustersProperties.Cluster clusterProperties = clustersProperties.getClusters().get(clusterIndex); log.debug("Configuring serdes for cluster {}", clusterProperties.getName()); var globalPropertiesResolver = new PropertyResolverImpl(env); var clusterPropertiesResolver = new PropertyResolverImpl(env, "kafka.clusters." + clusterIndex); Map<String, SerdeInstance> registeredSerdes = new LinkedHashMap<>(); // initializing serdes from config if (clusterProperties.getSerde() != null) { for (int i = 0; i < clusterProperties.getSerde().size(); i++) { SerdeConfig serdeConfig = clusterProperties.getSerde().get(i); if (Strings.isNullOrEmpty(serdeConfig.getName())) { throw new ValidationException("'name' property not set for serde: " + serdeConfig); } if (registeredSerdes.containsKey(serdeConfig.getName())) { throw new ValidationException("Multiple serdes with same name: " + serdeConfig.getName()); } var instance = createSerdeFromConfig( serdeConfig, new PropertyResolverImpl(env, "kafka.clusters." + clusterIndex + ".serde." + i + ".properties"), clusterPropertiesResolver, globalPropertiesResolver ); registeredSerdes.put(serdeConfig.getName(), instance); } } // initializing remaining built-in serdes with empty selection patters builtInSerdeClasses.forEach((name, clazz) -> { if (!registeredSerdes.containsKey(name)) { BuiltInSerde serde = createSerdeInstance(clazz); if (autoConfigureSerde(serde, clusterPropertiesResolver, globalPropertiesResolver)) { registeredSerdes.put(name, new SerdeInstance(name, serde, null, null, null)); } } }); registerTopicRelatedSerde(registeredSerdes); return new ClusterSerdes( registeredSerdes, Optional.ofNullable(clusterProperties.getDefaultKeySerde()) .map(name -> Preconditions.checkNotNull(registeredSerdes.get(name), "Default key serde not found")) .orElse(null), Optional.ofNullable(clusterProperties.getDefaultValueSerde()) .map(name -> Preconditions.checkNotNull(registeredSerdes.get(name), "Default value serde not found")) .or(() -> Optional.ofNullable(registeredSerdes.get(SchemaRegistrySerde.name()))) .or(() -> Optional.ofNullable(registeredSerdes.get(ProtobufFileSerde.name()))) .orElse(null), createFallbackSerde() ); }
@Test void serdeWithBuiltInNameAndNoPropertiesIsAutoConfiguredIfPossible() { ClustersProperties.SerdeConfig serdeConfig = new ClustersProperties.SerdeConfig(); serdeConfig.setName("BuiltIn1"); // supports auto-configuration serdeConfig.setTopicKeysPattern("keys"); serdeConfig.setTopicValuesPattern("vals"); var serdes = init(serdeConfig); SerdeInstance autoConfiguredSerde = serdes.serdes.get("BuiltIn1"); verifyAutoConfigured(autoConfiguredSerde); verifyPatternsMatch(serdeConfig, autoConfiguredSerde); }
public List<Connection> listWithOneConnection(Long id) { return connectionRepository.findListById(id); }
@Test void listWithOneConnection() { when(connectionRepositoryMock.findListById(anyLong())).thenReturn(new ArrayList<>()); List<Connection> result = connectionServiceMock.listWithOneConnection(1L); verify(connectionRepositoryMock, times(1)).findListById(anyLong()); assertNotNull(result); }
@Override public double sd() { return 1 / lambda; }
@Test public void testSd() { System.out.println("sd"); ExponentialDistribution instance = new ExponentialDistribution(1.0); instance.rand(); assertEquals(1.0, instance.sd(), 1E-7); instance = new ExponentialDistribution(2.0); instance.rand(); assertEquals(0.5, instance.sd(), 1E-7); instance = new ExponentialDistribution(3.0); instance.rand(); assertEquals(0.3333333, instance.sd(), 1E-7); instance = new ExponentialDistribution(4.0); instance.rand(); assertEquals(0.25, instance.sd(), 1E-7); }
@Udf(description = "Returns Euler's number e raised to the power of an INT value.") public Double exp( @UdfParameter( value = "exponent", description = "the exponent to raise e to." ) final Integer exponent ) { return exp(exponent == null ? null : exponent.doubleValue()); }
@Test public void shouldHandleNegative() { assertThat(udf.exp(-1), is(0.36787944117144233)); assertThat(udf.exp(-1L), is(0.36787944117144233)); assertThat(udf.exp(-1.0), is(0.36787944117144233)); }
public static <T> Iterator<T> asReadOnlyIterator(Iterator<T> iterator) { if (iterator instanceof UnmodifiableIterator) { return iterator; } return new UnmodifiableIterator<>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public T next() { return iterator.next(); } }; }
@Test public void test_asReadOnlyIterator_returns_same_iterator_when_given_iterator_is_read_only() { Iterator<Integer> iterator = IterableUtil.asReadOnlyIterator(numbers.iterator()); assertTrue(iterator == IterableUtil.asReadOnlyIterator(iterator)); }
@Override public RateLimiter rateLimiter(final String name) { return rateLimiter(name, getDefaultConfig()); }
@Test public void rateLimiterPositive() throws Exception { RateLimiterRegistry registry = RateLimiterRegistry.of(config); RateLimiter firstRateLimiter = registry.rateLimiter("test"); RateLimiter anotherLimit = registry.rateLimiter("test1"); RateLimiter sameAsFirst = registry.rateLimiter("test"); then(firstRateLimiter).isEqualTo(sameAsFirst); then(firstRateLimiter).isNotEqualTo(anotherLimit); }
public Array getArray(String name) { Array a = arrayMap.get(name); if (a == null) { validateArray(name); a = new Array(configDefinition, name); arrayMap.put(name, a); } return a; }
@Test(expected=IllegalStateException.class) public void require_that_index_conflicts_with_append() { ConfigPayloadBuilder builder = new ConfigPayloadBuilder(); ConfigPayloadBuilder.Array array = builder.getArray("foo"); array.append("baz"); array.set(0, "bar"); }
public static <K, V> AsMultimap<K, V> asMultimap() { return new AsMultimap<>(false); }
@Test @Category(ValidatesRunner.class) public void testWindowedMultimapSideInput() { final PCollectionView<Map<String, Iterable<Integer>>> view = pipeline .apply( "CreateSideInput", Create.timestamped( TimestampedValue.of(KV.of("a", 1), new Instant(1)), TimestampedValue.of(KV.of("a", 1), new Instant(2)), TimestampedValue.of(KV.of("a", 2), new Instant(7)), TimestampedValue.of(KV.of("b", 3), new Instant(14)))) .apply("SideWindowInto", Window.into(FixedWindows.of(Duration.millis(10)))) .apply(View.asMultimap()); PCollection<KV<String, Integer>> output = pipeline .apply( "CreateMainInput", Create.timestamped( TimestampedValue.of("apple", new Instant(5)), TimestampedValue.of("banana", new Instant(13)), TimestampedValue.of("blackberry", new Instant(16)))) .apply("MainWindowInto", Window.into(FixedWindows.of(Duration.millis(10)))) .apply( "OutputSideInputs", ParDo.of( new DoFn<String, KV<String, Integer>>() { @ProcessElement public void processElement(ProcessContext c) { for (Integer v : c.sideInput(view).get(c.element().substring(0, 1))) { c.output(KV.of(c.element(), v)); } } }) .withSideInputs(view)); PAssert.that(output) .containsInAnyOrder( KV.of("apple", 1), KV.of("apple", 1), KV.of("apple", 2), KV.of("banana", 3), KV.of("blackberry", 3)); pipeline.run(); }
public static <T> Flattened<T> flattenedSchema() { return new AutoValue_Select_Flattened.Builder<T>() .setNameFn(CONCAT_FIELD_NAMES) .setNameOverrides(Collections.emptyMap()) .build(); }
@Test @Category(NeedsRunner.class) public void testFlatSchema() { List<Row> rows = IntStream.rangeClosed(0, 2) .mapToObj(i -> Row.withSchema(SIMPLE_SCHEMA).addValues(i, Integer.toString(i)).build()) .collect(Collectors.toList()); PCollection<Row> unnested = pipeline .apply(Create.of(rows).withRowSchema(SIMPLE_SCHEMA)) .apply(Select.flattenedSchema()); PAssert.that(unnested).containsInAnyOrder(rows); pipeline.run(); }
public JobMetaDataParameterObject processJobMultipart(JobMultiPartParameterObject parameterObject) throws IOException, NoSuchAlgorithmException { // Change the timestamp in the beginning to avoid expiration changeLastUpdatedTime(); validateReceivedParameters(parameterObject); validateReceivedPartNumbersAreExpected(parameterObject); validatePartChecksum(parameterObject); // Parts numbers are good. Save them currentPart = parameterObject.getCurrentPartNumber(); totalPart = parameterObject.getTotalPartNumber(); Path jarPath = jobMetaDataParameterObject.getJarPath(); // Append data to file try (OutputStream outputStream = Files.newOutputStream(jarPath, StandardOpenOption.CREATE, StandardOpenOption.APPEND)) { outputStream.write(parameterObject.getPartData(), 0, parameterObject.getPartSize()); } if (LOGGER.isInfoEnabled()) { String message = String.format("Session : %s jarPath: %s PartNumber: %d/%d Total file size : %d bytes", parameterObject.getSessionId(), jarPath, currentPart, totalPart, Files.size(jarPath)); LOGGER.info(message); } JobMetaDataParameterObject result = null; // If parts are complete if (currentPart == totalPart) { validateJarChecksum(); result = jobMetaDataParameterObject; } return result; }
@Test public void testInvalidTotalPart() { JobMultiPartParameterObject jobMultiPartParameterObject = new JobMultiPartParameterObject(); jobMultiPartParameterObject.setSessionId(null); jobMultiPartParameterObject.setCurrentPartNumber(1); jobMultiPartParameterObject.setTotalPartNumber(0); jobMultiPartParameterObject.setPartData(null); jobMultiPartParameterObject.setPartSize(0); Assert.assertThrows(JetException.class, () -> jobUploadStatus.processJobMultipart(jobMultiPartParameterObject)); }
@Override public void merge(ColumnStatisticsObj aggregateColStats, ColumnStatisticsObj newColStats) { LOG.debug("Merging statistics: [aggregateColStats:{}, newColStats: {}]", aggregateColStats, newColStats); TimestampColumnStatsDataInspector aggregateData = timestampInspectorFromStats(aggregateColStats); TimestampColumnStatsDataInspector newData = timestampInspectorFromStats(newColStats); Timestamp lowValue = mergeLowValue(getLowValue(aggregateData), getLowValue(newData)); if (lowValue != null) { aggregateData.setLowValue(lowValue); } Timestamp highValue = mergeHighValue(getHighValue(aggregateData), getHighValue(newData)); if (highValue != null) { aggregateData.setHighValue(highValue); } aggregateData.setNumNulls(mergeNumNulls(aggregateData.getNumNulls(), newData.getNumNulls())); NumDistinctValueEstimator oldNDVEst = aggregateData.getNdvEstimator(); NumDistinctValueEstimator newNDVEst = newData.getNdvEstimator(); List<NumDistinctValueEstimator> ndvEstimatorsList = Arrays.asList(oldNDVEst, newNDVEst); aggregateData.setNumDVs(mergeNumDistinctValueEstimator(aggregateColStats.getColName(), ndvEstimatorsList, aggregateData.getNumDVs(), newData.getNumDVs())); aggregateData.setNdvEstimator(ndvEstimatorsList.get(0)); KllHistogramEstimator oldKllEst = aggregateData.getHistogramEstimator(); KllHistogramEstimator newKllEst = newData.getHistogramEstimator(); aggregateData.setHistogramEstimator(mergeHistogramEstimator(aggregateColStats.getColName(), oldKllEst, newKllEst)); aggregateColStats.getStatsData().setTimestampStats(aggregateData); }
@Test public void testMergeNullWithNonNullValues() { ColumnStatisticsObj aggrObj = createColumnStatisticsObj(new ColStatsBuilder<>(Timestamp.class) .low(null) .high(null) .numNulls(0) .numDVs(0) .build()); ColumnStatisticsObj newObj = createColumnStatisticsObj(new ColStatsBuilder<>(Timestamp.class) .low(TS_1) .high(TS_3) .numNulls(4) .numDVs(2) .hll(TS_1.getSecondsSinceEpoch(), TS_3.getSecondsSinceEpoch(), TS_3.getSecondsSinceEpoch()) .kll(TS_1.getSecondsSinceEpoch(), TS_3.getSecondsSinceEpoch(), TS_3.getSecondsSinceEpoch()) .build()); merger.merge(aggrObj, newObj); assertEquals(newObj.getStatsData(), aggrObj.getStatsData()); }
public ProjectMeasuresStatistics searchSupportStatistics() { SearchRequest projectMeasuresSearchRequest = buildProjectMeasureSearchRequest(); SearchResponse projectMeasures = client.search(projectMeasuresSearchRequest); return buildProjectMeasuresStatistics(projectMeasures); }
@Test public void search_statistics_should_count_0_if_no_projects() { es.putDocuments(TYPE_PROJECT_MEASURES, // insert applications newDoc(ComponentTesting.newApplication(), "lines", 1000, "coverage", 70) .setLanguages(Arrays.asList("java", "python", "kotlin")) .setNclocLanguageDistributionFromMap(ImmutableMap.of("java", 300, "python", 100, "kotlin", 404)), newDoc(ComponentTesting.newApplication(), "lines", 20, "coverage", 80) .setLanguages(Arrays.asList("java", "python", "kotlin")) .setNclocLanguageDistributionFromMap(ImmutableMap.of("java", 300, "python", 100, "kotlin", 404))); ProjectMeasuresStatistics result = underTest.searchSupportStatistics(); assertThat(result.getProjectCount()).isZero(); assertThat(result.getProjectCountByLanguage()).isEmpty(); }
@Override public FailoverSwitch getSwitch() { try { File switchFile = Paths.get(failoverDir, UtilAndComs.FAILOVER_SWITCH).toFile(); if (!switchFile.exists()) { NAMING_LOGGER.debug("failover switch is not found, {}", switchFile.getName()); switchParams.put(FAILOVER_MODE_PARAM, Boolean.FALSE.toString()); return FAILOVER_SWITCH_FALSE; } long modified = switchFile.lastModified(); if (lastModifiedMillis < modified) { lastModifiedMillis = modified; String failover = ConcurrentDiskUtil.getFileContent(switchFile.getPath(), Charset.defaultCharset().toString()); if (!StringUtils.isEmpty(failover)) { String[] lines = failover.split(DiskCache.getLineSeparator()); for (String line : lines) { String line1 = line.trim(); if (IS_FAILOVER_MODE.equals(line1)) { switchParams.put(FAILOVER_MODE_PARAM, Boolean.TRUE.toString()); NAMING_LOGGER.info("failover-mode is on"); new FailoverFileReader().run(); return FAILOVER_SWITCH_TRUE; } else if (NO_FAILOVER_MODE.equals(line1)) { switchParams.put(FAILOVER_MODE_PARAM, Boolean.FALSE.toString()); NAMING_LOGGER.info("failover-mode is off"); return FAILOVER_SWITCH_FALSE; } } } } return switchParams.get(FAILOVER_MODE_PARAM).equals(Boolean.TRUE.toString()) ? FAILOVER_SWITCH_TRUE : FAILOVER_SWITCH_FALSE; } catch (Throwable e) { NAMING_LOGGER.error("[NA] failed to read failover switch.", e); switchParams.put(FAILOVER_MODE_PARAM, Boolean.FALSE.toString()); return FAILOVER_SWITCH_FALSE; } }
@Test void testGetSwitchWithNonExistFailoverSwitchFile() { FailoverSwitch actual = dataSource.getSwitch(); assertFalse(actual.getEnabled()); }
@Override @Transactional(rollbackFor = Exception.class) public void updateCodegen(CodegenUpdateReqVO updateReqVO) { // 校验是否已经存在 if (codegenTableMapper.selectById(updateReqVO.getTable().getId()) == null) { throw exception(CODEGEN_TABLE_NOT_EXISTS); } // 校验主表字段存在 if (Objects.equals(updateReqVO.getTable().getTemplateType(), CodegenTemplateTypeEnum.SUB.getType())) { if (codegenTableMapper.selectById(updateReqVO.getTable().getMasterTableId()) == null) { throw exception(CODEGEN_MASTER_TABLE_NOT_EXISTS, updateReqVO.getTable().getMasterTableId()); } if (CollUtil.findOne(updateReqVO.getColumns(), // 关联主表的字段不存在 column -> column.getId().equals(updateReqVO.getTable().getSubJoinColumnId())) == null) { throw exception(CODEGEN_SUB_COLUMN_NOT_EXISTS, updateReqVO.getTable().getSubJoinColumnId()); } } // 更新 table 表定义 CodegenTableDO updateTableObj = BeanUtils.toBean(updateReqVO.getTable(), CodegenTableDO.class); codegenTableMapper.updateById(updateTableObj); // 更新 column 字段定义 List<CodegenColumnDO> updateColumnObjs = BeanUtils.toBean(updateReqVO.getColumns(), CodegenColumnDO.class); updateColumnObjs.forEach(updateColumnObj -> codegenColumnMapper.updateById(updateColumnObj)); }
@Test public void testUpdateCodegen_sub_masterNotExists() { // mock 数据 CodegenTableDO table = randomPojo(CodegenTableDO.class, o -> o.setTemplateType(CodegenTemplateTypeEnum.SUB.getType()) .setScene(CodegenSceneEnum.ADMIN.getScene())); codegenTableMapper.insert(table); // 准备参数 CodegenUpdateReqVO updateReqVO = randomPojo(CodegenUpdateReqVO.class, o -> o.getTable().setId(table.getId()) .setTemplateType(CodegenTemplateTypeEnum.SUB.getType())); // 调用,并断言 assertServiceException(() -> codegenService.updateCodegen(updateReqVO), CODEGEN_MASTER_TABLE_NOT_EXISTS, updateReqVO.getTable().getMasterTableId()); }
@Override public boolean isReadOnly(final int column) { Preconditions.checkArgument(1 == column); return true; }
@Test void assertIsReadOnly() throws SQLException { assertTrue(actualMetaData.isReadOnly(1)); }
public void encryptColumns( String inputFile, String outputFile, List<String> paths, FileEncryptionProperties fileEncryptionProperties) throws IOException { Path inPath = new Path(inputFile); Path outPath = new Path(outputFile); RewriteOptions options = new RewriteOptions.Builder(conf, inPath, outPath) .encrypt(paths) .encryptionProperties(fileEncryptionProperties) .build(); ParquetRewriter rewriter = new ParquetRewriter(options); rewriter.processBlocks(); rewriter.close(); }
@Test public void testEncryptSomeColumns() throws IOException { String[] encryptColumns = {"DocId", "Name", "Links.Forward"}; testSetup("GZIP"); columnEncryptor.encryptColumns( inputFile.getFileName(), outputFile, Arrays.asList(encryptColumns), EncDecProperties.getFileEncryptionProperties(encryptColumns, ParquetCipher.AES_GCM_CTR_V1, false)); ParquetMetadata metaData = getParquetMetadata(EncDecProperties.getFileDecryptionProperties()); assertFalse(metaData.getBlocks().isEmpty()); List<ColumnChunkMetaData> columns = metaData.getBlocks().get(0).getColumns(); Set<String> set = new HashSet<>(Arrays.asList(encryptColumns)); for (ColumnChunkMetaData column : columns) { if (set.contains(column.getPath().toDotString())) { assertTrue(column.isEncrypted()); } else { assertFalse(column.isEncrypted()); } } }
@PostMapping("/batchEnabled") @RequiresPermissions("system:authen:disable") public ShenyuAdminResult batchEnabled(@Valid @RequestBody final BatchCommonDTO batchCommonDTO) { final String result = appAuthService.enabled(batchCommonDTO.getIds(), batchCommonDTO.getEnabled()); if (StringUtils.isNoneBlank(result)) { return ShenyuAdminResult.error(result); } return ShenyuAdminResult.success(ShenyuResultMessage.ENABLE_SUCCESS); }
@Test public void testBatchEnabled() throws Exception { final BatchCommonDTO batchCommonDTO = new BatchCommonDTO(); batchCommonDTO.setIds(Arrays.asList("0001", "0002")); batchCommonDTO.setEnabled(true); given(this.appAuthService.enabled(batchCommonDTO.getIds(), batchCommonDTO.getEnabled())) .willReturn(StringUtils.EMPTY); this.mockMvc.perform(MockMvcRequestBuilders.post("/appAuth/batchEnabled") .contentType(MediaType.APPLICATION_JSON) .content(GsonUtils.getInstance().toJson(batchCommonDTO))) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.ENABLE_SUCCESS))) .andReturn(); }
public static <KLeftT, KRightT> KTableHolder<KLeftT> build( final KTableHolder<KLeftT> left, final KTableHolder<KRightT> right, final ForeignKeyTableTableJoin<KLeftT, KRightT> join, final RuntimeBuildContext buildContext ) { final LogicalSchema leftSchema = left.getSchema(); final LogicalSchema rightSchema = right.getSchema(); final ProcessingLogger logger = buildContext.getProcessingLogger( join.getProperties().getQueryContext() ); final ExpressionEvaluator expressionEvaluator; final CodeGenRunner codeGenRunner = new CodeGenRunner( leftSchema, buildContext.getKsqlConfig(), buildContext.getFunctionRegistry() ); final Optional<ColumnName> leftColumnName = join.getLeftJoinColumnName(); final Optional<Expression> leftJoinExpression = join.getLeftJoinExpression(); if (leftColumnName.isPresent()) { expressionEvaluator = codeGenRunner.buildCodeGenFromParseTree( new UnqualifiedColumnReferenceExp(leftColumnName.get()), "Left Join Expression" ); } else if (leftJoinExpression.isPresent()) { expressionEvaluator = codeGenRunner.buildCodeGenFromParseTree( leftJoinExpression.get(), "Left Join Expression" ); } else { throw new IllegalStateException("Both leftColumnName and leftJoinExpression are empty."); } final ForeignKeyJoinParams<KRightT> joinParams = ForeignKeyJoinParamsFactory .create(expressionEvaluator, leftSchema, rightSchema, logger); final Formats formats = join.getFormats(); final PhysicalSchema physicalSchema = PhysicalSchema.from( joinParams.getSchema(), formats.getKeyFeatures(), formats.getValueFeatures() ); final Serde<KLeftT> keySerde = left.getExecutionKeyFactory().buildKeySerde( formats.getKeyFormat(), physicalSchema, join.getProperties().getQueryContext() ); final Serde<GenericRow> valSerde = buildContext.buildValueSerde( formats.getValueFormat(), physicalSchema, join.getProperties().getQueryContext() ); final KTable<KLeftT, GenericRow> result; switch (join.getJoinType()) { case INNER: result = left.getTable().join( right.getTable(), joinParams.getKeyExtractor(), joinParams.getJoiner(), buildContext.getMaterializedFactory().create(keySerde, valSerde) ); break; case LEFT: result = left.getTable().leftJoin( right.getTable(), joinParams.getKeyExtractor(), joinParams.getJoiner(), buildContext.getMaterializedFactory().create(keySerde, valSerde) ); break; default: throw new IllegalStateException("invalid join type: " + join.getJoinType()); } return KTableHolder.unmaterialized( result, joinParams.getSchema(), left.getExecutionKeyFactory() ); }
@Test @SuppressWarnings({"unchecked", "rawtypes"}) public void shouldDoInnerJoinOnNonKey() { // Given: givenInnerJoin(left, JOIN_COLUMN); // When: final KTableHolder<Struct> result = join.build(planBuilder, planInfo); // Then: final ArgumentCaptor<KsqlKeyExtractor> ksqlKeyExtractor = ArgumentCaptor.forClass(KsqlKeyExtractor.class); verify(leftKTable).join( same(rightKTable), ksqlKeyExtractor.capture(), eq(new KsqlValueJoiner(LEFT_SCHEMA.value().size(), RIGHT_SCHEMA.value().size(), 0)), any(Materialized.class) ); verifyNoMoreInteractions(leftKTable, rightKTable, resultKTable); final GenericKey extractedKey = GenericKey.genericKey(FOREIGN_KEY); assertThat(ksqlKeyExtractor.getValue().apply(LEFT_ROW), is(extractedKey)); assertThat(result.getTable(), is(resultKTable)); assertThat(result.getExecutionKeyFactory(), is(executionKeyFactory)); }
public static String toFileURI(File file) { URI uri = file.toURI(); String uriString = uri.toASCIIString(); return uriString.replaceAll("^file:/", "file:///"); }
@Test @EnabledOnOs(OS.WINDOWS) void shouldCreateFileURIForFileOnWindows() { assertThat(FileUtil.toFileURI(new File("c:\\foo")).startsWith("file:///c:/foo")).isTrue(); assertThat(FileUtil.toFileURI(new File("c:\\a dir with spaces\\foo")).startsWith("file:///c:/a%20dir%20with%20spaces/foo")).isTrue(); }
public static void getSemanticPropsSingleFromString( SingleInputSemanticProperties result, String[] forwarded, String[] nonForwarded, String[] readSet, TypeInformation<?> inType, TypeInformation<?> outType) { getSemanticPropsSingleFromString( result, forwarded, nonForwarded, readSet, inType, outType, false); }
@Test void testNonForwardedNestedTuple() { String[] nonForwardedFields = {"f1.f0.*; f1.f2; f0"}; SingleInputSemanticProperties sp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString( sp, null, nonForwardedFields, null, deepNestedTupleType, deepNestedTupleType); assertThat(sp.getForwardingTargetFields(0, 0)).isEmpty(); assertThat(sp.getForwardingTargetFields(0, 1)).isEmpty(); assertThat(sp.getForwardingTargetFields(0, 2)).isEmpty(); assertThat(sp.getForwardingTargetFields(0, 3)).isEmpty(); assertThat(sp.getForwardingTargetFields(0, 4)).contains(4); assertThat(sp.getForwardingTargetFields(0, 5)).isEmpty(); assertThat(sp.getForwardingTargetFields(0, 6)).contains(6); nonForwardedFields[0] = "f1.f0; f1.f2; f0"; sp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString( sp, null, nonForwardedFields, null, deepNestedTupleType, deepNestedTupleType); assertThat(sp.getForwardingTargetFields(0, 0)).isEmpty(); assertThat(sp.getForwardingTargetFields(0, 1)).isEmpty(); assertThat(sp.getForwardingTargetFields(0, 2)).isEmpty(); assertThat(sp.getForwardingTargetFields(0, 3)).isEmpty(); assertThat(sp.getForwardingTargetFields(0, 4)).contains(4); assertThat(sp.getForwardingTargetFields(0, 5)).isEmpty(); assertThat(sp.getForwardingTargetFields(0, 6)).contains(6); nonForwardedFields[0] = "f2; f1.f1"; sp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString( sp, null, nonForwardedFields, null, deepNestedTupleType, deepNestedTupleType); assertThat(sp.getForwardingTargetFields(0, 0)).contains(0); assertThat(sp.getForwardingTargetFields(0, 1)).contains(1); assertThat(sp.getForwardingTargetFields(0, 2)).contains(2); assertThat(sp.getForwardingTargetFields(0, 3)).contains(3); assertThat(sp.getForwardingTargetFields(0, 4)).isEmpty(); assertThat(sp.getForwardingTargetFields(0, 5)).contains(5); assertThat(sp.getForwardingTargetFields(0, 6)).isEmpty(); }
@Override public Reader getCharacterStream(final int columnIndex) throws SQLException { return mergedResult.getCharacterStream(columnIndex); }
@Test void assertGetCharacterStream() throws SQLException { Reader reader = mock(Reader.class); when(mergedResult.getCharacterStream(1)).thenReturn(reader); assertThat(new MaskMergedResult(mock(MaskRule.class), mock(SelectStatementContext.class), mergedResult).getCharacterStream(1), is(reader)); }
public static String jaasConfig(String moduleName, Map<String, String> options) { StringJoiner joiner = new StringJoiner(" "); for (Entry<String, String> entry : options.entrySet()) { String key = Objects.requireNonNull(entry.getKey()); String value = Objects.requireNonNull(entry.getValue()); if (key.contains("=") || key.contains(";")) { throw new IllegalArgumentException("Keys must not contain '=' or ';'"); } if (moduleName.isEmpty() || moduleName.contains(";") || moduleName.contains("=")) { throw new IllegalArgumentException("module name must be not empty and must not contain '=' or ';'"); } else { joiner.add(key + "=\"" + value + "\""); } } return moduleName + " required " + joiner + ";"; }
@Test public void testConfigWithEmptyOptions() { String moduleName = "ExampleModule"; Map<String, String> options = new HashMap<>(); String expectedOutput = "ExampleModule required ;"; String result = AuthenticationUtils.jaasConfig(moduleName, options); assertEquals(expectedOutput, result); }
public static Optional<String> extractTeamName(String groupName) { return extractRegexGroupIfMatches(groupName, 2); }
@Test public void extractTeamName_whenNameIsCorrect_extractsTeamName() { assertThat(GithubTeamConverter.extractTeamName("Org1/team1")).isEqualTo(Optional.of("team1")); assertThat(GithubTeamConverter.extractTeamName("Org1/team1/team2")).isEqualTo(Optional.of("team1/team2")); }
public static String format(TemporalAccessor time, DateTimeFormatter formatter) { if (null == time) { return null; } if(time instanceof Month){ return time.toString(); } if(null == formatter){ formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME; } try { return formatter.format(time); } catch (UnsupportedTemporalTypeException e){ if(time instanceof LocalDate && e.getMessage().contains("HourOfDay")){ // 用户传入LocalDate,但是要求格式化带有时间部分,转换为LocalDateTime重试 return formatter.format(((LocalDate) time).atStartOfDay()); }else if(time instanceof LocalTime && e.getMessage().contains("YearOfEra")){ // 用户传入LocalTime,但是要求格式化带有日期部分,转换为LocalDateTime重试 return formatter.format(((LocalTime) time).atDate(LocalDate.now())); } else if(time instanceof Instant){ // 时间戳没有时区信息,赋予默认时区 return formatter.format(((Instant) time).atZone(ZoneId.systemDefault())); } throw e; } }
@Test public void formatLocalTimeTest(){ final String today = TemporalAccessorUtil.format(LocalDate.now(), DatePattern.NORM_DATE_PATTERN); final String format = TemporalAccessorUtil.format(LocalTime.MIN, DatePattern.NORM_DATETIME_PATTERN); assertEquals(today + " 00:00:00", format); }
public DdlCommandResult execute( final String sql, final DdlCommand ddlCommand, final boolean withQuery, final Set<SourceName> withQuerySources ) { return execute(sql, ddlCommand, withQuery, withQuerySources, false); }
@Test public void shouldAddSinkStream() { // Given: givenCreateStream(); // When: cmdExec.execute(SQL_TEXT, createStream, true, NO_QUERY_SOURCES); // Then: assertThat(metaStore.getSource(STREAM_NAME).isCasTarget(), is(true)); }
public int truncateTo(int targetSize) throws IOException { int originalSize = sizeInBytes(); if (targetSize > originalSize || targetSize < 0) throw new KafkaException("Attempt to truncate log segment " + file + " to " + targetSize + " bytes failed, " + " size of this log segment is " + originalSize + " bytes."); if (targetSize < (int) channel.size()) { channel.truncate(targetSize); size.set(targetSize); } return originalSize - targetSize; }
@Test public void testTruncateIfSizeIsDifferentToTargetSize() throws IOException { FileChannel channelMock = mock(FileChannel.class); when(channelMock.size()).thenReturn(42L); when(channelMock.truncate(anyLong())).thenReturn(channelMock); FileRecords fileRecords = new FileRecords(tempFile(), channelMock, 0, Integer.MAX_VALUE, false); fileRecords.truncateTo(23); verify(channelMock, atLeastOnce()).size(); verify(channelMock).truncate(23); }
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (PGBulkLoaderMeta) smi; data = (PGBulkLoaderData) sdi; try { Object[] r = getRow(); // Get row from input rowset & set row busy! if ( r == null ) { // no more input to be expected... setOutputDone(); // Close the output stream... // will be null if no records (empty stream) if ( data != null && pgCopyOut != null ) { pgCopyOut.flush(); pgCopyOut.endCopy(); } return false; } if ( first ) { first = false; // Cache field indexes. // data.keynrs = new int[meta.getFieldStream().length]; for ( int i = 0; i < data.keynrs.length; i++ ) { data.keynrs[i] = getInputRowMeta().indexOfValue( meta.getFieldStream()[i] ); } // execute the copy statement... pgCopyOut is setup there // do_copy( meta, true ); // Write rows of data hereafter... // } writeRowToPostgres( getInputRowMeta(), r ); putRow( getInputRowMeta(), r ); incrementLinesOutput(); return true; } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "GPBulkLoader.Log.ErrorInStep" ), e ); setErrors( 1 ); stopAll(); setOutputDone(); // signal end to receiver(s) return false; } }
@Test public void testProcessRow_StreamIsNull() throws Exception { PGBulkLoader pgBulkLoaderStreamIsNull = mock( PGBulkLoader.class ); doReturn( null ).when( pgBulkLoaderStreamIsNull ).getRow(); PGBulkLoaderMeta meta = mock( PGBulkLoaderMeta.class ); PGBulkLoaderData data = mock( PGBulkLoaderData.class ); assertEquals( false, pgBulkLoaderStreamIsNull.processRow( meta, data ) ); }
public static PersistenceSchema from( final List<? extends SimpleColumn> columns, final SerdeFeatures features ) { return new PersistenceSchema(columns, features); }
@SuppressWarnings("UnstableApiUsage") @Test public void shouldImplementEqualsProperly() { new EqualsTester() .addEqualityGroup( PersistenceSchema.from(SINGLE_COLUMN, SerdeFeatures.of()), PersistenceSchema.from(SINGLE_COLUMN, SerdeFeatures.of()) ) .addEqualityGroup( PersistenceSchema.from(MULTI_COLUMN, SerdeFeatures.of()) ) .addEqualityGroup( PersistenceSchema .from(SINGLE_COLUMN, SerdeFeatures.of(SerdeFeature.WRAP_SINGLES)) ) .testEquals(); }
public String toString() { StringBuilder sb = new StringBuilder(); sb.append("iniBlock:=").append(getIniBlock()).append(", "); sb.append("startBlock:=").append(getStartBlock()).append(", "); sb.append("endBlock:=").append(getEndBlock()).append(", "); sb.append("startOffset:=").append(getStartOffset()).append(", "); sb.append("endOffset:=").append(getEndOffset()).append(System.getProperty("line.separator")); return sb.toString(); }
@Test public void testToString() throws ChmParsingException { if (chmBlockInfo == null) { testGetChmBlockInfo(); } assertTrue(chmBlockInfo.toString().length() > 0); }
public static String stripPrefixIfPresent(final String key, final String prefix) { if (key.startsWith(prefix)) { return key.substring(prefix.length()); } return key; }
@Test public void stripPrefixIfPresent() throws Exception { final String[] inputs = new String[]{ "ufs://bucket/", "ufs://bucket", "ufs://bucket/", "ufs://bucket-2/dir/file", "dir/file", "/dir/file", "ufs://bucket/file" }; final String[] prefixToStrip = new String[]{ "ufs://bucket/", "ufs://bucket/", "", "ufs://bucket-2/", "ufs://bucket", "/", "ufs://bucket/" }; final String[] results = new String[]{ "", "ufs://bucket", "ufs://bucket/", "dir/file", "dir/file", "dir/file", "file" }; for (int i = 0; i < inputs.length; i++) { assertEquals(results[i], CommonUtils.stripPrefixIfPresent(inputs[i], prefixToStrip[i])); } }
static byte[] adaptArray(byte[] ftdiData) { int length = ftdiData.length; if(length > 64) { int n = 1; int p = 64; // Precalculate length without FTDI headers while(p < length) { n++; p = n*64; } int realLength = length - n*2; byte[] data = new byte[realLength]; copyData(ftdiData, data); return data; } else if (length == 2) // special case optimization that returns the same instance. { return EMPTY_BYTE_ARRAY; } else { return Arrays.copyOfRange(ftdiData, 2, length); } }
@Test public void withHeaders() { byte[] withHeaders = {1, 2, 3, 4, 5, 6}; byte[] wanted = {3,4,5,6}; Assert.assertArrayEquals(wanted, FTDISerialDevice.adaptArray(withHeaders)); }
@Override public void pushSuccess(PushResult result) { MetricsMonitor.incrementPush(); MetricsMonitor.incrementPushCost(result.getAllCost()); MetricsMonitor.compareAndSetMaxPushCost(result.getAllCost()); if (null == result.getData().getHosts() || !result.getData().validate()) { MetricsMonitor.incrementEmptyPush(); } if (isRpc(result.getSubscriber())) { NamingTpsMonitor.rpcPushSuccess(result.getSubscribeClientId(), result.getSubscriber().getIp()); } else { NamingTpsMonitor.udpPushSuccess(result.getSubscribeClientId(), result.getSubscriber().getIp()); } }
@Test void testPushSuccessForEmptyPush() { new NacosMonitorPushResultHook().pushSuccess(pushResult); assertEquals(1, MetricsMonitor.getTotalPushMonitor().get()); assertEquals(1, MetricsMonitor.getEmptyPushMonitor().get()); assertEquals(allCost, MetricsMonitor.getMaxPushCostMonitor().get()); }
public void isTrue() { if (actual == null) { isEqualTo(true); // fails } else if (!actual) { failWithoutActual(simpleFact("expected to be true")); } }
@Test public void isTrue() { assertThat(true).isTrue(); }
public FEELFnResult<Boolean> invoke(@ParameterName( "list" ) List list) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } boolean result = false; boolean containsNull = false; // Spec. definition: return true if any item is true, else false if all items are false, else null for ( final Object element : list ) { if (element != null && !(element instanceof Boolean)) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "an element in the list is not a Boolean")); } else { if (element != null) { result |= (Boolean) element; } else if (!containsNull) { containsNull = true; } } } if (containsNull && !result) { return FEELFnResult.ofResult( null ); } else { return FEELFnResult.ofResult( result ); } }
@Test void invokeArrayParamReturnNull() { FunctionTestUtil.assertResultNull(anyFunction.invoke(new Object[]{Boolean.FALSE, null, Boolean.FALSE})); }
public List<TStatisticData> queryStatisticSync(ConnectContext context, String tableUUID, Table table, List<String> columnNames) throws AnalysisException { if (table == null) { // Statistical information query is an unlocked operation, // so it is possible for the table to be deleted while the code is running return Collections.emptyList(); } List<Type> columnTypes = Lists.newArrayList(); for (String colName : columnNames) { columnTypes.add(StatisticUtils.getQueryStatisticsColumnType(table, colName)); } String sql = StatisticSQLBuilder.buildQueryExternalFullStatisticsSQL(tableUUID, columnNames, columnTypes); return executeStatisticDQL(context, sql); }
@Test public void testDroppedDB() throws Exception { StatisticExecutor statisticExecutor = new StatisticExecutor(); Database db = GlobalStateMgr.getCurrentState().getDb("test"); GlobalStateMgr.getCurrentState().getAnalyzeMgr().addBasicStatsMeta(new BasicStatsMeta(db.getId(), 1000, null, StatsConstants.AnalyzeType.FULL, LocalDateTime.of(2020, 1, 1, 1, 1, 1), Maps.newHashMap())); List<TStatisticData> stats = statisticExecutor.queryStatisticSync( StatisticUtils.buildConnectContext(), null, 1000L, Lists.newArrayList("foo", "bar")); Assert.assertEquals(0, stats.size()); }
public static Resource componentwiseMax(Resource lhs, Resource rhs) { Resource ret = createResource(0); int maxLength = ResourceUtils.getNumberOfCountableResourceTypes(); for (int i = 0; i < maxLength; i++) { try { ResourceInformation rhsValue = rhs.getResourceInformation(i); ResourceInformation lhsValue = lhs.getResourceInformation(i); ResourceInformation outInfo = lhsValue.getValue() > rhsValue.getValue() ? lhsValue : rhsValue; ret.setResourceInformation(i, outInfo); } catch (ResourceNotFoundException ye) { LOG.warn("Resource is missing:" + ye.getMessage()); continue; } } return ret; }
@Test void testComponentwiseMax() { assertEquals(createResource(2, 2), componentwiseMax(createResource(1, 1), createResource(2, 2))); assertEquals(createResource(2, 2), componentwiseMax(createResource(2, 2), createResource(1, 1))); assertEquals(createResource(2, 2), componentwiseMax(createResource(1, 2), createResource(2, 1))); assertEquals(createResource(2, 2, 2), componentwiseMax(createResource(1, 1, 1), createResource(2, 2, 2))); assertEquals(createResource(2, 2, 2), componentwiseMax(createResource(2, 2, 2), createResource(1, 1))); assertEquals(createResource(2, 2, 3), componentwiseMax(createResource(1, 2, 2), createResource(2, 1, 3))); assertEquals(createResource(2, 2, 1), componentwiseMax(createResource(2, 2, 0), createResource(2, 1, 1))); }
@Override public boolean isSatisfied(int index, TradingRecord tradingRecord) { if (tradingRecord != null && !tradingRecord.isClosed()) { Num entryPrice = tradingRecord.getCurrentPosition().getEntry().getNetPrice(); Num currentPrice = this.referencePrice.getValue(index); Num threshold = this.stopLossThreshold.getValue(index); int barsSinceEntry = index - tradingRecord.getCurrentPosition().getEntry().getIndex() + 1; if (tradingRecord.getCurrentPosition().getEntry().isBuy()) { HighestValueIndicator highestPrice = new HighestValueIndicator(this.referencePrice, barsSinceEntry); Num thresholdPrice = entryPrice.max(highestPrice.getValue(index)).minus(threshold); return currentPrice.isLessThan(thresholdPrice); } else { LowestValueIndicator lowestPrice = new LowestValueIndicator(this.referencePrice, barsSinceEntry); Num thresholdPrice = entryPrice.min(lowestPrice.getValue(index)).plus(threshold); return currentPrice.isGreaterThan(thresholdPrice); } } return false; }
@Test public void testStopLossNotTriggered() { TradingRecord tradingRecord = new BaseTradingRecord(); tradingRecord.enter(0, series.getBar(0).getClosePrice(), series.numOf(1)); AverageTrueRangeTrailingStopLossRule rule = new AverageTrueRangeTrailingStopLossRule(series, 3, 1.0); assertFalse(rule.isSatisfied(1, tradingRecord)); assertFalse(rule.isSatisfied(2, tradingRecord)); assertFalse(rule.isSatisfied(3, tradingRecord)); }
public static boolean isWebService(Optional<String> serviceName) { return serviceName.isPresent() && IS_PLAIN_HTTP_BY_KNOWN_WEB_SERVICE_NAME.containsKey( Ascii.toLowerCase(serviceName.get())); }
@Test public void isWebService_whenNonWebService_returnsFalse() { assertThat( NetworkServiceUtils.isWebService( NetworkService.newBuilder().setServiceName("ssh").build())) .isFalse(); }
@Override public void execute(Exchange exchange) throws SmppException { SubmitSm[] submitSms = createSubmitSm(exchange); List<String> messageIDs = new ArrayList<>(submitSms.length); String messageID = null; for (int i = 0; i < submitSms.length; i++) { SubmitSm submitSm = submitSms[i]; messageID = null; if (log.isDebugEnabled()) { log.debug("Sending short message {} for exchange id '{}'...", i, exchange.getExchangeId()); } try { SubmitSmResult result = session.submitShortMessage( submitSm.getServiceType(), TypeOfNumber.valueOf(submitSm.getSourceAddrTon()), NumberingPlanIndicator.valueOf(submitSm.getSourceAddrNpi()), submitSm.getSourceAddr(), TypeOfNumber.valueOf(submitSm.getDestAddrTon()), NumberingPlanIndicator.valueOf(submitSm.getDestAddrNpi()), submitSm.getDestAddress(), new ESMClass(submitSm.getEsmClass()), submitSm.getProtocolId(), submitSm.getPriorityFlag(), submitSm.getScheduleDeliveryTime(), submitSm.getValidityPeriod(), new RegisteredDelivery(submitSm.getRegisteredDelivery()), submitSm.getReplaceIfPresent(), DataCodings.newInstance(submitSm.getDataCoding()), (byte) 0, submitSm.getShortMessage(), submitSm.getOptionalParameters()); if (result != null) { messageID = result.getMessageId(); } } catch (Exception e) { throw new SmppException(e); } if (messageID != null) { messageIDs.add(messageID); } } if (log.isDebugEnabled()) { log.debug("Sent short message for exchange id '{}' and received message ids '{}'", exchange.getExchangeId(), messageIDs); } Message message = ExchangeHelper.getResultMessage(exchange); message.setHeader(SmppConstants.ID, messageIDs); message.setHeader(SmppConstants.SENT_MESSAGE_COUNT, messageIDs.size()); }
@Test public void executeWithValidityPeriodAsString() throws Exception { Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitSm"); exchange.getIn().setHeader(SmppConstants.ID, "1"); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_TON, TypeOfNumber.NATIONAL.value()); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_NPI, NumberingPlanIndicator.NATIONAL.value()); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR, "1818"); exchange.getIn().setHeader(SmppConstants.DEST_ADDR_TON, TypeOfNumber.INTERNATIONAL.value()); exchange.getIn().setHeader(SmppConstants.DEST_ADDR_NPI, NumberingPlanIndicator.INTERNET.value()); exchange.getIn().setHeader(SmppConstants.DEST_ADDR, "1919"); exchange.getIn().setHeader(SmppConstants.SCHEDULE_DELIVERY_TIME, new Date(1111111)); exchange.getIn().setHeader(SmppConstants.VALIDITY_PERIOD, "000003000000000R"); // three days exchange.getIn().setHeader(SmppConstants.PROTOCOL_ID, (byte) 1); exchange.getIn().setHeader(SmppConstants.PRIORITY_FLAG, (byte) 2); exchange.getIn().setHeader(SmppConstants.REGISTERED_DELIVERY, new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE).value()); exchange.getIn().setHeader(SmppConstants.REPLACE_IF_PRESENT_FLAG, ReplaceIfPresentFlag.REPLACE.value()); exchange.getIn().setBody("short message body"); when(session.submitShortMessage(eq("CMT"), eq(TypeOfNumber.NATIONAL), eq(NumberingPlanIndicator.NATIONAL), eq("1818"), eq(TypeOfNumber.INTERNATIONAL), eq(NumberingPlanIndicator.INTERNET), eq("1919"), eq(new ESMClass()), eq((byte) 1), eq((byte) 2), eq("-300101001831100+"), eq("000003000000000R"), eq(new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE)), eq(ReplaceIfPresentFlag.REPLACE.value()), eq(DataCodings.newInstance((byte) 0)), eq((byte) 0), eq("short message body".getBytes()))) .thenReturn(new SubmitSmResult(new MessageId("1"), null)); command.execute(exchange); assertEquals(Arrays.asList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); assertEquals(1, exchange.getMessage().getHeader(SmppConstants.SENT_MESSAGE_COUNT)); }