focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
private CoordinatorResult<ConsumerGroupHeartbeatResponseData, CoordinatorRecord> consumerGroupHeartbeat( String groupId, String memberId, int memberEpoch, String instanceId, String rackId, int rebalanceTimeoutMs, String clientId, String clientHost, List<String> subscribedTopicNames, String assignorName, List<ConsumerGroupHeartbeatRequestData.TopicPartitions> ownedTopicPartitions ) throws ApiException { final long currentTimeMs = time.milliseconds(); final List<CoordinatorRecord> records = new ArrayList<>(); // Get or create the consumer group. boolean createIfNotExists = memberEpoch == 0; final ConsumerGroup group = getOrMaybeCreateConsumerGroup(groupId, createIfNotExists, records); throwIfConsumerGroupIsFull(group, memberId); // Get or create the member. if (memberId.isEmpty()) memberId = Uuid.randomUuid().toString(); final ConsumerGroupMember member; if (instanceId == null) { member = getOrMaybeSubscribeDynamicConsumerGroupMember( group, memberId, memberEpoch, ownedTopicPartitions, createIfNotExists, false ); } else { member = getOrMaybeSubscribeStaticConsumerGroupMember( group, memberId, memberEpoch, instanceId, ownedTopicPartitions, createIfNotExists, false, records ); } // 1. Create or update the member. If the member is new or has changed, a ConsumerGroupMemberMetadataValue // record is written to the __consumer_offsets partition to persist the change. If the subscriptions have // changed, the subscription metadata is updated and persisted by writing a ConsumerGroupPartitionMetadataValue // record to the __consumer_offsets partition. Finally, the group epoch is bumped if the subscriptions have // changed, and persisted by writing a ConsumerGroupMetadataValue record to the partition. ConsumerGroupMember updatedMember = new ConsumerGroupMember.Builder(member) .maybeUpdateInstanceId(Optional.ofNullable(instanceId)) .maybeUpdateRackId(Optional.ofNullable(rackId)) .maybeUpdateRebalanceTimeoutMs(ofSentinel(rebalanceTimeoutMs)) .maybeUpdateServerAssignorName(Optional.ofNullable(assignorName)) .maybeUpdateSubscribedTopicNames(Optional.ofNullable(subscribedTopicNames)) .setClientId(clientId) .setClientHost(clientHost) .setClassicMemberMetadata(null) .build(); boolean bumpGroupEpoch = hasMemberSubscriptionChanged( groupId, member, updatedMember, records ); int groupEpoch = group.groupEpoch(); Map<String, TopicMetadata> subscriptionMetadata = group.subscriptionMetadata(); Map<String, Integer> subscribedTopicNamesMap = group.subscribedTopicNames(); SubscriptionType subscriptionType = group.subscriptionType(); if (bumpGroupEpoch || group.hasMetadataExpired(currentTimeMs)) { // The subscription metadata is updated in two cases: // 1) The member has updated its subscriptions; // 2) The refresh deadline has been reached. subscribedTopicNamesMap = group.computeSubscribedTopicNames(member, updatedMember); subscriptionMetadata = group.computeSubscriptionMetadata( subscribedTopicNamesMap, metadataImage.topics(), metadataImage.cluster() ); int numMembers = group.numMembers(); if (!group.hasMember(updatedMember.memberId()) && !group.hasStaticMember(updatedMember.instanceId())) { numMembers++; } subscriptionType = ModernGroup.subscriptionType( subscribedTopicNamesMap, numMembers ); if (!subscriptionMetadata.equals(group.subscriptionMetadata())) { log.info("[GroupId {}] Computed new subscription metadata: {}.", groupId, subscriptionMetadata); bumpGroupEpoch = true; records.add(newConsumerGroupSubscriptionMetadataRecord(groupId, subscriptionMetadata)); } if (bumpGroupEpoch) { groupEpoch += 1; records.add(newConsumerGroupEpochRecord(groupId, groupEpoch)); log.info("[GroupId {}] Bumped group epoch to {}.", groupId, groupEpoch); metrics.record(CONSUMER_GROUP_REBALANCES_SENSOR_NAME); } group.setMetadataRefreshDeadline(currentTimeMs + consumerGroupMetadataRefreshIntervalMs, groupEpoch); } // 2. Update the target assignment if the group epoch is larger than the target assignment epoch. The delta between // the existing and the new target assignment is persisted to the partition. final int targetAssignmentEpoch; final Assignment targetAssignment; if (groupEpoch > group.assignmentEpoch()) { targetAssignment = updateTargetAssignment( group, groupEpoch, member, updatedMember, subscriptionMetadata, subscriptionType, records ); targetAssignmentEpoch = groupEpoch; } else { targetAssignmentEpoch = group.assignmentEpoch(); targetAssignment = group.targetAssignment(updatedMember.memberId(), updatedMember.instanceId()); } // 3. Reconcile the member's assignment with the target assignment if the member is not // fully reconciled yet. updatedMember = maybeReconcile( groupId, updatedMember, group::currentPartitionEpoch, targetAssignmentEpoch, targetAssignment, ownedTopicPartitions, records ); scheduleConsumerGroupSessionTimeout(groupId, memberId); // Prepare the response. ConsumerGroupHeartbeatResponseData response = new ConsumerGroupHeartbeatResponseData() .setMemberId(updatedMember.memberId()) .setMemberEpoch(updatedMember.memberEpoch()) .setHeartbeatIntervalMs(consumerGroupHeartbeatIntervalMs(groupId)); // The assignment is only provided in the following cases: // 1. The member sent a full request. It does so when joining or rejoining the group with zero // as the member epoch; or on any errors (e.g. timeout). We use all the non-optional fields // (rebalanceTimeoutMs, subscribedTopicNames and ownedTopicPartitions) to detect a full request // as those must be set in a full request. // 2. The member's assignment has been updated. boolean isFullRequest = memberEpoch == 0 || (rebalanceTimeoutMs != -1 && subscribedTopicNames != null && ownedTopicPartitions != null); if (isFullRequest || hasAssignedPartitionsChanged(member, updatedMember)) { response.setAssignment(createConsumerGroupResponseAssignment(updatedMember)); } return new CoordinatorResult<>(records, response); }
@Test public void testShouldThrowFencedInstanceIdExceptionWhenStaticMemberWithDifferentMemberIdLeaves() { String groupId = "fooup"; // Use a static member id as it makes the test easier. String memberId1 = Uuid.randomUuid().toString(); Uuid fooTopicId = Uuid.randomUuid(); String fooTopicName = "foo"; MockPartitionAssignor assignor = new MockPartitionAssignor("range"); // Consumer group with one static member. GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder() .withConsumerGroupAssignors(Collections.singletonList(assignor)) .withMetadataImage(new MetadataImageBuilder() .addTopic(fooTopicId, fooTopicName, 6) .build()) .withConsumerGroup(new ConsumerGroupBuilder(groupId, 10) .withMember(new ConsumerGroupMember.Builder(memberId1) .setState(MemberState.STABLE) .setInstanceId(memberId1) .setMemberEpoch(10) .setPreviousMemberEpoch(9) .setClientId(DEFAULT_CLIENT_ID) .setClientHost(DEFAULT_CLIENT_ADDRESS.toString()) .setSubscribedTopicNames(Arrays.asList("foo", "bar")) .setServerAssignorName("range") .setAssignedPartitions(mkAssignment( mkTopicAssignment(fooTopicId, 0, 1, 2))) .build()) .withAssignment(memberId1, mkAssignment( mkTopicAssignment(fooTopicId, 0, 1, 2))) .withAssignmentEpoch(10)) .build(); assertThrows(FencedInstanceIdException.class, () -> context.consumerGroupHeartbeat( new ConsumerGroupHeartbeatRequestData() .setGroupId(groupId) .setMemberId("unknown-" + memberId1) .setInstanceId(memberId1) .setMemberEpoch(LEAVE_GROUP_STATIC_MEMBER_EPOCH) .setRebalanceTimeoutMs(5000) .setSubscribedTopicNames(Arrays.asList("foo", "bar")) .setTopicPartitions(Collections.emptyList()))); }
@Override public String convertDestination(ProtocolConverter converter, Destination d) { if (d == null) { return null; } ActiveMQDestination activeMQDestination = (ActiveMQDestination)d; String physicalName = activeMQDestination.getPhysicalName(); String rc = converter.getCreatedTempDestinationName(activeMQDestination); if( rc!=null ) { return rc; } StringBuilder buffer = new StringBuilder(); if (activeMQDestination.isQueue()) { if (activeMQDestination.isTemporary()) { buffer.append("/remote-temp-queue/"); } else { buffer.append("/queue/"); } } else { if (activeMQDestination.isTemporary()) { buffer.append("/remote-temp-topic/"); } else { buffer.append("/topic/"); } } buffer.append(physicalName); return buffer.toString(); }
@Test(timeout = 10000) public void testConvertCompositeQueues() throws Exception { String destinationA = "destinationA"; String destinationB = "destinationB"; String composite = "/queue/" + destinationA + ",/queue/" + destinationB; ActiveMQDestination destination = translator.convertDestination(converter, composite, false); assertEquals(ActiveMQDestination.QUEUE_TYPE, destination.getDestinationType()); assertTrue(destination.isComposite()); ActiveMQDestination[] composites = destination.getCompositeDestinations(); assertEquals(2, composites.length); Arrays.sort(composites); assertEquals(ActiveMQDestination.QUEUE_TYPE, composites[0].getDestinationType()); assertEquals(ActiveMQDestination.QUEUE_TYPE, composites[1].getDestinationType()); assertEquals(destinationA, composites[0].getPhysicalName()); assertEquals(destinationB, composites[1].getPhysicalName()); }
@Override public String getTargetRestEndpointURL() { return "/jobs/:" + JobIDPathParameter.KEY + "/metrics"; }
@Test void testUrl() { assertThat(jobMetricsHeaders.getTargetRestEndpointURL()) .isEqualTo("/jobs/:" + JobIDPathParameter.KEY + "/metrics"); }
@Override public double mean() { return k * theta; }
@Test public void testMean() { System.out.println("var"); GammaDistribution instance = new GammaDistribution(3, 2.1); instance.rand(); assertEquals(6.3, instance.mean(), 1E-7); }
@Override public void publishLeaderInformation(String componentId, LeaderInformation leaderInformation) { Preconditions.checkState(running.get()); if (!leaderLatch.hasLeadership()) { return; } final String connectionInformationPath = ZooKeeperUtils.generateConnectionInformationPath(componentId); LOG.debug( "Write leader information {} for component '{}' to {}.", leaderInformation, componentId, ZooKeeperUtils.generateZookeeperPath( curatorFramework.getNamespace(), connectionInformationPath)); try { ZooKeeperUtils.writeLeaderInformationToZooKeeper( leaderInformation, curatorFramework, leaderLatch::hasLeadership, connectionInformationPath); } catch (Exception e) { leaderElectionListener.onError(e); } }
@Test void testPublishLeaderInformation() throws Exception { new Context() { { runTest( () -> { leaderElectionListener.await(LeaderElectionEvent.IsLeaderEvent.class); final String componentId = "retrieved-component"; final DefaultLeaderRetrievalService defaultLeaderRetrievalService = new DefaultLeaderRetrievalService( new ZooKeeperLeaderRetrievalDriverFactory( curatorFramework.asCuratorFramework(), componentId, ZooKeeperLeaderRetrievalDriver .LeaderInformationClearancePolicy .ON_LOST_CONNECTION)); final TestingListener leaderRetrievalListener = new TestingListener(); defaultLeaderRetrievalService.start(leaderRetrievalListener); final LeaderInformation leaderInformation = LeaderInformation.known(UUID.randomUUID(), "foobar"); leaderElectionDriver.publishLeaderInformation( componentId, leaderInformation); leaderRetrievalListener.waitForNewLeader(); assertThat(leaderRetrievalListener.getLeader()) .isEqualTo(leaderInformation); }); } }; }
public static PDImageXObject createFromImage(PDDocument document, BufferedImage image) throws IOException { return createFromImage(document, image, 0.75f); }
@Test void testCreateFromImageINT_ARGB() throws IOException { PDDocument document = new PDDocument(); BufferedImage image = ImageIO.read(JPEGFactoryTest.class.getResourceAsStream("jpeg.jpg")); // create an ARGB image int width = image.getWidth(); int height = image.getHeight(); BufferedImage argbImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); Graphics ag = argbImage.getGraphics(); ag.drawImage(image, 0, 0, null); ag.dispose(); for (int x = 0; x < argbImage.getWidth(); ++x) { for (int y = 0; y < argbImage.getHeight(); ++y) { argbImage.setRGB(x, y, (argbImage.getRGB(x, y) & 0xFFFFFF) | ((y / 10 * 10) << 24)); } } PDImageXObject ximage = JPEGFactory.createFromImage(document, argbImage); validate(ximage, 8, width, height, "jpg", PDDeviceRGB.INSTANCE.getName()); assertNotNull(ximage.getSoftMask()); validate(ximage.getSoftMask(), 8, width, height, "jpg", PDDeviceGray.INSTANCE.getName()); assertTrue(colorCount(ximage.getSoftMask().getImage()) > image.getHeight() / 10); doWritePDF(document, ximage, TESTRESULTSDIR, "jpeg-intargb.pdf"); }
public static Map<String, Object> appendDeserializerToConfig(Map<String, Object> configs, Deserializer<?> keyDeserializer, Deserializer<?> valueDeserializer) { // validate deserializer configuration, if the passed deserializer instance is null, the user must explicitly set a valid deserializer configuration value Map<String, Object> newConfigs = new HashMap<>(configs); if (keyDeserializer != null) newConfigs.put(KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializer.getClass()); else if (newConfigs.get(KEY_DESERIALIZER_CLASS_CONFIG) == null) throw new ConfigException(KEY_DESERIALIZER_CLASS_CONFIG, null, "must be non-null."); if (valueDeserializer != null) newConfigs.put(VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializer.getClass()); else if (newConfigs.get(VALUE_DESERIALIZER_CLASS_CONFIG) == null) throw new ConfigException(VALUE_DESERIALIZER_CLASS_CONFIG, null, "must be non-null."); return newConfigs; }
@Test public void testAppendDeserializerToConfig() { Map<String, Object> configs = new HashMap<>(); configs.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializerClass); configs.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializerClass); Map<String, Object> newConfigs = ConsumerConfig.appendDeserializerToConfig(configs, null, null); assertEquals(newConfigs.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), keyDeserializerClass); assertEquals(newConfigs.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), valueDeserializerClass); configs.clear(); configs.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializerClass); newConfigs = ConsumerConfig.appendDeserializerToConfig(configs, keyDeserializer, null); assertEquals(newConfigs.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), keyDeserializerClass); assertEquals(newConfigs.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), valueDeserializerClass); configs.clear(); configs.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializerClass); newConfigs = ConsumerConfig.appendDeserializerToConfig(configs, null, valueDeserializer); assertEquals(newConfigs.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), keyDeserializerClass); assertEquals(newConfigs.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), valueDeserializerClass); configs.clear(); newConfigs = ConsumerConfig.appendDeserializerToConfig(configs, keyDeserializer, valueDeserializer); assertEquals(newConfigs.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG), keyDeserializerClass); assertEquals(newConfigs.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG), valueDeserializerClass); }
public Set<Route> getRoutes() { ImmutableSet.Builder<Route> routes = ImmutableSet.builder(); array.forEach(route -> { try { IpPrefix prefix = IpPrefix.valueOf(route.path(PREFIX).asText()); IpAddress nextHop = IpAddress.valueOf(route.path(NEXTHOP).asText()); routes.add(new Route(Route.Source.STATIC, prefix, nextHop)); } catch (IllegalArgumentException e) { // Ignores routes that cannot be parsed correctly } }); return routes.build(); }
@Test public void getRoutes() throws Exception { assertThat(config.getRoutes(), is(EXPECTED_ROUTES)); assertThat(config.getRoutes(), not(UNEXPECTED_ROUTES)); }
public static ByteBuffer serializeSubscription(final Subscription subscription) { return serializeSubscription(subscription, ConsumerProtocolSubscription.HIGHEST_SUPPORTED_VERSION); }
@Test public void serializeSubscriptionShouldOrderOwnedPartitions() { assertEquals( ConsumerProtocol.serializeSubscription( new Subscription(Arrays.asList("foo", "bar"), null, Arrays.asList(tp1, tp2)) ), ConsumerProtocol.serializeSubscription( new Subscription(Arrays.asList("foo", "bar"), null, Arrays.asList(tp2, tp1)) ) ); }
@Override public String toString() { return toStringHelper(getClass()) .add("chassisId", Arrays.toString(chassisId.getValue())) .add("portId", Arrays.toString(portId.getValue())) .add("ttl", Arrays.toString(ttl.getValue())) .add("ethType", Short.toString(ethType)) .toString(); // TODO: need to handle optionalTLVList }
@Test public void testToStringLLDP() throws Exception { LLDP lldp = deserializer.deserialize(bytes, 0, bytes.length); String str = lldp.toString(); // TODO: need to add LLDP toString unit test }
Object getCellValue(Cell cell, Schema.FieldType type) { ByteString cellValue = cell.getValue(); int valueSize = cellValue.size(); switch (type.getTypeName()) { case BOOLEAN: checkArgument(valueSize == 1, message("Boolean", 1)); return cellValue.toByteArray()[0] != 0; case BYTE: checkArgument(valueSize == 1, message("Byte", 1)); return cellValue.toByteArray()[0]; case INT16: checkArgument(valueSize == 2, message("Int16", 2)); return Shorts.fromByteArray(cellValue.toByteArray()); case INT32: checkArgument(valueSize == 4, message("Int32", 4)); return Ints.fromByteArray(cellValue.toByteArray()); case INT64: checkArgument(valueSize == 8, message("Int64", 8)); return Longs.fromByteArray(cellValue.toByteArray()); case FLOAT: checkArgument(valueSize == 4, message("Float", 4)); return Float.intBitsToFloat(Ints.fromByteArray(cellValue.toByteArray())); case DOUBLE: checkArgument(valueSize == 8, message("Double", 8)); return Double.longBitsToDouble(Longs.fromByteArray(cellValue.toByteArray())); case DATETIME: return DateTime.parse(cellValue.toStringUtf8()); case STRING: return cellValue.toStringUtf8(); case BYTES: return cellValue.toByteArray(); case LOGICAL_TYPE: String identifier = checkArgumentNotNull(type.getLogicalType()).getIdentifier(); throw new IllegalStateException("Unsupported logical type: " + identifier); default: throw new IllegalArgumentException( String.format("Unsupported cell value type '%s'.", type.getTypeName())); } }
@Test public void shouldFailParseInt16TypeTooLong() { byte[] value = new byte[6]; IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> PARSER.getCellValue(cell(value), INT16)); checkMessage(exception.getMessage(), "Int16 has to be 2-bytes long bytearray"); }
public static ResourceModel processResource(final Class<?> resourceClass) { return processResource(resourceClass, null); }
@Test(expectedExceptions = ResourceConfigException.class) public void failsOnNonInstantiableActionReturnTypeRef() { @RestLiCollection(name = "invalidActionReturnType") class LocalClass extends CollectionResourceTemplate<Long, EmptyRecord> { @Action(name = "nonInstantiableTypeRef", returnTyperef = BrokenTypeRef.class) public BrokenTypeRef nonInstantiableTypeRef(@ActionParam(value = "someId") String someId) { return null; } } RestLiAnnotationReader.processResource(LocalClass.class); Assert.fail("#getActionTyperefDataSchema should fail throwing a ResourceConfigException"); }
public void launch(Monitored mp) { if (!lifecycle.tryToMoveTo(Lifecycle.State.STARTING)) { throw new IllegalStateException("Already started"); } monitored = mp; Logger logger = LoggerFactory.getLogger(getClass()); try { launch(logger); } catch (Exception e) { logger.warn("Fail to start {}", processId.getHumanReadableName(), e); hardStop(); } }
@Test public void terminate_if_startup_error() throws IOException { Props props = createProps(); final ProcessEntryPoint entryPoint = new ProcessEntryPoint(props, exit, commands, runtime); final Monitored process = mock(Monitored.class); doThrow(IllegalStateException.class).when(process).start(); entryPoint.launch(process); }
public KsqlTarget target(final URI server) { return target(server, Collections.emptyMap()); }
@Test public void shouldPostQueryRequest() { // Given: List<StreamedRow> expectedResponse = new ArrayList<>(); for (int i = 0; i < 10; i++) { GenericRow row = GenericRow.genericRow("foo", 123, true); StreamedRow sr = StreamedRow.pushRow(row); expectedResponse.add(sr); } server.setResponseBuffer(createResponseBuffer(expectedResponse)); String sql = "some sql"; // When: KsqlTarget target = ksqlClient.target(serverUri); RestResponse<List<StreamedRow>> response = target.postQueryRequest( sql, Collections.emptyMap(), Optional.of(321L)); // Then: assertThat(server.getHttpMethod(), is(HttpMethod.POST)); assertThat(server.getPath(), is("/query")); assertThat(server.getHeaders().get("Accept"), is("application/json")); assertThat(getKsqlRequest(), is(new KsqlRequest(sql, properties, Collections.emptyMap(), 321L))); assertThat(response.getResponse(), is(expectedResponse)); }
public static String decapitalize(String string) { return string == null ? null : string.substring( 0, 1 ).toLowerCase( Locale.ROOT ) + string.substring( 1 ); }
@Test public void testDecapitalize() { assertThat( Strings.decapitalize( null ) ).isNull(); assertThat( Strings.decapitalize( "c" ) ).isEqualTo( "c" ); assertThat( Strings.decapitalize( "capitalize" ) ).isEqualTo( "capitalize" ); assertThat( Strings.decapitalize( "AlreadyCapitalized" ) ).isEqualTo( "alreadyCapitalized" ); assertThat( Strings.decapitalize( "notCapitalized" ) ).isEqualTo( "notCapitalized" ); }
public URI executionUrl(Execution execution) { return this.build("/ui/" + (execution.getTenantId() != null ? execution.getTenantId() + "/" : "") + "executions/" + execution.getNamespace() + "/" + execution.getFlowId() + "/" + execution.getId()); }
@Test void executionUrl() { Flow flow = TestsUtils.mockFlow(); Execution execution = TestsUtils.mockExecution(flow, ImmutableMap.of()); assertThat(uriProvider.executionUrl(execution).toString(), containsString("mysuperhost.com/subpath/ui")); assertThat(uriProvider.executionUrl(execution).toString(), containsString(flow.getNamespace() + "/" + flow.getId() + "/" + execution.getId())); }
public int listOpenFiles(String[] argv) throws IOException { String path = null; List<OpenFilesType> types = new ArrayList<>(); if (argv != null) { List<String> args = new ArrayList<>(Arrays.asList(argv)); if (StringUtils.popOption("-blockingDecommission", args)) { types.add(OpenFilesType.BLOCKING_DECOMMISSION); } path = StringUtils.popOptionWithArgument("-path", args); } if (types.isEmpty()) { types.add(OpenFilesType.ALL_OPEN_FILES); } if (path != null) { path = path.trim(); if (path.length() == 0) { path = OpenFilesIterator.FILTER_PATH_DEFAULT; } } else { path = OpenFilesIterator.FILTER_PATH_DEFAULT; } EnumSet<OpenFilesType> openFilesTypes = EnumSet.copyOf(types); DistributedFileSystem dfs = getDFS(); RemoteIterator<OpenFileEntry> openFilesRemoteIterator; try{ openFilesRemoteIterator = dfs.listOpenFiles(openFilesTypes, path); printOpenFiles(openFilesRemoteIterator); } catch (IOException ioe){ System.err.println("List open files failed."); throw ioe; } return 0; }
@Test(timeout = 300000L) public void testListOpenFiles() throws Exception { redirectStream(); final Configuration dfsConf = new HdfsConfiguration(); dfsConf.setInt( DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 500); dfsConf.setLong(DFS_HEARTBEAT_INTERVAL_KEY, 1); dfsConf.setLong(DFSConfigKeys.DFS_NAMENODE_LIST_OPENFILES_NUM_RESPONSES, 5); final Path baseDir = new Path( PathUtils.getTestDir(getClass()).getAbsolutePath(), GenericTestUtils.getMethodName()); dfsConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.toString()); final int numDataNodes = 3; final int numClosedFiles = 25; final int numOpenFiles = 15; try(MiniDFSCluster miniCluster = new MiniDFSCluster .Builder(dfsConf) .numDataNodes(numDataNodes).build()) { final short replFactor = 1; final long fileLength = 512L; final FileSystem fs = miniCluster.getFileSystem(); final Path parentDir = new Path("/tmp/files/"); fs.mkdirs(parentDir); HashSet<Path> closedFileSet = new HashSet<>(); for (int i = 0; i < numClosedFiles; i++) { Path file = new Path(parentDir, "closed-file-" + i); DFSTestUtil.createFile(fs, file, fileLength, replFactor, 12345L); closedFileSet.add(file); } HashMap<Path, FSDataOutputStream> openFilesMap = new HashMap<>(); for (int i = 0; i < numOpenFiles; i++) { Path file = new Path(parentDir, "open-file-" + i); DFSTestUtil.createFile(fs, file, fileLength, replFactor, 12345L); FSDataOutputStream outputStream = fs.append(file); openFilesMap.put(file, outputStream); } final DFSAdmin dfsAdmin = new DFSAdmin(dfsConf); assertEquals(0, ToolRunner.run(dfsAdmin, new String[]{"-listOpenFiles"})); verifyOpenFilesListing(closedFileSet, openFilesMap); for (int count = 0; count < numOpenFiles; count++) { closedFileSet.addAll(DFSTestUtil.closeOpenFiles(openFilesMap, 1)); resetStream(); assertEquals(0, ToolRunner.run(dfsAdmin, new String[]{"-listOpenFiles"})); verifyOpenFilesListing(closedFileSet, openFilesMap); } // test -listOpenFiles command with option <path> openFilesMap.clear(); Path file; HashMap<Path, FSDataOutputStream> openFiles1 = new HashMap<>(); HashMap<Path, FSDataOutputStream> openFiles2 = new HashMap<>(); for (int i = 0; i < numOpenFiles; i++) { if (i % 2 == 0) { file = new Path(new Path("/tmp/files/a"), "open-file-" + i); } else { file = new Path(new Path("/tmp/files/b"), "open-file-" + i); } DFSTestUtil.createFile(fs, file, fileLength, replFactor, 12345L); FSDataOutputStream outputStream = fs.append(file); if (i % 2 == 0) { openFiles1.put(file, outputStream); } else { openFiles2.put(file, outputStream); } openFilesMap.put(file, outputStream); } resetStream(); // list all open files assertEquals(0, ToolRunner.run(dfsAdmin, new String[] {"-listOpenFiles"})); verifyOpenFilesListing(null, openFilesMap); resetStream(); // list open files under directory path /tmp/files/a assertEquals(0, ToolRunner.run(dfsAdmin, new String[] {"-listOpenFiles", "-path", "/tmp/files/a"})); verifyOpenFilesListing(null, openFiles1); resetStream(); // list open files without input path assertEquals(-1, ToolRunner.run(dfsAdmin, new String[] {"-listOpenFiles", "-path"})); // verify the error String outStr = scanIntoString(err); assertTrue(outStr.contains("listOpenFiles: option" + " -path requires 1 argument")); resetStream(); // list open files with empty path assertEquals(0, ToolRunner.run(dfsAdmin, new String[] {"-listOpenFiles", "-path", ""})); // all the open files will be listed verifyOpenFilesListing(null, openFilesMap); resetStream(); // list invalid path file assertEquals(0, ToolRunner.run(dfsAdmin, new String[] {"-listOpenFiles", "-path", "/invalid_path"})); outStr = scanIntoString(out); for (Path openFilePath : openFilesMap.keySet()) { assertThat(outStr, not(containsString(openFilePath.toString()))); } DFSTestUtil.closeOpenFiles(openFilesMap, openFilesMap.size()); } }
static boolean isInvalidLocalHost(String host) { return StringUtils.isBlank(host) || isAnyHost(host) || isLocalHost(host); }
@Test public void isInvalidLocalHost() throws Exception { Assert.assertTrue(NetUtils.isInvalidLocalHost("0.0.0.0")); Assert.assertTrue(NetUtils.isInvalidLocalHost("127.0.0.1")); Assert.assertTrue(NetUtils.isInvalidLocalHost("")); Assert.assertTrue(NetUtils.isInvalidLocalHost(" ")); Assert.assertTrue(NetUtils.isInvalidLocalHost(null)); }
@ConstantFunction.List(list = { @ConstantFunction(name = "date_format", argTypes = {DATETIME, VARCHAR}, returnType = VARCHAR, isMonotonic = true), @ConstantFunction(name = "date_format", argTypes = {DATE, VARCHAR}, returnType = VARCHAR, isMonotonic = true) }) public static ConstantOperator dateFormat(ConstantOperator date, ConstantOperator fmtLiteral) { String format = fmtLiteral.getVarchar(); if (format.isEmpty()) { return ConstantOperator.createNull(Type.VARCHAR); } // unix style if (!SUPPORT_JAVA_STYLE_DATETIME_FORMATTER.contains(format.trim())) { DateTimeFormatter builder = DateUtils.unixDatetimeFormatter(fmtLiteral.getVarchar()); return ConstantOperator.createVarchar(builder.format(date.getDatetime())); } else { String result = date.getDatetime().format(DateTimeFormatter.ofPattern(fmtLiteral.getVarchar())); return ConstantOperator.createVarchar(result); } }
@Test public void dateFormat() { Locale.setDefault(Locale.ENGLISH); ConstantOperator testDate = ConstantOperator.createDatetime(LocalDateTime.of(2001, 1, 9, 13, 4, 5)); assertEquals("1", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%c")).getVarchar()); assertEquals("09", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%d")).getVarchar()); assertEquals("9", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%e")).getVarchar()); assertEquals("13", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%H")).getVarchar()); assertEquals("01", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%h")).getVarchar()); assertEquals("01", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%I")).getVarchar()); assertEquals("04", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%i")).getVarchar()); assertEquals("009", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%j")).getVarchar()); assertEquals("13", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%k")).getVarchar()); assertEquals("1", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%l")).getVarchar()); assertEquals("01", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%m")).getVarchar()); assertEquals("05", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%S")).getVarchar()); assertEquals("05", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%s")).getVarchar()); assertEquals("13:04:05", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%T")).getVarchar()); assertEquals("02", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%v")).getVarchar()); assertEquals("2001", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%Y")).getVarchar()); assertEquals("01", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%y")).getVarchar()); assertEquals("%", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%%")).getVarchar()); assertEquals("foo", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("foo")).getVarchar()); assertEquals("g", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%g")).getVarchar()); assertEquals("4", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%4")).getVarchar()); assertEquals("02", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%v")).getVarchar()); assertEquals("yyyy", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyy")).getVarchar()); assertEquals("20010109", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyyMMdd")).getVarchar()); assertEquals("yyyyMMdd HH:mm:ss", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyyMMdd HH:mm:ss")) .getVarchar()); assertEquals("HH:mm:ss", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("HH:mm:ss")).getVarchar()); assertEquals("2001-01-09", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyy-MM-dd")) .getVarchar()); assertEquals("2001-01-09 13:04:05", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("yyyy-MM-dd HH:mm:ss")) .getVarchar()); assertEquals("2001-01-09", ScalarOperatorFunctions.dateFormat(ConstantOperator.createDate(LocalDateTime.of(2001, 1, 9, 13, 4, 5)), ConstantOperator.createVarchar("%Y-%m-%d")) .getVarchar()); assertEquals("123000", ScalarOperatorFunctions .dateFormat(ConstantOperator.createDate(LocalDateTime.of(2022, 3, 13, 0, 0, 0, 123000000)), ConstantOperator.createVarchar("%f")).getVarchar()); assertEquals("asdfafdfsçv", ScalarOperatorFunctions.dateFormat(ConstantOperator.createDate(LocalDateTime.of(2020, 2, 21, 13, 4, 5)), ConstantOperator.createVarchar("asdfafdfsçv")).getVarchar()); Assert.assertNotEquals("53", ScalarOperatorFunctions.dateFormat(ConstantOperator.createDatetime(LocalDateTime.of(2024, 12, 31, 22, 0, 0)), ConstantOperator.createVarchar("%v")).getVarchar()); assertEquals("01", ScalarOperatorFunctions.dateFormat(ConstantOperator.createDatetime(LocalDateTime.of(2024, 12, 31, 22, 0, 0)), ConstantOperator.createVarchar("%v")).getVarchar()); Assert.assertThrows("%a not supported in date format string", IllegalArgumentException.class, () -> ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%a")).getVarchar()); Assert.assertThrows("%b not supported in date format string", IllegalArgumentException.class, () -> ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%b")).getVarchar()); Assert.assertThrows("%M not supported in date format string", IllegalArgumentException.class, () -> ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%M")).getVarchar()); Assert.assertThrows("%W not supported in date format string", IllegalArgumentException.class, () -> ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%W")).getVarchar()); Assert.assertThrows("%x not supported in date format string", IllegalArgumentException.class, () -> ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%x")).getVarchar()); Assert.assertThrows("%w not supported in date format string", IllegalArgumentException.class, () -> ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%w")).getVarchar()); Assert.assertThrows("%p not supported in date format string", IllegalArgumentException.class, () -> ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%p")).getVarchar()); Assert.assertThrows("%r not supported in date format string", IllegalArgumentException.class, () -> ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("%r")).getVarchar()); Assert.assertThrows(IllegalArgumentException.class, () -> ScalarOperatorFunctions .dateFormat(ConstantOperator.createDate(LocalDateTime.of(2020, 2, 21, 13, 4, 5)), ConstantOperator.createVarchar("%U")).getVarchar()); Assert.assertThrows(IllegalArgumentException.class, () -> ScalarOperatorFunctions .dateFormat(ConstantOperator.createDate(LocalDateTime.of(2020, 2, 21, 13, 4, 5)), ConstantOperator.createVarchar("%X")).getVarchar()); assertTrue(ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar("")) .isNull()); assertEquals(" ", ScalarOperatorFunctions.dateFormat(testDate, ConstantOperator.createVarchar(" ")) .getVarchar()); }
@Override public void deregisterInstance(String serviceName, String ip, int port) throws NacosException { deregisterInstance(serviceName, ip, port, Constants.DEFAULT_CLUSTER_NAME); }
@Test void testDeregisterInstance4() throws NacosException { //given String serviceName = "service1"; String groupName = "group1"; String clusterName = "cluster1"; String ip = "1.1.1.1"; int port = 10000; //when client.deregisterInstance(serviceName, groupName, ip, port, clusterName); //then verify(proxy, times(1)).deregisterService(eq(serviceName), eq(groupName), argThat(instance -> instance.getIp().equals(ip) && instance.getPort() == port && Math.abs(instance.getWeight() - 1.0) < 0.01f && instance.getClusterName() .equals(clusterName))); }
public static ParameterizedType mapOf(Type keyType, Type valueType) { return parameterizedType(Map.class, keyType, valueType); }
@Test public void createMapType() { ParameterizedType type = Types.mapOf(String.class, Person.class); assertThat(type.getRawType()).isEqualTo(Map.class); assertThat(type.getActualTypeArguments()).isEqualTo(new Type[] {String.class, Person.class}); }
@Override public boolean alterOffsets(Map<String, String> connectorConfig, Map<Map<String, ?>, Map<String, ?>> offsets) { for (Map.Entry<Map<String, ?>, Map<String, ?>> offsetEntry : offsets.entrySet()) { Map<String, ?> sourceOffset = offsetEntry.getValue(); if (sourceOffset == null) { // We allow tombstones for anything; if there's garbage in the offsets for the connector, we don't // want to prevent users from being able to clean it up using the REST API continue; } Map<String, ?> sourcePartition = offsetEntry.getKey(); if (sourcePartition == null) { throw new ConnectException("Source partitions may not be null"); } MirrorUtils.validateSourcePartitionString(sourcePartition, SOURCE_CLUSTER_KEY); MirrorUtils.validateSourcePartitionString(sourcePartition, TOPIC_KEY); MirrorUtils.validateSourcePartitionPartition(sourcePartition); MirrorUtils.validateSourceOffset(sourcePartition, sourceOffset, false); } // We never commit offsets with our source consumer, so no additional effort is required beyond just validating // the format of the user-supplied offsets return true; }
@Test public void testAlterOffsetsIncorrectPartitionKey() { MirrorSourceConnector connector = new MirrorSourceConnector(); assertThrows(ConnectException.class, () -> connector.alterOffsets(null, Collections.singletonMap( Collections.singletonMap("unused_partition_key", "unused_partition_value"), MirrorUtils.wrapOffset(10) ))); // null partitions are invalid assertThrows(ConnectException.class, () -> connector.alterOffsets(null, Collections.singletonMap( null, MirrorUtils.wrapOffset(10) ))); }
String deleteAll(int n) { return deleteAllQueries.computeIfAbsent(n, deleteAllFactory); }
@Test public void testDeleteAllIsEscaped() { Queries queries = new Queries(mappingEscape, idColumnEscape, columnMetadataEscape); String result = queries.deleteAll(2); assertEquals("DELETE FROM \"my\"\"mapping\" WHERE \"i\"\"d\" IN (?, ?)", result); }
@Override @SuppressWarnings("unchecked") public int run() throws IOException { Preconditions.checkArgument(targets != null && targets.size() == 1, "Parquet file is required."); if (targets.size() > 1) { Preconditions.checkArgument(outputPath == null, "Cannot output multiple schemas to file %s", outputPath); for (String source : targets) { console.info("{}: {}", source, getSchema(source)); } } else { String source = targets.get(0); if (outputPath != null) { try (OutputStream out = overwrite ? create(outputPath) : createWithNoOverwrite(outputPath)) { out.write(getSchema(source).getBytes(StandardCharsets.UTF_8)); } } else { console.info(getSchema(source)); } } return 0; }
@Test(expected = FileAlreadyExistsException.class) public void testSchemaCommandOverwriteExistentFileWithoutOverwriteOption() throws IOException { File inputFile = parquetFile(); File outputFile = new File(getTempFolder(), getClass().getSimpleName() + ".avsc"); FileUtils.touch(outputFile); SchemaCommand command = new SchemaCommand(createLogger()); command.targets = Arrays.asList(inputFile.getAbsolutePath()); command.outputPath = outputFile.getAbsolutePath(); command.setConf(new Configuration()); command.run(); }
public char getLeftSymbol() { return leftSymbol; }
@Test public void testGetLeftSymbol() { char expectedLeftSymbol = '"'; EscapeSymbol escapeSymbol = new EscapeSymbol(expectedLeftSymbol, '"'); assertEquals(expectedLeftSymbol, escapeSymbol.getLeftSymbol(), "The left symbol should be '" + expectedLeftSymbol + "'"); }
public static FEEL_1_1Parser parse(FEELEventListenersManager eventsManager, String source, Map<String, Type> inputVariableTypes, Map<String, Object> inputVariables, Collection<FEELFunction> additionalFunctions, List<FEELProfile> profiles, FEELTypeRegistry typeRegistry) { CharStream input = CharStreams.fromString(source); FEEL_1_1Lexer lexer = new FEEL_1_1Lexer( input ); CommonTokenStream tokens = new CommonTokenStream( lexer ); FEEL_1_1Parser parser = new FEEL_1_1Parser( tokens ); ParserHelper parserHelper = new ParserHelper(eventsManager); additionalFunctions.forEach(f -> parserHelper.getSymbolTable().getBuiltInScope().define(f.getSymbol())); parser.setHelper(parserHelper); parser.setErrorHandler( new FEELErrorHandler() ); parser.removeErrorListeners(); // removes the error listener that prints to the console parser.addErrorListener( new FEELParserErrorListener( eventsManager ) ); // pre-loads the parser with symbols defineVariables( inputVariableTypes, inputVariables, parser ); if (typeRegistry != null) { parserHelper.setTypeRegistry(typeRegistry); } return parser; }
@Test void floatLiteral() { String inputExpression = "10.5"; BaseNode number = parse( inputExpression ); assertThat( number).isInstanceOf(NumberNode.class); assertThat( number.getResultType()).isEqualTo(BuiltInType.NUMBER); assertLocation( inputExpression, number ); }
public static UExpressionStatement create(UExpression expression) { return new AutoValue_UExpressionStatement(expression); }
@Test public void serialization() { SerializableTester.reserializeAndAssert( UExpressionStatement.create( UBinary.create(Kind.PLUS, ULiteral.intLit(5), ULiteral.intLit(2)))); }
@Override public void init(Collection<T> allObjectsToTest) { condition.init(allObjectsToTest); }
@Test public void inits_inverted_condition() { ConditionWithInitAndFinish original = someCondition("anything"); ArchCondition<String> never = never(original); never.init(Collections.singleton("something")); assertThat(original.allObjectsToTest).containsExactly("something"); }
protected String buildPrefix(String pluginName, String apiVersion) { GroupVersion groupVersion = GroupVersion.parseAPIVersion(apiVersion); if (StringUtils.hasText(groupVersion.group())) { // apis/{group}/{version} return String.format("/apis/%s/%s", groupVersion.group(), groupVersion.version()); } // apis/api.plugin.halo.run/{version}/plugins/{pluginName} return String.format("/apis/api.plugin.halo.run/%s/plugins/%s", groupVersion.version(), pluginName); }
@Test void buildPrefix() { String s = handlerMapping.buildPrefix("fakePlugin", "v1"); assertThat(s).isEqualTo("/apis/api.plugin.halo.run/v1/plugins/fakePlugin"); s = handlerMapping.buildPrefix("fakePlugin", "fake.halo.run/v1alpha1"); assertThat(s).isEqualTo("/apis/fake.halo.run/v1alpha1"); }
@Override public ConsumeStats examineConsumeStats( String consumerGroup) throws RemotingException, MQClientException, InterruptedException, MQBrokerException { return examineConsumeStats(consumerGroup, null); }
@Ignore @Test public void testExamineConsumeStats() throws InterruptedException, RemotingException, MQClientException, MQBrokerException { ConsumeStats consumeStats = defaultMQAdminExt.examineConsumeStats("default-consumer-group", "unit-test"); assertThat(consumeStats.getConsumeTps()).isGreaterThanOrEqualTo(1234); ConsumerConnection connection = new ConsumerConnection(); connection.setMessageModel(MessageModel.BROADCASTING); HashSet<Connection> connections = new HashSet<>(); connections.add(new Connection()); connection.setConnectionSet(connections); when(mQClientAPIImpl.getConsumeStats(anyString(), anyString(), anyString(), anyLong())) .thenReturn(new ConsumeStats()); when(mQClientAPIImpl.getConsumerConnectionList(anyString(), anyString(), anyLong())) .thenReturn(new ConsumerConnection()).thenReturn(connection); // CONSUMER_NOT_ONLINE try { defaultMQAdminExt.examineConsumeStats("default-consumer-group", "unit-test"); } catch (Exception e) { assertThat(e instanceof MQClientException).isTrue(); assertThat(((MQClientException) e).getResponseCode()).isEqualTo(ResponseCode.CONSUMER_NOT_ONLINE); } // BROADCAST_CONSUMPTION try { defaultMQAdminExt.examineConsumeStats("default-consumer-group", "unit-test"); } catch (Exception e) { assertThat(e instanceof MQClientException).isTrue(); assertThat(((MQClientException) e).getResponseCode()).isEqualTo(ResponseCode.BROADCAST_CONSUMPTION); } }
private synchronized RemotingCommand updateAndCreateTopic(ChannelHandlerContext ctx, RemotingCommand request) throws RemotingCommandException { long startTime = System.currentTimeMillis(); final RemotingCommand response = RemotingCommand.createResponseCommand(null); final CreateTopicRequestHeader requestHeader = (CreateTopicRequestHeader) request.decodeCommandCustomHeader(CreateTopicRequestHeader.class); LOGGER.info("Broker receive request to update or create topic={}, caller address={}", requestHeader.getTopic(), RemotingHelper.parseChannelRemoteAddr(ctx.channel())); String topic = requestHeader.getTopic(); long executionTime; try { TopicValidator.ValidateTopicResult result = TopicValidator.validateTopic(topic); if (!result.isValid()) { response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark(result.getRemark()); return response; } if (brokerController.getBrokerConfig().isValidateSystemTopicWhenUpdateTopic()) { if (TopicValidator.isSystemTopic(topic)) { response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark("The topic[" + topic + "] is conflict with system topic."); return response; } } TopicConfig topicConfig = new TopicConfig(topic); topicConfig.setReadQueueNums(requestHeader.getReadQueueNums()); topicConfig.setWriteQueueNums(requestHeader.getWriteQueueNums()); topicConfig.setTopicFilterType(requestHeader.getTopicFilterTypeEnum()); topicConfig.setPerm(requestHeader.getPerm()); topicConfig.setTopicSysFlag(requestHeader.getTopicSysFlag() == null ? 0 : requestHeader.getTopicSysFlag()); topicConfig.setOrder(requestHeader.getOrder()); String attributesModification = requestHeader.getAttributes(); topicConfig.setAttributes(AttributeParser.parseToMap(attributesModification)); if (topicConfig.getTopicMessageType() == TopicMessageType.MIXED && !brokerController.getBrokerConfig().isEnableMixedMessageType()) { response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark("MIXED message type is not supported."); return response; } if (topicConfig.equals(this.brokerController.getTopicConfigManager().getTopicConfigTable().get(topic))) { LOGGER.info("Broker receive request to update or create topic={}, but topicConfig has no changes , so idempotent, caller address={}", requestHeader.getTopic(), RemotingHelper.parseChannelRemoteAddr(ctx.channel())); response.setCode(ResponseCode.SUCCESS); return response; } this.brokerController.getTopicConfigManager().updateTopicConfig(topicConfig); if (brokerController.getBrokerConfig().isEnableSingleTopicRegister()) { this.brokerController.registerSingleTopicAll(topicConfig); } else { this.brokerController.registerIncrementBrokerData(topicConfig, this.brokerController.getTopicConfigManager().getDataVersion()); } response.setCode(ResponseCode.SUCCESS); } catch (Exception e) { LOGGER.error("Update / create topic failed for [{}]", request, e); response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark(e.getMessage()); return response; } finally { executionTime = System.currentTimeMillis() - startTime; InvocationStatus status = response.getCode() == ResponseCode.SUCCESS ? InvocationStatus.SUCCESS : InvocationStatus.FAILURE; Attributes attributes = BrokerMetricsManager.newAttributesBuilder() .put(LABEL_INVOCATION_STATUS, status.getName()) .put(LABEL_IS_SYSTEM, TopicValidator.isSystemTopic(topic)) .build(); BrokerMetricsManager.topicCreateExecuteTime.record(executionTime, attributes); } LOGGER.info("executionTime of create topic:{} is {} ms" , topic, executionTime); return response; }
@Test public void testUpdateAndCreateTopic() throws Exception { //test system topic for (String topic : systemTopicSet) { RemotingCommand request = buildCreateTopicRequest(topic); RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SYSTEM_ERROR); assertThat(response.getRemark()).isEqualTo("The topic[" + topic + "] is conflict with system topic."); } //test validate error topic String topic = ""; RemotingCommand request = buildCreateTopicRequest(topic); RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SYSTEM_ERROR); topic = "TEST_CREATE_TOPIC"; request = buildCreateTopicRequest(topic); response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); }
public static int nextCapacity(int current) { assert current > 0 && Long.bitCount(current) == 1 : "Capacity must be a power of two."; if (current < MIN_CAPACITY / 2) { current = MIN_CAPACITY / 2; } current <<= 1; if (current < 0) { throw new RuntimeException("Maximum capacity exceeded."); } return current; }
@Test public void testNextCapacity_withLong_shouldIncreaseToHalfOfMinCapacity() { long capacity = 1; long nextCapacity = nextCapacity(capacity); assertEquals(4, nextCapacity); }
public void initializeSession(AuthenticationRequest authenticationRequest, SAMLBindingContext bindingContext) throws SamlSessionException, SharedServiceClientException { final String httpSessionId = authenticationRequest.getRequest().getSession().getId(); if (authenticationRequest.getFederationName() != null) { findOrInitializeFederationSession(authenticationRequest, httpSessionId); } findOrInitializeSamlSession(authenticationRequest, httpSessionId, bindingContext); }
@Test public void noSupportedIDPTest() throws SamlSessionException, SharedServiceClientException { IDPList idpList = OpenSAMLUtils.buildSAMLObject(IDPList.class); IDPEntry idpEntry = OpenSAMLUtils.buildSAMLObject(IDPEntry.class); idpEntry.setProviderID("OtherIdP"); Scoping scoping = OpenSAMLUtils.buildSAMLObject(Scoping.class); scoping.setIDPList(idpList); authnRequest.setScoping(scoping); authnRequest.getScoping().getIDPList().getIDPEntrys().add(idpEntry); samlSessionService.initializeSession(authenticationRequest, bindingContext); assertNull(authenticationRequest.getSamlSession().getIdpEntries()); assertEquals(authenticationRequest.getSamlSession().getValidationStatus(), STATUS_INVALID.label); }
public ValidationResult validateMessagesAndAssignOffsets(PrimitiveRef.LongRef offsetCounter, MetricsRecorder metricsRecorder, BufferSupplier bufferSupplier) { if (sourceCompressionType == CompressionType.NONE && targetCompression.type() == CompressionType.NONE) { // check the magic value if (!records.hasMatchingMagic(toMagic)) return convertAndAssignOffsetsNonCompressed(offsetCounter, metricsRecorder); else // Do in-place validation, offset assignment and maybe set timestamp return assignOffsetsNonCompressed(offsetCounter, metricsRecorder); } else return validateMessagesAndAssignOffsetsCompressed(offsetCounter, metricsRecorder, bufferSupplier); }
@Test public void testDifferentCodecCausesRecompression() { List<byte[]> records = Arrays.asList( "somedata".getBytes(), "moredata".getBytes() ); // Records from the producer were created with gzip max level Compression gzipMax = Compression.gzip().level(CompressionType.GZIP.maxLevel()).build(); MemoryRecords recordsGzipMax = createRecords(records, RecordBatch.MAGIC_VALUE_V2, RecordBatch.NO_TIMESTAMP, gzipMax); // The topic is configured with lz4 min level Compression lz4Min = Compression.lz4().level(CompressionType.GZIP.minLevel()).build(); MemoryRecords recordsLz4Min = createRecords(records, RecordBatch.MAGIC_VALUE_V2, RecordBatch.NO_TIMESTAMP, lz4Min); LogValidator validator = new LogValidator(recordsGzipMax, topicPartition, time, gzipMax.type(), lz4Min, false, RecordBatch.MAGIC_VALUE_V2, TimestampType.CREATE_TIME, 5000L, 5000L, RecordBatch.NO_PARTITION_LEADER_EPOCH, AppendOrigin.CLIENT, MetadataVersion.latestTesting() ); LogValidator.ValidationResult result = validator.validateMessagesAndAssignOffsets( PrimitiveRef.ofLong(0L), metricsRecorder, RequestLocal.withThreadConfinedCaching().bufferSupplier() ); // Ensure validated records have been recompressed and match lz4 min level assertEquals(recordsLz4Min, result.validatedRecords); }
@VisibleForTesting WriteOptions getOptions() { return options; }
@Test public void testDefaultWriteOptionsHaveDisabledWAL() throws Exception { WriteOptions options; try (RocksDB db = RocksDB.open(folder.newFolder().getAbsolutePath()); RocksDBWriteBatchWrapper writeBatchWrapper = new RocksDBWriteBatchWrapper(db, null, 200, 50)) { options = writeBatchWrapper.getOptions(); assertTrue(options.isOwningHandle()); assertTrue(options.disableWAL()); } assertFalse(options.isOwningHandle()); }
public static LeaderInformationRegister clear( @Nullable LeaderInformationRegister leaderInformationRegister, String componentId) { if (leaderInformationRegister == null || !leaderInformationRegister.getRegisteredComponentIds().iterator().hasNext()) { return LeaderInformationRegister.empty(); } return merge(leaderInformationRegister, componentId, LeaderInformation.empty()); }
@Test void testClear() { final String componentId = "component-id"; final LeaderInformation leaderInformation = LeaderInformation.known(UUID.randomUUID(), "address"); final LeaderInformationRegister initialRegister = LeaderInformationRegister.of(componentId, leaderInformation); final LeaderInformationRegister newRegister = LeaderInformationRegister.clear(initialRegister, componentId); assertThat(newRegister).isNotSameAs(initialRegister); assertThat(newRegister.getRegisteredComponentIds()).isEmpty(); assertThat(newRegister.forComponentId(componentId)).isNotPresent(); }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .nullable(column.isNullable()) .comment(column.getComment()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case BOOLEAN: builder.columnType(HANA_BOOLEAN); builder.dataType(HANA_BOOLEAN); builder.length(2L); break; case TINYINT: builder.columnType(HANA_TINYINT); builder.dataType(HANA_TINYINT); break; case SMALLINT: builder.columnType(HANA_SMALLINT); builder.dataType(HANA_SMALLINT); break; case INT: builder.columnType(HANA_INTEGER); builder.dataType(HANA_INTEGER); break; case BIGINT: builder.columnType(HANA_BIGINT); builder.dataType(HANA_BIGINT); break; case FLOAT: builder.columnType(HANA_REAL); builder.dataType(HANA_REAL); break; case DOUBLE: builder.columnType(HANA_DOUBLE); builder.dataType(HANA_DOUBLE); break; case DECIMAL: DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = (int) Math.max(0, scale - (precision - MAX_PRECISION)); precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } builder.columnType(String.format("%s(%s,%s)", HANA_DECIMAL, precision, scale)); builder.dataType(HANA_DECIMAL); builder.precision(precision); builder.scale(scale); break; case BYTES: builder.columnType(HANA_BLOB); builder.dataType(HANA_BLOB); break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= MAX_NVARCHAR_LENGTH) { builder.columnType(HANA_NVARCHAR); builder.dataType(HANA_NVARCHAR); builder.length( column.getColumnLength() == null ? MAX_NVARCHAR_LENGTH : column.getColumnLength()); } else { builder.columnType(HANA_CLOB); builder.dataType(HANA_CLOB); } break; case DATE: builder.columnType(HANA_DATE); builder.dataType(HANA_DATE); break; case TIME: builder.columnType(HANA_TIME); builder.dataType(HANA_TIME); break; case TIMESTAMP: if (column.getScale() == null || column.getScale() <= 0) { builder.columnType(HANA_SECONDDATE); builder.dataType(HANA_SECONDDATE); } else { int timestampScale = column.getScale(); if (column.getScale() > MAX_TIMESTAMP_SCALE) { timestampScale = MAX_TIMESTAMP_SCALE; log.warn( "The timestamp column {} type timestamp({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to timestamp({})", column.getName(), column.getScale(), MAX_TIMESTAMP_SCALE, timestampScale); } builder.columnType(HANA_TIMESTAMP); builder.dataType(HANA_TIMESTAMP); builder.scale(timestampScale); } break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.SAP_HANA, column.getDataType().getSqlType().name(), column.getName()); } BasicTypeDefine typeDefine = builder.build(); typeDefine.setColumnType( appendColumnSizeIfNeed( typeDefine.getColumnType(), typeDefine.getLength(), typeDefine.getScale())); return typeDefine; }
@Test public void testReconvertBoolean() { Column column = PhysicalColumn.builder().name("test").dataType(BasicType.BOOLEAN_TYPE).build(); BasicTypeDefine typeDefine = SapHanaTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(SapHanaTypeConverter.HANA_BOOLEAN, typeDefine.getColumnType()); Assertions.assertEquals(SapHanaTypeConverter.HANA_BOOLEAN, typeDefine.getDataType()); }
public JChannel getChannel() { return channel; }
@Test public void shouldCreateChannel() { // When JGroupsEndpoint endpoint = getMandatoryEndpoint("my-default-jgroups:" + CLUSTER_NAME, JGroupsEndpoint.class); JGroupsComponent component = (JGroupsComponent) endpoint.getComponent(); // Then assertNotNull(component.getChannel()); }
Span handleStart(HttpRequest request, Span span) { if (span.isNoop()) return span; try { parseRequest(request, span); } catch (Throwable t) { propagateIfFatal(t); Platform.get().log("error parsing request {0}", request, t); } finally { // all of the above parsing happened before a timestamp on the span long timestamp = request.startTimestamp(); if (timestamp == 0L) { span.start(); } else { span.start(timestamp); } } return span; }
@Test void handleStart_addsRemoteEndpointWhenParsed() { handler = new HttpHandler(HttpRequestParser.DEFAULT, HttpResponseParser.DEFAULT) { @Override void parseRequest(HttpRequest request, Span span) { span.remoteIpAndPort("1.2.3.4", 0); } }; handler.handleStart(request, span); verify(span).remoteIpAndPort("1.2.3.4", 0); }
@Override public Iterable<FileInfo> listPrefix(String prefix) { Path prefixToList = new Path(prefix); FileSystem fs = Util.getFs(prefixToList, hadoopConf.get()); return () -> { try { return Streams.stream( new AdaptingIterator<>(fs.listFiles(prefixToList, true /* recursive */))) .map( fileStatus -> new FileInfo( fileStatus.getPath().toString(), fileStatus.getLen(), fileStatus.getModificationTime())) .iterator(); } catch (IOException e) { throw new UncheckedIOException(e); } }; }
@Test public void testListPrefix() { Path parent = new Path(tempDir.toURI()); List<Integer> scaleSizes = Lists.newArrayList(1, 1000, 2500); scaleSizes .parallelStream() .forEach( scale -> { Path scalePath = new Path(parent, Integer.toString(scale)); createRandomFiles(scalePath, scale); assertThat( Streams.stream(hadoopFileIO.listPrefix(scalePath.toUri().toString())).count()) .isEqualTo((long) scale); }); long totalFiles = scaleSizes.stream().mapToLong(Integer::longValue).sum(); assertThat(Streams.stream(hadoopFileIO.listPrefix(parent.toUri().toString())).count()) .isEqualTo(totalFiles); }
@Bean public ShenyuPlugin dividePlugin() { return new DividePlugin(); }
@Test public void testDividePlugin() { applicationContextRunner.run(context -> { ShenyuPlugin plugin = context.getBean("dividePlugin", ShenyuPlugin.class); assertNotNull(plugin); } ); }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { try { new BrickAttributesFinderFeature(session).find(file); return true; } catch(NotfoundException e) { return false; } }
@Test public void testFindFile() throws Exception { final Path file = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new BrickTouchFeature(session).touch(file, new TransferStatus()); assertTrue(new BrickFindFeature(session).find(file)); assertFalse(new BrickFindFeature(session).find(new Path(file.getAbsolute(), EnumSet.of(Path.Type.directory)))); new BrickDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public boolean checkCredentials(String username, String password) { if (username == null || password == null) { return false; } Credentials credentials = new Credentials(username, password); if (validCredentialsCache.contains(credentials)) { return true; } else if (invalidCredentialsCache.contains(credentials)) { return false; } boolean isValid = this.username.equals(username) && this.passwordHash.equals( generatePasswordHash( algorithm, salt, iterations, keyLength, password)); if (isValid) { validCredentialsCache.add(credentials); } else { invalidCredentialsCache.add(credentials); } return isValid; }
@Test public void testPBKDF2WithHmacSHA1_withoutColon() throws Exception { String algorithm = "PBKDF2WithHmacSHA1"; int iterations = 1000; int keyLength = 128; String hash = "17:87:CA:B9:14:73:60:36:8B:20:82:87:92:58:43:B8:A3:85:66:BC:C1:6D:C3:31:6C:1D:47:48:C7:F2:E4:1D:96" + ":00:11:F8:4D:94:63:2F:F2:7A:F0:3B:72:63:16:5D:EF:5C:97:CC:EC:59:CB:18:4A:AA:F5:23:63:0B:6E:3B:65" + ":E0:72:6E:69:7D:EB:83:05:05:E5:D6:F2:19:99:49:3F:89:DA:DE:83:D7:2B:5B:7D:C9:56:B4:F2:F6:A5:61:29" + ":29:ED:DF:4C:4E:8D:EA:DF:47:A2:B0:89:11:86:D4:77:A1:02:E9:0C:26:A4:1E:2A:C1:A8:71:E0:93:8F:A4"; hash = hash.replace(":", ""); PBKDF2Authenticator PBKDF2Authenticator = new PBKDF2Authenticator( "/", VALID_USERNAME, hash, algorithm, SALT, iterations, keyLength); for (String username : TEST_USERNAMES) { for (String password : TEST_PASSWORDS) { boolean expectedIsAuthenticated = VALID_USERNAME.equals(username) && VALID_PASSWORD.equals(password); boolean actualIsAuthenticated = PBKDF2Authenticator.checkCredentials(username, password); assertEquals(expectedIsAuthenticated, actualIsAuthenticated); } } }
public static <T extends Message> ProtoCoder<T> of(Class<T> protoMessageClass) { return new ProtoCoder<>(protoMessageClass, ImmutableSet.of()); }
@Test public void testCoderEncodeDecodeEqualNestedContext() throws Exception { MessageA value1 = MessageA.newBuilder() .setField1("hello") .addField2(MessageB.newBuilder().setField1(true).build()) .addField2(MessageB.newBuilder().setField1(false).build()) .build(); MessageA value2 = MessageA.newBuilder() .setField1("world") .addField2(MessageB.newBuilder().setField1(false).build()) .addField2(MessageB.newBuilder().setField1(true).build()) .build(); CoderProperties.coderDecodeEncodeEqual( ListCoder.of(ProtoCoder.of(MessageA.class)), ImmutableList.of(value1, value2)); }
public CompletableFuture<InetSocketAddress> resolveAndCheckTargetAddress(String hostAndPort) { int pos = hostAndPort.lastIndexOf(':'); String host = hostAndPort.substring(0, pos); int port = Integer.parseInt(hostAndPort.substring(pos + 1)); if (!isPortAllowed(port)) { return FutureUtil.failedFuture( new TargetAddressDeniedException("Given port in '" + hostAndPort + "' isn't allowed.")); } else if (!isHostAllowed(host)) { return FutureUtil.failedFuture( new TargetAddressDeniedException("Given host in '" + hostAndPort + "' isn't allowed.")); } else { return NettyFutureUtil.toCompletableFuture( inetSocketAddressResolver.resolve(InetSocketAddress.createUnresolved(host, port))) .thenCompose(resolvedAddress -> { CompletableFuture<InetSocketAddress> result = new CompletableFuture<>(); if (isIPAddressAllowed(resolvedAddress)) { result.complete(resolvedAddress); } else { result.completeExceptionally(new TargetAddressDeniedException( "The IP address of the given host and port '" + hostAndPort + "' isn't allowed.")); } return result; }); } }
@Test public void shouldAllowIPv6AddressNumeric() throws Exception { BrokerProxyValidator brokerProxyValidator = new BrokerProxyValidator( createMockedAddressResolver("fd4d:801b:73fa:abcd:0000:0000:0000:0001"), "*" , "fd4d:801b:73fa:abcd::/64" , "6650"); brokerProxyValidator.resolveAndCheckTargetAddress("fd4d:801b:73fa:abcd:0000:0000:0000:0001:6650").get(); }
@Override public AppResponse process(Flow flow, ActivateWithCodeRequest request) throws FlowNotDefinedException, IOException, NoSuchAlgorithmException { Map<String, Object> result = digidClient.activateAccountWithCode(appSession.getAccountId(), request.getActivationCode()); if (result.get(lowerUnderscore(STATUS)).equals("OK")) { appAuthenticator.setIssuerType((String) result.get(lowerUnderscore(ISSUER_TYPE))); return new OkResponse(); } if (result.get(lowerUnderscore(STATUS)).equals("NOK") && result.get(ERROR) != null ) { final var error = result.get(ERROR); if (ERROR_CODE_NOT_CORRECT.equals(error)) { // Logcode 88 is already logged in x, can be changed when switching to account microservice : return new EnterActivationResponse(ERROR_CODE_NOT_CORRECT, Map.of(REMAINING_ATTEMPTS, result.get(lowerUnderscore(REMAINING_ATTEMPTS)))); } else if (ERROR_CODE_BLOCKED.equals(error)) { digidClient.remoteLog("87", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); return new NokResponse((String) result.get(ERROR)); } else if (ERROR_CODE_INVALID.equals(error)) { digidClient.remoteLog("90", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); return new EnterActivationResponse(ERROR_CODE_INVALID, Map.of(DAYS_VALID, result.get(lowerUnderscore(DAYS_VALID)))); } } return new NokResponse(); }
@Test public void responseTestNOK() throws FlowNotDefinedException, IOException, NoSuchAlgorithmException { //given when(digidClientMock.activateAccountWithCode(anyLong(), any())).thenReturn(Map.of( lowerUnderscore(STATUS), "NOK" )); //when AppResponse result = activationCodeChecked.process(mockedFlow, activateWithCodeRequest); //then assertTrue(result instanceof NokResponse); }
public void parse( RepositoryElementReadListener repositoryElementReadListener ) throws SAXException, ParserConfigurationException, IOException { this.repositoryElementReadListener = repositoryElementReadListener; SAXParserFactory factory = XMLParserFactoryProducer.createSecureSAXParserFactory(); this.saxParser = factory.newSAXParser(); this.saxParser.parse( new File( filename ), this ); }
@Test public void testNoExceptionOccurs_WhenNameContainsJapaneseCharacters() throws Exception { repExpSAXParser = new RepositoryExportSaxParser( getRepositoryFile().getCanonicalPath(), repImpPgDlg ); try { repExpSAXParser.parse( repImpMock ); } catch ( Exception e ) { Assert.fail( "No exception is expected But occured: " + e ); } }
Object getFromSignal(String signalName, String paramName) { try { return executor .submit(() -> fromSignal(signalName, paramName)) .get(TIMEOUT_IN_MILLIS, TimeUnit.MILLISECONDS); } catch (Exception e) { throw new MaestroInternalError( e, "getFromSignal throws an exception for signalName=[%s], paramName=[%s]", signalName, paramName); } }
@Test public void testGetFromSignal() { SignalInitiator initiator = Mockito.mock(SignalInitiator.class); when(instanceWrapper.getInitiator()).thenReturn(initiator); when(initiator.getType()).thenReturn(Initiator.Type.SIGNAL); when(initiator.getParams()) .thenReturn( twoItemMap( "signal-a", StringMapParameter.builder() .evaluatedResult(singletonMap("param1", "value1")) .build(), "signal-b", MapParameter.builder().evaluatedResult(singletonMap("param2", 123L)).build())); assertEquals("value1", paramExtension.getFromSignal("signal-a", "param1")); assertEquals(123L, paramExtension.getFromSignal("signal-b", "param2")); }
public static int cityHash32(byte[] data) { return CityHash.hash32(data); }
@Test public void cityHash32Test(){ String s="Google发布的Hash计算算法:CityHash64 与 CityHash128"; final int hash = HashUtil.cityHash32(StrUtil.utf8Bytes(s)); assertEquals(0xa8944fbe, hash); }
public ProtocolBuilder isDefault(Boolean isDefault) { this.isDefault = isDefault; return getThis(); }
@Test void isDefault() { ProtocolBuilder builder = new ProtocolBuilder(); builder.isDefault(true); Assertions.assertTrue(builder.build().isDefault()); }
@Override public AuthenticationResult authenticate(final ChannelHandlerContext context, final PacketPayload payload) { AuthorityRule rule = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getGlobalRuleMetaData().getSingleRule(AuthorityRule.class); if (MySQLConnectionPhase.AUTH_PHASE_FAST_PATH == connectionPhase) { currentAuthResult = authenticatePhaseFastPath(context, payload, rule); if (!currentAuthResult.isFinished()) { return currentAuthResult; } } else if (MySQLConnectionPhase.AUTHENTICATION_METHOD_MISMATCH == connectionPhase) { authenticateMismatchedMethod((MySQLPacketPayload) payload); } Grantee grantee = new Grantee(currentAuthResult.getUsername(), getHostAddress(context)); if (!login(rule, grantee, authResponse)) { throw new AccessDeniedException(currentAuthResult.getUsername(), grantee.getHostname(), 0 != authResponse.length); } if (!authorizeDatabase(rule, grantee, currentAuthResult.getDatabase())) { throw new DatabaseAccessDeniedException(currentAuthResult.getUsername(), grantee.getHostname(), currentAuthResult.getDatabase()); } writeOKPacket(context); return AuthenticationResultBuilder.finished(grantee.getUsername(), grantee.getHostname(), currentAuthResult.getDatabase()); }
@Test void assertAuthenticateFailedWithAbsentUser() { setConnectionPhase(MySQLConnectionPhase.AUTH_PHASE_FAST_PATH); AuthorityRule rule = mock(AuthorityRule.class); when(rule.getAuthenticatorType(any())).thenReturn(""); when(rule.findUser(new Grantee("root", "127.0.0.1"))).thenReturn(Optional.empty()); ChannelHandlerContext context = mockChannelHandlerContext(); ContextManager contextManager = mockContextManager(rule); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); when(ProxyContext.getInstance().databaseExists("foo_db")).thenReturn(true); try (MockedConstruction<MySQLErrPacket> ignored = mockConstruction(MySQLErrPacket.class, (mock, mockContext) -> assertAuthenticationErrorPacket(mockContext.arguments()))) { assertThrows(AccessDeniedException.class, () -> authenticationEngine.authenticate(context, getPayload("root", "foo_db", authResponse))); } }
private static void addInputPath(JobConf conf, Path path, Schema inputSchema) { String schemaMapping = path.toString() + ";" + toBase64(inputSchema.toString()); String schemas = conf.get(SCHEMA_KEY); conf.set(SCHEMA_KEY, schemas == null ? schemaMapping : schemas + "," + schemaMapping); conf.setInputFormat(DelegatingInputFormat.class); }
@Test void job() throws Exception { JobConf job = new JobConf(); Path inputPath1 = new Path(INPUT_DIR_1.getPath()); Path inputPath2 = new Path(INPUT_DIR_2.getPath()); Path outputPath = new Path(OUTPUT_DIR.getPath()); outputPath.getFileSystem(job).delete(outputPath, true); writeNamesFiles(new File(inputPath1.toUri().getPath())); writeBalancesFiles(new File(inputPath2.toUri().getPath())); job.setJobName("multiple-inputs-join"); AvroMultipleInputs.addInputPath(job, inputPath1, NamesMapImpl.class, ReflectData.get().getSchema(NamesRecord.class)); AvroMultipleInputs.addInputPath(job, inputPath2, BalancesMapImpl.class, ReflectData.get().getSchema(BalancesRecord.class)); Schema keySchema = ReflectData.get().getSchema(KeyRecord.class); Schema valueSchema = ReflectData.get().getSchema(JoinableRecord.class); AvroJob.setMapOutputSchema(job, Pair.getPairSchema(keySchema, valueSchema)); AvroJob.setOutputSchema(job, ReflectData.get().getSchema(CompleteRecord.class)); AvroJob.setReducerClass(job, ReduceImpl.class); job.setNumReduceTasks(1); FileOutputFormat.setOutputPath(job, outputPath); AvroJob.setReflect(job); JobClient.runJob(job); validateCompleteFile(new File(OUTPUT_DIR, "part-00000.avro")); }
public void hasValue(@Nullable Object expected) { if (expected == null) { throw new NullPointerException("Optional cannot have a null value."); } if (actual == null) { failWithActual("expected an optional with value", expected); } else if (!actual.isPresent()) { failWithoutActual(fact("expected to have value", expected), simpleFact("but was absent")); } else { checkNoNeedToDisplayBothValues("get()").that(actual.get()).isEqualTo(expected); } }
@Test public void hasValue_failingWithWrongValue() { expectFailureWhenTestingThat(Optional.of("foo")).hasValue("boo"); assertFailureValue("value of", "optional.get()"); }
public static MountTable newInstance() { MountTable record = StateStoreSerializer.newRecord(MountTable.class); record.init(); return record; }
@Test public void testValidation() throws IOException { Map<String, String> destinations = new HashMap<>(); destinations.put("ns0", "/testValidate-dest"); try { MountTable.newInstance("testValidate", destinations); fail("Mount table entry should be created failed."); } catch (Exception e) { GenericTestUtils.assertExceptionContains( MountTable.ERROR_MSG_MUST_START_WITH_BACK_SLASH, e); } destinations.clear(); destinations.put("ns0", "testValidate-dest"); try { MountTable.newInstance("/testValidate", destinations); fail("Mount table entry should be created failed."); } catch (Exception e) { GenericTestUtils.assertExceptionContains( MountTable.ERROR_MSG_ALL_DEST_MUST_START_WITH_BACK_SLASH, e); } destinations.clear(); destinations.put("", "/testValidate-dest"); try { MountTable.newInstance("/testValidate", destinations); fail("Mount table entry should be created failed."); } catch (Exception e) { GenericTestUtils.assertExceptionContains( MountTable.ERROR_MSG_INVALID_DEST_NS, e); } destinations.clear(); destinations.put("ns0", "/testValidate-dest"); MountTable record = MountTable.newInstance("/testValidate", destinations); assertNotNull(record); }
public String format() { final StringBuilder sb = new StringBuilder(); if (betweenMs > 0) { long day = betweenMs / DateUnit.DAY.getMillis(); long hour = betweenMs / DateUnit.HOUR.getMillis() - day * 24; long minute = betweenMs / DateUnit.MINUTE.getMillis() - day * 24 * 60 - hour * 60; final long BetweenOfSecond = ((day * 24 + hour) * 60 + minute) * 60; long second = betweenMs / DateUnit.SECOND.getMillis() - BetweenOfSecond; long millisecond = betweenMs - (BetweenOfSecond + second) * 1000; final int level = this.level.ordinal(); int levelCount = 0; if (isLevelCountValid(levelCount) && day > 0) { sb.append(day).append(levelFormatter.apply(Level.DAY)).append(separator); levelCount++; } if (isLevelCountValid(levelCount) && 0 != hour && level >= Level.HOUR.ordinal()) { sb.append(hour).append(levelFormatter.apply(Level.HOUR)).append(separator); levelCount++; } if (isLevelCountValid(levelCount) && 0 != minute && level >= Level.MINUTE.ordinal()) { sb.append(minute).append(levelFormatter.apply(Level.MINUTE)).append(separator); levelCount++; } if (isLevelCountValid(levelCount) && 0 != second && level >= Level.SECOND.ordinal()) { sb.append(second).append(levelFormatter.apply(Level.SECOND)).append(separator); levelCount++; } if (isLevelCountValid(levelCount) && 0 != millisecond && level >= Level.MILLISECOND.ordinal()) { sb.append(millisecond).append(levelFormatter.apply(Level.MILLISECOND)).append(separator); // levelCount++; } } if (StrUtil.isEmpty(sb)) { sb.append(0).append(levelFormatter.apply(this.level)); } else { if (StrUtil.isNotEmpty(separator)) { sb.delete(sb.length() - separator.length(), sb.length()); } } return sb.toString(); }
@Test public void formatTest() { long betweenMs = DateUtil.betweenMs(DateUtil.parse("2017-01-01 22:59:59"), DateUtil.parse("2017-01-02 23:59:58")); BetweenFormatter formater = new BetweenFormatter(betweenMs, Level.MILLISECOND, 1); assertEquals(formater.toString(), "1天"); }
@Override @SuppressWarnings("ProtoFieldNullComparison") public List<IncomingMessage> pull( long requestTimeMsSinceEpoch, SubscriptionPath subscription, int batchSize, boolean returnImmediately) throws IOException { PullRequest request = new PullRequest().setReturnImmediately(returnImmediately).setMaxMessages(batchSize); PullResponse response = pubsub.projects().subscriptions().pull(subscription.getPath(), request).execute(); if (response.getReceivedMessages() == null || response.getReceivedMessages().isEmpty()) { return ImmutableList.of(); } List<IncomingMessage> incomingMessages = new ArrayList<>(response.getReceivedMessages().size()); for (ReceivedMessage message : response.getReceivedMessages()) { PubsubMessage pubsubMessage = message.getMessage(); Map<String, String> attributes; if (pubsubMessage.getAttributes() != null) { attributes = pubsubMessage.getAttributes(); } else { attributes = new HashMap<>(); } // Payload. byte[] elementBytes = pubsubMessage.getData() == null ? null : pubsubMessage.decodeData(); if (elementBytes == null) { elementBytes = new byte[0]; } // Timestamp. long timestampMsSinceEpoch; if (Strings.isNullOrEmpty(timestampAttribute)) { timestampMsSinceEpoch = parseTimestampAsMsSinceEpoch(message.getMessage().getPublishTime()); } else { timestampMsSinceEpoch = extractTimestampAttribute(timestampAttribute, attributes); } // Ack id. String ackId = message.getAckId(); checkState(!Strings.isNullOrEmpty(ackId)); // Record id, if any. @Nullable String recordId = null; if (idAttribute != null) { recordId = attributes.get(idAttribute); } if (Strings.isNullOrEmpty(recordId)) { // Fall back to the Pubsub provided message id. recordId = pubsubMessage.getMessageId(); } com.google.pubsub.v1.PubsubMessage.Builder protoMessage = com.google.pubsub.v1.PubsubMessage.newBuilder(); protoMessage.setData(ByteString.copyFrom(elementBytes)); protoMessage.putAllAttributes(attributes); // PubsubMessage uses `null` to represent no ordering key where we want a default of "". if (pubsubMessage.getOrderingKey() != null) { protoMessage.setOrderingKey(pubsubMessage.getOrderingKey()); } else { protoMessage.setOrderingKey(""); } incomingMessages.add( IncomingMessage.of( protoMessage.build(), timestampMsSinceEpoch, requestTimeMsSinceEpoch, ackId, recordId)); } return incomingMessages; }
@Test public void pullOneMessageEmptyAttributes() throws IOException { client = new PubsubJsonClient(null, null, mockPubsub); String expectedSubscription = SUBSCRIPTION.getPath(); PullRequest expectedRequest = new PullRequest().setReturnImmediately(true).setMaxMessages(10); PubsubMessage expectedPubsubMessage = new PubsubMessage() .setMessageId(MESSAGE_ID) .encodeData(DATA.getBytes(StandardCharsets.UTF_8)) .setPublishTime(String.valueOf(PUB_TIME)); ReceivedMessage expectedReceivedMessage = new ReceivedMessage().setMessage(expectedPubsubMessage).setAckId(ACK_ID); PullResponse expectedResponse = new PullResponse().setReceivedMessages(ImmutableList.of(expectedReceivedMessage)); when((Object) mockPubsub .projects() .subscriptions() .pull(expectedSubscription, expectedRequest) .execute()) .thenReturn(expectedResponse); List<IncomingMessage> acutalMessages = client.pull(REQ_TIME, SUBSCRIPTION, 10, true); assertEquals(1, acutalMessages.size()); IncomingMessage actualMessage = acutalMessages.get(0); assertEquals(ACK_ID, actualMessage.ackId()); assertEquals(DATA, actualMessage.message().getData().toStringUtf8()); assertEquals(REQ_TIME, actualMessage.requestTimeMsSinceEpoch()); assertEquals(PUB_TIME, actualMessage.timestampMsSinceEpoch()); }
@Deprecated public URLNormalizer addTrailingSlash() { return addDirectoryTrailingSlash(); }
@Test public void testAddTrailingSlash() { s = "http://www.example.com/alice"; t = "http://www.example.com/alice/"; assertEquals(t, n(s).addDirectoryTrailingSlash().toString()); s = "http://www.example.com/alice.html"; t = "http://www.example.com/alice.html"; assertEquals(t, n(s).addDirectoryTrailingSlash().toString()); s = "http://www.example.com"; t = "http://www.example.com"; assertEquals(t, n(s).addDirectoryTrailingSlash().toString()); s = "http://www.example.com/blah/?param=value"; t = "http://www.example.com/blah/?param=value"; assertEquals(t, n(s).addDirectoryTrailingSlash().toString()); s = "http://www.example.com/blah?param=value"; t = "http://www.example.com/blah/?param=value"; assertEquals(t, n(s).addDirectoryTrailingSlash().toString()); // This one is for HTTP Collector GitHub issue #163: s = "http://www.example.com/"; t = "http://www.example.com/"; assertEquals(t, n(s).addDirectoryTrailingSlash().toString()); }
@Override protected void validateDataImpl(TenantId tenantId, Alarm alarm) { validateString("Alarm type", alarm.getType()); if (alarm.getOriginator() == null) { throw new DataValidationException("Alarm originator should be specified!"); } if (alarm.getSeverity() == null) { throw new DataValidationException("Alarm severity should be specified!"); } if (alarm.getStatus() == null) { throw new DataValidationException("Alarm status should be specified!"); } if (alarm.getTenantId() == null) { throw new DataValidationException("Alarm should be assigned to tenant!"); } else { if (!tenantService.tenantExists(alarm.getTenantId())) { throw new DataValidationException("Alarm is referencing to non-existent tenant!"); } } }
@Test void testValidateNameInvocation() { Alarm alarm = new Alarm(); alarm.setType("overheating"); alarm.setOriginator(tenantId); alarm.setSeverity(AlarmSeverity.CRITICAL); alarm.setCleared(false); alarm.setAcknowledged(false); alarm.setTenantId(tenantId); validator.validateDataImpl(tenantId, alarm); verify(validator).validateString("Alarm type", alarm.getType()); }
static void run( final SystemExit systemExit, final String... args ) throws Throwable { final Arguments arguments = new Arguments.Builder() .parseArgs(args) .build(); if (arguments.help) { usage(); return; } final Properties props = getProperties(arguments); final DataGenProducer dataProducer = ProducerFactory .getProducer(arguments.keyFormat, arguments.valueFormat, arguments.valueDelimiter, props); final Optional<RateLimiter> rateLimiter = arguments.msgRate != -1 ? Optional.of(RateLimiter.create(arguments.msgRate)) : Optional.empty(); final Executor executor = Executors.newFixedThreadPool( arguments.numThreads, r -> { final Thread thread = new Thread(r); thread.setDaemon(true); return thread; } ); final CompletionService<Void> service = new ExecutorCompletionService<>(executor); for (int i = 0; i < arguments.numThreads; i++) { service.submit(getProducerTask(arguments, dataProducer, props, rateLimiter)); } for (int i = 0; i < arguments.numThreads; i++) { try { service.take().get(); } catch (final InterruptedException e) { System.err.println("Interrupted waiting for threads to exit."); systemExit.exit(1); } catch (final ExecutionException e) { throw e.getCause(); } } }
@Test(expected = IllegalArgumentException.class) public void shouldThrowIfKeyFieldDoesNotExist() throws Throwable { DataGen.run( mockSystem, "key=not_a_field", "schema=./src/main/resources/purchase.avro", "format=avro", "topic=foo" ); }
public static int symLink(String target, String linkname) throws IOException{ if (target == null || linkname == null) { LOG.warn("Can not create a symLink with a target = " + target + " and link =" + linkname); return 1; } // Run the input paths through Java's File so that they are converted to the // native OS form File targetFile = new File( Path.getPathWithoutSchemeAndAuthority(new Path(target)).toString()); File linkFile = new File( Path.getPathWithoutSchemeAndAuthority(new Path(linkname)).toString()); String[] cmd = Shell.getSymlinkCommand( targetFile.toString(), linkFile.toString()); ShellCommandExecutor shExec; try { if (Shell.WINDOWS && linkFile.getParentFile() != null && !new Path(target).isAbsolute()) { // Relative links on Windows must be resolvable at the time of // creation. To ensure this we run the shell command in the directory // of the link. // shExec = new ShellCommandExecutor(cmd, linkFile.getParentFile()); } else { shExec = new ShellCommandExecutor(cmd); } shExec.execute(); } catch (Shell.ExitCodeException ec) { int returnVal = ec.getExitCode(); if (Shell.WINDOWS && returnVal == SYMLINK_NO_PRIVILEGE) { LOG.warn("Fail to create symbolic links on Windows. " + "The default security settings in Windows disallow non-elevated " + "administrators and all non-administrators from creating symbolic links. " + "This behavior can be changed in the Local Security Policy management console"); } else if (returnVal != 0) { LOG.warn("Command '" + StringUtils.join(" ", cmd) + "' failed " + returnVal + " with: " + ec.getMessage()); } return returnVal; } catch (IOException e) { if (LOG.isDebugEnabled()) { LOG.debug("Error while create symlink " + linkname + " to " + target + "." + " Exception: " + StringUtils.stringifyException(e)); } throw e; } return shExec.getExitCode(); }
@Test (timeout = 30000) public void testSymlinkRenameTo() throws Exception { File file = new File(del, FILE); file.createNewFile(); File link = new File(del, "_link"); // create the symlink FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); Verify.exists(file); Verify.exists(link); File link2 = new File(del, "_link2"); // Rename the symlink Assert.assertTrue(link.renameTo(link2)); // Make sure the file still exists // (NOTE: this would fail on Java6 on Windows if we didn't // copy the file in FileUtil#symlink) Verify.exists(file); Verify.exists(link2); Verify.notExists(link); }
@Deprecated @Restricted(DoNotUse.class) public static String resolve(ConfigurationContext context, String toInterpolate) { return context.getSecretSourceResolver().resolve(toInterpolate); }
@Test public void resolve_File() throws Exception { String input = getPath("secret.json").toAbsolutePath().toString(); String output = resolve("${readFile:" + input + "}"); assertThat(output, equalTo(FILE.lookup(input))); assertThat(output, containsString("\"Our secret\": \"Hello World\"")); }
public static JibContainerBuilder toJibContainerBuilder( ArtifactProcessor processor, CommonCliOptions commonCliOptions, CommonContainerConfigCliOptions commonContainerConfigCliOptions, ConsoleLogger logger) throws IOException, InvalidImageReferenceException { String baseImage = commonContainerConfigCliOptions.getFrom().orElse("jetty"); JibContainerBuilder containerBuilder = ContainerBuilders.create(baseImage, Collections.emptySet(), commonCliOptions, logger); List<String> programArguments = commonContainerConfigCliOptions.getProgramArguments(); if (!commonContainerConfigCliOptions.getProgramArguments().isEmpty()) { containerBuilder.setProgramArguments(programArguments); } containerBuilder .setEntrypoint(computeEntrypoint(commonContainerConfigCliOptions)) .setFileEntriesLayers(processor.createLayers()) .setExposedPorts(commonContainerConfigCliOptions.getExposedPorts()) .setVolumes(commonContainerConfigCliOptions.getVolumes()) .setEnvironment(commonContainerConfigCliOptions.getEnvironment()) .setLabels(commonContainerConfigCliOptions.getLabels()); commonContainerConfigCliOptions.getUser().ifPresent(containerBuilder::setUser); commonContainerConfigCliOptions.getFormat().ifPresent(containerBuilder::setFormat); commonContainerConfigCliOptions.getCreationTime().ifPresent(containerBuilder::setCreationTime); return containerBuilder; }
@Test public void testToJibContainerBuilder_noProgramArgumentsSpecified() throws IOException, InvalidImageReferenceException { JibContainerBuilder containerBuilder = WarFiles.toJibContainerBuilder( mockStandardWarExplodedProcessor, mockCommonCliOptions, mockCommonContainerConfigCliOptions, mockLogger); ContainerBuildPlan buildPlan = containerBuilder.toContainerBuildPlan(); assertThat(buildPlan.getCmd()).isNull(); }
protected void acknowledgeTask(TaskAcknowledgeOperation ackOperation) { final long checkpointId = ackOperation.getBarrier().getId(); final PendingCheckpoint pendingCheckpoint = pendingCheckpoints.get(checkpointId); if (pendingCheckpoint == null) { LOG.info("skip already ack checkpoint " + checkpointId); return; } TaskLocation location = ackOperation.getTaskLocation(); LOG.debug( "task[{}]({}/{}) ack. {}", location.getTaskID(), location.getPipelineId(), location.getJobId(), ackOperation.getBarrier().toString()); pendingCheckpoint.acknowledgeTask( location, ackOperation.getStates(), pendingCheckpoint.getCheckpointType().isSavepoint() ? SubtaskStatus.SAVEPOINT_PREPARE_CLOSE : SubtaskStatus.RUNNING); if (ackOperation.getBarrier().getCheckpointType().notFinalCheckpoint() && ackOperation.getBarrier().prepareClose(location)) { completedCloseIdleTask(location); } }
@Test void testACKNotExistPendingCheckpoint() throws CheckpointStorageException { CheckpointConfig checkpointConfig = new CheckpointConfig(); checkpointConfig.setStorage(new CheckpointStorageConfig()); Map<Integer, CheckpointPlan> planMap = new HashMap<>(); planMap.put(1, CheckpointPlan.builder().pipelineId(1).build()); CheckpointManager checkpointManager = new CheckpointManager( 1L, false, nodeEngine, null, planMap, checkpointConfig, instance.getExecutorService("test"), nodeEngine.getHazelcastInstance().getMap(IMAP_RUNNING_JOB_STATE)); checkpointManager.acknowledgeTask( new TaskAcknowledgeOperation( new TaskLocation(new TaskGroupLocation(1L, 1, 1), 1, 1), new CheckpointBarrier( 999, System.currentTimeMillis(), CheckpointType.CHECKPOINT_TYPE), new ArrayList<>())); }
public static Field p(String fieldName) { return SELECT_ALL_FROM_SOURCES_ALL.where(fieldName); }
@Test void build_query_which_created_from_Q_b_without_select_and_sources() { String q = Q.p("f1").contains("v1") .build(); assertEquals(q, "yql=select * from sources * where f1 contains \"v1\""); }
@Override public ExplodedPlugin explode(PluginInfo info) { try { File dir = unzipFile(info.getNonNullJarFile()); return explodeFromUnzippedDir(info, info.getNonNullJarFile(), dir); } catch (Exception e) { throw new IllegalStateException(String.format("Fail to open plugin [%s]: %s", info.getKey(), info.getNonNullJarFile().getAbsolutePath()), e); } }
@Test public void extract_only_libs() throws IOException { File jar = loadFile("sonar-checkstyle-plugin-2.8.jar"); underTest.explode(PluginInfo.create(jar)); assertThat(new File(jar.getParent(), "sonar-checkstyle-plugin-2.8.jar")).exists(); assertThat(new File(jar.getParent(), "sonar-checkstyle-plugin-2.8.jar_unzip/META-INF/MANIFEST.MF")).doesNotExist(); assertThat(new File(jar.getParent(), "sonar-checkstyle-plugin-2.8.jar_unzip/org/sonar/plugins/checkstyle/CheckstyleVersion.class")).doesNotExist(); }
public static void main(String[] args) { LOGGER.info("Use superpower: sky launch"); var skyLaunch = new SkyLaunch(); skyLaunch.activate(); LOGGER.info("Use superpower: ground dive"); var groundDive = new GroundDive(); groundDive.activate(); }
@Test void shouldExecuteWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
public static String getSpaces( int SpacesCount ) { // if ( SpacesCount < 0 ) return "?"; // String Info = ""; // for ( int K = 1; K <= SpacesCount; K ++ ) { Info += " "; } // // return Info; }
@Test public void testgetSpaces() throws Exception { // assertEquals( "?", BTools.getSpaces( -3 ) ); assertEquals( "", BTools.getSpaces( 0 ) ); assertEquals( " ", BTools.getSpaces( 4 ) ); // }
@Override public void reset() throws IOException { createDirectory(PATH_DATA.getKey()); createDirectory(PATH_WEB.getKey()); createDirectory(PATH_LOGS.getKey()); File tempDir = createOrCleanTempDirectory(PATH_TEMP.getKey()); try (AllProcessesCommands allProcessesCommands = new AllProcessesCommands(tempDir)) { allProcessesCommands.clean(); } }
@Test public void fail_if_required_directory_is_a_file() throws Exception { // <home>/data is missing FileUtils.forceMkdir(webDir); FileUtils.forceMkdir(logsDir); FileUtils.touch(dataDir); assertThatThrownBy(() -> underTest.reset()) .isInstanceOf(IllegalStateException.class) .hasMessage("Property 'sonar.path.data' is not valid, not a directory: " + dataDir.getAbsolutePath()); }
synchronized void add(int splitCount) { int pos = count % history.length; history[pos] = splitCount; count += 1; }
@Test public void testNotFullHistory() { EnumerationHistory history = new EnumerationHistory(3); history.add(1); history.add(2); int[] expectedHistorySnapshot = {1, 2}; testHistory(history, expectedHistorySnapshot); }
@Operation(summary = "updateTenant", description = "UPDATE_TENANT_NOTES") @Parameters({ @Parameter(name = "id", description = "TENANT_ID", required = true, schema = @Schema(implementation = int.class, example = "100")), @Parameter(name = "tenantCode", description = "TENANT_CODE", required = true, schema = @Schema(implementation = String.class)), @Parameter(name = "queueId", description = "QUEUE_ID", required = true, schema = @Schema(implementation = int.class, example = "100")), @Parameter(name = "description", description = "TENANT_DESC", schema = @Schema(implementation = String.class)) }) @PutMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(UPDATE_TENANT_ERROR) public Result<Boolean> updateTenant(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") int id, @RequestParam(value = "tenantCode") String tenantCode, @RequestParam(value = "queueId") int queueId, @RequestParam(value = "description", required = false) String description) throws Exception { tenantService.updateTenant(loginUser, id, tenantCode, queueId, description); return Result.success(true); }
@Test public void testUpdateTenant() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id", "9"); paramsMap.add("tenantCode", "cxc_te"); paramsMap.add("queueId", "1"); paramsMap.add("description", "tenant description"); MvcResult mvcResult = mockMvc.perform(put("/tenants/{id}", 9) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertEquals(Status.TENANT_NOT_EXIST.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); }
public void generate() throws IOException { packageNameByTypes.clear(); generatePackageInfo(); generateTypeStubs(); generateMessageHeaderStub(); for (final List<Token> tokens : ir.messages()) { final Token msgToken = tokens.get(0); final List<Token> messageBody = getMessageBody(tokens); final boolean hasVarData = -1 != findSignal(messageBody, Signal.BEGIN_VAR_DATA); int i = 0; final List<Token> fields = new ArrayList<>(); i = collectFields(messageBody, i, fields); final List<Token> groups = new ArrayList<>(); i = collectGroups(messageBody, i, groups); final List<Token> varData = new ArrayList<>(); collectVarData(messageBody, i, varData); final String decoderClassName = formatClassName(decoderName(msgToken.name())); final String decoderStateClassName = decoderClassName + "#CodecStates"; final FieldPrecedenceModel decoderPrecedenceModel = precedenceChecks.createDecoderModel( decoderStateClassName, tokens); generateDecoder(decoderClassName, msgToken, fields, groups, varData, hasVarData, decoderPrecedenceModel); final String encoderClassName = formatClassName(encoderName(msgToken.name())); final String encoderStateClassName = encoderClassName + "#CodecStates"; final FieldPrecedenceModel encoderPrecedenceModel = precedenceChecks.createEncoderModel( encoderStateClassName, tokens); generateEncoder(encoderClassName, msgToken, fields, groups, varData, hasVarData, encoderPrecedenceModel); } }
@Test void shouldCreateTypesInSamePackageIfSupportDisabled() throws Exception { try (InputStream in = Tests.getLocalResource("explicit-package-test-schema.xml")) { final ParserOptions options = ParserOptions.builder().stopOnError(true).build(); final MessageSchema schema = parse(in, options); final IrGenerator irg = new IrGenerator(); ir = irg.generate(schema); outputManager.clear(); outputManager.setPackageName(ir.applicableNamespace()); final JavaGenerator generator = new JavaGenerator( ir, BUFFER_NAME, READ_ONLY_BUFFER_NAME, false, false, false, false, outputManager); generator.generate(); final String encoderFqcn = ir.applicableNamespace() + ".TestMessageEncoder"; final Class<?> encoderClazz = compile(encoderFqcn); assertNotNull(encoderClazz); final String decoderFqcn = ir.applicableNamespace() + ".TestMessageDecoder"; final Class<?> decoderClazz = compile(decoderFqcn); assertNotNull(decoderClazz); final Map<String, CharSequence> sources = outputManager.getSources(); assertNotNull(sources.get(ir.applicableNamespace() + ".CarEncoder")); assertNotNull(sources.get(ir.applicableNamespace() + ".CarDecoder")); assertNotNull(sources.get(ir.applicableNamespace() + ".BooleanType")); assertNotNull(sources.get(ir.applicableNamespace() + ".DaysEncoder")); assertNotNull(sources.get(ir.applicableNamespace() + ".DaysDecoder")); assertNotNull(sources.get(ir.applicableNamespace() + ".MessageHeaderEncoder")); } }
public static String ltrim( String source ) { if ( source == null ) { return null; } int from = 0; while ( from < source.length() && isSpace( source.charAt( from ) ) ) { from++; } return source.substring( from ); }
@Test public void testLtrim() { assertEquals( null, Const.ltrim( null ) ); assertEquals( "", Const.ltrim( "" ) ); assertEquals( "", Const.ltrim( " " ) ); assertEquals( "test ", Const.ltrim( "test " ) ); assertEquals( "test ", Const.ltrim( " test " ) ); }
public Command create( final ConfiguredStatement<? extends Statement> statement, final KsqlExecutionContext context) { return create(statement, context.getServiceContext(), context); }
@Test public void shouldCreateCommandForTerminateQuery() { // Given: givenTerminate(); // When: final Command command = commandFactory.create(configuredStatement, executionContext); // Then: assertThat(command, is(Command.of(configuredStatement))); }
@Override public int size() { return to - from; }
@Test void testSize() { RangeSet rangeSet = new RangeSet(5, 10); assertEquals(5, rangeSet.size()); }
@Override public String getPonLinks(String target) { DriverHandler handler = handler(); NetconfController controller = handler.get(NetconfController.class); MastershipService mastershipService = handler.get(MastershipService.class); DeviceId ncDeviceId = handler.data().deviceId(); checkNotNull(controller, "Netconf controller is null"); String reply = null; if (!mastershipService.isLocalMaster(ncDeviceId)) { log.warn("Not master for {} Use {} to execute command", ncDeviceId, mastershipService.getMasterFor(ncDeviceId)); return null; } try { StringBuilder request = new StringBuilder(); request.append(VOLT_NE_OPEN + VOLT_NE_NAMESPACE); request.append(ANGLE_RIGHT + NEW_LINE); request.append(buildStartTag(VOLT_PORTS)); if (target != null) { int pon; try { pon = Integer.parseInt(target); if (pon <= ZERO) { log.error("Invalid integer for ponlink-id:{}", target); return null; } } catch (NumberFormatException e) { log.error("Non-number input for ponlink-id:{}", target); return null; } request.append(buildStartTag(GPON_PONLINK_PORTS)) .append(buildStartTag(GPON_PONLINK_PORT)) .append(buildStartTag(PONLINK_ID, false)) .append(target) .append(buildEndTag(PONLINK_ID)) .append(buildEndTag(GPON_PONLINK_PORT)) .append(buildEndTag(GPON_PONLINK_PORTS)); } else { request.append(buildEmptyTag(GPON_PONLINK_PORTS)); } request.append(buildEndTag(VOLT_PORTS)); request.append(VOLT_NE_CLOSE); reply = controller .getDevicesMap() .get(ncDeviceId) .getSession() .get(request.toString(), REPORT_ALL); } catch (NetconfException e) { log.error("Cannot communicate to device {} exception {}", ncDeviceId, e); } return reply; }
@Test public void testInvalidGetPonLinksInput() throws Exception { String reply; String target; for (int i = ZERO; i < INVALID_GET_TCS.length; i++) { target = INVALID_GET_TCS[i]; reply = voltConfig.getPonLinks(target); assertNull("Incorrect response for INVALID_GET_TCS", reply); } }
@Override public String[] filterCipherSuites(Iterable<String> ciphers, List<String> defaultCiphers, Set<String> supportedCiphers) { if (ciphers == null) { return defaultToDefaultCiphers ? defaultCiphers.toArray(EmptyArrays.EMPTY_STRINGS) : supportedCiphers.toArray(EmptyArrays.EMPTY_STRINGS); } else { List<String> newCiphers = new ArrayList<String>(supportedCiphers.size()); for (String c : ciphers) { if (c == null) { break; } newCiphers.add(c); } return newCiphers.toArray(EmptyArrays.EMPTY_STRINGS); } }
@Test public void regularInstanceDefaultsToDefaultCiphers() { List<String> defaultCiphers = Arrays.asList("FOO", "BAR"); Set<String> supportedCiphers = new HashSet<String>(Arrays.asList("BAZ", "QIX")); String[] filtered = IdentityCipherSuiteFilter.INSTANCE .filterCipherSuites(null, defaultCiphers, supportedCiphers); assertArrayEquals(defaultCiphers.toArray(), filtered); }
@Override public void replay( long offset, long producerId, short producerEpoch, CoordinatorRecord record ) throws RuntimeException { ApiMessageAndVersion key = record.key(); ApiMessageAndVersion value = record.value(); switch (key.version()) { case 0: case 1: offsetMetadataManager.replay( offset, producerId, (OffsetCommitKey) key.message(), (OffsetCommitValue) Utils.messageOrNull(value) ); break; case 2: groupMetadataManager.replay( (GroupMetadataKey) key.message(), (GroupMetadataValue) Utils.messageOrNull(value) ); break; case 3: groupMetadataManager.replay( (ConsumerGroupMetadataKey) key.message(), (ConsumerGroupMetadataValue) Utils.messageOrNull(value) ); break; case 4: groupMetadataManager.replay( (ConsumerGroupPartitionMetadataKey) key.message(), (ConsumerGroupPartitionMetadataValue) Utils.messageOrNull(value) ); break; case 5: groupMetadataManager.replay( (ConsumerGroupMemberMetadataKey) key.message(), (ConsumerGroupMemberMetadataValue) Utils.messageOrNull(value) ); break; case 6: groupMetadataManager.replay( (ConsumerGroupTargetAssignmentMetadataKey) key.message(), (ConsumerGroupTargetAssignmentMetadataValue) Utils.messageOrNull(value) ); break; case 7: groupMetadataManager.replay( (ConsumerGroupTargetAssignmentMemberKey) key.message(), (ConsumerGroupTargetAssignmentMemberValue) Utils.messageOrNull(value) ); break; case 8: groupMetadataManager.replay( (ConsumerGroupCurrentMemberAssignmentKey) key.message(), (ConsumerGroupCurrentMemberAssignmentValue) Utils.messageOrNull(value) ); break; case 9: groupMetadataManager.replay( (ShareGroupPartitionMetadataKey) key.message(), (ShareGroupPartitionMetadataValue) Utils.messageOrNull(value) ); break; case 10: groupMetadataManager.replay( (ShareGroupMemberMetadataKey) key.message(), (ShareGroupMemberMetadataValue) Utils.messageOrNull(value) ); break; case 11: groupMetadataManager.replay( (ShareGroupMetadataKey) key.message(), (ShareGroupMetadataValue) Utils.messageOrNull(value) ); break; case 12: groupMetadataManager.replay( (ShareGroupTargetAssignmentMetadataKey) key.message(), (ShareGroupTargetAssignmentMetadataValue) Utils.messageOrNull(value) ); break; case 13: groupMetadataManager.replay( (ShareGroupTargetAssignmentMemberKey) key.message(), (ShareGroupTargetAssignmentMemberValue) Utils.messageOrNull(value) ); break; case 14: groupMetadataManager.replay( (ShareGroupCurrentMemberAssignmentKey) key.message(), (ShareGroupCurrentMemberAssignmentValue) Utils.messageOrNull(value) ); break; default: throw new IllegalStateException("Received an unknown record type " + key.version() + " in " + record); } }
@Test public void testReplayShareGroupMemberMetadataWithNullValue() { GroupMetadataManager groupMetadataManager = mock(GroupMetadataManager.class); OffsetMetadataManager offsetMetadataManager = mock(OffsetMetadataManager.class); CoordinatorMetrics coordinatorMetrics = mock(CoordinatorMetrics.class); CoordinatorMetricsShard metricsShard = mock(CoordinatorMetricsShard.class); GroupCoordinatorShard coordinator = new GroupCoordinatorShard( new LogContext(), groupMetadataManager, offsetMetadataManager, Time.SYSTEM, new MockCoordinatorTimer<>(Time.SYSTEM), mock(GroupCoordinatorConfig.class), coordinatorMetrics, metricsShard ); ShareGroupMemberMetadataKey key = new ShareGroupMemberMetadataKey(); coordinator.replay(0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, new CoordinatorRecord( new ApiMessageAndVersion(key, (short) 10), null )); verify(groupMetadataManager, times(1)).replay(key, null); }
@Override public String sendCall(String to, String data, DefaultBlockParameter defaultBlockParameter) throws IOException { EthCall ethCall = web3j.ethCall( Transaction.createEthCallTransaction(getFromAddress(), to, data), defaultBlockParameter) .send(); assertCallNotReverted(ethCall); return ethCall.getValue() != null ? ethCall.getValue() : ethCall.getError() != null ? ethCall.getError().getData() : null; }
@Test void sendCallErrorRevertByCode() throws IOException { EthCall lookupDataHex = new EthCall(); Response.Error error = new Response.Error(); error.setCode(10); error.setData(responseData); lookupDataHex.setError(error); Request request = mock(Request.class); when(request.send()).thenReturn(lookupDataHex); when(web3j.ethCall(any(Transaction.class), any(DefaultBlockParameter.class))) .thenReturn(request); assertThrows( ContractCallException.class, () -> clientTransactionManager.sendCall( "0xAdress", "data", DefaultBlockParameter.valueOf("latest"))); }
@PublicAPI(usage = ACCESS) public JavaClasses importUrl(URL url) { return importUrls(singletonList(url)); }
@Test public void creates_relations_between_super_and_subclasses() { JavaClasses classes = new ClassFileImporter().importUrl(getClass().getResource("testexamples/classhierarchyimport")); JavaClass baseClass = classes.get(BaseClass.class); JavaClass subclass = classes.get(Subclass.class); JavaClass otherSubclass = classes.get(OtherSubclass.class); JavaClass subSubclass = classes.get(SubSubclass.class); JavaClass subSubSubclass = classes.get(SubSubSubclass.class); JavaClass subSubSubSubclass = classes.get(SubSubSubSubclass.class); assertThat(baseClass.getRawSuperclass().get().reflect()).isEqualTo(Object.class); assertThat(baseClass.getSubclasses()).containsOnly(subclass, otherSubclass); assertThat(baseClass.getSubclasses()).containsOnly(subclass, otherSubclass); assertThat(baseClass.getAllSubclasses()).containsOnly(subclass, otherSubclass, subSubclass, subSubSubclass, subSubSubSubclass); assertThat(baseClass.getAllSubclasses()).containsOnly(subclass, otherSubclass, subSubclass, subSubSubclass, subSubSubSubclass); assertThat(subclass.getRawSuperclass()).contains(baseClass); assertThat(subclass.getRawSuperclass()).contains(baseClass); assertThat(subclass.getAllSubclasses()).containsOnly(subSubclass, subSubSubclass, subSubSubSubclass); assertThat(subSubclass.getRawSuperclass()).contains(subclass); }
@Override public double calculateDistance(int txPower, double rssi) { if (mDistanceCalculator == null) { LogManager.w(TAG, "distance calculator has not been set"); return -1.0; } return mDistanceCalculator.calculateDistance(txPower, rssi); }
@Test public void testCalculatesDistance() { org.robolectric.shadows.ShadowLog.stream = System.err; ModelSpecificDistanceCalculator distanceCalculator = new ModelSpecificDistanceCalculator(null, null); Double distance = distanceCalculator.calculateDistance(-59, -59); assertEquals("Distance should be 1.0 for same power and rssi", 1.0, distance, 0.1); }
@Override public String toString() { return mOwnerBits.toString() + mGroupBits.toString() + mOtherBits.toString(); }
@Test public void toStringTest() { assertEquals("rwxrwxrwx", new Mode((short) 0777).toString()); assertEquals("rw-r-----", new Mode((short) 0640).toString()); assertEquals("rw-------", new Mode((short) 0600).toString()); assertEquals("---------", new Mode((short) 0000).toString()); }
@Override public void unregister(URL url) { super.unregister(url); unregistered(url); }
@Test void testUnregister() { Set<URL> registered; // register first registry.register(serviceUrl); registered = registry.getRegistered(); assertTrue(registered.contains(serviceUrl)); // then unregister registered = registry.getRegistered(); registry.unregister(serviceUrl); assertFalse(registered.contains(serviceUrl)); }
public void removeBe(long be) { if (numHardwareCoresPerBe.remove(be) != null) { cachedAvgNumHardwareCores.set(-1); } LOG.info("remove numHardwareCores of be [{}], current cpuCores stats: {}", be, numHardwareCoresPerBe); if (memLimitBytesPerBe.remove(be) != null) { cachedAvgMemLimitBytes.set(-1); } }
@Test public void testRemoveBe() { BackendResourceStat stat = BackendResourceStat.getInstance(); stat.setNumHardwareCoresOfBe(0L, 8); stat.setNumHardwareCoresOfBe(1L, 4); assertThat(stat.getAvgNumHardwareCoresOfBe()).isEqualTo(6); stat.setMemLimitBytesOfBe(0L, 100); stat.setMemLimitBytesOfBe(1L, 50); assertThat(stat.getAvgMemLimitBytes()).isEqualTo(150 / 2); assertThat(stat.getAvgNumHardwareCoresOfBe()).isEqualTo(6); // Remove non-exist BE. stat.removeBe(3L); assertThat(stat.getAvgMemLimitBytes()).isEqualTo(150 / 2); assertThat(stat.getAvgNumHardwareCoresOfBe()).isEqualTo(6); // Remove exist BE. stat.removeBe(0L); assertThat(stat.getAvgMemLimitBytes()).isEqualTo(50); assertThat(stat.getAvgNumHardwareCoresOfBe()).isEqualTo(4); }
@Override public void loginFailure(HttpRequest request, AuthenticationException e) { checkRequest(request); requireNonNull(e, "AuthenticationException can't be null"); if (!LOGGER.isDebugEnabled()) { return; } Source source = e.getSource(); LOGGER.debug("login failure [cause|{}][method|{}][provider|{}|{}][IP|{}|{}][login|{}]", emptyIfNull(e.getMessage()), source.getMethod(), source.getProvider(), source.getProviderName(), request.getRemoteAddr(), getAllIps(request), preventLogFlood(emptyIfNull(e.getLogin()))); }
@Test public void login_failure_creates_DEBUG_log_with_empty_login_if_AuthenticationException_has_no_login() { AuthenticationException exception = newBuilder().setSource(Source.sso()).setMessage("message").build(); underTest.loginFailure(mockRequest(), exception); verifyLog("login failure [cause|message][method|SSO][provider|SSO|sso][IP||][login|]", Set.of("logout", "login success")); }
private int getNodeId() { int nodeId = getNodeId(System.nanoTime()); assert nodeId > 0 || nodeId == NODE_ID_OUT_OF_RANGE : "getNodeId() returned invalid value: " + nodeId; return nodeId; }
@Test public void when_nodeIdUpdated_then_pickedUpAfterUpdateInterval() { when(clusterService.getMemberListJoinVersion()).thenReturn(20); assertEquals(20, gen.getNodeId(0)); when(clusterService.getMemberListJoinVersion()).thenReturn(30); assertEquals(20, gen.getNodeId(0)); assertEquals(20, gen.getNodeId(NODE_ID_UPDATE_INTERVAL_NS - 1)); assertEquals(30, gen.getNodeId(NODE_ID_UPDATE_INTERVAL_NS)); }
@Override public void preflight(final Path workdir, final String filename) throws BackgroundException { if(workdir.isRoot() || new DeepboxPathContainerService(session).isDeepbox(workdir) || new DeepboxPathContainerService(session).isBox(workdir)) { throw new AccessDeniedException(MessageFormat.format(LocaleFactory.localizedString("Cannot create {0}", "Error"), filename)).withFile(workdir); } final Acl acl = workdir.attributes().getAcl(); if(Acl.EMPTY == acl) { // Missing initialization log.warn(String.format("Unknown ACLs on %s", workdir)); return; } if(!acl.get(new Acl.CanonicalUser()).contains(CANADDCHILDREN)) { if(log.isWarnEnabled()) { log.warn(String.format("ACL %s for %s does not include %s", acl, workdir, CANADDCHILDREN)); } throw new AccessDeniedException(MessageFormat.format(LocaleFactory.localizedString("Cannot create {0}", "Error"), filename)).withFile(workdir); } }
@Test public void testNoAddChildrenFolder() throws Exception { final DeepboxIdProvider nodeid = new DeepboxIdProvider(session); final Path folder = new Path("/ORG 1 - DeepBox Desktop App/ORG1:Box1/Documents/Bookkeeping", EnumSet.of(Path.Type.directory, Path.Type.volume)); final PathAttributes attributes = new DeepboxAttributesFinderFeature(session, nodeid).find(folder); assertFalse(new CoreRestControllerApi(session.getClient()).getNodeInfo(attributes.getFileId(), null, null, null).getNode().getPolicy().isCanAddChildren()); assertFalse(attributes.getAcl().get(new Acl.CanonicalUser()).contains(CANADDCHILDREN)); assertThrows(AccessDeniedException.class, () -> new DeepboxTouchFeature(session, nodeid).preflight(folder.withAttributes(attributes), new AlphanumericRandomStringService().random())); assertThrows(AccessDeniedException.class, () -> new DeepboxDirectoryFeature(session, nodeid).preflight(folder.withAttributes(attributes), new AlphanumericRandomStringService().random())); }
void setHealth(Health health) { model.setHealth(health); }
@Test void testSetHealth() { final var model = new GiantModel("giant1", Health.HEALTHY, Fatigue.ALERT, Nourishment.SATURATED); assertEquals(Health.HEALTHY, model.getHealth()); var messageFormat = "Giant giant1, The giant looks %s, alert and saturated."; for (final var health : Health.values()) { model.setHealth(health); assertEquals(health, model.getHealth()); assertEquals(String.format(messageFormat, health), model.toString()); } }
@VisibleForTesting static void validateWorkerSettings(DataflowPipelineWorkerPoolOptions workerOptions) { DataflowPipelineOptions dataflowOptions = workerOptions.as(DataflowPipelineOptions.class); validateSdkContainerImageOptions(workerOptions); GcpOptions gcpOptions = workerOptions.as(GcpOptions.class); Preconditions.checkArgument( gcpOptions.getZone() == null || gcpOptions.getWorkerRegion() == null, "Cannot use option zone with workerRegion. Prefer either workerZone or workerRegion."); Preconditions.checkArgument( gcpOptions.getZone() == null || gcpOptions.getWorkerZone() == null, "Cannot use option zone with workerZone. Prefer workerZone."); Preconditions.checkArgument( gcpOptions.getWorkerRegion() == null || gcpOptions.getWorkerZone() == null, "workerRegion and workerZone options are mutually exclusive."); boolean hasExperimentWorkerRegion = false; if (dataflowOptions.getExperiments() != null) { for (String experiment : dataflowOptions.getExperiments()) { if (experiment.startsWith("worker_region")) { hasExperimentWorkerRegion = true; break; } } } Preconditions.checkArgument( !hasExperimentWorkerRegion || gcpOptions.getWorkerRegion() == null, "Experiment worker_region and option workerRegion are mutually exclusive."); Preconditions.checkArgument( !hasExperimentWorkerRegion || gcpOptions.getWorkerZone() == null, "Experiment worker_region and option workerZone are mutually exclusive."); if (gcpOptions.getZone() != null) { LOG.warn("Option --zone is deprecated. Please use --workerZone instead."); gcpOptions.setWorkerZone(gcpOptions.getZone()); gcpOptions.setZone(null); } }
@Test public void testAliasForLegacyWorkerHarnessContainerImage() { DataflowPipelineWorkerPoolOptions options = PipelineOptionsFactory.as(DataflowPipelineWorkerPoolOptions.class); String testImage = "image.url:worker"; options.setWorkerHarnessContainerImage(testImage); DataflowRunner.validateWorkerSettings(options); assertEquals(testImage, options.getWorkerHarnessContainerImage()); assertEquals(testImage, options.getSdkContainerImage()); }
public Analysis analyze(Statement statement) { return analyze(statement, false); }
@Test public void testNonEquiOuterJoin() { analyze("SELECT * FROM t1 LEFT JOIN t2 ON t1.a + t2.a = 1"); analyze("SELECT * FROM t1 RIGHT JOIN t2 ON t1.a + t2.a = 1"); analyze("SELECT * FROM t1 LEFT JOIN t2 ON t1.a = t2.a OR t1.b = t2.b"); }
public static String[][] assignExecutors( List<? extends ScanTaskGroup<?>> taskGroups, List<String> executorLocations) { Map<Integer, JavaHash<StructLike>> partitionHashes = Maps.newHashMap(); String[][] locations = new String[taskGroups.size()][]; for (int index = 0; index < taskGroups.size(); index++) { locations[index] = assign(taskGroups.get(index), executorLocations, partitionHashes); } return locations; }
@TestTemplate public void testFileScanTaskWithoutDeletes() { List<ScanTask> tasks = ImmutableList.of( new MockFileScanTask(mockDataFile(Row.of(1, "a")), SCHEMA, SPEC_1), new MockFileScanTask(mockDataFile(Row.of(2, "b")), SCHEMA, SPEC_1), new MockFileScanTask(mockDataFile(Row.of(3, "c")), SCHEMA, SPEC_1)); ScanTaskGroup<ScanTask> taskGroup = new BaseScanTaskGroup<>(tasks); List<ScanTaskGroup<ScanTask>> taskGroups = ImmutableList.of(taskGroup); String[][] locations = SparkPlanningUtil.assignExecutors(taskGroups, EXECUTOR_LOCATIONS); // should not assign executors if there are no deletes assertThat(locations.length).isEqualTo(1); assertThat(locations[0]).isEmpty(); }
@Override public void updateInstance(String serviceName, String groupName, Instance instance) throws NacosException { }
@Test void testUpdateInstance() throws Exception { //TODO thrown.expect(UnsupportedOperationException.class); client.updateInstance(SERVICE_NAME, GROUP_NAME, instance); }
@Bean public ShenyuPlugin redirectPlugin(final DispatcherHandler dispatcherHandler) { return new RedirectPlugin(dispatcherHandler); }
@Test public void testRedirectPlugin() { new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(RedirectPluginConfiguration.class)) .withBean(RedirectPluginConfigurationTest.class) .withBean(DispatcherHandler.class) .withPropertyValues("debug=true") .run(context -> { ShenyuPlugin shenyuPlugin = context.getBean("redirectPlugin", ShenyuPlugin.class); assertNotNull(shenyuPlugin); assertThat(shenyuPlugin.named()).isEqualTo(PluginEnum.REDIRECT.getName()); }); }
public void validate(CreateReviewAnswerRequest request) { Question question = questionRepository.findById(request.questionId()) .orElseThrow(() -> new SubmittedQuestionNotFoundException(request.questionId())); validateNotIncludingOptions(request); validateQuestionRequired(question, request); validateLength(request); }
@Test void 텍스트형_질문에_선택형_응답을_하면_예외가_발생한다() { // given Question savedQuestion = questionRepository.save(new Question(true, QuestionType.TEXT, "질문", "가이드라인", 1)); CreateReviewAnswerRequest request = new CreateReviewAnswerRequest(savedQuestion.getId(), List.of(1L), "응답"); // when, then assertThatCode(() -> createTextAnswerRequestValidator.validate(request)) .isInstanceOf(TextAnswerIncludedOptionItemException.class); }
public static Nazgul getInstance(NazgulName name) { return nazguls.get(name); }
@Test void testGetInstance() { for (final var name : NazgulName.values()) { final var nazgul = Nazgul.getInstance(name); assertNotNull(nazgul); assertSame(nazgul, Nazgul.getInstance(name)); assertEquals(name, nazgul.getName()); } }
@VisibleForTesting static boolean isInPriorNetwork(String ip) { ip = ip.trim(); for (String cidr : PRIORITY_CIDRS) { cidr = cidr.trim(); IPAddressString network = new IPAddressString(cidr); IPAddressString address = new IPAddressString(ip); if (network.contains(address)) { return true; } } return false; }
@Test public void cidrTest2() { List<String> priorityCidrs = FrontendOptions.PRIORITY_CIDRS; priorityCidrs.add("2408:4001:258::/48"); FrontendOptions frontendOptions = new FrontendOptions(); boolean inPriorNetwork = frontendOptions.isInPriorNetwork("2408:4001:258:3780:f3f4:5acd:d53d:fa23"); Assert.assertEquals(true, inPriorNetwork); }
@Override public <T> T clone(T object) { if (object instanceof String) { return object; } else if (object instanceof Collection) { Object firstElement = findFirstNonNullElement((Collection) object); if (firstElement != null && !(firstElement instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass()); return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } else if (object instanceof Map) { Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object); if (firstEntry != null) { Object key = firstEntry.getKey(); Object value = firstEntry.getValue(); if (!(key instanceof Serializable) || !(value instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass()); return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } } else if (object instanceof JsonNode) { return (T) ((JsonNode) object).deepCopy(); } if (object instanceof Serializable) { try { return (T) SerializationHelper.clone((Serializable) object); } catch (SerializationException e) { //it is possible that object itself implements java.io.Serializable, but underlying structure does not //in this case we switch to the other JSON marshaling strategy which doesn't use the Java serialization } } return jsonClone(object); }
@Test public void should_clone_jsonnode() { Object original = mapper.getObjectMapper().createArrayNode() .add(BigDecimal.ONE) .add(1.0) .add("string"); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Override public MetadataRequest.Builder buildRequest(Set<BrokerKey> keys) { validateLookupKeys(keys); // Send empty `Metadata` request. We are only interested in the brokers from the response return new MetadataRequest.Builder(new MetadataRequestData()); }
@Test public void testBuildRequestWithInvalidLookupKeys() { AllBrokersStrategy strategy = new AllBrokersStrategy(logContext); AllBrokersStrategy.BrokerKey key1 = new AllBrokersStrategy.BrokerKey(OptionalInt.empty()); AllBrokersStrategy.BrokerKey key2 = new AllBrokersStrategy.BrokerKey(OptionalInt.of(1)); assertThrows(IllegalArgumentException.class, () -> strategy.buildRequest(mkSet(key1))); assertThrows(IllegalArgumentException.class, () -> strategy.buildRequest(mkSet(key2))); assertThrows(IllegalArgumentException.class, () -> strategy.buildRequest(mkSet(key1, key2))); Set<AllBrokersStrategy.BrokerKey> keys = new HashSet<>(AllBrokersStrategy.LOOKUP_KEYS); keys.add(key2); assertThrows(IllegalArgumentException.class, () -> strategy.buildRequest(keys)); }
List<MappingField> resolveFields( @Nonnull String[] externalName, @Nullable String dataConnectionName, @Nonnull Map<String, String> options, @Nonnull List<MappingField> userFields, boolean stream ) { Predicate<MappingField> pkColumnName = Options.getPkColumnChecker(options, stream); Map<String, DocumentField> dbFields = readFields(externalName, dataConnectionName, options, stream); List<MappingField> resolvedFields = new ArrayList<>(); if (userFields.isEmpty()) { for (DocumentField documentField : dbFields.values()) { MappingField mappingField = new MappingField( documentField.columnName, resolveType(documentField.columnType), documentField.columnName, documentField.columnType.name() ); mappingField.setPrimaryKey(pkColumnName.test(mappingField)); resolvedFields.add(mappingField); } } else { for (MappingField f : userFields) { String prefixIfStream = stream ? "fullDocument." : ""; String nameInMongo = f.externalName() == null ? prefixIfStream + f.name() : f.externalName(); DocumentField documentField = getField(dbFields, f, stream); if (documentField == null) { throw new IllegalArgumentException("Could not resolve field with name " + nameInMongo); } MappingField mappingField = new MappingField(f.name(), f.type(), documentField.columnName, documentField.columnType.name()); mappingField.setPrimaryKey(pkColumnName.test(mappingField)); validateType(f, documentField); resolvedFields.add(mappingField); } } return resolvedFields; }
@Test public void testResolvesMappingFieldsViaSampleInStream() { try (MongoClient client = MongoClients.create(mongoContainer.getConnectionString())) { String databaseName = "testDatabase"; String collectionName = "testResolvesMappingFieldsViaSampleInStream"; MongoDatabase testDatabase = client.getDatabase(databaseName); MongoCollection<Document> collection = testDatabase.getCollection(collectionName); collection.insertOne(new Document("firstName", "Tomasz") .append("lastName", "Gawęda") .append("birthYear", 1992)); FieldResolver resolver = new FieldResolver(null); Map<String, String> readOpts = new HashMap<>(); readOpts.put("connectionString", mongoContainer.getConnectionString()); List<MappingField> fields = resolver.resolveFields(new String[]{databaseName, collectionName}, null, readOpts, emptyList(), true); assertThat(fields).contains( fieldWithSameExternal("resumeToken", VARCHAR, BsonType.STRING), fieldWithSameExternal("operationType", VARCHAR, BsonType.STRING), fieldWithSameExternal("fullDocument._id", OBJECT, BsonType.OBJECT_ID).setPrimaryKey(true), fieldWithSameExternal("fullDocument.firstName", VARCHAR, BsonType.STRING), fieldWithSameExternal("fullDocument.lastName", VARCHAR, BsonType.STRING), fieldWithSameExternal("fullDocument.birthYear", INT, BsonType.INT32) ); } }
public static DataMap bytesToDataMap(Map<String, String> headers, ByteString bytes) throws MimeTypeParseException, IOException { return getContentType(headers).getCodec().readMap(bytes); }
@Test(expectedExceptions = IOException.class) public void testByteStringToDataMapWithInvalidContentType() throws MimeTypeParseException, IOException { DataMap dataMap = createTestDataMap(); ByteString byteString = ByteString.copy(JACKSON_DATA_CODEC.mapToBytes(dataMap)); bytesToDataMap("application/x-pson", byteString); }
@Override public int getMaxCursorNameLength() { return 0; }
@Test void assertGetMaxCursorNameLength() { assertThat(metaData.getMaxCursorNameLength(), is(0)); }