focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public void initialize(URI uri, Configuration conf) throws IOException { requireNonNull(uri, "uri is null"); requireNonNull(conf, "conf is null"); super.initialize(uri, conf); setConf(conf); this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority()); this.workingDirectory = new Path(PATH_SEPARATOR).makeQualified(this.uri, new Path(PATH_SEPARATOR)); HiveS3Config defaults = new HiveS3Config(); this.stagingDirectory = new File(conf.get(S3_STAGING_DIRECTORY, defaults.getS3StagingDirectory().toString())); this.maxAttempts = conf.getInt(S3_MAX_CLIENT_RETRIES, defaults.getS3MaxClientRetries()) + 1; this.maxBackoffTime = Duration.valueOf(conf.get(S3_MAX_BACKOFF_TIME, defaults.getS3MaxBackoffTime().toString())); this.maxRetryTime = Duration.valueOf(conf.get(S3_MAX_RETRY_TIME, defaults.getS3MaxRetryTime().toString())); int maxErrorRetries = conf.getInt(S3_MAX_ERROR_RETRIES, defaults.getS3MaxErrorRetries()); boolean sslEnabled = conf.getBoolean(S3_SSL_ENABLED, defaults.isS3SslEnabled()); Duration connectTimeout = Duration.valueOf(conf.get(S3_CONNECT_TIMEOUT, defaults.getS3ConnectTimeout().toString())); Duration socketTimeout = Duration.valueOf(conf.get(S3_SOCKET_TIMEOUT, defaults.getS3SocketTimeout().toString())); int maxConnections = conf.getInt(S3_MAX_CONNECTIONS, defaults.getS3MaxConnections()); this.multiPartUploadMinFileSize = conf.getLong(S3_MULTIPART_MIN_FILE_SIZE, defaults.getS3MultipartMinFileSize().toBytes()); this.multiPartUploadMinPartSize = conf.getLong(S3_MULTIPART_MIN_PART_SIZE, defaults.getS3MultipartMinPartSize().toBytes()); this.isPathStyleAccess = conf.getBoolean(S3_PATH_STYLE_ACCESS, defaults.isS3PathStyleAccess()); this.useInstanceCredentials = conf.getBoolean(S3_USE_INSTANCE_CREDENTIALS, defaults.isS3UseInstanceCredentials()); this.pinS3ClientToCurrentRegion = conf.getBoolean(S3_PIN_CLIENT_TO_CURRENT_REGION, defaults.isPinS3ClientToCurrentRegion()); this.s3IamRole = conf.get(S3_IAM_ROLE, defaults.getS3IamRole()); this.s3IamRoleSessionName = conf.get(S3_IAM_ROLE_SESSION_NAME, defaults.getS3IamRoleSessionName()); verify(!(useInstanceCredentials && conf.get(S3_IAM_ROLE) != null), "Invalid configuration: either use instance credentials or specify an iam role"); verify((pinS3ClientToCurrentRegion && conf.get(S3_ENDPOINT) == null) || !pinS3ClientToCurrentRegion, "Invalid configuration: either endpoint can be set or S3 client can be pinned to the current region"); this.sseEnabled = conf.getBoolean(S3_SSE_ENABLED, defaults.isS3SseEnabled()); this.sseType = PrestoS3SseType.valueOf(conf.get(S3_SSE_TYPE, defaults.getS3SseType().name())); this.sseKmsKeyId = conf.get(S3_SSE_KMS_KEY_ID, defaults.getS3SseKmsKeyId()); this.s3AclType = PrestoS3AclType.valueOf(conf.get(S3_ACL_TYPE, defaults.getS3AclType().name())); String userAgentPrefix = conf.get(S3_USER_AGENT_PREFIX, defaults.getS3UserAgentPrefix()); this.skipGlacierObjects = conf.getBoolean(S3_SKIP_GLACIER_OBJECTS, defaults.isSkipGlacierObjects()); this.s3StorageClass = conf.getEnum(S3_STORAGE_CLASS, defaults.getS3StorageClass()); ClientConfiguration configuration = new ClientConfiguration() .withMaxErrorRetry(maxErrorRetries) .withProtocol(sslEnabled ? Protocol.HTTPS : Protocol.HTTP) .withConnectionTimeout(toIntExact(connectTimeout.toMillis())) .withSocketTimeout(toIntExact(socketTimeout.toMillis())) .withMaxConnections(maxConnections) .withUserAgentPrefix(userAgentPrefix) .withUserAgentSuffix(S3_USER_AGENT_SUFFIX); this.credentialsProvider = createAwsCredentialsProvider(uri, conf); this.s3 = createAmazonS3Client(conf, configuration); }
@Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "Error creating an instance of .*") public void testCustomCredentialsClassCannotBeFound() throws Exception { Configuration config = new Configuration(); config.set(S3_USE_INSTANCE_CREDENTIALS, "false"); config.set(S3_CREDENTIALS_PROVIDER, "com.example.DoesNotExist"); try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { fs.initialize(new URI("s3n://test-bucket/"), config); } }
@SuppressWarnings("unchecked") public static int compare(Comparable lhs, Comparable rhs) { assert lhs != null; assert rhs != null; if (lhs.getClass() == rhs.getClass()) { return lhs.compareTo(rhs); } if (lhs instanceof Number && rhs instanceof Number) { return Numbers.compare(lhs, rhs); } return lhs.compareTo(rhs); }
@SuppressWarnings("ConstantConditions") @Test(expected = Throwable.class) public void testNullRhsInCompareThrows() { compare(1, null); }
@Override public Integer revRank(V o) { return get(revRankAsync(o)); }
@Test public void testRevRank() { RScoredSortedSet<String> set = redisson.getScoredSortedSet("simple"); set.add(0.1, "a"); set.add(0.2, "b"); set.add(0.3, "c"); set.add(0.4, "d"); set.add(0.5, "e"); set.add(0.6, "f"); set.add(0.7, "g"); assertThat(set.revRank("f")).isEqualTo(1); assertThat(set.revRank("abc")).isNull(); }
public static void checkInstanceIsLegal(Instance instance) throws NacosException { if (null == instance) { throw new NacosApiException(NacosException.INVALID_PARAM, ErrorCode.INSTANCE_ERROR, "Instance can not be null."); } if (instance.getInstanceHeartBeatTimeOut() < instance.getInstanceHeartBeatInterval() || instance.getIpDeleteTimeout() < instance.getInstanceHeartBeatInterval()) { throw new NacosApiException(NacosException.INVALID_PARAM, ErrorCode.INSTANCE_ERROR, "Instance 'heart beat interval' must less than 'heart beat timeout' and 'ip delete timeout'."); } if (!StringUtils.isEmpty(instance.getClusterName()) && !CLUSTER_NAME_PATTERN.matcher(instance.getClusterName()).matches()) { throw new NacosApiException(NacosException.INVALID_PARAM, ErrorCode.INSTANCE_ERROR, String.format("Instance 'clusterName' should be characters with only 0-9a-zA-Z-. (current: %s)", instance.getClusterName())); } }
@Test void testCheckInstanceIsNull() throws NacosException { Instance instance = new Instance(); instance.setIp("127.0.0.1"); instance.setPort(9089); NamingUtils.checkInstanceIsLegal(instance); try { NamingUtils.checkInstanceIsLegal(null); } catch (NacosException e) { assertEquals(NacosException.INVALID_PARAM, e.getErrCode()); } }
public static String createGPX(InstructionList instructions, String trackName, long startTimeMillis, boolean includeElevation, boolean withRoute, boolean withTrack, boolean withWayPoints, String version, Translation tr) { DateFormat formatter = Helper.createFormatter(); DecimalFormat decimalFormat = new DecimalFormat("#", DecimalFormatSymbols.getInstance(Locale.ROOT)); decimalFormat.setMinimumFractionDigits(1); decimalFormat.setMaximumFractionDigits(6); decimalFormat.setMinimumIntegerDigits(1); String header = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\" ?>" + "<gpx xmlns=\"http://www.topografix.com/GPX/1/1\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " creator=\"Graphhopper version " + version + "\" version=\"1.1\"" // This xmlns:gh acts only as ID, no valid URL necessary. // Use a separate namespace for custom extensions to make basecamp happy. + " xmlns:gh=\"https://graphhopper.com/public/schema/gpx/1.1\">" + "\n<metadata>" + "<copyright author=\"OpenStreetMap contributors\"/>" + "<link href=\"http://graphhopper.com\">" + "<text>GraphHopper GPX</text>" + "</link>" + "<time>" + formatter.format(startTimeMillis) + "</time>" + "</metadata>"; StringBuilder gpxOutput = new StringBuilder(header); if (!instructions.isEmpty()) { if (withWayPoints) { createWayPointBlock(gpxOutput, instructions.get(0), decimalFormat, tr); // Start for (Instruction currInstr : instructions) { if ((currInstr.getSign() == Instruction.REACHED_VIA) // Via || (currInstr.getSign() == Instruction.FINISH)) // End { createWayPointBlock(gpxOutput, currInstr, decimalFormat, tr); } } } if (withRoute) { gpxOutput.append("\n<rte>"); Instruction nextInstr = null; for (Instruction currInstr : instructions) { if (null != nextInstr) createRteptBlock(gpxOutput, nextInstr, currInstr, decimalFormat, tr); nextInstr = currInstr; } createRteptBlock(gpxOutput, nextInstr, null, decimalFormat, tr); gpxOutput.append("\n</rte>"); } } if (withTrack) { gpxOutput.append("\n<trk><name>").append(trackName).append("</name>"); gpxOutput.append("<trkseg>"); for (GPXEntry entry : createGPXList(instructions)) { gpxOutput.append("\n<trkpt lat=\"").append(decimalFormat.format(entry.getPoint().getLat())); gpxOutput.append("\" lon=\"").append(decimalFormat.format(entry.getPoint().getLon())).append("\">"); if (includeElevation) gpxOutput.append("<ele>").append(Helper.round2(((GHPoint3D) entry.getPoint()).getEle())).append("</ele>"); if (entry.getTime() != null) gpxOutput.append("<time>").append(formatter.format(startTimeMillis + entry.getTime())).append("</time>"); gpxOutput.append("</trkpt>"); } gpxOutput.append("\n</trkseg>"); gpxOutput.append("\n</trk>"); } // we could now use 'wpt' for via points gpxOutput.append("\n</gpx>"); return gpxOutput.toString(); }
@Test public void testCreateGPXCorrectFormattingSmallNumbers() { InstructionList instructions = new InstructionList(trMap.getWithFallBack(Locale.US)); PointList pl = new PointList(); pl.add(0.000001, 0.000001); pl.add(-0.000123, -0.000125); Instruction instruction = new Instruction(0, "do it", pl); instructions.add(instruction); instructions.add(new FinishInstruction(0.000852, 0.000852, 0)); String gpxStr = GpxConversions.createGPX(instructions, "test", 0, true, true, true, true, Constants.VERSION, trMap.getWithFallBack(Locale.US)); assertFalse(gpxStr.contains("E-"), gpxStr); assertTrue(gpxStr.contains("0.000001"), gpxStr); assertTrue(gpxStr.contains("-0.000125"), gpxStr); verifyGPX(gpxStr); }
@Override public Set<TransferItem> find(final CommandLine input, final TerminalAction action, final Path remote) throws AccessDeniedException { if(input.getOptionValues(action.name()).length == 2) { final String path = input.getOptionValues(action.name())[1]; // This only applies to a shell where the glob is not already expanded into multiple arguments if(StringUtils.containsAny(path, '*', '?')) { final Local directory = LocalFactory.get(FilenameUtils.getFullPath(path)); if(directory.isDirectory()) { final Set<TransferItem> items = new HashSet<TransferItem>(); for(Local file : directory.list(new NullFilter<String>() { @Override public boolean accept(final String file) { return FilenameUtils.wildcardMatch(file, PathNormalizer.name(path)); } })) { items.add(new TransferItem(new Path(remote, file.getName(), EnumSet.of(Path.Type.file)), file)); } return items; } } } return new SingleTransferItemFinder().find(input, action, remote); }
@Test public void testFind() throws Exception { File.createTempFile("temp", ".duck"); final File f = File.createTempFile("temp", ".duck"); File.createTempFile("temp", ".false"); final CommandLineParser parser = new PosixParser(); final CommandLine input = parser.parse(TerminalOptionsBuilder.options(), new String[]{"--upload", "rackspace://cdn.cyberduck.ch/remote", f.getParent() + "/*.duck"}); final Set<TransferItem> found = new GlobTransferItemFinder().find(input, TerminalAction.upload, new Path("/cdn.cyberduck.ch/remote", EnumSet.of(Path.Type.file))); assertFalse(found.isEmpty()); assertTrue(found.contains(new TransferItem( new Path(new Path("/cdn.cyberduck.ch/remote", EnumSet.of(Path.Type.directory)), f.getName(), EnumSet.of(Path.Type.file)), new Local(f.getAbsolutePath())))); }
public int registerUser(final User user) throws SQLException { var sql = "insert into USERS (username, password) values (?,?)"; try (var connection = dataSource.getConnection(); var preparedStatement = connection.prepareStatement(sql) ) { preparedStatement.setString(1, user.getUsername()); preparedStatement.setString(2, user.getPassword()); var result = preparedStatement.executeUpdate(); LOGGER.info("Register successfully!"); return result; } }
@Test void registerShouldSucceed() throws SQLException { var dataSource = createDataSource(); var userTableModule = new UserTableModule(dataSource); var user = new User(1, "123456", "123456"); assertEquals(1, userTableModule.registerUser(user)); }
public Configuration getConfiguration() { return configuration; }
@Test public void getConfiguration() { Configuration configuration = namesrvController.getConfiguration(); Assert.assertNotNull(configuration); }
@Udf public int instr(final String str, final String substring) { return instr(str, substring, 1); }
@Test public void shouldReturnZeroOnNullValue() { assertThat(udf.instr(null, "OR"), is(0)); assertThat(udf.instr(null, "OR", 1), is(0)); assertThat(udf.instr(null, "OR", 1, 1), is(0)); assertThat(udf.instr("CORPORATE FLOOR", null, 1), is(0)); assertThat(udf.instr("CORPORATE FLOOR", null, 1, 1), is(0)); }
public void resetPositionsIfNeeded() { Map<TopicPartition, Long> offsetResetTimestamps = offsetFetcherUtils.getOffsetResetTimestamp(); if (offsetResetTimestamps.isEmpty()) return; resetPositionsAsync(offsetResetTimestamps); }
@Test public void testUpdateFetchPositionOfPausedPartitionsWithoutAValidPosition() { buildFetcher(); assignFromUser(singleton(tp0)); subscriptions.requestOffsetReset(tp0); subscriptions.pause(tp0); // paused partition does not have a valid position offsetFetcher.resetPositionsIfNeeded(); consumerClient.pollNoWakeup(); assertTrue(subscriptions.isOffsetResetNeeded(tp0)); assertFalse(subscriptions.isFetchable(tp0)); // because tp is paused assertFalse(subscriptions.hasValidPosition(tp0)); }
RegistryEndpointProvider<Optional<URL>> initializer() { return new Initializer(); }
@Test public void testGetAccept() { Assert.assertEquals(0, testBlobPusher.initializer().getAccept().size()); }
@Override public void finish() throws IOException { if (rowCount == 0) { writer.append("(no rows)\n"); } writer.flush(); }
@Test public void testVerticalPrintingNoRows() throws Exception { StringWriter writer = new StringWriter(); List<String> fieldNames = ImmutableList.of("none"); OutputPrinter printer = new VerticalRecordPrinter(fieldNames, writer); printer.finish(); assertEquals(writer.getBuffer().toString(), "(no rows)\n"); }
@Override protected void write(final PostgreSQLPacketPayload payload) { payload.getByteBuf().writeBytes(PREFIX); payload.getByteBuf().writeByte(status); }
@Test void assertReadWriteWithTransactionFailed() { ByteBuf byteBuf = ByteBufTestUtils.createByteBuf(6); PostgreSQLPacketPayload payload = new PostgreSQLPacketPayload(byteBuf, StandardCharsets.UTF_8); PostgreSQLReadyForQueryPacket packet = PostgreSQLReadyForQueryPacket.TRANSACTION_FAILED; packet.write(payload); assertThat(byteBuf.writerIndex(), is(6)); assertThat(byteBuf.getByte(5), is((byte) 'E')); }
public static String repeat(final String str, final int repeat) { // Performance tuned for 2.0 (JDK1.4) if (str == null) { return null; } if (repeat <= 0) { return EMPTY; } final int inputLength = str.length(); if (repeat == 1 || inputLength == 0) { return str; } if (inputLength == 1 && repeat <= PAD_LIMIT) { return repeat(str.charAt(0), repeat); } final int outputLength = inputLength * repeat; switch (inputLength) { case 1: return repeat(str.charAt(0), repeat); case 2: final char ch0 = str.charAt(0); final char ch1 = str.charAt(1); final char[] output2 = new char[outputLength]; for (int i = repeat * 2 - 2; i >= 0; i--, i--) { output2[i] = ch0; output2[i + 1] = ch1; } return new String(output2); default: final StringBuilder buf = new StringBuilder(outputLength); for (int i = 0; i < repeat; i++) { buf.append(str); } return buf.toString(); } }
@Test void testRepetition() { assertThat(EncodingUtils.repeat("we", 3)).isEqualTo("wewewe"); }
public static String getGetterMethodName(String property, FullyQualifiedJavaType fullyQualifiedJavaType) { StringBuilder sb = new StringBuilder(); sb.append(property); if (Character.isLowerCase(sb.charAt(0)) && (sb.length() == 1 || !Character.isUpperCase(sb.charAt(1)))) { sb.setCharAt(0, Character.toUpperCase(sb.charAt(0))); } if (fullyQualifiedJavaType.equals(FullyQualifiedJavaType .getBooleanPrimitiveInstance())) { sb.insert(0, "is"); //$NON-NLS-1$ } else { sb.insert(0, "get"); //$NON-NLS-1$ } return sb.toString(); }
@Test void testGetGetterMethodName() { assertEquals("geteMail", getGetterMethodName("eMail", FullyQualifiedJavaType.getStringInstance())); //$NON-NLS-1$ //$NON-NLS-2$ assertEquals("getFirstName", getGetterMethodName("firstName", FullyQualifiedJavaType.getStringInstance())); //$NON-NLS-1$ //$NON-NLS-2$ assertEquals("getURL", getGetterMethodName("URL", FullyQualifiedJavaType.getStringInstance())); //$NON-NLS-1$ //$NON-NLS-2$ assertEquals("getXAxis", getGetterMethodName("XAxis", FullyQualifiedJavaType.getStringInstance())); //$NON-NLS-1$ //$NON-NLS-2$ assertEquals("getA", getGetterMethodName("a", FullyQualifiedJavaType.getStringInstance())); //$NON-NLS-1$ //$NON-NLS-2$ assertEquals("isActive", getGetterMethodName("active", FullyQualifiedJavaType.getBooleanPrimitiveInstance())); //$NON-NLS-1$ //$NON-NLS-2$ assertEquals("getI_PARAM_INT_1", getGetterMethodName("i_PARAM_INT_1", FullyQualifiedJavaType.getStringInstance())); //$NON-NLS-1$ //$NON-NLS-2$ assertEquals("get_fred", getGetterMethodName("_fred", FullyQualifiedJavaType.getStringInstance())); //$NON-NLS-1$ //$NON-NLS-2$ assertEquals("getAccountType", getGetterMethodName("AccountType", FullyQualifiedJavaType.getStringInstance())); //$NON-NLS-1$ //$NON-NLS-2$ }
public static <V> List<V> listOrEmpty(List<V> list) { return list == null ? ImmutableList.<V>of() : list; }
@Test public void testNullSafeList() { assertEquals(Collections.emptyList(), Apiary.listOrEmpty(null)); List<?> values = Arrays.asList("abc"); assertSame(values, Apiary.listOrEmpty(values)); }
@Override public <W extends Window> TimeWindowedCogroupedKStream<K, VOut> windowedBy(final Windows<W> windows) { Objects.requireNonNull(windows, "windows can't be null"); return new TimeWindowedCogroupedKStreamImpl<>( windows, builder, subTopologySourceNodes, name, aggregateBuilder, graphNode, groupPatterns); }
@Test public void shouldNotHaveNullWindowOnWindowedBySliding() { assertThrows(NullPointerException.class, () -> cogroupedStream.windowedBy((SlidingWindows) null)); }
@Override public CompletableFuture<Void> putStateAsync(String key, ByteBuffer value) { ensureStateEnabled(); return defaultStateStore.putAsync(key, value); }
@Test public void testPutStateStateEnabled() throws Exception { context.defaultStateStore = mock(BKStateStoreImpl.class); ByteBuffer buffer = ByteBuffer.wrap("test-value".getBytes(UTF_8)); context.putStateAsync("test-key", buffer); verify(context.defaultStateStore, times(1)).putAsync(eq("test-key"), same(buffer)); }
public String generateUniqueProjectKey(String projectName, String... extraProjectKeyItems) { String sqProjectKey = generateCompleteProjectKey(projectName, extraProjectKeyItems); sqProjectKey = truncateProjectKeyIfNecessary(sqProjectKey); return sanitizeProjectKey(sqProjectKey); }
@Test public void generateUniqueProjectKey_shortProjectName_shouldAppendUuid() { String fullProjectName = RandomStringUtils.randomAlphanumeric(10); assertThat(projectKeyGenerator.generateUniqueProjectKey(fullProjectName)) .isEqualTo(generateExpectedKeyName(fullProjectName)); }
public static Exception lookupExceptionInCause(Throwable source, Class<? extends Exception>... clazzes) { while (source != null) { for (Class<? extends Exception> clazz : clazzes) { if (clazz.isAssignableFrom(source.getClass())) { return (Exception) source; } } source = source.getCause(); } return null; }
@Test void givenNoCause_whenLookupExceptionInCause_thenReturnNull() { assertThat(ExceptionUtil.lookupExceptionInCause(new Exception(), RuntimeException.class)).isNull(); }
@Override public <VO, VR> KStream<K, VR> join(final KStream<K, VO> otherStream, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows) { return join(otherStream, toValueJoinerWithKey(joiner), windows); }
@Test public void shouldNotAllowNullTableOnTableJoinWithJoiner() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.join(null, MockValueJoiner.TOSTRING_JOINER, Joined.as("name"))); assertThat(exception.getMessage(), equalTo("table can't be null")); }
@Override public List<ApolloAuditLogDTO> queryLogs(int page, int size) { return ApolloAuditUtil.logListToDTOList(logService.findAll(page, size)); }
@Test public void testQueryLogs() { { List<ApolloAuditLog> logList = MockBeanFactory.mockAuditLogListByLength(size); Mockito.when(logService.findAll(Mockito.eq(page), Mockito.eq(size))) .thenReturn(logList); } List<ApolloAuditLogDTO> dtoList = api.queryLogs(page, size); Mockito.verify(logService, Mockito.times(1)) .findAll(Mockito.eq(page), Mockito.eq(size)); assertEquals(size, dtoList.size()); }
@Override public Object decode(Channel channel, InputStream input) throws IOException { if (log.isDebugEnabled()) { Thread thread = Thread.currentThread(); log.debug("Decoding in thread -- [" + thread.getName() + "#" + thread.getId() + "]"); } int contentLength = input.available(); setAttribute(Constants.CONTENT_LENGTH_KEY, contentLength); // switch TCCL if (invocation != null && invocation.getServiceModel() != null) { Thread.currentThread() .setContextClassLoader(invocation.getServiceModel().getClassLoader()); } ObjectInput in = CodecSupport.getSerialization(serializationType).deserialize(channel.getUrl(), input); byte flag = in.readByte(); switch (flag) { case DubboCodec.RESPONSE_NULL_VALUE: break; case DubboCodec.RESPONSE_VALUE: handleValue(in); break; case DubboCodec.RESPONSE_WITH_EXCEPTION: handleException(in); break; case DubboCodec.RESPONSE_NULL_VALUE_WITH_ATTACHMENTS: handleAttachment(in); break; case DubboCodec.RESPONSE_VALUE_WITH_ATTACHMENTS: handleValue(in); handleAttachment(in); break; case DubboCodec.RESPONSE_WITH_EXCEPTION_WITH_ATTACHMENTS: handleException(in); handleAttachment(in); break; default: throw new IOException("Unknown result flag, expect '0' '1' '2' '3' '4' '5', but received: " + flag); } if (in instanceof Cleanable) { ((Cleanable) in).cleanup(); } return this; }
@Test void test() throws Exception { // Mock a rpcInvocation, this rpcInvocation is usually generated by the client request, and stored in // Request#data Byte proto = CodecSupport.getIDByName(DefaultSerializationSelector.getDefaultRemotingSerialization()); URL url = new ServiceConfigURL("dubbo", "127.0.0.1", 9103, DemoService.class.getName(), VERSION_KEY, "1.0.0"); ServiceDescriptor serviceDescriptor = repository.registerService(DemoService.class); ProviderModel providerModel = new ProviderModel(url.getServiceKey(), new DemoServiceImpl(), serviceDescriptor, null, null); RpcInvocation rpcInvocation = new RpcInvocation( providerModel, "echo", DemoService.class.getName(), "", new Class<?>[] {String.class}, new String[] { "yug" }); rpcInvocation.put(SERIALIZATION_ID_KEY, proto); // Mock a response result returned from the server and write to the buffer Channel channel = new MockChannel(); Response response = new Response(1); Result result = new AppResponse(); result.setValue("yug"); response.setResult(result); ChannelBuffer buffer = writeBuffer(url, response, proto, false); // The client reads and decode the buffer InputStream is = new ChannelBufferInputStream(buffer, buffer.readableBytes()); DecodeableRpcResult decodeableRpcResult = new DecodeableRpcResult(channel, response, is, rpcInvocation, proto); decodeableRpcResult.decode(); // Verify RESPONSE_VALUE_WITH_ATTACHMENTS // Verify that the decodeableRpcResult decoded by the client is consistent with the response returned by the // server Assertions.assertEquals(decodeableRpcResult.getValue(), result.getValue()); Assertions.assertTrue( CollectionUtils.mapEquals(decodeableRpcResult.getObjectAttachments(), result.getObjectAttachments())); // Verify RESPONSE_WITH_EXCEPTION_WITH_ATTACHMENTS Response exResponse = new Response(2); Result exResult = new AppResponse(); exResult.setException(new RpcException(BIZ_EXCEPTION)); exResponse.setResult(exResult); buffer = writeBuffer(url, exResponse, proto, true); is = new ChannelBufferInputStream(buffer, buffer.readableBytes()); decodeableRpcResult = new DecodeableRpcResult(channel, response, is, rpcInvocation, proto); decodeableRpcResult.decode(); Assertions.assertEquals( ((RpcException) decodeableRpcResult.getException()).getCode(), ((RpcException) exResult.getException()).getCode()); Assertions.assertTrue( CollectionUtils.mapEquals(decodeableRpcResult.getObjectAttachments(), exResult.getObjectAttachments())); }
public static List<CredentialRetriever> getFromCredentialRetrievers( CommonCliOptions commonCliOptions, DefaultCredentialRetrievers defaultCredentialRetrievers) throws FileNotFoundException { // these are all mutually exclusive as enforced by the CLI commonCliOptions .getUsernamePassword() .ifPresent( credential -> defaultCredentialRetrievers.setKnownCredential( credential, "--username/--password")); commonCliOptions .getFromUsernamePassword() .ifPresent( credential -> defaultCredentialRetrievers.setKnownCredential( credential, "--from-username/--from-password")); commonCliOptions .getCredentialHelper() .ifPresent(defaultCredentialRetrievers::setCredentialHelper); commonCliOptions .getFromCredentialHelper() .ifPresent(defaultCredentialRetrievers::setCredentialHelper); return defaultCredentialRetrievers.asList(); }
@Test @Parameters(method = "paramsFromCredHelper") public void testGetFromCredentialHelper(String[] args) throws FileNotFoundException { CommonCliOptions commonCliOptions = CommandLine.populateCommand(new CommonCliOptions(), ArrayUtils.addAll(DEFAULT_ARGS, args)); Credentials.getFromCredentialRetrievers(commonCliOptions, defaultCredentialRetrievers); verify(defaultCredentialRetrievers).setCredentialHelper("abc"); verify(defaultCredentialRetrievers).asList(); verifyNoMoreInteractions(defaultCredentialRetrievers); }
public static Type convertType(TypeInfo typeInfo) { switch (typeInfo.getOdpsType()) { case BIGINT: return Type.BIGINT; case INT: return Type.INT; case SMALLINT: return Type.SMALLINT; case TINYINT: return Type.TINYINT; case FLOAT: return Type.FLOAT; case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; return ScalarType.createUnifiedDecimalType(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); case DOUBLE: return Type.DOUBLE; case CHAR: CharTypeInfo charTypeInfo = (CharTypeInfo) typeInfo; return ScalarType.createCharType(charTypeInfo.getLength()); case VARCHAR: VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) typeInfo; return ScalarType.createVarcharType(varcharTypeInfo.getLength()); case STRING: case JSON: return ScalarType.createDefaultCatalogString(); case BINARY: return Type.VARBINARY; case BOOLEAN: return Type.BOOLEAN; case DATE: return Type.DATE; case TIMESTAMP: case DATETIME: return Type.DATETIME; case MAP: MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo; return new MapType(convertType(mapTypeInfo.getKeyTypeInfo()), convertType(mapTypeInfo.getValueTypeInfo())); case ARRAY: ArrayTypeInfo arrayTypeInfo = (ArrayTypeInfo) typeInfo; return new ArrayType(convertType(arrayTypeInfo.getElementTypeInfo())); case STRUCT: StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; List<Type> fieldTypeList = structTypeInfo.getFieldTypeInfos().stream().map(EntityConvertUtils::convertType) .collect(Collectors.toList()); return new StructType(fieldTypeList); default: return Type.VARCHAR; } }
@Test public void testConvertTypeCaseDecimalLessThanOrEqualMaxDecimal64Precision() { DecimalTypeInfo decimalTypeInfo = TypeInfoFactory.getDecimalTypeInfo(12, 4); Type result = EntityConvertUtils.convertType(decimalTypeInfo); Type expectedType = ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL64, 12, 4); assertEquals(expectedType, result); }
public static StringBuilder print_json_diff(LogBuffer buffer, long len, String columnName, int columnIndex, String charsetName) { return print_json_diff(buffer, len, columnName, columnIndex, Charset.forName(charsetName)); }
@Test public void print_json_diffInputNotNullZeroNotNullZeroNotNullOutputIllegalArgumentException() throws InvocationTargetException { // Arrange final LogBuffer buffer = new LogBuffer(); buffer.position = 28; buffer.semival = 0; final byte[] myByteArray = { (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 3, (byte) 2 }; buffer.buffer = myByteArray; buffer.limit = 31; buffer.origin = 0; final long len = 0L; final String columnName = "foo"; final int columnIndex = 0; final String charsetName = "gbk"; try { // Act thrown.expect(IllegalArgumentException.class); JsonDiffConversion.print_json_diff(buffer, len, columnName, columnIndex, charsetName); } catch (IllegalArgumentException ex) { // Assert side effects Assert.assertNotNull(buffer); Assert.assertEquals(29, buffer.position); Assert.assertEquals(0, buffer.semival); Assert.assertArrayEquals(new byte[] { (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 2, (byte) 3, (byte) 2 }, buffer.buffer); Assert.assertEquals(31, buffer.limit); Assert.assertEquals(0, buffer.origin); throw ex; } }
public static String substVars(String val, PropertyContainer pc1) throws ScanException { return substVars(val, pc1, null); }
@Test public void testLiteral() throws ScanException { String noSubst = "hello world"; String result = OptionHelper.substVars(noSubst, context); assertEquals(noSubst, result); }
public static List<Integer> reverseSequence(int start, int end, int step) { return sequence(end - 1, start - 1, -step); }
@Test public void reverseSequence() { List<Integer> lst = Iterators.reverseSequence(1, 4); assertEquals(3, (int) lst.get(0)); assertEquals(2, (int) lst.get(1)); assertEquals(1, (int) lst.get(2)); assertEquals(3, lst.size()); }
@VisibleForTesting void validateExperienceOutRange(List<MemberLevelDO> list, Long id, Integer level, Integer experience) { for (MemberLevelDO levelDO : list) { if (levelDO.getId().equals(id)) { continue; } if (levelDO.getLevel() < level) { // 经验大于前一个等级 if (experience <= levelDO.getExperience()) { throw exception(LEVEL_EXPERIENCE_MIN, levelDO.getName(), levelDO.getExperience()); } } else if (levelDO.getLevel() > level) { //小于下一个级别 if (experience >= levelDO.getExperience()) { throw exception(LEVEL_EXPERIENCE_MAX, levelDO.getName(), levelDO.getExperience()); } } } }
@Test public void testCreateLevel_experienceOutRange() { // 准备参数 int level = 10; int experience = 10; String name = randomString(); // mock 数据 memberlevelMapper.insert(randomLevelDO(o -> { o.setLevel(level); o.setExperience(experience); o.setName(name); })); List<MemberLevelDO> list = memberlevelMapper.selectList(); // 调用,校验异常 assertServiceException(() -> levelService.validateExperienceOutRange(list, null, level + 1, experience - 1), LEVEL_EXPERIENCE_MIN, name, level); // 调用,校验异常 assertServiceException(() -> levelService.validateExperienceOutRange(list, null, level - 1, experience + 1), LEVEL_EXPERIENCE_MAX, name, level); }
public synchronized void start() throws NacosException { if (isStarted || isFixed) { return; } GetServerListTask getServersTask = new GetServerListTask(addressServerUrl); for (int i = 0; i < initServerListRetryTimes && serverUrls.isEmpty(); ++i) { getServersTask.run(); if (!serverUrls.isEmpty()) { break; } try { this.wait((i + 1) * 100L); } catch (Exception e) { LOGGER.warn("get serverlist fail,url: {}", addressServerUrl); } } if (serverUrls.isEmpty()) { LOGGER.error("[init-serverlist] fail to get NACOS-server serverlist! env: {}, url: {}", name, addressServerUrl); throw new NacosException(NacosException.SERVER_ERROR, "fail to get NACOS-server serverlist! env:" + name + ", not connnect url:" + addressServerUrl); } // executor schedules the timer task this.executorService.scheduleWithFixedDelay(getServersTask, 0L, 30L, TimeUnit.SECONDS); isStarted = true; }
@Test void testStart() throws NacosException { final ServerListManager mgr = new ServerListManager("localhost", 0); try { mgr.start(); fail(); } catch (NacosException e) { assertEquals( "fail to get NACOS-server serverlist! env:custom-localhost_0_nacos_serverlist, not connnect url:http://localhost:0/nacos/serverlist", e.getErrMsg()); } mgr.shutdown(); }
@Override public ImportResult importItem( UUID jobId, IdempotentImportExecutor idempotentImportExecutor, TokensAndUrlAuthData authData, PhotosContainerResource resource) throws Exception { KoofrClient koofrClient = koofrClientFactory.create(authData); monitor.debug( () -> String.format( "%s: Importing %s albums and %s photos before transmogrification", jobId, resource.getAlbums().size(), resource.getPhotos().size())); // Make the data Koofr compatible resource.transmogrify(transmogrificationConfig); monitor.debug( () -> String.format( "%s: Importing %s albums and %s photos after transmogrification", jobId, resource.getAlbums().size(), resource.getPhotos().size())); for (PhotoAlbum album : resource.getAlbums()) { // Create a Koofr folder and then save the id with the mapping data idempotentImportExecutor.executeAndSwallowIOExceptions( album.getId(), album.getName(), () -> createAlbumFolder(album, koofrClient)); } final LongAdder totalImportedFilesSizes = new LongAdder(); for (PhotoModel photoModel : resource.getPhotos()) { idempotentImportExecutor.importAndSwallowIOExceptions( photoModel, photo -> { ItemImportResult<String> fileImportResult = importSinglePhoto(photoModel, jobId, idempotentImportExecutor, koofrClient); if (fileImportResult != null && fileImportResult.hasBytes()) { totalImportedFilesSizes.add(fileImportResult.getBytes()); } return fileImportResult; }); } return ImportResult.OK.copyWithBytes(totalImportedFilesSizes.longValue()); }
@Test public void testImportItemFromJobStoreUserTimeZone() throws Exception { ByteArrayInputStream inputStream = new ByteArrayInputStream(new byte[] {0, 1, 2, 3, 4}); when(jobStore.getStream(any(), any())).thenReturn(new InputStreamWrapper(inputStream, 5L)); UUID jobId = UUID.randomUUID(); PortabilityJob job = mock(PortabilityJob.class); when(job.userTimeZone()).thenReturn(TimeZone.getTimeZone("Europe/Rome")); when(jobStore.findJob(jobId)).thenReturn(job); Collection<PhotoAlbum> albums = ImmutableList.of(new PhotoAlbum("id1", "Album 1", "This is a fake album")); DateFormat format = new SimpleDateFormat("yyyy:MM:dd HH:mm:ss"); format.setTimeZone(TimeZone.getTimeZone("Europe/Kiev")); Collection<PhotoModel> photos = ImmutableList.of( new PhotoModel( "pic1.jpg", "http://fake.com/1.jpg", "A pic", "image/jpeg", "p1", "id1", true, format.parse("2021:02:16 11:55:00"))); PhotosContainerResource resource = spy(new PhotosContainerResource(albums, photos)); importer.importItem(jobId, executor, authData, resource); InOrder clientInOrder = Mockito.inOrder(client); clientInOrder .verify(client) .uploadFile(any(), eq("2021-02-16 10.55.00 pic1.jpg"), any(), any(), any(), any()); }
public void setName(String name) throws IllegalStateException { if (name != null && name.equals(this.name)) { return; // idempotent naming } if (this.name == null || CoreConstants.DEFAULT_CONTEXT_NAME.equals(this.name)) { this.name = name; } else { throw new IllegalStateException("Context has been already given a name"); } }
@Test public void renameTest() { context.setName("hello"); try { context.setName("x"); Assertions.fail("renaming is not allowed"); } catch (IllegalStateException ise) { } }
public static Object replace(Object root, DataIterator it, Object value) { return transform(root, it, Transforms.constantValue(value)); }
@Test public void testReplaceByPredicate() throws Exception { SimpleTestData data = IteratorTestData.createSimpleTestData(); Builder.create(data.getDataElement(), IterationOrder.PRE_ORDER) .filterBy(Predicates.pathMatchesPathSpec(IteratorTestData.PATH_TO_ID)) .replace(100); assertEquals(data.getValue().getDataList("foo").getDataMap(0).getInteger("id").intValue(), 100); assertEquals(data.getValue().getDataList("foo").getDataMap(1).getInteger("id").intValue(), 100); assertEquals(data.getValue().getDataList("foo").getDataMap(2).getInteger("id").intValue(), 100); }
@Override public Map<String, String> generationCodes(Long tableId) { // 校验是否已经存在 CodegenTableDO table = codegenTableMapper.selectById(tableId); if (table == null) { throw exception(CODEGEN_TABLE_NOT_EXISTS); } List<CodegenColumnDO> columns = codegenColumnMapper.selectListByTableId(tableId); if (CollUtil.isEmpty(columns)) { throw exception(CODEGEN_COLUMN_NOT_EXISTS); } // 如果是主子表,则加载对应的子表信息 List<CodegenTableDO> subTables = null; List<List<CodegenColumnDO>> subColumnsList = null; if (CodegenTemplateTypeEnum.isMaster(table.getTemplateType())) { // 校验子表存在 subTables = codegenTableMapper.selectListByTemplateTypeAndMasterTableId( CodegenTemplateTypeEnum.SUB.getType(), tableId); if (CollUtil.isEmpty(subTables)) { throw exception(CODEGEN_MASTER_GENERATION_FAIL_NO_SUB_TABLE); } // 校验子表的关联字段存在 subColumnsList = new ArrayList<>(); for (CodegenTableDO subTable : subTables) { List<CodegenColumnDO> subColumns = codegenColumnMapper.selectListByTableId(subTable.getId()); if (CollUtil.findOne(subColumns, column -> column.getId().equals(subTable.getSubJoinColumnId())) == null) { throw exception(CODEGEN_SUB_COLUMN_NOT_EXISTS, subTable.getId()); } subColumnsList.add(subColumns); } } // 执行生成 return codegenEngine.execute(table, columns, subTables, subColumnsList); }
@Test public void testGenerationCodes_sub_columnNotExists() { // mock 数据(CodegenTableDO) CodegenTableDO table = randomPojo(CodegenTableDO.class, o -> o.setScene(CodegenSceneEnum.ADMIN.getScene()) .setTemplateType(CodegenTemplateTypeEnum.MASTER_NORMAL.getType())); codegenTableMapper.insert(table); // mock 数据(CodegenColumnDO) CodegenColumnDO column01 = randomPojo(CodegenColumnDO.class, o -> o.setTableId(table.getId())); codegenColumnMapper.insert(column01); // mock 数据(sub CodegenTableDO) CodegenTableDO subTable = randomPojo(CodegenTableDO.class, o -> o.setScene(CodegenSceneEnum.ADMIN.getScene()) .setTemplateType(CodegenTemplateTypeEnum.SUB.getType()) .setMasterTableId(table.getId())); codegenTableMapper.insert(subTable); // 准备参数 Long tableId = table.getId(); // 调用,并断言 assertServiceException(() -> codegenService.generationCodes(tableId), CODEGEN_SUB_COLUMN_NOT_EXISTS, subTable.getId()); }
@SuppressFBWarnings("NS_NON_SHORT_CIRCUIT") protected boolean isValidUtf8(final byte[] input) { int i = 0; // Check for BOM if (input.length >= 3 && (input[0] & 0xFF) == 0xEF && (input[1] & 0xFF) == 0xBB & (input[2] & 0xFF) == 0xBF) { i = 3; } int end; for (int j = input.length; i < j; ++i) { int octet = input[i]; if ((octet & 0x80) == 0) { continue; // ASCII } // Check for UTF-8 leading byte if ((octet & 0xE0) == 0xC0) { end = i + 1; } else if ((octet & 0xF0) == 0xE0) { end = i + 2; } else if ((octet & 0xF8) == 0xF0) { end = i + 3; } else { // Java only supports BMP so 3 is max return false; } while (i < end) { i++; octet = input[i]; if ((octet & 0xC0) != 0x80) { // Not a valid trailing byte return false; } } } return true; }
@Test public void isValidUtf8_testNaughtyStrings_allShouldFail() { MockResponseWriter rw = new MockResponseWriter(); for (int[] s : NAUGHTY_STRINGS) { byte[] buf = new byte[s.length * 4]; int pos = 0; for (int v : s) { for (byte b : convert2Bytes(v)) { buf[pos] = b; pos++; } } assertFalse(rw.isValidUtf8(buf)); } }
@Override public R apply(R record) { final Long timestamp = record.timestamp(); if (timestamp == null) { throw new DataException("Timestamp missing on record: " + record); } final String formattedTimestamp = timestampFormat.get().format(new Date(timestamp)); final String replace1 = TOPIC.matcher(topicFormat).replaceAll(Matcher.quoteReplacement(record.topic())); final String updatedTopic = TIMESTAMP.matcher(replace1).replaceAll(Matcher.quoteReplacement(formattedTimestamp)); return record.newRecord( updatedTopic, record.kafkaPartition(), record.keySchema(), record.key(), record.valueSchema(), record.value(), record.timestamp() ); }
@Test public void defaultConfiguration() { final SourceRecord record = new SourceRecord( null, null, "test", 0, null, null, null, null, 1483425001864L ); assertEquals("test-20170103", xform.apply(record).topic()); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { try { final AttributedList<Path> children = new AttributedList<>(); ListFolderResult result; this.parse(directory, listener, children, result = new DbxUserFilesRequests(session.getClient(directory)).listFolder(containerService.getKey(directory))); // If true, then there are more entries available. Pass the cursor to list_folder/continue to retrieve the rest. while(result.getHasMore()) { this.parse(directory, listener, children, result = new DbxUserFilesRequests(session.getClient(directory)).listFolderContinue(result.getCursor())); } return children; } catch(DbxException e) { throw new DropboxExceptionMappingService().map("Listing directory {0} failed", e, directory); } }
@Test public void testListHome() throws Exception { final AttributedList<Path> list = new DropboxListService(session).list(new Path("/", EnumSet.of(Path.Type.directory, Path.Type.volume)), new DisabledListProgressListener()); assertNotSame(AttributedList.emptyList(), list); for(Path f : list) { assertEquals(f.attributes(), new DropboxAttributesFinderFeature(session).find(f)); } }
@Override public void failover(NamedNode master) { connection.sync(RedisCommands.SENTINEL_FAILOVER, master.getName()); }
@Test public void testFailover() throws InterruptedException { Collection<RedisServer> masters = connection.masters(); connection.failover(masters.iterator().next()); Thread.sleep(10000); RedisServer newMaster = connection.masters().iterator().next(); assertThat(masters.iterator().next().getPort()).isNotEqualTo(newMaster.getPort()); }
@Override public void doPush(String clientId, Subscriber subscriber, PushDataWrapper data) { getPushExecuteService(clientId, subscriber).doPush(clientId, subscriber, data); }
@Test void testDoPushForUdp() { delegate.doPush(udpClientId, subscriber, pushdata); verify(pushExecutorUdp).doPush(udpClientId, subscriber, pushdata); }
public static double parseDouble(final String str) { final double d = Double.parseDouble(str); if (Double.isInfinite(d) || Double.isNaN(d)) { throw new NumberFormatException("Invalid double value: " + str); } return d; }
@Test public void shouldThrowIfNotNumber() { assertThrows(NumberFormatException.class, () -> SqlDoubles.parseDouble("What no number?")); }
@Override public IConfigContext getConfigContext() { return configContext; }
@Test void testGetConfigContext() { ConfigRequest configRequest = new ConfigRequest(); IConfigContext configContext = configRequest.getConfigContext(); assertNotNull(configContext); }
public final void addSource(final Topology.AutoOffsetReset offsetReset, final String name, final TimestampExtractor timestampExtractor, final Deserializer<?> keyDeserializer, final Deserializer<?> valDeserializer, final String... topics) { if (topics.length == 0) { throw new TopologyException("You must provide at least one topic"); } Objects.requireNonNull(name, "name must not be null"); if (nodeFactories.containsKey(name)) { throw new TopologyException("Processor " + name + " is already added."); } for (final String topic : topics) { Objects.requireNonNull(topic, "topic names cannot be null"); validateTopicNotAlreadyRegistered(topic); maybeAddToResetList(earliestResetTopics, latestResetTopics, offsetReset, topic); rawSourceTopicNames.add(topic); } nodeFactories.put(name, new SourceNodeFactory<>(name, topics, null, timestampExtractor, keyDeserializer, valDeserializer)); nodeToSourceTopics.put(name, Arrays.asList(topics)); nodeGrouper.add(name); nodeGroups = null; }
@Test public void shouldNotAllowOffsetResetSourceWithoutTopics() { assertThrows(TopologyException.class, () -> builder.addSource(Topology.AutoOffsetReset.EARLIEST, "source", null, stringSerde.deserializer(), stringSerde.deserializer())); }
public static void getSemanticPropsSingleFromString( SingleInputSemanticProperties result, String[] forwarded, String[] nonForwarded, String[] readSet, TypeInformation<?> inType, TypeInformation<?> outType) { getSemanticPropsSingleFromString( result, forwarded, nonForwarded, readSet, inType, outType, false); }
@Test void testReadFieldsIndividualStrings() { String[] readFields = {"f1", "f2"}; SingleInputSemanticProperties sp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString( sp, null, null, readFields, threeIntTupleType, threeIntTupleType); FieldSet fs = sp.getReadFields(0); assertThat(fs).containsExactly(1, 2); }
public List<Issue> validateMetadata(ExtensionVersion extVersion) { return Observation.createNotStarted("ExtensionValidator#validateMetadata", observations).observe(() -> { var issues = new ArrayList<Issue>(); checkVersion(extVersion.getVersion(), issues); checkTargetPlatform(extVersion.getTargetPlatform(), issues); checkCharacters(extVersion.getDisplayName(), "displayName", issues); checkFieldSize(extVersion.getDisplayName(), DEFAULT_STRING_SIZE, "displayName", issues); checkCharacters(extVersion.getDescription(), "description", issues); checkFieldSize(extVersion.getDescription(), DESCRIPTION_SIZE, "description", issues); checkCharacters(extVersion.getCategories(), "categories", issues); checkFieldSize(extVersion.getCategories(), DEFAULT_STRING_SIZE, "categories", issues); checkCharacters(extVersion.getTags(), "keywords", issues); checkFieldSize(extVersion.getTags(), DEFAULT_STRING_SIZE, "keywords", issues); checkCharacters(extVersion.getLicense(), "license", issues); checkFieldSize(extVersion.getLicense(), DEFAULT_STRING_SIZE, "license", issues); checkURL(extVersion.getHomepage(), "homepage", issues); checkFieldSize(extVersion.getHomepage(), DEFAULT_STRING_SIZE, "homepage", issues); checkURL(extVersion.getRepository(), "repository", issues); checkFieldSize(extVersion.getRepository(), DEFAULT_STRING_SIZE, "repository", issues); checkURL(extVersion.getBugs(), "bugs", issues); checkFieldSize(extVersion.getBugs(), DEFAULT_STRING_SIZE, "bugs", issues); checkInvalid(extVersion.getMarkdown(), s -> !MARKDOWN_VALUES.contains(s), "markdown", issues, MARKDOWN_VALUES.toString()); checkCharacters(extVersion.getGalleryColor(), "galleryBanner.color", issues); checkFieldSize(extVersion.getGalleryColor(), GALLERY_COLOR_SIZE, "galleryBanner.color", issues); checkInvalid(extVersion.getGalleryTheme(), s -> !GALLERY_THEME_VALUES.contains(s), "galleryBanner.theme", issues, GALLERY_THEME_VALUES.toString()); checkFieldSize(extVersion.getLocalizedLanguages(), DEFAULT_STRING_SIZE, "localizedLanguages", issues); checkInvalid(extVersion.getQna(), s -> !QNA_VALUES.contains(s) && isInvalidURL(s), "qna", issues, QNA_VALUES.toString() + " or a URL"); checkFieldSize(extVersion.getQna(), DEFAULT_STRING_SIZE, "qna", issues); return issues; }); }
@Test public void testInvalidTargetPlatform() { var extension = new ExtensionVersion(); extension.setTargetPlatform("debian-x64"); extension.setVersion("1.0.0"); var issues = validator.validateMetadata(extension); assertThat(issues).hasSize(1); assertThat(issues.get(0)) .isEqualTo(new ExtensionValidator.Issue("Unsupported target platform 'debian-x64'")); }
@Override public T getValue() { return transform(base.getValue()); }
@Test public void returnsATransformedValue() { assertThat(gauge2.getValue()) .isEqualTo(3); }
public static <T> StateSerializerProvider<T> fromPreviousSerializerSnapshot( TypeSerializerSnapshot<T> stateSerializerSnapshot) { return new LazilyRegisteredStateSerializerProvider<>(stateSerializerSnapshot); }
@Test void testLazyInstantiationOfPreviousSchemaSerializer() { // create the provider with an exception throwing snapshot; // this would throw an exception if the restore serializer was eagerly accessed StateSerializerProvider<String> testProvider = StateSerializerProvider.fromPreviousSerializerSnapshot( new ExceptionThrowingSerializerSnapshot()); // if we fail here, that means the restore serializer was indeed lazily accessed assertThatThrownBy(testProvider::previousSchemaSerializer) .withFailMessage("expected to fail when accessing the restore serializer.") .isInstanceOf(UnsupportedOperationException.class); }
@Override public ApiResult<TopicPartition, ListOffsetsResultInfo> handleResponse( Node broker, Set<TopicPartition> keys, AbstractResponse abstractResponse ) { ListOffsetsResponse response = (ListOffsetsResponse) abstractResponse; Map<TopicPartition, ListOffsetsResultInfo> completed = new HashMap<>(); Map<TopicPartition, Throwable> failed = new HashMap<>(); List<TopicPartition> unmapped = new ArrayList<>(); Set<TopicPartition> retriable = new HashSet<>(); for (ListOffsetsTopicResponse topic : response.topics()) { for (ListOffsetsPartitionResponse partition : topic.partitions()) { TopicPartition topicPartition = new TopicPartition(topic.name(), partition.partitionIndex()); Errors error = Errors.forCode(partition.errorCode()); if (!offsetTimestampsByPartition.containsKey(topicPartition)) { log.warn("ListOffsets response includes unknown topic partition {}", topicPartition); } else if (error == Errors.NONE) { Optional<Integer> leaderEpoch = (partition.leaderEpoch() == ListOffsetsResponse.UNKNOWN_EPOCH) ? Optional.empty() : Optional.of(partition.leaderEpoch()); completed.put( topicPartition, new ListOffsetsResultInfo(partition.offset(), partition.timestamp(), leaderEpoch)); } else { handlePartitionError(topicPartition, error, failed, unmapped, retriable); } } } // Sanity-check if the current leader for these partitions returned results for all of them for (TopicPartition topicPartition : keys) { if (unmapped.isEmpty() && !completed.containsKey(topicPartition) && !failed.containsKey(topicPartition) && !retriable.contains(topicPartition) ) { ApiException sanityCheckException = new ApiException( "The response from broker " + broker.id() + " did not contain a result for topic partition " + topicPartition); log.error( "ListOffsets request for topic partition {} failed sanity check", topicPartition, sanityCheckException); failed.put(topicPartition, sanityCheckException); } } return new ApiResult<>(completed, failed, unmapped); }
@Test public void testHandleResponseSanityCheck() { TopicPartition errorPartition = t0p0; Map<TopicPartition, Long> specsByPartition = new HashMap<>(offsetTimestampsByPartition); specsByPartition.remove(errorPartition); ApiResult<TopicPartition, ListOffsetsResultInfo> result = handleResponse(createResponse(emptyMap(), specsByPartition)); assertEquals(offsetTimestampsByPartition.size() - 1, result.completedKeys.size()); assertEquals(1, result.failedKeys.size()); assertEquals(errorPartition, result.failedKeys.keySet().iterator().next()); String sanityCheckMessage = result.failedKeys.get(errorPartition).getMessage(); assertTrue(sanityCheckMessage.contains("did not contain a result for topic partition")); assertTrue(result.unmappedKeys.isEmpty()); }
protected static boolean compareUpdateAttributes(String left, String right) { //the numbers below come from the CPE Matching standard //Table 6-2: Enumeration of Attribute Comparison Set Relations //https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7696.pdf if (left.equalsIgnoreCase(right)) { //1 6 9 - equals return true; } else if (LogicalValue.ANY.getAbbreviation().equals(left)) { //2 3 4 - superset (4 is undefined - treating as true) return true; } else if (LogicalValue.NA.getAbbreviation().equals(left) && LogicalValue.ANY.getAbbreviation().equals(right)) { //5 - subset return true; } else if (LogicalValue.NA.getAbbreviation().equals(left)) { //7 8 - disjoint, undefined return false; } else if (LogicalValue.NA.getAbbreviation().equals(right)) { //12 16 - disjoint return false; } else if (LogicalValue.ANY.getAbbreviation().equals(right)) { //13 15 - subset return true; } final String leftValue = left.replace("-", "").replace("_", ""); final String rightValue = right.replace("-", "").replace("_", ""); if (leftValue.equalsIgnoreCase(rightValue)) { //1 6 9 - equals return true; } boolean results = false; //10 11 14 17 if (containsSpecialCharacter(left)) { final Pattern p = Convert.wellFormedToPattern(left.toLowerCase()); final Matcher m = p.matcher(right.toLowerCase()); results = m.matches(); } if (!results && rightValue.matches("^[abu]\\d.*") && leftValue.matches("^(update|alpha|beta).*")) { switch (right.charAt(0)) { case 'u': results = compareUpdateAttributes(leftValue, "update" + rightValue.substring(1)); break; case 'a': results = compareUpdateAttributes(leftValue, "alpha" + rightValue.substring(1)); break; case 'b': results = compareUpdateAttributes(leftValue, "beta" + rightValue.substring(1)); break; default: break; } } if (!results && leftValue.matches("^[abu]\\d.*") && rightValue.matches("^(update|alpha|beta).*")) { switch (left.charAt(0)) { case 'u': results = compareUpdateAttributes("update" + leftValue.substring(1), rightValue); break; case 'a': results = compareUpdateAttributes("alpha" + leftValue.substring(1), rightValue); break; case 'b': results = compareUpdateAttributes("beta" + leftValue.substring(1), rightValue); break; default: break; } } return results; }
@Test public void testcompareUpdateAttributes() throws CpeValidationException { assertTrue(VulnerableSoftware.compareUpdateAttributes("update1", "u1")); assertTrue(VulnerableSoftware.compareUpdateAttributes("u1", "update1")); assertTrue(VulnerableSoftware.compareUpdateAttributes("u1", "update-1")); assertTrue(VulnerableSoftware.compareUpdateAttributes("a1", "alpha1")); assertTrue(VulnerableSoftware.compareUpdateAttributes("alpha-1", "alpha1")); assertTrue(VulnerableSoftware.compareUpdateAttributes("b-1", "beta1")); assertFalse(VulnerableSoftware.compareUpdateAttributes("a1", "beta1")); }
public void addProperty(String key, String value) { store.put(key, value); }
@Test void getBoolean() { memConfig.addProperty("a", Boolean.TRUE.toString()); Assertions.assertTrue(memConfig.getBoolean("a")); Assertions.assertFalse(memConfig.getBoolean("b", false)); Assertions.assertTrue(memConfig.getBoolean("b", Boolean.TRUE)); }
public static InstanceAssignmentConfig getInstanceAssignmentConfig(TableConfig tableConfig, InstancePartitionsType instancePartitionsType) { Preconditions.checkState(allowInstanceAssignment(tableConfig, instancePartitionsType), "Instance assignment is not allowed for the given table config"); // Use the instance assignment config from the table config if it exists Map<String, InstanceAssignmentConfig> instanceAssignmentConfigMap = tableConfig.getInstanceAssignmentConfigMap(); if (instanceAssignmentConfigMap != null) { InstanceAssignmentConfig instanceAssignmentConfig = instanceAssignmentConfigMap.get(instancePartitionsType.toString()); if (instanceAssignmentConfig != null) { return instanceAssignmentConfig; } } // Generate default instance assignment config if it does not exist // Only allow default config for offline table with replica-group segment assignment for backward-compatibility InstanceTagPoolConfig tagPoolConfig = new InstanceTagPoolConfig(TagNameUtils.extractOfflineServerTag(tableConfig.getTenantConfig()), false, 0, null); InstanceReplicaGroupPartitionConfig replicaGroupPartitionConfig; SegmentsValidationAndRetentionConfig segmentConfig = tableConfig.getValidationConfig(); int numReplicaGroups = tableConfig.getReplication(); ReplicaGroupStrategyConfig replicaGroupStrategyConfig = segmentConfig.getReplicaGroupStrategyConfig(); Preconditions.checkState(replicaGroupStrategyConfig != null, "Failed to find the replica-group strategy config"); String partitionColumn = replicaGroupStrategyConfig.getPartitionColumn(); boolean minimizeDataMovement = segmentConfig.isMinimizeDataMovement(); if (partitionColumn != null) { int numPartitions = tableConfig.getIndexingConfig().getSegmentPartitionConfig().getNumPartitions(partitionColumn); Preconditions.checkState(numPartitions > 0, "Number of partitions for column: %s is not properly configured", partitionColumn); replicaGroupPartitionConfig = new InstanceReplicaGroupPartitionConfig(true, 0, numReplicaGroups, 0, numPartitions, replicaGroupStrategyConfig.getNumInstancesPerPartition(), minimizeDataMovement, partitionColumn); } else { // If partition column is not configured, use replicaGroupStrategyConfig.getNumInstancesPerPartition() as // number of instances per replica-group for backward-compatibility replicaGroupPartitionConfig = new InstanceReplicaGroupPartitionConfig(true, 0, numReplicaGroups, replicaGroupStrategyConfig.getNumInstancesPerPartition(), 0, 0, minimizeDataMovement, null); } return new InstanceAssignmentConfig(tagPoolConfig, null, replicaGroupPartitionConfig, null, minimizeDataMovement); }
@Test public void testGetInstanceAssignmentConfigWhenInstanceAssignmentConfig() { Map<String, InstanceAssignmentConfig> instanceAssignmentConfigMap = new HashMap<>(); instanceAssignmentConfigMap.put(InstancePartitionsType.COMPLETED.name(), getInstanceAssignmentConfig(InstanceAssignmentConfig.PartitionSelector.FD_AWARE_INSTANCE_PARTITION_SELECTOR)); Map<InstancePartitionsType, String> instancePartitionsTypeStringMap = new HashMap<>(); instancePartitionsTypeStringMap.put(InstancePartitionsType.COMPLETED, "testTable"); TableConfig tableConfig = new TableConfigBuilder(TableType.REALTIME).setTableName("testTable") .setInstanceAssignmentConfigMap(instanceAssignmentConfigMap) .setInstancePartitionsMap(instancePartitionsTypeStringMap).build(); Assert.assertEquals(InstanceAssignmentConfigUtils .getInstanceAssignmentConfig(tableConfig, InstancePartitionsType.COMPLETED).getConstraintConfig() .getConstraints().get(0), "constraints1"); Assert.assertEquals(InstanceAssignmentConfigUtils.getInstanceAssignmentConfig(tableConfig, InstancePartitionsType.COMPLETED).getReplicaGroupPartitionConfig().getNumInstancesPerPartition(), 1); }
@Override public void authenticate(Invocation invocation, URL url) throws RpcAuthenticationException { String accessKeyId = String.valueOf(invocation.getAttachment(Constants.AK_KEY)); String requestTimestamp = String.valueOf(invocation.getAttachment(Constants.REQUEST_TIMESTAMP_KEY)); String originSignature = String.valueOf(invocation.getAttachment(Constants.REQUEST_SIGNATURE_KEY)); String consumer = String.valueOf(invocation.getAttachment(CommonConstants.CONSUMER)); if (StringUtils.isAnyEmpty(accessKeyId, consumer, requestTimestamp, originSignature)) { throw new RpcAuthenticationException("Failed to authenticate, maybe consumer side did not enable the auth"); } AccessKeyPair accessKeyPair; try { accessKeyPair = getAccessKeyPair(invocation, url); } catch (Exception e) { throw new RpcAuthenticationException("Failed to authenticate , can't load the accessKeyPair", e); } String computeSignature = getSignature(url, invocation, accessKeyPair.getSecretKey(), requestTimestamp); boolean success = computeSignature.equals(originSignature); if (!success) { throw new RpcAuthenticationException("Failed to authenticate, signature is not correct"); } }
@Test void testAuthenticateRequestNoSignature() { URL url = URL.valueOf("dubbo://10.10.10.10:2181") .addParameter(Constants.ACCESS_KEY_ID_KEY, "ak") .addParameter(CommonConstants.APPLICATION_KEY, "test") .addParameter(Constants.SECRET_ACCESS_KEY_KEY, "sk"); Invocation invocation = new RpcInvocation(); AccessKeyAuthenticator helper = new AccessKeyAuthenticator(ApplicationModel.defaultModel()); assertThrows(RpcAuthenticationException.class, () -> helper.authenticate(invocation, url)); }
private DeviceKeyId() { super(); }
@Test public void testConstruction() { DeviceKeyId deviceKeyId = DeviceKeyId.deviceKeyId(deviceKeyIdValue1); assertNotNull("The deviceKeyId should not be null.", deviceKeyId); assertEquals("The id should match the expected value.", deviceKeyIdValue1, deviceKeyId.id()); }
void importPlaylistItems( List<MusicPlaylistItem> playlistItems, IdempotentImportExecutor executor, UUID jobId, TokensAndUrlAuthData authData) throws Exception { if (playlistItems != null && !playlistItems.isEmpty()) { Map<String, List<MusicPlaylistItem>> playlistItemsByPlaylist = playlistItems.stream() .filter(playlistItem -> !executor.isKeyCached(playlistItem.toString())) .collect(Collectors.groupingBy(MusicPlaylistItem::getPlaylistId)); for (Entry<String, List<MusicPlaylistItem>> playlistEntry : playlistItemsByPlaylist.entrySet()) { String originalPlaylistId = playlistEntry.getKey(); UnmodifiableIterator<List<MusicPlaylistItem>> batches = Iterators.partition(playlistEntry.getValue().iterator(), PLAYLIST_ITEM_BATCH_SIZE); while (batches.hasNext()) { importPlaylistItemBatch(jobId, authData, batches.next(), executor, originalPlaylistId); } } } return; }
@Test public void failOnePlaylistItem() throws Exception { importPlaylistSetUp("p1_id", "p1_title"); importPlaylistSetUp("p2_id", "p2_title"); MusicPlaylistItem playlistItem1 = new MusicPlaylistItem( new MusicRecording( "item1_isrc", null, 180000L, new MusicRelease("r1_icpn", null, null), null, false), "p1_id", 1); MusicPlaylistItem playlistItem2 = new MusicPlaylistItem( new MusicRecording( "item1_isrc", null, 180000L, new MusicRelease("r1_icpn", null, null), null, false), "p2_id", 1); GooglePlaylistItem googlePlaylistItem = buildGooglePlaylistItem("item1_isrc", "r1_icpn"); BatchPlaylistItemRequest batchPlaylistItemRequest1 = new BatchPlaylistItemRequest( Lists.newArrayList( new ImportPlaylistItemRequest("p1_id", googlePlaylistItem)), "p1_id"); BatchPlaylistItemRequest batchPlaylistItemRequest2 = new BatchPlaylistItemRequest( Lists.newArrayList( new ImportPlaylistItemRequest("p2_id", googlePlaylistItem)), "p2_id"); BatchPlaylistItemResponse batchPlaylistItemResponse1 = new BatchPlaylistItemResponse( new NewPlaylistItemResult[]{ buildPlaylistItemResult("item1_isrc", "r1_icpn", Code.OK_VALUE, null) }); BatchPlaylistItemResponse batchPlaylistItemResponse2 = new BatchPlaylistItemResponse( new NewPlaylistItemResult[]{ buildPlaylistItemResult("item1_isrc", "r1_icpn", Code.INVALID_ARGUMENT_VALUE, "") }); when(googleMusicHttpApi.importPlaylistItems(eq(batchPlaylistItemRequest1))) .thenReturn(batchPlaylistItemResponse1); when(googleMusicHttpApi.importPlaylistItems(eq(batchPlaylistItemRequest2))) .thenReturn(batchPlaylistItemResponse2); // Run test googleMusicImporter.importPlaylistItems( Lists.newArrayList(playlistItem1, playlistItem2), executor, uuid, null); // One playlist item is imported assertTrue(executor.isKeyCached(String.valueOf(playlistItem1))); // Expected executor to have one error assertThat(executor.getErrors()).hasSize(1); ErrorDetail errorDetail = executor.getErrors().iterator().next(); assertEquals(String.valueOf(playlistItem2), errorDetail.id()); assertThat(errorDetail.exception()).contains("PlaylistItem could not be created."); }
public void open( VariableSpace space, Process sqlldrProcess ) throws KettleException { String loadMethod = meta.getLoadMethod(); try { OutputStream os; if ( OraBulkLoaderMeta.METHOD_AUTO_CONCURRENT.equals( loadMethod ) ) { os = sqlldrProcess.getOutputStream(); } else { // Else open the data file filled in. String dataFilePath = getFilename( getFileObject( space.environmentSubstitute( meta.getDataFile() ), space ) ); File dataFile = new File( dataFilePath ); // Make sure the parent directory exists dataFile.getParentFile().mkdirs(); os = new FileOutputStream( dataFile, false ); } String encoding = meta.getEncoding(); if ( Utils.isEmpty( encoding ) ) { // Use the default encoding. output = new BufferedWriter( new OutputStreamWriter( os ) ); } else { // Use the specified encoding output = new BufferedWriter( new OutputStreamWriter( os, encoding ) ); } } catch ( IOException e ) { throw new KettleException( "IO exception occured: " + e.getMessage(), e ); } }
@Test public void testOpen() { try { String tmpDir = System.getProperty("java.io.tmpdir"); File tempFile = File.createTempFile("orafiles", "test" ); String tempFilePath = tempFile.getAbsolutePath(); String dataFileVfsPath = "file:///" + tempFilePath; LocalFile tempFileObject = mock( LocalFile.class ); tempFile.deleteOnExit(); doReturn( dataFileVfsPath ).when( oraBulkLoaderMeta ).getDataFile(); doReturn( tempFilePath ).when( space ).environmentSubstitute( dataFileVfsPath ); doReturn( tempFileObject ).when( oraBulkDataOutput ).getFileObject( tempFilePath, space ); doReturn( tempFilePath ).when( oraBulkDataOutput ).getFilename( tempFileObject ); oraBulkDataOutput.open( space, sqlldrProcess ); oraBulkDataOutput.close(); } catch ( Exception ex ) { fail( "If any exception occurs, this test fails: " + ex ); } }
Object getCellValue(Cell cell, Schema.FieldType type) { ByteString cellValue = cell.getValue(); int valueSize = cellValue.size(); switch (type.getTypeName()) { case BOOLEAN: checkArgument(valueSize == 1, message("Boolean", 1)); return cellValue.toByteArray()[0] != 0; case BYTE: checkArgument(valueSize == 1, message("Byte", 1)); return cellValue.toByteArray()[0]; case INT16: checkArgument(valueSize == 2, message("Int16", 2)); return Shorts.fromByteArray(cellValue.toByteArray()); case INT32: checkArgument(valueSize == 4, message("Int32", 4)); return Ints.fromByteArray(cellValue.toByteArray()); case INT64: checkArgument(valueSize == 8, message("Int64", 8)); return Longs.fromByteArray(cellValue.toByteArray()); case FLOAT: checkArgument(valueSize == 4, message("Float", 4)); return Float.intBitsToFloat(Ints.fromByteArray(cellValue.toByteArray())); case DOUBLE: checkArgument(valueSize == 8, message("Double", 8)); return Double.longBitsToDouble(Longs.fromByteArray(cellValue.toByteArray())); case DATETIME: return DateTime.parse(cellValue.toStringUtf8()); case STRING: return cellValue.toStringUtf8(); case BYTES: return cellValue.toByteArray(); case LOGICAL_TYPE: String identifier = checkArgumentNotNull(type.getLogicalType()).getIdentifier(); throw new IllegalStateException("Unsupported logical type: " + identifier); default: throw new IllegalArgumentException( String.format("Unsupported cell value type '%s'.", type.getTypeName())); } }
@Test public void shouldParseInt16Type() { byte[] value = new byte[] {2, 0}; assertEquals((short) 512, PARSER.getCellValue(cell(value), INT16)); }
@UdafFactory(description = "collect distinct values of a Bigint field into a single Array") public static <T> Udaf<T, List<T>, List<T>> createCollectSetT() { return new Collect<>(); }
@Test public void shouldRespectSizeLimit() { final Udaf<Integer, List<Integer>, List<Integer>> udaf = CollectSetUdaf.createCollectSetT(); ((Configurable) udaf).configure(ImmutableMap.of(CollectSetUdaf.LIMIT_CONFIG, 1000)); List<Integer> runningList = udaf.initialize(); for (int i = 1; i < 2500; i++) { runningList = udaf.aggregate(i, runningList); } assertThat(runningList, hasSize(1000)); assertThat(runningList, hasItem(1)); assertThat(runningList, hasItem(1000)); assertThat(runningList, not(hasItem(1001))); }
static void parseServerIpAndPort(Connection connection, Span span) { try { URI url = URI.create(connection.getMetaData().getURL().substring(5)); // strip "jdbc:" String remoteServiceName = connection.getProperties().getProperty("zipkinServiceName"); if (remoteServiceName == null || "".equals(remoteServiceName)) { String databaseName = connection.getCatalog(); if (databaseName != null && !databaseName.isEmpty()) { remoteServiceName = "mysql-" + databaseName; } else { remoteServiceName = "mysql"; } } span.remoteServiceName(remoteServiceName); String host = connection.getHost(); if (host != null) { span.remoteIpAndPort(host, url.getPort() == -1 ? 3306 : url.getPort()); } } catch (Exception e) { // remote address is optional } }
@Test void parseServerIpAndPort_serviceNameFromDatabaseName() throws SQLException { setupAndReturnPropertiesForHost("1.2.3.4"); when(connection.getCatalog()).thenReturn("mydatabase"); TracingStatementInterceptor.parseServerIpAndPort(connection, span); verify(span).remoteServiceName("mysql-mydatabase"); verify(span).remoteIpAndPort("1.2.3.4", 5555); }
@Override public Object getValue(final int columnIndex, final Class<?> type) throws SQLException { if (boolean.class == type) { return resultSet.getBoolean(columnIndex); } if (byte.class == type) { return resultSet.getByte(columnIndex); } if (short.class == type) { return resultSet.getShort(columnIndex); } if (int.class == type) { return resultSet.getInt(columnIndex); } if (long.class == type) { return resultSet.getLong(columnIndex); } if (float.class == type) { return resultSet.getFloat(columnIndex); } if (double.class == type) { return resultSet.getDouble(columnIndex); } if (String.class == type) { return resultSet.getString(columnIndex); } if (BigDecimal.class == type) { return resultSet.getBigDecimal(columnIndex); } if (byte[].class == type) { return resultSet.getBytes(columnIndex); } if (Date.class == type) { return resultSet.getDate(columnIndex); } if (Time.class == type) { return resultSet.getTime(columnIndex); } if (Timestamp.class == type) { return resultSet.getTimestamp(columnIndex); } if (Blob.class == type) { return resultSet.getBlob(columnIndex); } if (Clob.class == type) { return resultSet.getClob(columnIndex); } if (Array.class == type) { return resultSet.getArray(columnIndex); } return resultSet.getObject(columnIndex); }
@Test void assertGetValueByByte() throws SQLException { ResultSet resultSet = mock(ResultSet.class); when(resultSet.getByte(1)).thenReturn((byte) 0x00); assertThat(new JDBCStreamQueryResult(resultSet).getValue(1, byte.class), is((byte) 0x00)); }
@Override public void batchRegisterService(String serviceName, String groupName, List<Instance> instances) { throw new UnsupportedOperationException( "Do not support persistent instances to perform batch registration methods."); }
@Test void testBatchRegisterService() { assertThrows(UnsupportedOperationException.class, () -> { clientProxy.batchRegisterService("a", "b", null); }); }
List<Endpoint> endpoints() { try { String urlString = String.format("%s/api/v1/namespaces/%s/pods", kubernetesMaster, namespace); return enrichWithPublicAddresses(parsePodsList(callGet(urlString))); } catch (RestClientException e) { return handleKnownException(e); } }
@Test public void wrongApiToken() { // given String unauthorizedBody = "\"reason\":\"Unauthorized\""; stub(String.format("/api/v1/namespaces/%s/pods", NAMESPACE), HttpURLConnection.HTTP_UNAUTHORIZED, unauthorizedBody); // when List<Endpoint> result = kubernetesClient.endpoints(); // then assertEquals(emptyList(), result); }
public <T> T submitRequest(String pluginId, String requestName, PluginInteractionCallback<T> pluginInteractionCallback) { if (!pluginManager.isPluginOfType(extensionName, pluginId)) { throw new RecordNotFoundException(format("Did not find '%s' plugin with id '%s'. Looks like plugin is missing", extensionName, pluginId)); } try { String resolvedExtensionVersion = pluginManager.resolveExtensionVersion(pluginId, extensionName, goSupportedVersions); DefaultGoPluginApiRequest apiRequest = new DefaultGoPluginApiRequest(extensionName, resolvedExtensionVersion, requestName); apiRequest.setRequestBody(pluginInteractionCallback.requestBody(resolvedExtensionVersion)); apiRequest.setRequestParams(pluginInteractionCallback.requestParams(resolvedExtensionVersion)); apiRequest.setRequestHeaders(pluginInteractionCallback.requestHeaders(resolvedExtensionVersion)); GoPluginApiResponse response = pluginManager.submitTo(pluginId, extensionName, apiRequest); if (response == null) { throw new RuntimeException("The plugin sent a null response"); } if (DefaultGoApiResponse.SUCCESS_RESPONSE_CODE == response.responseCode()) { return pluginInteractionCallback.onSuccess(response.responseBody(), response.responseHeaders(), resolvedExtensionVersion); } pluginInteractionCallback.onFailure(response.responseCode(), response.responseBody(), resolvedExtensionVersion); throw new RuntimeException(format("The plugin sent a response that could not be understood by Go. Plugin returned with code '%s' and the following response: '%s'", response.responseCode(), response.responseBody())); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(format("Interaction with plugin with id '%s' implementing '%s' extension failed while requesting for '%s'. Reason: [%s]", pluginId, extensionName, requestName, e.getMessage()), e); } }
@Test void shouldConstructTheRequest() { final String requestBody = "request_body"; when(response.responseCode()).thenReturn(DefaultGoApiResponse.SUCCESS_RESPONSE_CODE); final GoPluginApiRequest[] generatedRequest = {null}; doAnswer(invocationOnMock -> { generatedRequest[0] = (GoPluginApiRequest) invocationOnMock.getArguments()[2]; return response; }).when(pluginManager).submitTo(eq(pluginId), eq(extensionName), any(GoPluginApiRequest.class)); helper.submitRequest(pluginId, requestName, new DefaultPluginInteractionCallback<>() { @Override public String requestBody(String resolvedExtensionVersion) { return requestBody; } }); assertThat(generatedRequest[0].requestBody()).isEqualTo(requestBody); assertThat(generatedRequest[0].extension()).isEqualTo(extensionName); assertThat(generatedRequest[0].requestName()).isEqualTo(requestName); assertThat(generatedRequest[0].requestParameters().isEmpty()).isTrue(); }
@Override protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) { if (Objects.isNull(rule)) { return Mono.empty(); } final ShenyuContext shenyuContext = exchange.getAttribute(Constants.CONTEXT); assert shenyuContext != null; final SpringCloudSelectorHandle springCloudSelectorHandle = SpringCloudPluginDataHandler.SELECTOR_CACHED.get().obtainHandle(selector.getId()); final SpringCloudRuleHandle ruleHandle = buildRuleHandle(rule); String serviceId = springCloudSelectorHandle.getServiceId(); if (StringUtils.isBlank(serviceId)) { Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.CANNOT_CONFIG_SPRINGCLOUD_SERVICEID); return WebFluxResultUtils.result(exchange, error); } final String ip = Objects.requireNonNull(exchange.getRequest().getRemoteAddress()).getAddress().getHostAddress(); final Upstream upstream = serviceChooser.choose(serviceId, selector.getId(), ip, ruleHandle.getLoadBalance()); if (Objects.isNull(upstream)) { Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.SPRINGCLOUD_SERVICEID_IS_ERROR); return WebFluxResultUtils.result(exchange, error); } final String domain = upstream.buildDomain(); setDomain(URI.create(domain + shenyuContext.getRealUrl()), exchange); //set time out. exchange.getAttributes().put(Constants.HTTP_TIME_OUT, ruleHandle.getTimeout()); return chain.execute(exchange); }
@Test public void testSpringCloudPluginNotConfigServiceId() { final SelectorData selectorData = SelectorData.builder() .id("springcloud") .handle("[]") .build(); final RuleData rule = RuleData.builder() .id("springcloud") .selectorId("springcloud") .handle("{}") .build(); SpringCloudSelectorHandle springCloudSelectorHandle = new SpringCloudSelectorHandle(); SpringCloudPluginDataHandler.SELECTOR_CACHED.get().cachedHandle(selectorData.getId(), springCloudSelectorHandle); SpringCloudRuleHandle springCloudRuleHandle = GsonUtils.getGson().fromJson(rule.getHandle(), SpringCloudRuleHandle.class); SpringCloudPluginDataHandler.RULE_CACHED.get().cachedHandle(CacheKeyUtils.INST.getKey(rule), springCloudRuleHandle); Mono<Void> execute = springCloudPlugin.doExecute(exchange, chain, selectorData, rule); StepVerifier.create(execute).expectSubscription().verifyComplete(); }
@Override public String version() { return AppInfoParser.getVersion(); }
@Test public void testCastVersionRetrievedFromAppInfoParser() { assertEquals(AppInfoParser.getVersion(), xformKey.version()); assertEquals(AppInfoParser.getVersion(), xformValue.version()); assertEquals(xformKey.version(), xformValue.version()); }
@Override public Collection<LocalDataQueryResultRow> getRows(final ShowStorageUnitsStatement sqlStatement, final ContextManager contextManager) { Collection<LocalDataQueryResultRow> result = new LinkedList<>(); for (Entry<String, StorageUnit> entry : getToBeShownStorageUnits(sqlStatement).entrySet()) { ConnectionProperties connectionProps = entry.getValue().getConnectionProperties(); DataSourcePoolProperties dataSourcePoolProps = getDataSourcePoolProperties(entry.getValue()); Map<String, Object> poolProps = dataSourcePoolProps.getPoolPropertySynonyms().getStandardProperties(); Map<String, Object> customProps = getCustomProperties(dataSourcePoolProps.getCustomProperties().getProperties(), connectionProps.getQueryProperties()); result.add(new LocalDataQueryResultRow(entry.getKey(), entry.getValue().getStorageType().getType(), connectionProps.getHostname(), connectionProps.getPort(), connectionProps.getCatalog(), getStandardProperty(poolProps, "connectionTimeoutMilliseconds"), getStandardProperty(poolProps, "idleTimeoutMilliseconds"), getStandardProperty(poolProps, "maxLifetimeMilliseconds"), getStandardProperty(poolProps, "maxPoolSize"), getStandardProperty(poolProps, "minPoolSize"), getStandardProperty(poolProps, "readOnly"), customProps)); } return result; }
@Test void assertGetRowsWithLikePattern() { Collection<LocalDataQueryResultRow> actual = executor.getRows(new ShowStorageUnitsStatement(mock(DatabaseSegment.class), "_0", null), mock(ContextManager.class)); assertThat(actual.size(), is(1)); LocalDataQueryResultRow row = actual.iterator().next(); assertThat(row.getCell(1), is("ds_0")); assertThat(row.getCell(2), is("MySQL")); assertThat(row.getCell(3), is("localhost")); assertThat(row.getCell(4), is("3307")); assertThat(row.getCell(5), is("ds_0")); assertThat(row.getCell(6), is("")); assertThat(row.getCell(7), is("")); assertThat(row.getCell(8), is("")); assertThat(row.getCell(9), is("100")); assertThat(row.getCell(10), is("10")); assertThat(row.getCell(11), is("")); assertThat(row.getCell(12), is("{\"openedConnections\":[],\"closed\":false}")); }
@Override @CacheEvict(value = RedisKeyConstants.MAIL_ACCOUNT, key = "#id") public void deleteMailAccount(Long id) { // 校验是否存在账号 validateMailAccountExists(id); // 校验是否存在关联模版 if (mailTemplateService.getMailTemplateCountByAccountId(id) > 0) { throw exception(MAIL_ACCOUNT_RELATE_TEMPLATE_EXISTS); } // 删除 mailAccountMapper.deleteById(id); }
@Test public void testDeleteMailAccount_success() { // mock 数据 MailAccountDO dbMailAccount = randomPojo(MailAccountDO.class); mailAccountMapper.insert(dbMailAccount);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbMailAccount.getId(); // mock 方法(无关联模版) when(mailTemplateService.getMailTemplateCountByAccountId(eq(id))).thenReturn(0L); // 调用 mailAccountService.deleteMailAccount(id); // 校验数据不存在了 assertNull(mailAccountMapper.selectById(id)); }
public PatchTree generatePatchTree() { try { return new PatchTree( _patchTree.getDataMap().copy()); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Error copying data map: " + _patchTree.getDataMap(), e); } }
@Test public void testEmptyPatch() { PatchTreeRecorder<PatchTreeTestModel> pc = makeOne(); Assert.assertEquals(pc.generatePatchTree().getDataMap(), new DataMap()); }
public MessageType convert(Schema avroSchema) { if (!avroSchema.getType().equals(Schema.Type.RECORD)) { throw new IllegalArgumentException("Avro schema must be a record."); } return new MessageType(avroSchema.getFullName(), convertFields(avroSchema.getFields(), "")); }
@Test(expected = IllegalArgumentException.class) public void testParquetMapWithNonStringKeyFails() throws Exception { MessageType parquetSchema = MessageTypeParser.parseMessageType("message myrecord {\n" + " required group mymap (MAP) {\n" + " repeated group map (MAP_KEY_VALUE) {\n" + " required int32 key;\n" + " required int32 value;\n" + " }\n" + " }\n" + "}\n"); new AvroSchemaConverter().convert(parquetSchema); }
public void createOrUpdateTableUsingBqManifestFile(String tableName, String bqManifestFileUri, String sourceUriPrefix, Schema schema) { try { String withClauses = String.format("( %s )", BigQuerySchemaResolver.schemaToSqlString(schema)); String extraOptions = "enable_list_inference=true,"; if (!StringUtils.isNullOrEmpty(sourceUriPrefix)) { withClauses += " WITH PARTITION COLUMNS"; extraOptions += String.format(" hive_partition_uri_prefix=\"%s\", require_hive_partition_filter=%s,", sourceUriPrefix, requirePartitionFilter); } if (!StringUtils.isNullOrEmpty(bigLakeConnectionId)) { withClauses += String.format(" WITH CONNECTION `%s`", bigLakeConnectionId); } String query = String.format( "CREATE OR REPLACE EXTERNAL TABLE `%s.%s.%s` %s OPTIONS (%s " + "uris=[\"%s\"], format=\"PARQUET\", file_set_spec_type=\"NEW_LINE_DELIMITED_MANIFEST\")", projectId, datasetName, tableName, withClauses, extraOptions, bqManifestFileUri); QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query) .setUseLegacySql(false) .build(); JobId jobId = JobId.newBuilder().setProject(projectId).setRandomJob().build(); Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); queryJob = queryJob.waitFor(); if (queryJob == null) { LOG.error("Job for table creation no longer exists"); } else if (queryJob.getStatus().getError() != null) { LOG.error("Job for table creation failed: {}", queryJob.getStatus().getError().toString()); } else { LOG.info("External table created using manifest file."); } } catch (InterruptedException | BigQueryException e) { throw new HoodieBigQuerySyncException("Failed to create external table using manifest file. ", e); } }
@Test void createTableWithManifestFile_partitioned() throws Exception { properties.setProperty(BigQuerySyncConfig.BIGQUERY_SYNC_BIG_LAKE_CONNECTION_ID.key(), "my-project.us.bl_connection"); BigQuerySyncConfig config = new BigQuerySyncConfig(properties); client = new HoodieBigQuerySyncClient(config, mockBigQuery); Schema schema = Schema.of(Field.of("field", StandardSQLTypeName.STRING)); ArgumentCaptor<JobInfo> jobInfoCaptor = ArgumentCaptor.forClass(JobInfo.class); Job mockJob = mock(Job.class); when(mockBigQuery.create(jobInfoCaptor.capture())).thenReturn(mockJob); Job mockJobFinished = mock(Job.class); when(mockJob.waitFor()).thenReturn(mockJobFinished); JobStatus mockJobStatus = mock(JobStatus.class); when(mockJobFinished.getStatus()).thenReturn(mockJobStatus); when(mockJobStatus.getError()).thenReturn(null); client.createOrUpdateTableUsingBqManifestFile(TEST_TABLE, MANIFEST_FILE_URI, SOURCE_PREFIX, schema); QueryJobConfiguration configuration = jobInfoCaptor.getValue().getConfiguration(); assertEquals(configuration.getQuery(), String.format("CREATE OR REPLACE EXTERNAL TABLE `%s.%s.%s` ( `field` STRING ) WITH PARTITION COLUMNS WITH CONNECTION `my-project.us.bl_connection` " + "OPTIONS (enable_list_inference=true, hive_partition_uri_prefix=\"%s\", " + "require_hive_partition_filter=true, uris=[\"%s\"], format=\"PARQUET\", file_set_spec_type=\"NEW_LINE_DELIMITED_MANIFEST\")", PROJECT_ID, TEST_DATASET, TEST_TABLE, SOURCE_PREFIX, MANIFEST_FILE_URI)); }
public static FeatureRange buildFromMixedIn(String key, List<String> partitions, int arity) { Long fromInclusive = null; Long toInclusive = null; long from = 0; long to = 0; for (String p : partitions) { String[] parts = p.split(","); if (parts.length == 1) { String[] subparts = parts[0].split("=|-"); int offset = subparts.length == 3? 0 : 1; if (subparts.length < 3 || subparts.length > 4) { throw new IllegalArgumentException("MIXED_IN range partition must be on the form label=val-val"); } from = Long.parseLong(subparts[offset + 1]); to = Long.parseLong(subparts[offset + 2]); if (parts[0].contains("=-")) { long tmp = from; from = -to; to = -tmp; } } else { if (parts.length != 3) { throw new IllegalArgumentException("MIXED_IN range edge partition must be on the form label=val,val,payload"); } long value = Long.parseLong(parts[1]); long payload = Long.parseLong(parts[2]); if ((payload & 0xc0000000) == 0x80000000L) { from = value + (payload & 0xffff); to = value + arity - 1; } else if ((payload & 0xc0000000) == 0x40000000L) { from = value; to = value + (payload & 0xffff); } else { from = value + (payload >> 16); to = value + (payload & 0xffff); } } if (fromInclusive == null || fromInclusive > from) { fromInclusive = from; } if (toInclusive == null || toInclusive > to) { toInclusive = to; } } return new FeatureRange(key, fromInclusive, toInclusive); }
@Test void requireThatFeatureRangeCanBeBuiltFromMixedInNode() { assertEquals(new FeatureRange("foo", 10L, 19L), FeatureRange.buildFromMixedIn("foo", List.of("foo=10-19"), 10)); assertEquals(new FeatureRange("foo", -19L, -10L), FeatureRange.buildFromMixedIn("foo", List.of("foo=-10-19"), 10)); assertEquals(new FeatureRange("foo", 10L, 19L), FeatureRange.buildFromMixedIn("foo", List.of("foo=10,10,9"), 10)); assertEquals(new FeatureRange("foo", 10L, 19L), FeatureRange.buildFromMixedIn("foo", List.of("foo=10,10,1073741833"), 10)); assertEquals(new FeatureRange("foo", 10L, 19L), FeatureRange.buildFromMixedIn("foo", List.of("foo=10,10,2147483648"), 10)); }
public AlertResult send(String title, String content) { AlertResult alertResult = new AlertResult(); // if there is no receivers && no receiversCc, no need to process if (CollectionUtils.isEmpty(emailParams.getReceivers())) { logger.error("no receivers , you must set receivers"); return alertResult; } try { String sendResult = MailUtil.send( getMailAccount(), emailParams.getReceivers(), emailParams.getReceiverCcs(), null, title, content, true); if (StringUtils.isNotBlank(sendResult)) { logger.info("Send email info: {}", sendResult); alertResult.setSuccess(true); alertResult.setMessage(sendResult); return alertResult; } } catch (GeneralSecurityException e) { handleException(alertResult, e); } return alertResult; }
@Ignore @Test public void testTextSendMails() throws GeneralSecurityException { AlertResult alertResult = emailSender.send(AlertBaseConstant.ALERT_TEMPLATE_TITLE, AlertBaseConstant.ALERT_TEMPLATE_MSG); Assert.assertEquals(true, alertResult.getSuccess()); }
public static List<String> parse(@Nullable String input, boolean escapeComma, boolean trim) { if (null == input || input.isEmpty()) { return Collections.emptyList(); } Stream<String> tokenStream; if (escapeComma) { // Use regular expression to split on "," unless it is "\," // Use a non-positive limit to apply the replacement as many times as possible and to ensure trailing empty // strings shall not be discarded tokenStream = Arrays.stream(input.split("(?<!\\\\),", -1)) .map(s -> s.replace("\\,", ",")); } else { tokenStream = Arrays.stream(input.split(",")); } if (trim) { tokenStream = tokenStream.map(String::trim); } return tokenStream.collect(Collectors.toList()); }
@Test public void testEscapeTrueTrimTrue() { String input = " \\,.\n\t()[]{}\"':=-_$\\?@&|#+/,:=[]$@&|#"; List<String> expectedOutput = Arrays.asList(",.\n\t()[]{}\"':=-_$\\?@&|#+/", ":=[]$@&|#"); Assert.assertEquals(CsvParser.parse(input, true, true), expectedOutput); }
@Override public boolean isContainer(final Path file) { if(StringUtils.isEmpty(RequestEntityRestStorageService.findBucketInHostname(host))) { return super.isContainer(file); } return false; }
@Test public void testContainerVirtualHostInHostname() { assertFalse("/", new S3PathContainerService(new Host(new S3Protocol(), "b.s3.amazonaws.com")).isContainer(new Path("/b", EnumSet.of(Path.Type.directory)))); assertFalse("/", new S3PathContainerService(new Host(new S3Protocol(), "b.s3.amazonaws.com")).isContainer(new Path("/b/f", EnumSet.of(Path.Type.file)))); assertEquals("/", new S3PathContainerService(new Host(new S3Protocol(), "b.s3.amazonaws.com")).getContainer(new Path("/b/f", EnumSet.of(Path.Type.file))).getName()); }
public static Object[] realize(Object[] objs, Class<?>[] types) { if (objs.length != types.length) { throw new IllegalArgumentException("args.length != types.length"); } Object[] dests = new Object[objs.length]; for (int i = 0; i < objs.length; i++) { dests[i] = realize(objs[i], types[i]); } return dests; }
@Test void test_realize_IntPararmter_IllegalArgumentException() throws Exception { Method method = PojoUtilsTest.class.getMethod("setInt", int.class); assertNotNull(method); Object value = PojoUtils.realize("123", method.getParameterTypes()[0], method.getGenericParameterTypes()[0]); method.invoke(new PojoUtilsTest(), value); }
public static NoActionInstruction createNoAction() { return new NoActionInstruction(); }
@Test public void testCreateNoActionMethod() { Instructions.NoActionInstruction instruction = Instructions.createNoAction(); checkAndConvert(instruction, Instruction.Type.NOACTION, Instructions.NoActionInstruction.class); }
public static void main(String[] args) throws IOException { runSqlLine(args, null, System.out, System.err); }
@Test public void testSqlLine_ddl() throws Exception { BeamSqlLine.main( new String[] { "-e", "CREATE EXTERNAL TABLE test (id INTEGER) TYPE 'text';", "-e", "DROP TABLE test;" }); }
@Override public void createTable(Table table) { validateTableType(table); // first assert the table name is unique if (tables.containsKey(table.getName())) { throw new IllegalArgumentException("Duplicate table name: " + table.getName()); } // invoke the provider's create providers.get(table.getType()).createTable(table); // store to the global metastore tables.put(table.getName(), table); }
@Test(expected = IllegalArgumentException.class) public void testCreateTable_invalidTableType() throws Exception { Table table = mockTable("person", "invalid"); store.createTable(table); }
@Override public void onStateElection(Job job, JobState newState) { if (isNotFailed(newState) || isJobNotFoundException(newState) || isProblematicExceptionAndMustNotRetry(newState) || maxAmountOfRetriesReached(job)) return; job.scheduleAt(now().plusSeconds(getSecondsToAdd(job)), String.format("Retry %d of %d", getFailureCount(job), getMaxNumberOfRetries(job))); }
@Test void retryFilterSchedulesJobAgainIfStateIsFailed() { final Job job = aFailedJob().build(); applyDefaultJobFilter(job); int beforeVersion = job.getJobStates().size(); retryFilter.onStateElection(job, job.getJobState()); int afterVersion = job.getJobStates().size(); assertThat(afterVersion).isEqualTo(beforeVersion + 1); assertThat(job.getState()).isEqualTo(SCHEDULED); }
public Optional<String> removeStreamThread() { return removeStreamThread(Long.MAX_VALUE); }
@Test public void shouldNotRemoveThreadWhenNotRunning() { prepareStreams(); prepareStreamThread(streamThreadOne, 1); props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 1); try (final KafkaStreams streams = new KafkaStreams(getBuilderWithSource().build(), props, supplier, time)) { assertThat(streams.removeStreamThread(), equalTo(Optional.empty())); assertThat(streams.threads.size(), equalTo(1)); } }
public final int queueCount() { return queues.length; }
@Test public void queueCount() { assertEquals(queueCount, conveyor.queueCount()); }
@Override public Set<Path> getPaths(Topology topology, DeviceId src, DeviceId dst) { checkNotNull(src, DEVICE_ID_NULL); checkNotNull(dst, DEVICE_ID_NULL); return defaultTopology(topology).getPaths(src, dst); }
@Test public void testGetPaths() { VirtualNetwork virtualNetwork = setupVirtualNetworkTopology(); TopologyService topologyService = manager.get(virtualNetwork.id(), TopologyService.class); Topology topology = topologyService.currentTopology(); VirtualDevice srcVirtualDevice = getVirtualDevice(virtualNetwork.id(), DID1); VirtualDevice dstVirtualDevice = getVirtualDevice(virtualNetwork.id(), DID2); // test the getPaths() method. Set<Path> paths = topologyService.getPaths(topology, srcVirtualDevice.id(), dstVirtualDevice.id()); assertNotNull("The paths should not be null.", paths); assertEquals("The paths size did not match.", 1, paths.size()); // test the getPaths() by weight method. LinkWeigher weight = new LinkWeigherAdapter(1.0); Set<Path> paths1 = topologyService.getPaths(topology, srcVirtualDevice.id(), dstVirtualDevice.id(), weight); assertNotNull("The paths should not be null.", paths1); assertEquals("The paths size did not match.", 1, paths1.size()); Path path = paths1.iterator().next(); assertEquals("wrong path length", 1, path.links().size()); assertEquals("wrong path cost", ScalarWeight.toWeight(1.0), path.weight()); }
@Udf(description = "Converts a string representation of a time in the given format" + " into the TIME value.") public Time parseTime( @UdfParameter( description = "The string representation of a time.") final String formattedTime, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.time.format.DateTimeFormatter.") final String formatPattern) { if (formattedTime == null | formatPattern == null) { return null; } try { final TemporalAccessor ta = formatters.get(formatPattern).parse(formattedTime); final Optional<ChronoField> dateField = Arrays.stream(ChronoField.values()) .filter(ChronoField::isDateBased) .filter(ta::isSupported) .findFirst(); if (dateField.isPresent()) { throw new KsqlFunctionException("Time format contains date field."); } return new Time(TimeUnit.NANOSECONDS.toMillis(LocalTime.from(ta).toNanoOfDay())); } catch (ExecutionException | RuntimeException e) { throw new KsqlFunctionException("Failed to parse time '" + formattedTime + "' with formatter '" + formatPattern + "': " + e.getMessage(), e); } }
@Test public void shouldConvertStringToDate() { // When: final Time result = udf.parseTime("000105", "HHmmss"); // Then: assertThat(result.getTime(), is(65000L)); }
public static String getBaseUrl() { try { var requestAttrs = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes(); return getBaseUrl(requestAttrs.getRequest()); } catch (IllegalStateException e) { // method is called outside of web request context return ""; } }
@Test public void testWithXForwarded() throws Exception { // basic request doReturn("http").when(request).getScheme(); doReturn("localhost").when(request).getServerName(); doReturn(8080).when(request).getServerPort(); doReturn("/").when(request).getContextPath(); // XForwarded content doReturn("https").when(request).getHeader("X-Forwarded-Proto"); var items = new ArrayList<String>(); items.add("open-vsx.org"); doReturn(Collections.enumeration(items)).when(request).getHeaders("X-Forwarded-Host"); doReturn("/openvsx").when(request).getHeader("X-Forwarded-Prefix"); assertThat(UrlUtil.getBaseUrl(request)).isEqualTo("https://open-vsx.org/openvsx/"); }
@Override public boolean isReachable(DeviceId deviceId) { SnmpDevice snmpDevice = controller.getDevice(deviceId); if (snmpDevice == null) { log.warn("BAD REQUEST: the requested device id: " + deviceId.toString() + " is not associated to any SNMP Device"); return false; } return snmpDevice.isReachable(); }
@Test public void addDevice() { AbstractProjectableModel.setDriverService(null, new MockDriverService()); //FIXME this needs sleep assertAfter(DELAY, TEST_DURATION, () -> assertNotNull("Device should be added to controller", controller.getDevice(deviceId))); assertTrue("Device should be reachable", provider.isReachable(deviceId)); }
@Override public boolean shouldWait() { RingbufferContainer ringbuffer = getRingBufferContainerOrNull(); if (resultSet == null) { resultSet = new ReadResultSetImpl<>(minSize, maxSize, getNodeEngine().getSerializationService(), filter); sequence = startSequence; } if (ringbuffer == null) { return minSize > 0; } sequence = ringbuffer.clampReadSequenceToBounds(sequence); if (minSize == 0) { if (sequence < ringbuffer.tailSequence() + 1) { readMany(ringbuffer); } return false; } if (resultSet.isMinSizeReached()) { // enough items have been read, we are done. return false; } if (sequence == ringbuffer.tailSequence() + 1) { // the sequence is not readable return true; } readMany(ringbuffer); return !resultSet.isMinSizeReached(); }
@Test public void whenOneAfterTailAndBufferEmpty() { ReadManyOperation op = getReadManyOperation(ringbuffer.tailSequence() + 1, 1, 1, null); // since there is an item, we don't need to wait boolean shouldWait = op.shouldWait(); assertTrue(shouldWait); ReadResultSetImpl response = getReadResultSet(op); assertEquals(0, response.readCount()); assertEquals(0, response.getNextSequenceToReadFrom()); assertEquals(0, response.size()); }
public static <T extends com.google.protobuf.GeneratedMessageV3> ProtobufSchema<T> of(Class<T> pojo) { return of(pojo, new HashMap<>()); }
@Test public void testSchema() { ProtobufSchema<org.apache.pulsar.client.schema.proto.Test.TestMessage> protobufSchema = ProtobufSchema.of(org.apache.pulsar.client.schema.proto.Test.TestMessage.class); Assert.assertEquals(protobufSchema.getSchemaInfo().getType(), SchemaType.PROTOBUF); String schemaJson = new String(protobufSchema.getSchemaInfo().getSchema()); Schema.Parser parser = new Schema.Parser(); Schema schema = parser.parse(schemaJson); Assert.assertEquals(schema.toString(), EXPECTED_SCHEMA_JSON); }
@Override public void operate() { // Drain and create new autorelease pool impl.operate(); }
@Test public void testOperate() { new AutoreleaseActionOperationBatcher(1).operate(); }
void onSignal(final long correlationId, final long recordingId, final long position, final RecordingSignal signal) { if (correlationId == replicationId) { if (RecordingSignal.EXTEND == signal) { final CountersReader counters = archive.context().aeron().countersReader(); recordingPositionCounterId = RecordingPos.findCounterIdByRecording(counters, recordingId, archive.archiveId()); } else if (RecordingSignal.SYNC == signal) { hasSynced = true; } else if (RecordingSignal.REPLICATE_END == signal) { hasReplicationEnded = true; } else if (RecordingSignal.STOP == signal) { if (NULL_POSITION != position) { this.position = position; } hasStopped = true; } else if (RecordingSignal.DELETE == signal) { throw new ClusterException("recording was deleted during replication: " + this); } this.lastRecordingSignal = signal; if (NULL_VALUE != recordingId) { this.recordingId = recordingId; } if (NULL_POSITION != position) { this.position = position; } } }
@Test void shouldFailIfRecordingLogIsDeletedDuringReplication() { final RecordingSignal recordingSignal = RecordingSignal.DELETE; final long stopPosition = 982734; final long nowNs = 0; final ReplicationParams replicationParams = new ReplicationParams() .dstRecordingId(DST_RECORDING_ID) .stopPosition(stopPosition) .replicationChannel(REPLICATION_CHANNEL) .replicationSessionId(Aeron.NULL_VALUE); final RecordingReplication logReplication = new RecordingReplication( aeronArchive, SRC_RECORDING_ID, ENDPOINT, SRC_STREAM_ID, replicationParams, PROGRESS_CHECK_TIMEOUT_NS, PROGRESS_CHECK_INTERVAL_NS, nowNs); assertThrows( ClusterException.class, () -> logReplication.onSignal(REPLICATION_ID, DST_RECORDING_ID, stopPosition - 1, recordingSignal)); }
@Override public EnvironmentConfig getLocal() { for (EnvironmentConfig part : this) { if (part.isLocal()) return part; } return null; }
@Test void shouldGetLocalPartWhenOriginFile() { assertThat(environmentConfig.getLocal()).isEqualTo(uatLocalPart2); }
public IterationResult<T> iterate(@Nonnull UUID cursorId, int maxCount) { requireNonNull(cursorId); if (cursorId.equals(this.prevCursorId)) { access(); // no progress, no need to forget a cursor id, so null return new IterationResult<>(this.page, this.cursorId, null); } else if (!cursorId.equals(this.cursorId)) { throw new IllegalStateException("The cursor id " + cursorId + " is not the current cursor id nor the previous cursor id."); } List<T> currentPage = new ArrayList<>(maxCount); while (currentPage.size() < maxCount && iterator.hasNext()) { currentPage.add(iterator.next()); } UUID cursorIdToForget = this.prevCursorId; this.prevCursorId = this.cursorId; this.cursorId = UuidUtil.newUnsecureUUID(); this.page = currentPage; access(); return new IterationResult<>(this.page, this.cursorId, cursorIdToForget); }
@Test public void testCursorIdOtherThan_PreviousOrCurrent_Throws() { UUID cursorId = iterator.iterate(initialCursorId, 100).getCursorId(); iterator.iterate(cursorId, 100); // prevCursorId == cursorId, cursorId == new id assertThatThrownBy(() -> iterator.iterate(initialCursorId, 100)).isInstanceOf(IllegalStateException.class) .hasMessageContaining("The cursor id " + initialCursorId + " is not the current cursor id nor the previous cursor id."); }
@Override public void clear() { root = null; size = 0; modCount++; // Clear iteration order Node<K, V> header = this.header; header.next = header.prev = header; }
@Test public void testClear() { LinkedTreeMap<String, String> map = new LinkedTreeMap<>(); map.put("a", "android"); map.put("c", "cola"); map.put("b", "bbq"); map.clear(); assertThat(map.keySet()).isEmpty(); assertThat(map).isEmpty(); }
@Activate public void activate(ComponentContext context) { active = true; componentConfigService.registerProperties(getClass()); providerService = providerRegistry.register(this); coreService.registerApplication(APP_NAME); cfgService.registerConfigFactory(factory); cfgService.addListener(cfgListener); controller.addDeviceListener(innerNodeListener); deviceService.addListener(deviceListener); scheduledTask = schedulePolling(); scheduledReconnectionTask = scheduleConnectDevices(); modified(context); log.info("Started"); }
@Test public void activate() throws Exception { assertTrue("Provider should be registered", deviceRegistry.getProviders().contains(provider.id())); assertEquals("Incorrect device service", deviceService, provider.deviceService); assertEquals("Incorrect provider service", providerService, provider.providerService); assertTrue("Incorrect config factories", cfgFactories.contains(provider.factory)); assertNotNull("Device listener should be added", deviceService.listener); assertFalse("Thread to connect device should be running", provider.connectionExecutor.isShutdown() || provider.connectionExecutor.isTerminated()); assertFalse("Scheduled task to update device should be running", provider.scheduledTask.isCancelled()); }
public static URI getUriFromTrackingPlugins(ApplicationId id, List<TrackingUriPlugin> trackingUriPlugins) throws URISyntaxException { URI toRet = null; for(TrackingUriPlugin plugin : trackingUriPlugins) { toRet = plugin.getTrackingUri(id); if (toRet != null) { return toRet; } } return null; }
@Test void testGetProxyUriFromPluginsReturnsValidUriWhenAble() throws URISyntaxException { ApplicationId id = BuilderUtils.newApplicationId(6384623l, 5); List<TrackingUriPlugin> list = Lists.newArrayListWithExpectedSize(2); // Insert a plugin that returns null. list.add(new TrackingUriPlugin() { public URI getTrackingUri(ApplicationId id) throws URISyntaxException { return null; } }); // Insert a plugin that returns a valid URI. list.add(new TrackingUriPlugin() { public URI getTrackingUri(ApplicationId id) throws URISyntaxException { return new URI("http://history.server.net/"); } }); URI result = ProxyUriUtils.getUriFromTrackingPlugins(id, list); assertNotNull(result); }
public static void setIpAddress(String ipAddress) { XID.ipAddress = ipAddress; }
@Test public void testSetIpAddress() { XID.setIpAddress("127.0.0.1"); assertThat(XID.getIpAddress()).isEqualTo("127.0.0.1"); }
public static <T> boolean isNotEmpty(T[] array) { return (null != array && array.length != 0); }
@Test public void isNotEmptyTest() { int[] a = {1, 2}; assertTrue(ArrayUtil.isNotEmpty(a)); String[] b = {"a", "b", "c"}; assertTrue(ArrayUtil.isNotEmpty(b)); Object c = new Object[]{"1", "2", 3, 4D}; assertTrue(ArrayUtil.isNotEmpty(c)); }
public List<HostAddress> toHostAddressList(Collection<Host> hosts) { ArrayList<HostAddress> list = new ArrayList<>(hosts.size()); for (Host host : hosts) { list.add(toHostAddress(host)); } return list; }
@Test public void testToHostAddressList() throws Exception { Set<Host> hosts = ImmutableSet.of( new TestHost( new InetSocketAddress( InetAddress.getByAddress(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 }), 3000)), new TestHost(new InetSocketAddress(InetAddress.getByAddress(new byte[] {1, 2, 3, 4}), 3000))); HostAddressFactory hostAddressFactory = new HostAddressFactory(); List<HostAddress> list = hostAddressFactory.toHostAddressList(hosts); assertEquals(list.toString(), "[[102:304:506:708:90a:b0c:d0e:f10], 1.2.3.4]"); }
static EfestoOutputPMML getEfestoOutput(KiePMMLModelFactory kiePMMLModelFactory, EfestoInputPMML darInputPMML) { List<KiePMMLModel> kiePMMLModels = kiePMMLModelFactory.getKiePMMLModels(); PMML4Result result = evaluate(kiePMMLModels, darInputPMML.getInputData()); return new EfestoOutputPMML(darInputPMML.getModelLocalUriId(), result); }
@Test void getEfestoOutput() { modelLocalUriId = getModelLocalUriIdFromPmmlIdFactory(FILE_NAME, MODEL_NAME); PMMLRuntimeContext pmmlContext = getPMMLContext(FILE_NAME, MODEL_NAME, memoryCompilerClassLoader); KiePMMLModelFactory kiePmmlModelFactory = PMMLLoaderUtils.loadKiePMMLModelFactory(modelLocalUriId, pmmlContext); EfestoInputPMML efestoInputPMML = new EfestoInputPMML(modelLocalUriId, pmmlContext); EfestoOutputPMML retrieved = PMMLRuntimeHelper.getEfestoOutput(kiePmmlModelFactory, efestoInputPMML); assertThat(retrieved).isNotNull(); commonEvaluateEfestoOutputPMML(retrieved, efestoInputPMML); }
public static <T> CloseableIterator<T> wrap(CloseableIterator<T> iterator, AutoCloseable... otherCloseables) { return new CloseablesIteratorWrapper<>(iterator, otherCloseables); }
@Test(expected = IllegalArgumentException.class) public void wrap_fails_if_iterator_declared_in_other_closeables() { CloseableIterator iterator = new SimpleCloseableIterator(); CloseableIterator.wrap(iterator, iterator); }
public void handleRequestBody(String requestBody, String userUuid, ProjectDto projectDto) { // parse anticipated transitions from request body List<AnticipatedTransition> anticipatedTransitions = anticipatedTransitionParser.parse(requestBody, userUuid, projectDto.getKey()); try (DbSession dbSession = dbClient.openSession(true)) { // delete previous anticipated transitions for the user and project deletePreviousAnticipatedTransitionsForUserAndProject(dbSession, userUuid, projectDto.getUuid()); // insert new anticipated transitions insertAnticipatedTransitions(dbSession, anticipatedTransitions, projectDto.getUuid()); dbSession.commit(); } }
@Test public void fivenRequestBodyWithTransitions_whenHandleRequestBody_thenTransitionsAreInserted() { // given ProjectDto projectDto = new ProjectDto() .setKey(PROJECT_KEY); String requestBody = "body_with_transitions"; doReturn(List.of(populateAnticipatedTransition(), populateAnticipatedTransition())) .when(anticipatedTransitionParser).parse(requestBody, USER_UUID, PROJECT_KEY); DbSession dbSession = mockDbSession(); // when underTest.handleRequestBody(requestBody, USER_UUID, projectDto); // then verify(dbClient).openSession(true); verify(anticipatedTransitionDao).deleteByProjectAndUser(dbSession, projectDto.getUuid(), USER_UUID); verify(anticipatedTransitionDao, times(2)).insert(eq(dbSession), any()); }
public static void sliceByRowsAndCols(File srcImageFile, File destDir, int rows, int cols) { sliceByRowsAndCols(srcImageFile, destDir, IMAGE_TYPE_JPEG, rows, cols); }
@Test @Disabled public void sliceByRowsAndColsTest2() { ImgUtil.sliceByRowsAndCols( FileUtil.file("d:/test/hutool.png"), FileUtil.file("d:/test/dest"), ImgUtil.IMAGE_TYPE_PNG, 1, 5); }
@Override public boolean equals(Object obj) { if ( this == obj ) { return true; } if ( obj == null ) { return false; } if ( getClass() != obj.getClass() ) { return false; } final SelectionParameters other = (SelectionParameters) obj; if ( !equals( this.qualifiers, other.qualifiers ) ) { return false; } if ( !Objects.equals( this.qualifyingNames, other.qualifyingNames ) ) { return false; } if ( !Objects.equals( this.conditionQualifiers, other.conditionQualifiers ) ) { return false; } if ( !Objects.equals( this.conditionQualifyingNames, other.conditionQualifyingNames ) ) { return false; } if ( !Objects.equals( this.sourceRHS, other.sourceRHS ) ) { return false; } return equals( this.resultType, other.resultType ); }
@Test public void testEqualsResultTypeOneNull() { List<String> qualifyingNames = Arrays.asList( "language", "german" ); TypeMirror resultType = new TestTypeMirror( "resultType" ); List<TypeMirror> qualifiers = new ArrayList<>(); qualifiers.add( new TestTypeMirror( "org.mapstruct.test.SomeType" ) ); qualifiers.add( new TestTypeMirror( "org.mapstruct.test.SomeOtherType" ) ); SelectionParameters params = new SelectionParameters( qualifiers, qualifyingNames, resultType, typeUtils ); List<String> qualifyingNames2 = Arrays.asList( "language", "german" ); List<TypeMirror> qualifiers2 = new ArrayList<>(); qualifiers2.add( new TestTypeMirror( "org.mapstruct.test.SomeType" ) ); qualifiers2.add( new TestTypeMirror( "org.mapstruct.test.SomeOtherType" ) ); SelectionParameters params2 = new SelectionParameters( qualifiers2, qualifyingNames2, null, typeUtils ); assertThat( params.equals( params2 ) ).as( "Second null resultType" ).isFalse(); assertThat( params2.equals( params ) ).as( "First null resultType" ).isFalse(); }