focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static Archive forName(final String name) { if(StringUtils.isNotBlank(name)) { for(Archive archive : getKnownArchives()) { for(String extension : archive.getExtensions()) { if(name.toLowerCase(Locale.ROOT).endsWith(extension.toLowerCase(Locale.ROOT))) { return archive; } } } } log.fatal(String.format("Unknown archive %s", name)); return null; }
@Test public void testForName() { assertEquals(Archive.TAR, Archive.forName("tar")); assertEquals(Archive.TARGZ, Archive.forName("tar.gz")); assertEquals(Archive.ZIP, Archive.forName("zip")); }
@Udf(description = "Returns the current number of days for the system since " + "1970-01-01 00:00:00 UTC/GMT.") public int unixDate() { return ((int) LocalDate.now().toEpochDay()); }
@Test public void shouldGetTheUnixDate() { // Given: final int now = ((int) LocalDate.now().toEpochDay()); // When: final int result = udf.unixDate(); // Then: assertEquals(now, result); }
@Override public StreamDataDecoderResult decode(StreamMessage message) { assert message.getValue() != null; try { _reuse.clear(); GenericRow row = _valueDecoder.decode(message.getValue(), 0, message.getLength(), _reuse); if (row != null) { if (message.getKey() != null) { row.putValue(KEY, new String(message.getKey(), StandardCharsets.UTF_8)); } StreamMessageMetadata metadata = message.getMetadata(); if (metadata != null) { if (metadata.getHeaders() != null) { metadata.getHeaders().getFieldToValueMap() .forEach((key, value) -> row.putValue(HEADER_KEY_PREFIX + key, value)); } if (metadata.getRecordMetadata() != null) { metadata.getRecordMetadata().forEach((key, value) -> row.putValue(METADATA_KEY_PREFIX + key, value)); } } return new StreamDataDecoderResult(row, null); } else { return new StreamDataDecoderResult(null, new RuntimeException("Encountered unknown exception when decoding a Stream message")); } } catch (Exception e) { LOGGER.error("Failed to decode StreamMessage", e); return new StreamDataDecoderResult(null, e); } }
@Test public void testNoExceptionIsThrown() throws Exception { ThrowingDecoder messageDecoder = new ThrowingDecoder(); messageDecoder.init(Collections.emptyMap(), ImmutableSet.of(NAME_FIELD), ""); String value = "Alice"; BytesStreamMessage message = new BytesStreamMessage(value.getBytes(StandardCharsets.UTF_8)); StreamDataDecoderResult result = new StreamDataDecoderImpl(messageDecoder).decode(message); Assert.assertNotNull(result); Assert.assertNotNull(result.getException()); Assert.assertNull(result.getResult()); }
@Override public boolean supportsDataManipulationTransactionsOnly() { return false; }
@Test void assertSupportsDataManipulationTransactionsOnly() { assertFalse(metaData.supportsDataManipulationTransactionsOnly()); }
public void updateGroupConfig(String groupId, Properties newGroupConfig) { if (null == groupId || groupId.isEmpty()) { throw new InvalidRequestException("Group name can't be empty."); } final GroupConfig newConfig = GroupConfig.fromProps( defaultConfig.originals(), newGroupConfig ); configMap.put(groupId, newConfig); }
@Test public void testUpdateConfigWithInvalidGroupId() { assertThrows(InvalidRequestException.class, () -> configManager.updateGroupConfig("", new Properties())); }
public byte[] instrument( ClassDetails classDetails, InstrumentationConfiguration config, ClassNodeProvider classNodeProvider) { PerfStatsCollector perfStats = PerfStatsCollector.getInstance(); MutableClass mutableClass = perfStats.measure( "analyze class", () -> analyzeClass(classDetails.getClassBytes(), config, classNodeProvider)); byte[] instrumentedBytes = perfStats.measure("instrument class", () -> instrumentToBytes(mutableClass)); recordPackageStats(perfStats, mutableClass); return instrumentedBytes; }
@Test public void instrumentNativeMethod_generatesNativeBindingMethod() { ClassNode classNode = createClassWithNativeMethod(); MutableClass clazz = new MutableClass( classNode, InstrumentationConfiguration.newBuilder().build(), classNodeProvider); instrumentor.instrument(clazz); String nativeMethodName = Shadow.directNativeMethodName("org.example.MyClass", "someFunction"); MethodNode methodNode = findMethodNode(classNode, nativeMethodName); assertThat(clazz.classNode.interfaces).contains(Type.getInternalName(ShadowedObject.class)); assertRoboDataField(clazz.getFields().get(0)); assertThat(methodNode.access & Opcodes.ACC_NATIVE).isNotEqualTo(0); assertThat(methodNode.access & Opcodes.ACC_PRIVATE).isNotEqualTo(0); assertThat(methodNode.access & Opcodes.ACC_SYNTHETIC).isNotEqualTo(0); }
public Object convert(Object obj) { Object newObject = null; switch (conversion) { case NO_CONVERSION: newObject = obj; break; case DOUBLE_TO_FLOAT: newObject = ((Double) obj).floatValue(); break; case INT_TO_SHORT: newObject = ((Integer) obj).shortValue(); break; case INT_TO_BYTE: newObject = ((Integer) obj).byteValue(); break; case STRING_TO_CHAR: newObject = ((CharSequence) obj).charAt(0); break; case NUM_TO_LONG: newObject = Long.parseLong(obj.toString()); break; default: newObject = null; } return newObject; }
@Test public void testIntConversion() { TypeConverter converter = new TypeConverter(TypeConverter.INT_TO_SHORT); assertEquals((short) 100, converter.convert(100)); assertTrue(converter.convert(100) instanceof Short); converter = new TypeConverter(TypeConverter.INT_TO_BYTE); assertEquals((byte) 100, converter.convert(100)); assertTrue(converter.convert(102) instanceof Byte); }
@Override public void validateRoleList(Collection<Long> ids) { if (CollUtil.isEmpty(ids)) { return; } // 获得角色信息 List<RoleDO> roles = roleMapper.selectBatchIds(ids); Map<Long, RoleDO> roleMap = convertMap(roles, RoleDO::getId); // 校验 ids.forEach(id -> { RoleDO role = roleMap.get(id); if (role == null) { throw exception(ROLE_NOT_EXISTS); } if (!CommonStatusEnum.ENABLE.getStatus().equals(role.getStatus())) { throw exception(ROLE_IS_DISABLE, role.getName()); } }); }
@Test public void testValidateRoleList_notFound() { // 准备参数 List<Long> ids = singletonList(randomLongId()); // 调用, 并断言异常 assertServiceException(() -> roleService.validateRoleList(ids), ROLE_NOT_EXISTS); }
@Override public long get(long key1, long key2) { return super.get0(key1, key2); }
@Test public void testClear() { final long key1 = randomKey(); final long key2 = randomKey(); insert(key1, key2); hsa.clear(); assertEquals(NULL_ADDRESS, hsa.get(key1, key2)); assertEquals(0, hsa.size()); }
@Override public <E extends Extension> void indexRecord(E extension) { writeLock.lock(); var transaction = new IndexerTransactionImpl(); try { transaction.begin(); doIndexRecord(extension).forEach(transaction::add); transaction.commit(); } catch (Throwable e) { transaction.rollback(); throw e; } finally { writeLock.unlock(); } }
@Test void indexRecord() { var nameIndex = getNameIndexSpec(); var indexContainer = new IndexEntryContainer(); var descriptor = new IndexDescriptor(nameIndex); descriptor.setReady(true); indexContainer.add(new IndexEntryImpl(descriptor)); var indexer = new DefaultIndexer(List.of(descriptor), indexContainer); indexer.indexRecord(createFakeExtension()); var iterator = indexer.allIndexesIterator(); assertThat(iterator.hasNext()).isTrue(); var indexEntry = iterator.next(); var entries = indexEntry.entries(); assertThat(entries).hasSize(1); assertThat(entries).contains(Map.entry("fake-extension", "fake-extension")); }
@Override public WorkerIdentity get() { // Look at configurations first if (mConf.isSetByUser(PropertyKey.WORKER_IDENTITY_UUID)) { String uuidStr = mConf.getString(PropertyKey.WORKER_IDENTITY_UUID); final WorkerIdentity workerIdentity = WorkerIdentity.ParserV1.INSTANCE.fromUUID(uuidStr); LOG.debug("Loaded worker identity from configuration: {}", workerIdentity); return workerIdentity; } // Try loading from the identity file String filePathStr = mConf.getString(PropertyKey.WORKER_IDENTITY_UUID_FILE_PATH); final Path idFile = Paths.get(filePathStr); try (BufferedReader reader = Files.newBufferedReader(idFile)) { List<String> nonCommentLines = reader.lines() .filter(line -> !line.startsWith("#")) .filter(line -> !line.trim().isEmpty()) .collect(Collectors.toList()); if (nonCommentLines.size() > 0) { if (nonCommentLines.size() > 1) { LOG.warn("Multiple worker identities configured in {}, only the first one will be used", idFile); } String uuidStr = nonCommentLines.get(0); final WorkerIdentity workerIdentity = WorkerIdentity.ParserV1.INSTANCE.fromUUID(uuidStr); LOG.debug("Loaded worker identity from file {}: {}", idFile, workerIdentity); return workerIdentity; } } catch (FileNotFoundException | NoSuchFileException ignored) { // if not existent, proceed to auto generate one LOG.debug("Worker identity file {} not found", idFile); } catch (IOException e) { // in case of other IO error, better stop worker from starting up than use a new identity throw new RuntimeException( String.format("Failed to read worker identity from identity file %s", idFile), e); } // No identity is supplied by the user // Assume this is the first time the worker starts up, and generate a new one LOG.debug("Auto generating new worker identity as no identity is supplied by the user"); UUID generatedId = mUUIDGenerator.get(); WorkerIdentity identity = WorkerIdentity.ParserV1.INSTANCE.fromUUID(generatedId); LOG.debug("Generated worker identity as {}", identity); try (BufferedWriter writer = Files.newBufferedWriter(idFile, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)) { writer.write("# Worker identity automatically generated at "); writer.write(OffsetDateTime.now().format(DateTimeFormatter.RFC_1123_DATE_TIME)); writer.newLine(); writer.write(generatedId.toString()); writer.newLine(); } catch (Exception e) { LOG.warn("Failed to persist automatically generated worker identity ({}) to {}, " + "this worker will lose its identity after restart", identity, idFile, e); } try { // set the file to be read-only Set<PosixFilePermission> permSet = Files.getPosixFilePermissions(idFile); Set<PosixFilePermission> nonWritablePermSet = Sets.filter(permSet, perm -> perm != PosixFilePermission.OWNER_WRITE && perm != PosixFilePermission.GROUP_WRITE && perm != PosixFilePermission.OTHERS_WRITE); Files.setPosixFilePermissions(idFile, nonWritablePermSet); } catch (Exception e) { LOG.warn("Failed to set identity file to be read-only", e); } return identity; }
@Test public void autoGeneratedIdFileSetToReadOnly() throws Exception { AlluxioProperties props = new AlluxioProperties(); props.set(PropertyKey.WORKER_IDENTITY_UUID_FILE_PATH, mUuidFilePath); AlluxioConfiguration conf = new InstancedConfiguration(props); WorkerIdentityProvider provider = new WorkerIdentityProvider(conf, () -> mReferenceUuid); WorkerIdentity identity = provider.get(); assertEquals(mReferenceUuid, WorkerIdentity.ParserV1.INSTANCE.toUUID(identity)); assertTrue(Files.exists(mUuidFilePath)); Set<PosixFilePermission> filePerms = Files.getPosixFilePermissions(mUuidFilePath); Set<PosixFilePermission> writePerms = ImmutableSet.of(OWNER_WRITE, GROUP_WRITE, OTHERS_WRITE); assertTrue(Sets.intersection(filePerms, writePerms).isEmpty()); }
public <T> T convert(String property, Class<T> targetClass) { final AbstractPropertyConverter<?> converter = converterRegistry.get(targetClass); if (converter == null) { throw new MissingFormatArgumentException("converter not found, can't convert from String to " + targetClass.getCanonicalName()); } return (T) converter.convert(property); }
@Test void testConvertBooleanTrue() { assertTrue(compositeConverter.convert("true", Boolean.class)); assertTrue(compositeConverter.convert("on", Boolean.class)); assertTrue(compositeConverter.convert("yes", Boolean.class)); assertTrue(compositeConverter.convert("1", Boolean.class)); }
void addPeerClusterWatches(@Nonnull Set<String> newPeerClusters, @Nonnull FailoutConfig failoutConfig) { final Set<String> existingPeerClusters = _peerWatches.keySet(); if (newPeerClusters.isEmpty()) { removePeerClusterWatches(); return; } final Set<String> peerClustersToAdd = new HashSet<>(newPeerClusters); peerClustersToAdd.removeAll(existingPeerClusters); if (!peerClustersToAdd.isEmpty()) { addClusterWatches(peerClustersToAdd, failoutConfig); } final Set<String> peerClustersToRemove = new HashSet<>(existingPeerClusters); peerClustersToRemove.removeAll(newPeerClusters); if (!peerClustersToRemove.isEmpty()) { removeClusterWatches(peerClustersToRemove); } }
@Test public void testAddPeerClusterWatches() { _manager.addPeerClusterWatches(new HashSet<>(Arrays.asList(PEER_CLUSTER_NAME1, PEER_CLUSTER_NAME2)), mock(FailoutConfig.class)); verify(_loadBalancerState).listenToCluster(eq(PEER_CLUSTER_NAME1), any()); verify(_loadBalancerState).listenToCluster(eq(PEER_CLUSTER_NAME2), any()); verify(_warmUpHandler, times(1)).warmUpConnections(eq(PEER_CLUSTER_NAME1), any()); verify(_warmUpHandler, times(1)).warmUpConnections(eq(PEER_CLUSTER_NAME2), any()); verify(_warmUpHandler, never()).cancelPendingRequests(any()); }
@Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { return inject(statement, new TopicProperties.Builder()); }
@Test public void shouldGenerateNameWithCorrectPrefixFromOverrides() { // Given: overrides.put(KsqlConfig.KSQL_OUTPUT_TOPIC_NAME_PREFIX_CONFIG, "prefix-"); givenStatement("CREATE STREAM x AS SELECT * FROM SOURCE;"); config = new KsqlConfig(ImmutableMap.of( KsqlConfig.KSQL_OUTPUT_TOPIC_NAME_PREFIX_CONFIG, "nope" )); // When: injector.inject(statement, builder); // Then: verify(builder).withName("prefix-X"); }
public static boolean isValidAddress(String address) { return ADDRESS_PATTERN.matcher(address).matches(); }
@Test void testValidAddress() { assertTrue(NetUtils.isValidAddress("10.20.130.230:20880")); assertFalse(NetUtils.isValidAddress("10.20.130.230")); assertFalse(NetUtils.isValidAddress("10.20.130.230:666666")); }
@Nullable public HostsFileEntriesResolver hostsFileEntriesResolver() { return hostsFileEntriesResolver; }
@Test void hostsFileEntriesResolver() { assertThat(builder.build().hostsFileEntriesResolver()).isNull(); builder.hostsFileEntriesResolver((inetHost, resolvedAddressTypes) -> null); assertThat(builder.build().hostsFileEntriesResolver()).isNotNull(); }
@Override public ConfigCenterConfig build() { ConfigCenterConfig configCenter = new ConfigCenterConfig(); super.build(configCenter); configCenter.setProtocol(protocol); configCenter.setAddress(address); configCenter.setCluster(cluster); configCenter.setNamespace(namespace); configCenter.setGroup(group); configCenter.setUsername(username); configCenter.setPassword(password); configCenter.setTimeout(timeout); configCenter.setHighestPriority(highestPriority); configCenter.setCheck(check); configCenter.setConfigFile(configFile); configCenter.setAppConfigFile(appConfigFile); configCenter.setParameters(parameters); return configCenter; }
@Test void build() { ConfigCenterBuilder builder = ConfigCenterBuilder.newBuilder(); builder.check(true) .protocol("protocol") .address("address") .appConfigFile("appConfigFile") .cluster("cluster") .configFile("configFile") .group("group") .highestPriority(false) .namespace("namespace") .password("password") .timeout(1000L) .username("usernama") .appendParameter("default.num", "one") .id("id"); ConfigCenterConfig config = builder.build(); ConfigCenterConfig config2 = builder.build(); Assertions.assertTrue(config.isCheck()); Assertions.assertFalse(config.isHighestPriority()); Assertions.assertEquals(1000L, config.getTimeout()); Assertions.assertEquals("protocol", config.getProtocol()); Assertions.assertEquals("address", config.getAddress()); Assertions.assertEquals("appConfigFile", config.getAppConfigFile()); Assertions.assertEquals("cluster", config.getCluster()); Assertions.assertEquals("configFile", config.getConfigFile()); Assertions.assertEquals("group", config.getGroup()); Assertions.assertEquals("namespace", config.getNamespace()); Assertions.assertEquals("password", config.getPassword()); Assertions.assertEquals("usernama", config.getUsername()); Assertions.assertTrue(config.getParameters().containsKey("default.num")); Assertions.assertEquals("one", config.getParameters().get("default.num")); Assertions.assertEquals("id", config.getId()); Assertions.assertNotSame(config, config2); }
public boolean contains(final Object value) { return contains((int)value); }
@Test void shouldNotContainMissingValueInitially() { assertFalse(testSet.contains(MISSING_VALUE)); }
public static boolean isEven(final int value) { return (value & LAST_DIGIT_MASK) == 0; }
@Test void shouldDetectEvenAndOddNumbers() { assertTrue(BitUtil.isEven(0)); assertTrue(BitUtil.isEven(2)); assertTrue(BitUtil.isEven(MIN_VALUE)); assertFalse(BitUtil.isEven(1)); assertFalse(BitUtil.isEven(-1)); assertFalse(BitUtil.isEven(MAX_VALUE)); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { final AttributedList<ch.cyberduck.core.Path> paths = new AttributedList<>(); final java.nio.file.Path p = session.toPath(directory); if(!Files.exists(p)) { throw new LocalExceptionMappingService().map("Listing directory {0} failed", new NoSuchFileException(directory.getAbsolute()), directory); } try (DirectoryStream<java.nio.file.Path> stream = Files.newDirectoryStream(p)) { for(java.nio.file.Path n : stream) { if(null == n.getFileName()) { continue; } try { final PathAttributes attributes = feature.toAttributes(n); final EnumSet<Path.Type> type = EnumSet.noneOf(Path.Type.class); if(Files.isDirectory(n)) { type.add(Path.Type.directory); } else { type.add(Path.Type.file); } final Path file = new Path(directory, n.getFileName().toString(), type, attributes); if(this.post(n, file)) { paths.add(file); listener.chunk(directory, paths); } } catch(IOException e) { log.warn(String.format("Failure reading attributes for %s", n)); } } } catch(IOException ex) { throw new LocalExceptionMappingService().map("Listing directory {0} failed", ex, directory); } return paths; }
@Test public void testList() throws Exception { final LocalSession session = new LocalSession(new Host(new LocalProtocol(), new LocalProtocol().getDefaultHostname())); assertNotNull(session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback())); assertTrue(session.isConnected()); assertNotNull(session.getClient()); session.login(new DisabledLoginCallback(), new DisabledCancelCallback()); final Path home = new LocalHomeFinderFeature().find(); final Path file = new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final Path directory = new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); new LocalDirectoryFeature(session).mkdir(directory, new TransferStatus()); new LocalTouchFeature(session).touch(file, new TransferStatus()); final AttributedList<Path> list = new LocalListService(session).list(home, new DisabledListProgressListener()); assertTrue(list.contains(file)); assertTrue(list.contains(directory)); new LocalDeleteFeature(session).delete(Arrays.asList(file, directory), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertFalse(new LocalListService(session).list(home, new DisabledListProgressListener()).contains(file)); assertFalse(new LocalListService(session).list(home, new DisabledListProgressListener()).contains(directory)); session.close(); }
@Deprecated public Statement createStatement(String sql) { return createStatement(sql, new ParsingOptions()); }
@SuppressWarnings("deprecation") @Test public void testAllowIdentifierAtSign() { SqlParser sqlParser = new SqlParser(new SqlParserOptions().allowIdentifierSymbol(AT_SIGN)); sqlParser.createStatement("select * from foo@bar"); }
@Override public boolean next() { boolean result = enumerator.moveNext(); if (result && null != enumerator.current()) { currentRows = enumerator.current().getClass().isArray() && !(enumerator.current() instanceof byte[]) ? (Object[]) enumerator.current() : new Object[]{enumerator.current()}; } else { currentRows = new Object[]{null}; } return result; }
@Test void assertNext() { assertTrue(federationResultSet.next()); }
@Override public Snapshot getSnapshot() { return histogram.getSnapshot(); }
@Test public void returnsTheSnapshotFromTheReservoir() { final Snapshot snapshot = mock(Snapshot.class); when(reservoir.getSnapshot()).thenReturn(snapshot); assertThat(timer.getSnapshot()) .isEqualTo(snapshot); }
public boolean hasRemainingEncodedBytes() { // We delete an array after fully consuming it. return encodedArrays.size() != 0; }
@Test public void testHasRemainingEncodedBytes() { byte[] bytes = {'a', 'b', 'c'}; long number = 12345; // Empty OrderedCode orderedCode = new OrderedCode(); assertFalse(orderedCode.hasRemainingEncodedBytes()); // First and only field of each type. orderedCode.writeBytes(bytes); assertTrue(orderedCode.hasRemainingEncodedBytes()); assertArrayEquals(orderedCode.readBytes(), bytes); assertFalse(orderedCode.hasRemainingEncodedBytes()); orderedCode.writeNumIncreasing(number); assertTrue(orderedCode.hasRemainingEncodedBytes()); assertEquals(orderedCode.readNumIncreasing(), number); assertFalse(orderedCode.hasRemainingEncodedBytes()); orderedCode.writeSignedNumIncreasing(number); assertTrue(orderedCode.hasRemainingEncodedBytes()); assertEquals(orderedCode.readSignedNumIncreasing(), number); assertFalse(orderedCode.hasRemainingEncodedBytes()); orderedCode.writeInfinity(); assertTrue(orderedCode.hasRemainingEncodedBytes()); assertTrue(orderedCode.readInfinity()); assertFalse(orderedCode.hasRemainingEncodedBytes()); orderedCode.writeTrailingBytes(bytes); assertTrue(orderedCode.hasRemainingEncodedBytes()); assertArrayEquals(orderedCode.readTrailingBytes(), bytes); assertFalse(orderedCode.hasRemainingEncodedBytes()); // Two fields of same type. orderedCode.writeBytes(bytes); orderedCode.writeBytes(bytes); assertTrue(orderedCode.hasRemainingEncodedBytes()); assertArrayEquals(orderedCode.readBytes(), bytes); assertArrayEquals(orderedCode.readBytes(), bytes); assertFalse(orderedCode.hasRemainingEncodedBytes()); }
@Override public void createRouter(Router osRouter) { checkNotNull(osRouter, ERR_NULL_ROUTER); checkArgument(!Strings.isNullOrEmpty(osRouter.getId()), ERR_NULL_ROUTER_ID); osRouterStore.createRouter(osRouter); log.info(String.format(MSG_ROUTER, deriveResourceName(osRouter), MSG_CREATED)); }
@Test(expected = NullPointerException.class) public void testCreateRouterWithNull() { target.createRouter(null); }
public byte[] build() { return form.build(); }
@Test void build() { var body = ParBodyBuilder.create() .acrValues("very-very-high") .codeChallengeMethod("S256") .codeChallenge("myChallenge") .responseType("authorization_code") .redirectUri(URI.create("https://example.com/callback")) .state("#/myaccount") .nonce("bcff66cb-4f01-4129-82a9-0e27703db958") .scopes(List.of("email", "openid")) .clientId("https://fachdienst.example.com/auth/realms/main") .build(); var asString = new String(body, StandardCharsets.UTF_8); assertEquals( "acr_values=very-very-high&code_challenge_method=S256&code_challenge=myChallenge&response_type=authorization_code&redirect_uri=https%3A%2F%2Fexample.com%2Fcallback&state=%23%2Fmyaccount&nonce=bcff66cb-4f01-4129-82a9-0e27703db958&scope=email+openid&client_id=https%3A%2F%2Ffachdienst.example.com%2Fauth%2Frealms%2Fmain", asString); }
@Override public ListenableFuture<?> execute(StartTransaction statement, TransactionManager transactionManager, Metadata metadata, AccessControl accessControl, QueryStateMachine stateMachine, List<Expression> parameters) { Session session = stateMachine.getSession(); if (!session.isClientTransactionSupport()) { throw new PrestoException(StandardErrorCode.INCOMPATIBLE_CLIENT, "Client does not support transactions"); } if (session.getTransactionId().isPresent()) { throw new PrestoException(StandardErrorCode.NOT_SUPPORTED, "Nested transactions not supported"); } Optional<IsolationLevel> isolationLevel = extractIsolationLevel(statement); Optional<Boolean> readOnly = extractReadOnly(statement); TransactionId transactionId = transactionManager.beginTransaction( isolationLevel.orElse(TransactionManager.DEFAULT_ISOLATION), readOnly.orElse(TransactionManager.DEFAULT_READ_ONLY), false); stateMachine.setStartedTransactionId(transactionId); // Since the current session does not contain this new transaction ID, we need to manually mark it as inactive // when this statement completes. transactionManager.trySetInactive(transactionId); return immediateFuture(null); }
@Test public void testNonTransactionalClient() { Session session = sessionBuilder().build(); TransactionManager transactionManager = createTestTransactionManager(); QueryStateMachine stateMachine = createQueryStateMachine("START TRANSACTION", session, true, transactionManager, executor, metadata); assertFalse(stateMachine.getSession().getTransactionId().isPresent()); StartTransactionTask startTransactionTask = new StartTransactionTask(); try { getFutureValue(startTransactionTask.execute(new StartTransaction(ImmutableList.of()), transactionManager, metadata, new AllowAllAccessControl(), stateMachine, emptyList())); fail(); } catch (PrestoException e) { assertEquals(e.getErrorCode(), INCOMPATIBLE_CLIENT.toErrorCode()); } assertTrue(transactionManager.getAllTransactionInfos().isEmpty()); assertFalse(stateMachine.getQueryInfo(Optional.empty()).isClearTransactionId()); assertFalse(stateMachine.getQueryInfo(Optional.empty()).getStartedTransactionId().isPresent()); }
public void startAsync() { try { udfLoader.load(); ProcessingLogServerUtils.maybeCreateProcessingLogTopic( serviceContext.getTopicClient(), processingLogConfig, ksqlConfig); if (processingLogConfig.getBoolean(ProcessingLogConfig.STREAM_AUTO_CREATE)) { log.warn("processing log auto-create is enabled, but this is not supported " + "for headless mode."); } rocksDBConfigSetterHandler.accept(ksqlConfig); processesQueryFile(readQueriesFile(queriesFile)); showWelcomeMessage(); final Properties properties = new Properties(); ksqlConfig.originals().forEach((key, value) -> { if (nonNull(value)) { properties.put(key, value.toString()); } }); versionChecker.start(KsqlModuleType.SERVER, properties); } catch (final Exception e) { log.error("Failed to start KSQL Server with query file: " + queriesFile, e); throw e; } }
@Test public void shouldStartTheVersionCheckerAgent() { // When: standaloneExecutor.startAsync(); verify(versionChecker).start(eq(KsqlModuleType.SERVER), any()); }
@Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { return inject(statement, new TopicProperties.Builder()); }
@Test public void shouldIdentifyAndUseCorrectSourceInJoin() { // Given: givenStatement("CREATE STREAM x WITH (kafka_topic='topic') AS SELECT * FROM SOURCE " + "JOIN J_SOURCE ON SOURCE.X = J_SOURCE.X;"); // When: injector.inject(statement, builder); // Then: verify(builder).withSource(argThat(supplierThatGets(sourceDescription)), any(Supplier.class)); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ApplicationMetric that = (ApplicationMetric) o; return getApplicationName().equals(that.applicationModel.getApplicationName()) && Objects.equals(extraInfo, that.extraInfo); }
@Test void testEquals() {}
public static Message parse(byte[] bytes) { Message result; try { if (bytes[0] == REQUEST_TYPE_FIELD_TAG) { if (bytes[1] == REQUEST_TYPE_READ) { result = ReadRequest.parseFrom(bytes); } else { result = WriteRequest.parseFrom(bytes); } return result; } } catch (Throwable ignore) { } // old consistency entity, will be @Deprecated in future try { GetRequest request = GetRequest.parseFrom(bytes); return convertToReadRequest(request); } catch (Throwable ignore) { } try { Log log = Log.parseFrom(bytes); return convertToWriteRequest(log); } catch (Throwable ignore) { } throw new ConsistencyException("The current array cannot be serialized to the corresponding object"); }
@Test void testParseWriteRequestWithRequestTypeField() { String group = "test"; ByteString data = ByteString.copyFrom("data".getBytes()); WriteRequest testCase = WriteRequest.newBuilder().setGroup(group).setData(data).build(); byte[] requestTypeFieldBytes = new byte[2]; requestTypeFieldBytes[0] = ProtoMessageUtil.REQUEST_TYPE_FIELD_TAG; requestTypeFieldBytes[1] = ProtoMessageUtil.REQUEST_TYPE_WRITE; byte[] dataBytes = testCase.toByteArray(); ByteBuffer byteBuffer = (ByteBuffer) ByteBuffer.allocate(requestTypeFieldBytes.length + dataBytes.length).put(requestTypeFieldBytes) .put(dataBytes).position(0); Object actual = ProtoMessageUtil.parse(byteBuffer.array()); assertEquals(WriteRequest.class, testCase.getClass()); assertEquals(group, ((WriteRequest) actual).getGroup()); assertEquals(data, ((WriteRequest) actual).getData()); }
@Override public Optional<ConfigItem> resolve(final String propertyName, final boolean strict) { if (propertyName.startsWith(KSQL_REQUEST_CONFIG_PROPERTY_PREFIX)) { return resolveRequestConfig(propertyName); } else if (propertyName.startsWith(KSQL_CONFIG_PROPERTY_PREFIX) && !propertyName.startsWith(KSQL_STREAMS_PREFIX)) { return resolveKsqlConfig(propertyName); } return resolveStreamsConfig(propertyName, strict); }
@Test public void shouldNotFindUnknownConsumerPropertyIfStrict() { // Given: final String configName = StreamsConfig.CONSUMER_PREFIX + "custom.interceptor.config"; // Then: assertThat(resolver.resolve(configName, true), is(Optional.empty())); }
public Builder toBuilder() { return builder() .setManagedKeyedState(managedKeyedState) .setManagedOperatorState(managedOperatorState) .setRawOperatorState(rawOperatorState) .setRawKeyedState(rawKeyedState) .setInputChannelState(inputChannelState) .setResultSubpartitionState(resultSubpartitionState) .setInputRescalingDescriptor(inputRescalingDescriptor) .setOutputRescalingDescriptor(outputRescalingDescriptor); }
@Test void testToBuilderCorrectness() throws IOException { // given: Initialized operator subtask state. OperatorSubtaskState operatorSubtaskState = generateSampleOperatorSubtaskState().f1; // when: Copy the operator subtask state. OperatorSubtaskState operatorSubtaskStateCopy = operatorSubtaskState.toBuilder().build(); // then: It should be equal to original one. assertThat(reflectionEquals(operatorSubtaskState, operatorSubtaskStateCopy)).isTrue(); }
public static String format(double amount, boolean isUseTraditional) { return format(amount, isUseTraditional, false); }
@Test public void singleNumberTest() { String format = NumberChineseFormatter.format(0.01, false, false); assertEquals("零点零一", format); format = NumberChineseFormatter.format(0.10, false, false); assertEquals("零点一", format); format = NumberChineseFormatter.format(0.12, false, false); assertEquals("零点一二", format); format = NumberChineseFormatter.format(1.00, false, false); assertEquals("一", format); format = NumberChineseFormatter.format(1.10, false, false); assertEquals("一点一", format); format = NumberChineseFormatter.format(1.02, false, false); assertEquals("一点零二", format); }
public Map<String, Object> getTelemetryResponse(User currentUser) { TelemetryUserSettings telemetryUserSettings = getTelemetryUserSettings(currentUser); if (isTelemetryEnabled && telemetryUserSettings.telemetryEnabled()) { DateTime clusterCreationDate = telemetryClusterService.getClusterCreationDate().orElse(null); String clusterId = telemetryClusterService.getClusterId(); List<TelemetryLicenseStatus> licenseStatuses = enterpriseDataProvider.licenseStatus(); return telemetryResponseFactory.createTelemetryResponse( getClusterInfo(clusterId, clusterCreationDate, licenseStatuses), getUserInfo(currentUser, clusterId), getPluginInfo(), getSearchClusterInfo(), licenseStatuses, telemetryUserSettings, getDataNodeInfo()); } else { return telemetryResponseFactory.createTelemetryDisabledResponse(telemetryUserSettings); } }
@Test void test_telemetry_enabled_for_user() { TelemetryService telemetryService = createTelemetryService(true); mockUserTelemetryEnabled(true); mockTrafficData(trafficCounterService); Map<String, Object> response = telemetryService.getTelemetryResponse(user); assertThatAllTelemetryDataIsPresent(response); }
public KeyManagerFactory createKeyManagerFactory() throws NoSuchProviderException, NoSuchAlgorithmException { return getProvider() != null ? KeyManagerFactory.getInstance(getAlgorithm(), getProvider()) : KeyManagerFactory.getInstance(getAlgorithm()); }
@Test public void testDefaults() throws Exception { assertNotNull(factoryBean.createKeyManagerFactory()); }
public static String mainName(File file) { return FileNameUtil.mainName(file); }
@Test public void mainNameTest() { String path = "d:\\aaa\\bbb\\cc\\ddd\\"; String mainName = FileUtil.mainName(path); assertEquals("ddd", mainName); path = "d:\\aaa\\bbb\\cc\\ddd"; mainName = FileUtil.mainName(path); assertEquals("ddd", mainName); path = "d:\\aaa\\bbb\\cc\\ddd.jpg"; mainName = FileUtil.mainName(path); assertEquals("ddd", mainName); }
public Process executeAsync() throws IOException, InterruptedException, ExecutionException { logger.atInfo().log("Executing the following command: '%s'", COMMAND_ARGS_JOINER.join(args)); process = processBuilder.inheritIO().start(); return process; }
@Test public void executeAsync_always_startsProcessAndReturnsProcessInstance() throws IOException, InterruptedException, ExecutionException { CommandExecutor executor = new CommandExecutor("/bin/sh", "-c", "echo 1"); Process process = executor.executeAsync(); process.waitFor(); assertThat(process.exitValue()).isEqualTo(0); }
@SuppressWarnings("unchecked") @SneakyThrows(ReflectiveOperationException.class) public static <T extends ShardingAlgorithm> T newInstance(final String shardingAlgorithmClassName, final Class<T> superShardingAlgorithmClass, final Properties props) { Class<?> algorithmClass = Class.forName(shardingAlgorithmClassName); if (!superShardingAlgorithmClass.isAssignableFrom(algorithmClass)) { throw new ShardingAlgorithmClassImplementationException(shardingAlgorithmClassName, superShardingAlgorithmClass); } T result = (T) algorithmClass.getDeclaredConstructor().newInstance(); result.init(convertToStringTypedProperties(props)); return result; }
@Test void assertNewInstanceWithUnAssignableFrom() { assertThrows(ShardingAlgorithmClassImplementationException.class, () -> ClassBasedShardingAlgorithmFactory.newInstance(ClassBasedHintShardingAlgorithmFixture.class.getName(), StandardShardingAlgorithm.class, new Properties())); }
@Description("bitwise OR in 2's complement arithmetic") @ScalarFunction @SqlType(StandardTypes.BIGINT) public static long bitwiseOr(@SqlType(StandardTypes.BIGINT) long left, @SqlType(StandardTypes.BIGINT) long right) { return left | right; }
@Test public void testBitwiseOr() { assertFunction("bitwise_or(0, -1)", BIGINT, -1L); assertFunction("bitwise_or(3, 8)", BIGINT, 3L | 8L); assertFunction("bitwise_or(-4, 12)", BIGINT, -4L | 12L); assertFunction("bitwise_or(60, 21)", BIGINT, 60L | 21L); }
@Override public Collection<TimeSeriesEntry<V, L>> pollFirstEntries(int count) { return get(pollFirstEntriesAsync(count)); }
@Test public void testPollFirstEntries() { RTimeSeries<String, String> t = redisson.getTimeSeries("test"); t.add(1, "10", "100"); t.add(2, "20"); t.add(3, "30"); Collection<TimeSeriesEntry<String, String>> s = t.pollFirstEntries(2); assertThat(s).containsExactly(new TimeSeriesEntry<>(1, "10", "100"), new TimeSeriesEntry<>(2, "20")); assertThat(t.size()).isEqualTo(1); }
public <T> T fromQueryParams(Class<T> clazz) { Map<String, String> fieldValues = queryParams.entrySet().stream() .collect(toMap(Entry::getKey, e -> e.getValue().get(0))); return ReflectionUtils.newInstanceAndSetFieldValues(clazz, fieldValues); }
@Test void testToRequestUrlWithQueryParams() { RequestUrl requestUrl = new MatchUrl("/api/jobs/enqueued?offset=2&limit=2&order=updatedAt:DESC").toRequestUrl("/api/jobs/:state"); OffsetBasedPageRequest pageRequest = requestUrl.fromQueryParams(OffsetBasedPageRequest.class); assertThat(pageRequest.getOffset()).isEqualTo(2); assertThat(pageRequest.getLimit()).isEqualTo(2); assertThat(pageRequest.getOrder()).isEqualTo("updatedAt:DESC"); }
public static Collection<SubquerySegment> getSubquerySegments(final SelectStatement selectStatement) { List<SubquerySegment> result = new LinkedList<>(); extractSubquerySegments(result, selectStatement); return result; }
@Test void assertGetSubquerySegmentsWithCombineSegment() { SelectStatement selectStatement = mock(SelectStatement.class); SubquerySegment left = new SubquerySegment(0, 0, mock(SelectStatement.class), ""); SubquerySegment right = createSelectStatementForCombineSegment(); when(selectStatement.getCombine()).thenReturn(Optional.of(new CombineSegment(0, 0, left, CombineType.UNION, right))); Collection<SubquerySegment> actual = SubqueryExtractUtils.getSubquerySegments(selectStatement); assertThat(actual.size(), is(3)); }
public static SetQueueInstruction setQueue(final long queueId, final PortNumber port) { return new SetQueueInstruction(queueId, port); }
@Test public void testSetQueueMethod() { final Instruction instruction = Instructions.setQueue(2, port2); final Instructions.SetQueueInstruction setQueueInstruction = checkAndConvert(instruction, Instruction.Type.QUEUE, Instructions.SetQueueInstruction.class); assertThat(setQueueInstruction.queueId(), is(2L)); assertThat(setQueueInstruction.port(), is(port2)); }
public static long parseDuration(final String propertyName, final String propertyValue) { final char lastCharacter = propertyValue.charAt(propertyValue.length() - 1); if (Character.isDigit(lastCharacter)) { return Long.parseLong(propertyValue); } if (lastCharacter != 's' && lastCharacter != 'S') { throw new NumberFormatException( propertyName + ": " + propertyValue + " should end with: s, ms, us, or ns."); } final char secondLastCharacter = propertyValue.charAt(propertyValue.length() - 2); if (Character.isDigit(secondLastCharacter)) { final long value = AsciiEncoding.parseLongAscii(propertyValue, 0, propertyValue.length() - 1); return TimeUnit.SECONDS.toNanos(value); } final long value = AsciiEncoding.parseLongAscii(propertyValue, 0, propertyValue.length() - 2); switch (secondLastCharacter) { case 'n': case 'N': return value; case 'u': case 'U': return TimeUnit.MICROSECONDS.toNanos(value); case 'm': case 'M': return TimeUnit.MILLISECONDS.toNanos(value); default: throw new NumberFormatException( propertyName + ": " + propertyValue + " should end with: s, ms, us, or ns."); } }
@Test void shouldThrowWhenParseTimeHasBadTwoLetterSuffix() { assertThrows(NumberFormatException.class, () -> parseDuration("", "1zs")); }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PortInfo portInfo = (PortInfo) o; if (port != portInfo.port) return false; if (tags != null ? !tags.equals(portInfo.tags) : portInfo.tags != null) return false; return true; }
@Test public void testEquals() { PortInfo a = new PortInfo(1234, List.of("foo")); PortInfo b = new PortInfo(1234, List.of("foo")); PortInfo c = new PortInfo(1234, List.of("foo", "bar")); PortInfo d = new PortInfo(12345, List.of("foo")); assertEquals(a, b); assertNotEquals(a, c); assertNotEquals(a, d); assertNotEquals(c, d); }
@Override public KsMaterializedQueryResult<WindowedRow> get( final GenericKey key, final int partition, final Range<Instant> windowStartBounds, final Range<Instant> windowEndBounds, final Optional<Position> position ) { try { final Instant lower = calculateLowerBound(windowStartBounds, windowEndBounds); final Instant upper = calculateUpperBound(windowStartBounds, windowEndBounds); final WindowKeyQuery<GenericKey, ValueAndTimestamp<GenericRow>> query = WindowKeyQuery.withKeyAndWindowStartRange(key, lower, upper); StateQueryRequest<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> request = inStore(stateStore.getStateStoreName()).withQuery(query); if (position.isPresent()) { request = request.withPositionBound(PositionBound.at(position.get())); } final KafkaStreams streams = stateStore.getKafkaStreams(); final StateQueryResult<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> result = streams.query(request); final QueryResult<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> queryResult = result.getPartitionResults().get(partition); if (queryResult.isFailure()) { throw failedQueryException(queryResult); } if (queryResult.getResult() == null) { return KsMaterializedQueryResult.rowIteratorWithPosition( Collections.emptyIterator(), queryResult.getPosition()); } try (WindowStoreIterator<ValueAndTimestamp<GenericRow>> it = queryResult.getResult()) { final Builder<WindowedRow> builder = ImmutableList.builder(); while (it.hasNext()) { final KeyValue<Long, ValueAndTimestamp<GenericRow>> next = it.next(); final Instant windowStart = Instant.ofEpochMilli(next.key); if (!windowStartBounds.contains(windowStart)) { continue; } final Instant windowEnd = windowStart.plus(windowSize); if (!windowEndBounds.contains(windowEnd)) { continue; } final TimeWindow window = new TimeWindow(windowStart.toEpochMilli(), windowEnd.toEpochMilli()); final WindowedRow row = WindowedRow.of( stateStore.schema(), new Windowed<>(key, window), next.value.value(), next.value.timestamp() ); builder.add(row); } return KsMaterializedQueryResult.rowIteratorWithPosition( builder.build().iterator(), queryResult.getPosition()); } } catch (final NotUpToBoundException | MaterializationException e) { throw e; } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test @SuppressWarnings("unchecked") public void shouldSupportRangeAll_fetchAll() { // When: final StateQueryResult<KeyValueIterator<Windowed<GenericKey>, ValueAndTimestamp<GenericRow>>> partitionResult = new StateQueryResult<>(); final QueryResult<KeyValueIterator<Windowed<GenericKey>, ValueAndTimestamp<GenericRow>>> queryResult = QueryResult.forResult(keyValueIterator); queryResult.setPosition(POSITION); partitionResult.addResult(PARTITION, queryResult); when(kafkaStreams.query(any(StateQueryRequest.class))).thenReturn(partitionResult); table.get(PARTITION, Range.all(), Range.all()); // Then: verify(kafkaStreams).query(queryTypeCaptor.capture()); StateQueryRequest<?> request = queryTypeCaptor.getValue(); assertThat(request.getQuery(), instanceOf(WindowRangeQuery.class)); }
public static FunctionConfig validateUpdate(FunctionConfig existingConfig, FunctionConfig newConfig) { FunctionConfig mergedConfig = existingConfig.toBuilder().build(); if (!existingConfig.getTenant().equals(newConfig.getTenant())) { throw new IllegalArgumentException("Tenants differ"); } if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) { throw new IllegalArgumentException("Namespaces differ"); } if (!existingConfig.getName().equals(newConfig.getName())) { throw new IllegalArgumentException("Function Names differ"); } if (!StringUtils.isEmpty(newConfig.getClassName())) { mergedConfig.setClassName(newConfig.getClassName()); } if (!StringUtils.isEmpty(newConfig.getJar())) { mergedConfig.setJar(newConfig.getJar()); } if (newConfig.getInputSpecs() == null) { newConfig.setInputSpecs(new HashMap<>()); } if (mergedConfig.getInputSpecs() == null) { mergedConfig.setInputSpecs(new HashMap<>()); } if (newConfig.getInputs() != null) { newConfig.getInputs().forEach((topicName -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder().isRegexPattern(false).build()); })); } if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) { newConfig.getInputSpecs().put(newConfig.getTopicsPattern(), ConsumerConfig.builder() .isRegexPattern(true) .build()); } if (newConfig.getCustomSerdeInputs() != null) { newConfig.getCustomSerdeInputs().forEach((topicName, serdeClassName) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .serdeClassName(serdeClassName) .isRegexPattern(false) .build()); }); } if (newConfig.getCustomSchemaInputs() != null) { newConfig.getCustomSchemaInputs().forEach((topicName, schemaClassname) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .schemaType(schemaClassname) .isRegexPattern(false) .build()); }); } if (!newConfig.getInputSpecs().isEmpty()) { newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> { if (!existingConfig.getInputSpecs().containsKey(topicName)) { throw new IllegalArgumentException("Input Topics cannot be altered"); } if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) { throw new IllegalArgumentException( "isRegexPattern for input topic " + topicName + " cannot be altered"); } mergedConfig.getInputSpecs().put(topicName, consumerConfig); }); } if (!StringUtils.isEmpty(newConfig.getOutputSerdeClassName()) && !newConfig.getOutputSerdeClassName() .equals(existingConfig.getOutputSerdeClassName())) { throw new IllegalArgumentException("Output Serde mismatch"); } if (!StringUtils.isEmpty(newConfig.getOutputSchemaType()) && !newConfig.getOutputSchemaType() .equals(existingConfig.getOutputSchemaType())) { throw new IllegalArgumentException("Output Schema mismatch"); } if (!StringUtils.isEmpty(newConfig.getLogTopic())) { mergedConfig.setLogTopic(newConfig.getLogTopic()); } if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees() .equals(existingConfig.getProcessingGuarantees())) { throw new IllegalArgumentException("Processing Guarantees cannot be altered"); } if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering() .equals(existingConfig.getRetainOrdering())) { throw new IllegalArgumentException("Retain Ordering cannot be altered"); } if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering() .equals(existingConfig.getRetainKeyOrdering())) { throw new IllegalArgumentException("Retain Key Ordering cannot be altered"); } if (!StringUtils.isEmpty(newConfig.getOutput())) { mergedConfig.setOutput(newConfig.getOutput()); } if (newConfig.getUserConfig() != null) { mergedConfig.setUserConfig(newConfig.getUserConfig()); } if (newConfig.getSecrets() != null) { mergedConfig.setSecrets(newConfig.getSecrets()); } if (newConfig.getRuntime() != null && !newConfig.getRuntime().equals(existingConfig.getRuntime())) { throw new IllegalArgumentException("Runtime cannot be altered"); } if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) { throw new IllegalArgumentException("AutoAck cannot be altered"); } if (newConfig.getMaxMessageRetries() != null) { mergedConfig.setMaxMessageRetries(newConfig.getMaxMessageRetries()); } if (!StringUtils.isEmpty(newConfig.getDeadLetterTopic())) { mergedConfig.setDeadLetterTopic(newConfig.getDeadLetterTopic()); } if (!StringUtils.isEmpty(newConfig.getSubName()) && !newConfig.getSubName() .equals(existingConfig.getSubName())) { throw new IllegalArgumentException("Subscription Name cannot be altered"); } if (newConfig.getParallelism() != null) { mergedConfig.setParallelism(newConfig.getParallelism()); } if (newConfig.getResources() != null) { mergedConfig .setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources())); } if (newConfig.getWindowConfig() != null) { mergedConfig.setWindowConfig(newConfig.getWindowConfig()); } if (newConfig.getTimeoutMs() != null) { mergedConfig.setTimeoutMs(newConfig.getTimeoutMs()); } if (newConfig.getCleanupSubscription() != null) { mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription()); } if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) { mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags()); } if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) { mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions()); } if (newConfig.getProducerConfig() != null) { mergedConfig.setProducerConfig(newConfig.getProducerConfig()); } return mergedConfig; }
@Test public void testMergeDifferentSecrets() { FunctionConfig functionConfig = createFunctionConfig(); Map<String, String> mySecrets = new HashMap<>(); mySecrets.put("MyKey", "MyValue"); FunctionConfig newFunctionConfig = createUpdatedFunctionConfig("secrets", mySecrets); FunctionConfig mergedConfig = FunctionConfigUtils.validateUpdate(functionConfig, newFunctionConfig); assertEquals( mergedConfig.getSecrets(), mySecrets ); mergedConfig.setSecrets(functionConfig.getSecrets()); assertEquals( new Gson().toJson(functionConfig), new Gson().toJson(mergedConfig) ); }
public static <T> RetryTransformer<T> of(Retry retry) { return new RetryTransformer<>(retry); }
@Test public void returnOnErrorUsingMaybe() throws InterruptedException { RetryConfig config = retryConfig(); Retry retry = Retry.of("testName", config); given(helloWorldService.returnHelloWorld()) .willThrow(new HelloWorldException()); Maybe.fromCallable(helloWorldService::returnHelloWorld) .compose(RetryTransformer.of(retry)) .test() .await() .assertError(HelloWorldException.class) .assertNotComplete(); Maybe.fromCallable(helloWorldService::returnHelloWorld) .compose(RetryTransformer.of(retry)) .test() .await() .assertError(HelloWorldException.class) .assertNotComplete(); then(helloWorldService).should(times(6)).returnHelloWorld(); Retry.Metrics metrics = retry.getMetrics(); assertThat(metrics.getNumberOfFailedCallsWithRetryAttempt()).isEqualTo(2); assertThat(metrics.getNumberOfFailedCallsWithoutRetryAttempt()).isZero(); }
public T send() throws IOException { return web3jService.send(this, responseType); }
@Test public void testNetPeerCount() throws Exception { web3j.netPeerCount().send(); verifyResult("{\"jsonrpc\":\"2.0\",\"method\":\"net_peerCount\",\"params\":[],\"id\":1}"); }
String getSignature(URL url, Invocation invocation, String secretKey, String time) { String requestString = String.format( Constants.SIGNATURE_STRING_FORMAT, url.getColonSeparatedKey(), RpcUtils.getMethodName(invocation), secretKey, time); boolean parameterEncrypt = url.getParameter(Constants.PARAMETER_SIGNATURE_ENABLE_KEY, false); if (parameterEncrypt) { return SignatureUtils.sign(invocation.getArguments(), requestString, secretKey); } return SignatureUtils.sign(requestString, secretKey); }
@Test void testGetSignatureWithParameter() { URL url = mock(URL.class); when(url.getParameter(Constants.PARAMETER_SIGNATURE_ENABLE_KEY, false)).thenReturn(true); Invocation invocation = mock(Invocation.class); String secretKey = "123456"; Object[] params = {"dubbo", new ArrayList()}; when(invocation.getArguments()).thenReturn(params); AccessKeyAuthenticator helper = new AccessKeyAuthenticator(ApplicationModel.defaultModel()); String signature = helper.getSignature(url, invocation, secretKey, String.valueOf(System.currentTimeMillis())); assertNotNull(signature); Object[] fakeParams = {"dubbo1", new ArrayList<>()}; when(invocation.getArguments()).thenReturn(fakeParams); String signature1 = helper.getSignature(url, invocation, secretKey, String.valueOf(System.currentTimeMillis())); assertNotEquals(signature, signature1); }
@Override public Collection<String> getJdbcUrlPrefixes() { return Arrays.asList("jdbc:presto:", "presto:"); }
@Test void assertGetJdbcUrlPrefixes() { assertThat(TypedSPILoader.getService(DatabaseType.class, "Presto").getJdbcUrlPrefixes(), is(Arrays.asList("jdbc:presto:", "presto:"))); }
public PropertiesSnapshot getWorkflowPropertiesSnapshot(String workflowId, String snapshotId) { if (Constants.LATEST_INSTANCE_RUN.equalsIgnoreCase(snapshotId)) { return getLatestPropertiesSnapshot(workflowId); } else { throw new UnsupportedOperationException("Specific snapshot version is not implemented."); } }
@Test public void testInvalidGetWorkflowPropertiesSnapshot() { AssertHelper.assertThrows( "Cannot get non-existing workflow's properties-snapshot", MaestroNotFoundException.class, "Cannot find workflow [" + TEST_WORKFLOW_ID1 + "]'s current properties-snapshot.", () -> workflowDao.getWorkflowPropertiesSnapshot(TEST_WORKFLOW_ID1, "latest")); AssertHelper.assertThrows( "Cannot get a specific version of snapshot", UnsupportedOperationException.class, "Specific snapshot version is not implemented.", () -> workflowDao.getWorkflowPropertiesSnapshot(TEST_WORKFLOW_ID1, "12345")); }
public static void writePositionToBlockBuilder(Block block, int position, BlockBuilder blockBuilder) { if (block instanceof DictionaryBlock) { position = ((DictionaryBlock) block).getId(position); block = ((DictionaryBlock) block).getDictionary(); } if (blockBuilder instanceof MapBlockBuilder) { writePositionToMapBuilder(block, position, (MapBlockBuilder) blockBuilder); } else if (blockBuilder instanceof ArrayBlockBuilder) { writePositionToArrayBuilder(block, position, (ArrayBlockBuilder) blockBuilder); } else if (blockBuilder instanceof RowBlockBuilder) { writePositionToRowBuilder(block, position, (RowBlockBuilder) blockBuilder); } else { block.writePositionTo(position, blockBuilder); } }
@Test public void testRowBlockBuilder() { RowType rowType = rowType(VARCHAR, BIGINT, TEST_MAP_TYPE); BlockBuilder blockBuilder = rowType.createBlockBuilder(null, 1); BlockBuilder rowBlockBuilder = blockBuilder.beginBlockEntry(); VARCHAR.writeString(rowBlockBuilder, "TEST_ROW"); BIGINT.writeLong(rowBlockBuilder, 10L); BlockBuilder mapBlockBuilder = rowBlockBuilder.beginBlockEntry(); writeValuesToMapBuilder(mapBlockBuilder); rowBlockBuilder.closeEntry(); Block expectedBlock = blockBuilder.closeEntry().build(); // write values to a new block using BlockBuilderUtil BlockBuilder blockBuilder2 = rowType.createBlockBuilder(null, 1); writePositionToBlockBuilder(expectedBlock, 0, blockBuilder2); Block newBlock = blockBuilder2.build(); assertEquals(newBlock, expectedBlock); }
@Override public int addAllIfAbsent(Map<V, Double> objects) { return get(addAllIfAbsentAsync(objects)); }
@Test public void testAddAllIfAbsent() { RScoredSortedSet<String> set = redisson.getScoredSortedSet("simple"); set.add(10, "1981"); set.add(11, "1984"); Map<String, Double> map = new HashMap<>(); map.put("1981", 111D); map.put("1982", 112D); map.put("1983", 113D); map.put("1984", 114D); assertThat(set.addAllIfAbsent(map)).isEqualTo(2); assertThat(set.getScore("1981")).isEqualTo(10); assertThat(set.getScore("1984")).isEqualTo(11); assertThat(set).contains("1981", "1982", "1983", "1984"); }
public static ParamType getVarArgsSchemaFromType(final Type type) { return getSchemaFromType(type, VARARGS_JAVA_TO_ARG_TYPE); }
@Test public void shouldGetBooleanSchemaForBooleanClassVariadic() { assertThat( UdfUtil.getVarArgsSchemaFromType(Boolean.class), equalTo(ParamTypes.BOOLEAN) ); }
@Override public boolean add(E e) { // will throw UnsupportedOperationException; delegate anyway for testability return underlying().add(e); }
@Test public void testDelegationOfUnsupportedFunctionAdd() { new PCollectionsHashSetWrapperDelegationChecker<>() .defineMockConfigurationForUnsupportedFunction(mock -> mock.add(eq(this))) .defineWrapperUnsupportedFunctionInvocation(wrapper -> wrapper.add(this)) .doUnsupportedFunctionDelegationCheck(); }
public JobId enqueue(JobLambda job) { return enqueue(null, job); }
@Test void onStreamOfJobsCreatingAndCreatedAreCalled() { when(storageProvider.save(anyList())).thenAnswer(invocation -> invocation.getArgument(0)); final Stream<Integer> range = IntStream.range(0, 1).boxed(); jobScheduler.enqueue(range, (i) -> testService.doWork(i)); assertThat(jobClientLogFilter.onCreating).isTrue(); assertThat(jobClientLogFilter.onCreated).isTrue(); }
List<String> decorateTextWithHtml(String text, DecorationDataHolder decorationDataHolder) { return decorateTextWithHtml(text, decorationDataHolder, null, null); }
@Test public void should_escape_markup_chars() { String javadocWithHtml = "/**\n" + " * Provides a basic framework to sequentially read any kind of character stream in order to feed a generic OUTPUT.\n" + " * \n" + " * This framework can used for instance in order to :\n" + " * <ul>\n" + " * <li>Create a lexer in charge to generate a list of tokens from a character stream</li>\n" + " * <li>Create a source code syntax highligther in charge to decorate a source code with HTML tags</li>\n" + " * <li>Create a javadoc generator</li>\n" + " * <li>...</li>\n" + " * </ul>\n" + " */\n"; DecorationDataHolder decorationData = new DecorationDataHolder(); decorationData.loadSyntaxHighlightingData("0,453,cppd;"); HtmlTextDecorator htmlTextDecorator = new HtmlTextDecorator(); List<String> htmlOutput = htmlTextDecorator.decorateTextWithHtml(javadocWithHtml, decorationData); assertThat(htmlOutput).containsExactly( "<span class=\"cppd\">/**</span>", "<span class=\"cppd\"> * Provides a basic framework to sequentially read any kind of character stream in order to feed a generic OUTPUT.</span>", "<span class=\"cppd\"> * </span>", "<span class=\"cppd\"> * This framework can used for instance in order to :</span>", "<span class=\"cppd\"> * &lt;ul&gt;</span>", "<span class=\"cppd\"> * &lt;li&gt;Create a lexer in charge to generate a list of tokens from a character stream&lt;/li&gt;</span>", "<span class=\"cppd\"> * &lt;li&gt;Create a source code syntax highligther in charge to decorate a source code with HTML tags&lt;/li&gt;</span>", "<span class=\"cppd\"> * &lt;li&gt;Create a javadoc generator&lt;/li&gt;</span>", "<span class=\"cppd\"> * &lt;li&gt;...&lt;/li&gt;</span>", "<span class=\"cppd\"> * &lt;/ul&gt;</span>", "<span class=\"cppd\"> */</span>", ""); }
@Override public HttpServletRequest readRequest(AwsProxyRequest request, SecurityContext securityContext, Context lambdaContext, ContainerConfig config) throws InvalidRequestEventException { // Expect the HTTP method and context to be populated. If they are not, we are handling an // unsupported event type. if (request.getHttpMethod() == null || request.getHttpMethod().equals("") || request.getRequestContext() == null) { throw new InvalidRequestEventException(INVALID_REQUEST_ERROR); } request.setPath(stripBasePath(request.getPath(), config)); if (request.getMultiValueHeaders() != null && request.getMultiValueHeaders().getFirst(HttpHeaders.CONTENT_TYPE) != null) { String contentType = request.getMultiValueHeaders().getFirst(HttpHeaders.CONTENT_TYPE); // put single as we always expect to have one and only one content type in a request. request.getMultiValueHeaders().putSingle(HttpHeaders.CONTENT_TYPE, getContentTypeWithCharset(contentType, config)); } AwsProxyHttpServletRequest servletRequest = new AwsProxyHttpServletRequest(request, lambdaContext, securityContext, config); servletRequest.setServletContext(servletContext); servletRequest.setAttribute(API_GATEWAY_CONTEXT_PROPERTY, request.getRequestContext()); servletRequest.setAttribute(API_GATEWAY_STAGE_VARS_PROPERTY, request.getStageVariables()); servletRequest.setAttribute(API_GATEWAY_EVENT_PROPERTY, request); servletRequest.setAttribute(ALB_CONTEXT_PROPERTY, request.getRequestContext().getElb()); servletRequest.setAttribute(LAMBDA_CONTEXT_PROPERTY, lambdaContext); servletRequest.setAttribute(JAX_SECURITY_CONTEXT_PROPERTY, securityContext); return servletRequest; }
@Test void readRequest_contentCharset_setsDefaultCharsetWhenNotSpecified() { String requestCharset = "application/json"; AwsProxyRequest request = new AwsProxyRequestBuilder(ENCODED_REQUEST_PATH, "GET").header(HttpHeaders.CONTENT_TYPE, requestCharset).build(); try { HttpServletRequest servletRequest = reader.readRequest(request, null, null, ContainerConfig.defaultConfig()); assertNotNull(servletRequest); assertNotNull(servletRequest.getHeader(HttpHeaders.CONTENT_TYPE)); String contentAndCharset = requestCharset + "; charset=" + LambdaContainerHandler.getContainerConfig().getDefaultContentCharset(); assertEquals(contentAndCharset, servletRequest.getHeader(HttpHeaders.CONTENT_TYPE)); assertEquals(LambdaContainerHandler.getContainerConfig().getDefaultContentCharset(), servletRequest.getCharacterEncoding()); } catch (InvalidRequestEventException e) { e.printStackTrace(); fail("Could not read request"); } }
public static MemberVersion of(int major, int minor, int patch) { if (major == 0 && minor == 0 && patch == 0) { return MemberVersion.UNKNOWN; } else { return new MemberVersion(major, minor, patch); } }
@Test public void testVersionOf_whenVersionStringIsSnapshot() { MemberVersion expected = MemberVersion.of(3, 8, 0); assertEquals(expected, MemberVersion.of(VERSION_3_8_SNAPSHOT_STRING)); }
@Override public Flux<BooleanResponse<RenameCommand>> rename(Publisher<RenameCommand> commands) { return execute(commands, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getNewName(), "New name must not be null!"); byte[] keyBuf = toByteArray(command.getKey()); byte[] newKeyBuf = toByteArray(command.getNewName()); if (executorService.getConnectionManager().calcSlot(keyBuf) == executorService.getConnectionManager().calcSlot(newKeyBuf)) { return super.rename(commands); } return read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.DUMP, keyBuf) .filter(Objects::nonNull) .zipWith( Mono.defer(() -> pTtl(command.getKey()) .filter(Objects::nonNull) .map(ttl -> Math.max(0, ttl)) .switchIfEmpty(Mono.just(0L)) ) ) .flatMap(valueAndTtl -> { return write(newKeyBuf, StringCodec.INSTANCE, RedisCommands.RESTORE, newKeyBuf, valueAndTtl.getT2(), valueAndTtl.getT1()); }) .thenReturn(new BooleanResponse<>(command, true)) .doOnSuccess((ignored) -> del(command.getKey())); }); }
@Test public void testRename() { connection.stringCommands().set(originalKey, value).block(); if (hasTtl) { connection.keyCommands().expire(originalKey, Duration.ofSeconds(1000)).block(); } Integer originalSlot = getSlotForKey(originalKey); newKey = getNewKeyForSlot(new String(originalKey.array()), getTargetSlot(originalSlot)); Boolean response = connection.keyCommands().rename(originalKey, newKey).block(); assertThat(response).isTrue(); final ByteBuffer newKeyValue = connection.stringCommands().get(newKey).block(); assertThat(newKeyValue).isEqualTo(value); if (hasTtl) { assertThat(connection.keyCommands().ttl(newKey).block()).isGreaterThan(0); } else { assertThat(connection.keyCommands().ttl(newKey).block()).isEqualTo(-1); } }
@Override public List<Bundle> bundles() { throw newException(); }
@Test void require_that_bundles_throws_exception() { assertThrows(RuntimeException.class, () -> { new DisableOsgiFramework().bundles(); }); }
void addFinishedBuffer(Buffer buffer) { NettyPayload toAddBuffer = NettyPayload.newBuffer(buffer, finishedBufferIndex, subpartitionId); addFinishedBuffer(toAddBuffer); }
@Test void testAddFinishedBuffer() { MemoryTierSubpartitionProducerAgent subpartitionProducerAgent = createSubpartitionProducerAgent(); AtomicReference<NettyPayload> received = new AtomicReference<>(); TestingNettyConnectionWriter connectionWriter = new TestingNettyConnectionWriter.Builder() .setWriteBufferFunction( buffer -> { received.set(buffer); return null; }) .build(); subpartitionProducerAgent.connectionEstablished(connectionWriter); Buffer sentBuffer = BufferBuilderTestUtils.buildSomeBuffer(); subpartitionProducerAgent.addFinishedBuffer(sentBuffer); Optional<Buffer> receivedBuffer = received.get().getBuffer(); assertThat(receivedBuffer).isPresent(); assertThat(receivedBuffer.get()).isEqualTo(sentBuffer); }
@Override public Integer doCall() throws Exception { String jv = VersionHelper.getJBangVersion(); if (jv != null) { printer().println("JBang version: " + jv); } CamelCatalog catalog = new DefaultCamelCatalog(); String v = catalog.getCatalogVersion(); printer().println("Camel JBang version: " + v); CommandLineHelper.loadProperties(properties -> { String uv = properties.getProperty("camel-version"); String kv = properties.getProperty("kamelets-version"); String repos = properties.getProperty("repos"); String runtime = properties.getProperty("runtime"); if (uv != null || repos != null || runtime != null) { printer().println("User configuration:"); if (uv != null) { printer().println(" camel-version = " + uv); } if (kv != null) { printer().println(" kamelets-version = " + kv); } if (runtime != null) { printer().println(" runtime = " + runtime); } if (repos != null) { printer().println(" repos = " + repos); } } }); return 0; }
@Test public void shouldPrintUserProperties() throws Exception { UserConfigHelper.createUserConfig(""" camel-version=latest foo=bar kamelets-version=greatest """); createJBangVersionFile("0.101"); VersionGet command = createCommand(); command.doCall(); List<String> lines = printer.getLines(); Assertions.assertEquals(5, lines.size()); Assertions.assertTrue(lines.get(0).startsWith("JBang version:")); Assertions.assertTrue(lines.get(1).startsWith("Camel JBang version:")); Assertions.assertEquals("User configuration:", lines.get(2)); Assertions.assertEquals("camel-version = latest", lines.get(3)); Assertions.assertEquals("kamelets-version = greatest", lines.get(4)); }
public static Pair<String, String> encryptHandler(String dataId, String content) { if (!checkCipher(dataId)) { return Pair.with("", content); } Optional<String> algorithmName = parseAlgorithmName(dataId); Optional<EncryptionPluginService> optional = algorithmName.flatMap( EncryptionPluginManager.instance()::findEncryptionService); if (!optional.isPresent()) { LOGGER.warn("[EncryptionHandler] [encryptHandler] No encryption program with the corresponding name found"); return Pair.with("", content); } EncryptionPluginService encryptionPluginService = optional.get(); String secretKey = encryptionPluginService.generateSecretKey(); String encryptContent = encryptionPluginService.encrypt(secretKey, content); return Pair.with(encryptionPluginService.encryptSecretKey(secretKey), encryptContent); }
@Test void testUnknownAlgorithmNameEnc() { String dataId = "cipher-mySM4-application"; String content = "content"; Pair<String, String> pair = EncryptionHandler.encryptHandler(dataId, content); assertNotNull(pair); assertEquals(content, pair.getSecond(), "should return original content if algorithm is not defined."); }
@Override public void deleteLevel(Long id) { // 校验存在 validateLevelExists(id); // 校验分组下是否有用户 validateLevelHasUser(id); // 删除 memberLevelMapper.deleteById(id); }
@Test public void testDeleteLevel_notExists() { // 准备参数 Long id = randomLongId(); // 调用, 并断言异常 assertServiceException(() -> levelService.deleteLevel(id), LEVEL_NOT_EXISTS); }
public static LinearModel fit(Formula formula, DataFrame data) { return fit(formula, data, new Properties()); }
@Test public void testCPU() { System.out.println("CPU"); MathEx.setSeed(19650218); // to get repeatable results. LinearModel model = OLS.fit(CPU.formula, CPU.data); System.out.println(model); RegressionValidations<LinearModel> result = CrossValidation.regression(10, CPU.formula, CPU.data, OLS::fit); System.out.println(result); assertEquals(51.0009, result.avg.rmse, 1E-4); }
public T add(String str) { requireNonNull(str, JVM_OPTION_NOT_NULL_ERROR_MESSAGE); String value = str.trim(); if (isInvalidOption(value)) { throw new IllegalArgumentException("a JVM option can't be empty and must start with '-'"); } checkMandatoryOptionOverwrite(value); options.add(value); return castThis(); }
@Test public void add_throws_IAE_if_argument_does_not_start_with_dash() { expectJvmOptionNotEmptyAndStartByDashIAE(() -> underTest.add(randomAlphanumeric(3))); }
public StandardContext configure(Tomcat tomcat, Props props) { addStaticDir(tomcat, getContextPath(props) + "/deploy", new File(props.nonNullValueAsFile(PATH_DATA.getKey()), WEB_DEPLOY_PATH_RELATIVE_TO_DATA_DIR)); StandardContext webapp = addContext(tomcat, getContextPath(props), webappDir(props)); for (Map.Entry<Object, Object> entry : props.rawProperties().entrySet()) { String key = entry.getKey().toString(); webapp.addParameter(key, entry.getValue().toString()); } return webapp; }
@Test public void context_path_must_start_with_slash() { props.setProperty("sonar.web.context", "foo"); assertThatThrownBy(() -> underTest.configure(tomcat, new Props(props))) .isInstanceOf(MessageException.class) .hasMessageContaining("Value of 'sonar.web.context' must start with a forward slash: 'foo'"); }
@Override public CompletableFuture<Acknowledge> submitJob(JobGraph jobGraph, Time timeout) { final JobID jobID = jobGraph.getJobID(); try (MdcCloseable ignored = MdcUtils.withContext(MdcUtils.asContextData(jobID))) { log.info("Received JobGraph submission '{}' ({}).", jobGraph.getName(), jobID); } return isInGloballyTerminalState(jobID) .thenComposeAsync( isTerminated -> { if (isTerminated) { log.warn( "Ignoring JobGraph submission '{}' ({}) because the job already " + "reached a globally-terminal state (i.e. {}) in a " + "previous execution.", jobGraph.getName(), jobID, Arrays.stream(JobStatus.values()) .filter(JobStatus::isGloballyTerminalState) .map(JobStatus::name) .collect(Collectors.joining(", "))); return FutureUtils.completedExceptionally( DuplicateJobSubmissionException.ofGloballyTerminated( jobID)); } else if (jobManagerRunnerRegistry.isRegistered(jobID) || submittedAndWaitingTerminationJobIDs.contains(jobID)) { // job with the given jobID is not terminated, yet return FutureUtils.completedExceptionally( DuplicateJobSubmissionException.of(jobID)); } else if (isPartialResourceConfigured(jobGraph)) { return FutureUtils.completedExceptionally( new JobSubmissionException( jobID, "Currently jobs is not supported if parts of the vertices " + "have resources configured. The limitation will be " + "removed in future versions.")); } else { return internalSubmitJob(jobGraph); } }, getMainThreadExecutor(jobID)); }
@Test public void testDuplicateJobSubmissionWithRunningJobId() throws Exception { dispatcher = createTestingDispatcherBuilder() .setJobManagerRunnerFactory(new ExpectedJobIdJobManagerRunnerFactory(jobId)) .setRecoveredJobs(Collections.singleton(jobGraph)) .build(rpcService); dispatcher.start(); final DispatcherGateway dispatcherGateway = dispatcher.getSelfGateway(DispatcherGateway.class); final CompletableFuture<Acknowledge> submitFuture = dispatcherGateway.submitJob(jobGraph, TIMEOUT); final ExecutionException executionException = assertThrows(ExecutionException.class, submitFuture::get); assertTrue(executionException.getCause() instanceof DuplicateJobSubmissionException); final DuplicateJobSubmissionException duplicateException = (DuplicateJobSubmissionException) executionException.getCause(); assertFalse(duplicateException.isGloballyTerminated()); }
@SneakyThrows // compute() doesn't throw checked exceptions public static String sha512_256Hex(String data) { return sha512DigestCache.get(data, () -> compute(data, DigestObjectPools.SHA_512_256)); }
@Test public void shouldComputeForAGivenStringUsingSHA_512_256() { String fingerprint = "Some String"; String digest = sha512_256Hex(fingerprint); assertEquals(DigestUtils.sha512_256Hex(fingerprint), digest); }
public String convert(ILoggingEvent event) { String formattedMessage = event.getFormattedMessage(); if (formattedMessage != null) { String result = CR_PATTERN.matcher(formattedMessage).replaceAll("\\\\r"); result = LF_PATTERN.matcher(result).replaceAll("\\\\n"); return result; } return null; }
@Test public void convert_null_message() { ILoggingEvent event = createILoggingEvent(null); assertThat(underTest.convert(event)).isNull(); }
@Override public void loadAll(boolean replaceExistingValues) { map.loadAll(replaceExistingValues); }
@Test(expected = MethodNotAvailableException.class) public void testLoadAllWithListener() { adapter.loadAll(Collections.emptySet(), true, null); }
@Override public String literal() { return type() + "(" + field() + "," + percentile() + ")"; }
@Test public void testLiteral() { final Percentile percentile1 = Percentile.builder() .percentile(25.0) .field("cloverfield") .id("dead-beef") .build(); assertThat(percentile1.literal()).isEqualTo("percentile(cloverfield,25.0)"); final Percentile percentile2 = Percentile.builder() .percentile(99.0) .field("nostromo") .id("dead-beef") .build(); assertThat(percentile2.literal()).isEqualTo("percentile(nostromo,99.0)"); }
public int getNextId() { var find = new Document("_id", TICKET_ID); var increase = new Document("seq", 1); var update = new Document("$inc", increase); var result = countersCollection.findOneAndUpdate(find, update); return result.getInteger("seq"); }
@Test void testNextId() { assertEquals(1, repository.getNextId()); assertEquals(2, repository.getNextId()); assertEquals(3, repository.getNextId()); }
@Override public AbstractWALEvent decode(final ByteBuffer data, final BaseLogSequenceNumber logSequenceNumber) { AbstractWALEvent result; byte[] bytes = new byte[data.remaining()]; data.get(bytes); String dataText = new String(bytes, StandardCharsets.UTF_8); if (decodeWithTX) { result = decodeDataWithTX(dataText); } else { result = decodeDataIgnoreTX(dataText); } result.setLogSequenceNumber(logSequenceNumber); return result; }
@Test void assertDecodeDeleteRowEvent() { MppTableData tableData = new MppTableData(); tableData.setTableName("public.test"); tableData.setOpType("DELETE"); String[] deleteTypes = new String[]{"tinyint", "smallint", "integer", "binary_integer", "bigint"}; String[] deleteValues = new String[]{"46", "30000", "2147483645", "2147483646", "9223372036854775806"}; tableData.setOldKeysType(deleteTypes); tableData.setOldKeysName(IntStream.range(0, deleteTypes.length).mapToObj(idx -> "data" + idx).toArray(String[]::new)); tableData.setOldKeysVal(deleteValues); ByteBuffer data = ByteBuffer.wrap(JsonUtils.toJsonString(tableData).getBytes()); DeleteRowEvent actual = (DeleteRowEvent) new MppdbDecodingPlugin(null, false, false).decode(data, logSequenceNumber); assertThat(actual.getLogSequenceNumber(), is(logSequenceNumber)); assertThat(actual.getTableName(), is("test")); IntStream.range(0, deleteTypes.length).forEach(each -> assertThat(actual.getPrimaryKeys().get(each).toString(), is(deleteValues[each]))); }
@Override protected void doStart() throws Exception { super.doStart(); LOG.debug("Creating connection to Azure ServiceBus"); client = getEndpoint().getServiceBusClientFactory().createServiceBusProcessorClient(getConfiguration(), this::processMessage, this::processError); client.start(); }
@Test void consumerHandlesClientError() throws Exception { try (ServiceBusConsumer consumer = new ServiceBusConsumer(endpoint, processor)) { consumer.doStart(); verify(client).start(); verify(clientFactory).createServiceBusProcessorClient(any(), any(), any()); ServiceBusErrorContext errorContext = mock(); when(errorContext.getErrorSource()).thenReturn(ServiceBusErrorSource.UNKNOWN); when(errorContext.getException()).thenReturn(new Exception("Test exception")); processErrorCaptor.getValue().accept(errorContext); verifyNoInteractions(processor); } }
@Override public String buildContext() { final String selector = ((Collection<?>) getSource()) .stream() .map(s -> ((DashboardUserDO) s).getUserName()) .collect(Collectors.joining(",")); return String.format("the user[%s] is %s", selector, StringUtils.lowerCase(getType().getType().toString())); }
@Test public void batchUserDeletedContextTest() { BatchUserDeletedEvent batchUserDeletedEvent = new BatchUserDeletedEvent(Arrays.asList(one, two), "test-operator"); String context = String.format("the user[%s] is %s", one.getUserName() + "," + two.getUserName(), StringUtils.lowerCase(EventTypeEnum.USER_DELETE.getType().toString())); assertEquals(context, batchUserDeletedEvent.buildContext()); }
Double calculateMedian(List<Double> durationEntries) { if (durationEntries.isEmpty()) { return 0.0; } Collections.sort(durationEntries); int middle = durationEntries.size() / 2; if (durationEntries.size() % 2 == 1) { return durationEntries.get(middle); } else { double total = durationEntries.get(middle - 1) + durationEntries.get(middle); return total / 2; } }
@Test void calculateMedianOfEvenNumberOfEntries() { OutputStream out = new ByteArrayOutputStream(); UsageFormatter usageFormatter = new UsageFormatter(out); Double result = usageFormatter.calculateMedian( asList(1.0, 3.0, 10.0, 5.0)); assertThat(result, is(closeTo(4.0, EPSILON))); }
@Override public double calcEdgeWeight(EdgeIteratorState edgeState, boolean reverse) { double priority = edgeToPriorityMapping.get(edgeState, reverse); if (priority == 0) return Double.POSITIVE_INFINITY; final double distance = edgeState.getDistance(); double seconds = calcSeconds(distance, edgeState, reverse); if (Double.isInfinite(seconds)) return Double.POSITIVE_INFINITY; // add penalty at start/stop/via points if (edgeState.get(EdgeIteratorState.UNFAVORED_EDGE)) seconds += headingPenaltySeconds; double distanceCosts = distance * distanceInfluence; if (Double.isInfinite(distanceCosts)) return Double.POSITIVE_INFINITY; return seconds / priority + distanceCosts; }
@Test public void testSpeed0() { EdgeIteratorState edge = graph.edge(0, 1).setDistance(10); CustomModel customModel = createSpeedCustomModel(avSpeedEnc); Weighting weighting = createWeighting(customModel); edge.set(avSpeedEnc, 0); assertEquals(1.0 / 0, weighting.calcEdgeWeight(edge, false), 1e-8); // 0 / 0 returns NaN but calcWeight should not return NaN! edge.setDistance(0); assertEquals(1.0 / 0, weighting.calcEdgeWeight(edge, false), 1e-8); }
public ServerHealthState trump(ServerHealthState otherServerHealthState) { int result = healthStateLevel.compareTo(otherServerHealthState.healthStateLevel); return result > 0 ? this : otherServerHealthState; }
@Test public void shouldtNotTrumpErrorIfCurrentIsWarning() { assertThat(ERROR_SERVER_HEALTH_STATE.trump(WARNING_SERVER_HEALTH_STATE), is(ERROR_SERVER_HEALTH_STATE)); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void setChatDescription() { BaseResponse response = bot.execute(new SetChatDescription(groupId, "New desc " + System.currentTimeMillis())); assertTrue(response.isOk()); }
@Override public void onCancel() { mKeyboardDismissAction.run(); }
@Test public void testOnCancel() { mUnderTest.onCancel(); Mockito.verify(mMockKeyboardDismissAction).run(); Mockito.verifyNoMoreInteractions(mMockKeyboardDismissAction); Mockito.verifyZeroInteractions(mMockParentListener); }
public static Object[] getParamArray(final Class<?>[] paramTypes, final String[] paramNames, final String body) { Map<String, Object> bodyMap = GsonUtils.getInstance().convertToMap(body); ParamCheckUtils.checkParamsLength(bodyMap.size(), paramNames.length); Object[] param = new Object[paramNames.length]; for (int i = 0; i < paramNames.length; i++) { String paramName = paramNames[i]; Class<?> paramType = paramTypes[i]; if (PRIMITIVE_TYPE.containsKey(paramType.getName())) { param[i] = PRIMITIVE_TYPE.get(paramType.getName()).getFunc().apply(bodyMap.get(paramName)); } else { param[i] = bodyMap.get(paramName); } } return param; }
@Test public void testGetParamArray() { assertArrayEquals(new Object[]{11, Double.valueOf("1.321321312"), Long.valueOf("131231312"), Short.valueOf("11"), Byte.valueOf("0"), false, 'a', 1.321321312F}, PrxInfoUtil.getParamArray(new Class<?>[]{int.class, double.class, long.class, short.class, byte.class, boolean.class, char.class, float.class}, new String[]{"int", "double", "long", "short", "byte", "boolean", "char", "float"}, "{\"int\":11,\"double\":1.321321312,\"long\":131231312,\"short\":11,\"byte\":0,\"boolean\":false,\"char\":'a',\"float\":1.321321312}")); }
@Override public boolean tryLock(final GlobalLockDefinition lockDefinition, final long timeoutMillis) { return repository.getDistributedLockHolder().getDistributedLock(lockDefinition.getLockKey()).tryLock(timeoutMillis); }
@Test void assertTryLock() { when(repository.getDistributedLockHolder().getDistributedLock("/lock/exclusive/locks/foo_lock").tryLock(1000L)).thenReturn(true); GlobalLockDefinition lockDefinition = new GlobalLockDefinition("foo_lock"); assertTrue(new GlobalLockPersistService(repository).tryLock(lockDefinition, 1000L)); }
public MetricDto setDescription(@Nullable String description) { this.description = checkMetricDescription(description); return this; }
@Test void fail_if_description_longer_than_255_characters() { String a256 = repeat("a", 256); assertThatThrownBy(() -> underTest.setDescription(a256)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Metric description length (256) is longer than the maximum authorized (255). '" + a256 + "' was provided."); }
public static Object getMessageAnnotation(String key, Message message) { if (message != null && message.getMessageAnnotations() != null) { Map<Symbol, Object> annotations = message.getMessageAnnotations().getValue(); return annotations.get(AmqpMessageSupport.getSymbol(key)); } return null; }
@Test public void testGetMessageAnnotationWhenMessageHasNoAnnotationsMap() { Message message = Proton.message(); assertNull(AmqpMessageSupport.getMessageAnnotation("x-opt-test", message)); }
void onComplete(List<Map<TopicIdPartition, Acknowledgements>> acknowledgementsMapList) { final ArrayList<Throwable> exceptions = new ArrayList<>(); acknowledgementsMapList.forEach(acknowledgementsMap -> acknowledgementsMap.forEach((partition, acknowledgements) -> { Exception exception = null; if (acknowledgements.getAcknowledgeErrorCode() != null) { exception = acknowledgements.getAcknowledgeErrorCode().exception(); } Set<Long> offsets = acknowledgements.getAcknowledgementsTypeMap().keySet(); Set<Long> offsetsCopy = Collections.unmodifiableSet(offsets); enteredCallback = true; try { acknowledgementCommitCallback.onComplete(Collections.singletonMap(partition, offsetsCopy), exception); } catch (Throwable e) { LOG.error("Exception thrown by acknowledgement commit callback", e); exceptions.add(e); } finally { enteredCallback = false; } })); if (!exceptions.isEmpty()) { throw ConsumerUtils.maybeWrapAsKafkaException(exceptions.get(0), "Exception thrown by acknowledgement commit callback"); } }
@Test public void testUnauthorizedTopic() throws Exception { Acknowledgements acknowledgements = Acknowledgements.empty(); acknowledgements.add(0L, AcknowledgeType.ACCEPT); acknowledgements.add(1L, AcknowledgeType.REJECT); acknowledgements.setAcknowledgeErrorCode(Errors.TOPIC_AUTHORIZATION_FAILED); acknowledgementsMap.put(tip0, acknowledgements); acknowledgementCommitCallbackHandler.onComplete(Collections.singletonList(acknowledgementsMap)); TestUtils.retryOnExceptionWithTimeout(() -> { assertInstanceOf(TopicAuthorizationException.class, exceptionMap.get(tpo00)); assertInstanceOf(TopicAuthorizationException.class, exceptionMap.get(tpo01)); }); }
public static String generateID() { var raw = new byte[32]; sr.nextBytes(raw); return encoder.encodeToString(raw); }
@Test void generate_unique() { var previous = new HashSet<>(); for (int i = 0; i < 100; i++) { var next = IdGenerator.generateID(); if (previous.contains(next)) { fail(); } previous.add(next); } }
@Override public ServiceInfo subscribe(String serviceName, String groupName, String clusters) throws NacosException { NAMING_LOGGER.info("[SUBSCRIBE-SERVICE] service:{}, group:{}, clusters:{} ", serviceName, groupName, clusters); String serviceNameWithGroup = NamingUtils.getGroupedName(serviceName, groupName); String serviceKey = ServiceInfo.getKey(serviceNameWithGroup, clusters); serviceInfoUpdateService.scheduleUpdateIfAbsent(serviceName, groupName, clusters); ServiceInfo result = serviceInfoHolder.getServiceInfoMap().get(serviceKey); if (null == result || !isSubscribed(serviceName, groupName, clusters)) { result = grpcClientProxy.subscribe(serviceName, groupName, clusters); } serviceInfoHolder.processServiceInfo(result); return result; }
@Test void testSubscribe() throws NacosException { String serviceName = "service1"; String groupName = "group1"; String clusters = "cluster1"; ServiceInfo info = new ServiceInfo(); info.setName(serviceName); info.setGroupName(groupName); info.setClusters(clusters); when(mockGrpcClient.subscribe(serviceName, groupName, clusters)).thenReturn(info); ServiceInfo actual = delegate.subscribe(serviceName, groupName, clusters); assertEquals(info, actual); verify(mockGrpcClient, times(1)).subscribe(serviceName, groupName, clusters); verify(holder, times(1)).processServiceInfo(info); }
@Override public int getPriorityLevel(Schedulable obj) { // First get the identity String identity = getIdentity(obj); // highest priority users may have a negative priority but their // calls will be priority 0. return Math.max(0, cachedOrComputedPriorityLevel(identity)); }
@Test public void testUsingWeightedTimeCostProviderNoRequests() { scheduler = getSchedulerWithWeightedTimeCostProvider(2, "ipc.18"); assertEquals(0, scheduler.getPriorityLevel(mockCall("A"))); }
@Override public DescribeConsumerGroupsResult describeConsumerGroups(final Collection<String> groupIds, final DescribeConsumerGroupsOptions options) { SimpleAdminApiFuture<CoordinatorKey, ConsumerGroupDescription> future = DescribeConsumerGroupsHandler.newFuture(groupIds); DescribeConsumerGroupsHandler handler = new DescribeConsumerGroupsHandler(options.includeAuthorizedOperations(), logContext); invokeDriver(handler, future, options.timeoutMs); return new DescribeConsumerGroupsResult(future.all().entrySet().stream() .collect(Collectors.toMap(entry -> entry.getKey().idValue, Map.Entry::getValue))); }
@Test public void testDescribeConsumerGroupNumRetries() throws Exception { final Cluster cluster = mockCluster(3, 0); final Time time = new MockTime(); try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(time, cluster, AdminClientConfig.RETRIES_CONFIG, "0")) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponse(prepareFindCoordinatorResponse(Errors.NONE, env.cluster().controller())); DescribeGroupsResponseData data = new DescribeGroupsResponseData(); data.groups().add(DescribeGroupsResponse.groupMetadata( GROUP_ID, Errors.NOT_COORDINATOR, "", "", "", Collections.emptyList(), Collections.emptySet())); env.kafkaClient().prepareResponse(new DescribeGroupsResponse(data)); env.kafkaClient().prepareResponse(prepareFindCoordinatorResponse(Errors.NONE, env.cluster().controller())); final DescribeConsumerGroupsResult result = env.adminClient().describeConsumerGroups(singletonList(GROUP_ID)); TestUtils.assertFutureError(result.all(), TimeoutException.class); } }
public static MappingRuleAction createPlaceToQueueAction( String queue, boolean allowCreate) { return new PlaceToQueueAction(queue, allowCreate); }
@Test public void testPlaceToQueueAction() { VariableContext variables = new VariableContext(); variables.put("%default", "root.default"); variables.put("%immutable", "immutable"); variables.put("%empty", ""); variables.put("%null", null); variables.put("%sub", "xxx"); variables.setImmutables("%immutable"); MappingRuleAction placeToStatic = new MappingRuleActions.PlaceToQueueAction("root.static.queue", true); MappingRuleAction placeToDynamic = new MappingRuleActions.PlaceToQueueAction("root.%sub.%immutable", true); MappingRuleAction placeToDynamicDoubleSub = MappingRuleActions.createPlaceToQueueAction( "root.%sub%sub.%immutable", true); MappingRuleAction placeToNull = MappingRuleActions.createPlaceToQueueAction(null, true); MappingRuleAction placeToEmpty = MappingRuleActions.createPlaceToQueueAction("", true); MappingRuleAction placeToNulRef = new MappingRuleActions.PlaceToQueueAction("%null", true); MappingRuleAction placeToEmptyRef = new MappingRuleActions.PlaceToQueueAction("%empty", true); MappingRuleAction placeToDefaultRef = new MappingRuleActions.PlaceToQueueAction("%default", true); assertPlaceResult(placeToStatic.execute(variables), "root.static.queue"); assertPlaceResult(placeToDynamic.execute(variables), "root.xxx.immutable"); assertPlaceResult(placeToDynamicDoubleSub.execute(variables), "root.%sub%sub.immutable"); assertPlaceResult(placeToNull.execute(variables), ""); assertPlaceResult(placeToEmpty.execute(variables), ""); assertPlaceResult(placeToNulRef.execute(variables), ""); assertPlaceResult(placeToEmptyRef.execute(variables), ""); assertPlaceResult(placeToDefaultRef.execute(variables), "root.default"); }
@Override public boolean allTablesAreSelectable() { return false; }
@Test void assertAllTablesAreSelectable() { assertFalse(metaData.allTablesAreSelectable()); }
public HealthCheck getHealthCheck(String name) { return healthChecks.get(name); }
@Test public void asyncHealthCheckIsScheduledOnExecutor() { ArgumentCaptor<AsyncHealthCheckDecorator> decoratorCaptor = forClass(AsyncHealthCheckDecorator.class); verify(executorService).scheduleAtFixedRate(decoratorCaptor.capture(), eq(0L), eq(10L), eq(TimeUnit.SECONDS)); assertThat(decoratorCaptor.getValue().getHealthCheck()).isEqualTo(ahc); }
@VisibleForTesting int removeExpired(final List<Account.UsernameHold> holds) { final Instant now = this.clock.instant(); int holdsToRemove = 0; for (Iterator<Account.UsernameHold> it = holds.iterator(); it.hasNext(); ) { if (it.next().expirationSecs() < now.getEpochSecond()) { holdsToRemove++; it.remove(); } } return holdsToRemove; }
@Test public void removeHolds() { final List<Account.UsernameHold> holds = IntStream.range(0, 100) .mapToObj(i -> new Account.UsernameHold(TestRandomUtil.nextBytes(32), i)).toList(); final List<Account.UsernameHold> shuffled = new ArrayList<>(holds); Collections.shuffle(shuffled); final int currentTime = ThreadLocalRandom.current().nextInt(0, 100); final Clock clock = TestClock.pinned(Instant.EPOCH.plus(Duration.ofSeconds(currentTime))); final RemoveExpiredUsernameHoldsCommand removeExpiredUsernameHoldsCommand = new TestRemoveExpiredUsernameHoldsCommand(clock, mock(AccountsManager.class), false); final List<Account.UsernameHold> actual = new ArrayList<>(shuffled); final int numRemoved = removeExpiredUsernameHoldsCommand.removeExpired(actual); assertThat(numRemoved).isEqualTo(currentTime); assertThat(actual).hasSize(100 - currentTime); // should preserve order final Iterator<Account.UsernameHold> expected = shuffled.iterator(); for (Account.UsernameHold hold : actual) { while (!Arrays.equals(expected.next().usernameHash(), hold.usernameHash())) { assertThat(expected).as("expected should be in order").hasNext(); } } }
@Override public boolean betterThan(Num criterionValue1, Num criterionValue2) { return criterionValue1.isGreaterThan(criterionValue2); }
@Test public void betterThan() { AnalysisCriterion criterion = getCriterion(); assertTrue(criterion.betterThan(numOf(-0.1), numOf(-0.2))); assertFalse(criterion.betterThan(numOf(-0.1), numOf(0.0))); }
@Override public void write(Object object) throws IOException { objectOutputStream.writeObject(object); objectOutputStream.flush(); preventMemoryLeak(); }
@Test public void flushesAfterWrite() throws IOException { // given ObjectWriter objectWriter = new AutoFlushingObjectWriter(objectOutputStream, 2); String object = "foo"; // when objectWriter.write(object); // then InOrder inOrder = inOrder(objectOutputStream); inOrder.verify(objectOutputStream).writeObjectOverride(object); inOrder.verify(objectOutputStream).flush(); }
@Override public Optional<QueryId> chooseQueryToKill(List<QueryMemoryInfo> runningQueries, List<MemoryInfo> nodes) { QueryId biggestQuery = null; long maxMemory = 0; for (QueryMemoryInfo query : runningQueries) { long bytesUsed = query.getMemoryReservation(); if (bytesUsed > maxMemory && GENERAL_POOL.equals(query.getMemoryPoolId())) { biggestQuery = query.getQueryId(); maxMemory = bytesUsed; } } return Optional.ofNullable(biggestQuery); }
@Test public void testSkewedQuery() { int reservePool = 10; int generalPool = 12; // q2 is the query with the most total memory reservation, but not the query with the max memory reservation. // This also tests the corner case where a node doesn't have a general pool. Map<String, Map<String, Long>> queries = ImmutableMap.<String, Map<String, Long>>builder() .put("q_1", ImmutableMap.of("n1", 0L, "n2", 8L, "n3", 0L, "n4", 0L, "n5", 0L)) .put("q_2", ImmutableMap.of("n1", 3L, "n2", 5L, "n3", 2L, "n4", 4L, "n5", 0L)) .put("q_3", ImmutableMap.of("n1", 0L, "n2", 0L, "n3", 9L, "n4", 0L, "n5", 0L)) .put("q_r", ImmutableMap.of("n1", 6L, "n2", 6L, "n3", 6L, "n4", 6L, "n5", 6L)) .build(); assertEquals( lowMemoryKiller.chooseQueryToKill( toQueryMemoryInfoList("q_r", queries), toNodeMemoryInfoList(reservePool, generalPool, "q_r", queries)), Optional.of(new QueryId("q_2"))); }