focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Allocation allocate(ApplicationAttemptId applicationAttemptId, List<ResourceRequest> ask, List<SchedulingRequest> schedulingRequests, List<ContainerId> release, List<String> blacklistAdditions, List<String> blacklistRemovals, ContainerUpdates updateRequests) { FifoAppAttempt application = getApplicationAttempt(applicationAttemptId); if (application == null) { LOG.error("Calling allocate on removed or non existent application " + applicationAttemptId.getApplicationId()); return EMPTY_ALLOCATION; } // The allocate may be the leftover from previous attempt, and it will // impact current attempt, such as confuse the request and allocation for // current attempt's AM container. // Note outside precondition check for the attempt id may be // outdated here, so double check it here is necessary. if (!application.getApplicationAttemptId().equals(applicationAttemptId)) { LOG.error("Calling allocate on previous or removed " + "or non existent application attempt " + applicationAttemptId); return EMPTY_ALLOCATION; } // Sanity check normalizeResourceRequests(ask); // Release containers releaseContainers(release, application); synchronized (application) { // make sure we aren't stopping/removing the application // when the allocate comes in if (application.isStopped()) { LOG.info("Calling allocate on a stopped " + "application " + applicationAttemptId); return EMPTY_ALLOCATION; } if (!ask.isEmpty()) { LOG.debug("allocate: pre-update" + " applicationId=" + applicationAttemptId + " application=" + application); application.showRequests(); // Update application requests application.updateResourceRequests(ask); LOG.debug("allocate: post-update" + " applicationId=" + applicationAttemptId + " application=" + application); application.showRequests(); LOG.debug("allocate:" + " applicationId=" + applicationAttemptId + " #ask=" + ask.size()); } application.updateBlacklist(blacklistAdditions, blacklistRemovals); Resource headroom = application.getHeadroom(); application.setApplicationHeadroomForMetrics(headroom); return new Allocation(application.pullNewlyAllocatedContainers(), headroom, null, null, null, application.pullUpdatedNMTokens()); } }
@Test(timeout = 60000) public void testAllocateContainerOnNodeWithoutOffSwitchSpecified() throws Exception { GenericTestUtils.setRootLogLevel(Level.DEBUG); MockRM rm = new MockRM(conf); rm.start(); MockNM nm1 = rm.registerNode("127.0.0.1:1234", 6 * GB); RMApp app1 = MockRMAppSubmitter.submitWithMemory(2048, rm); // kick the scheduling, 2 GB given to AM1, remaining 4GB on nm1 nm1.nodeHeartbeat(true); RMAppAttempt attempt1 = app1.getCurrentAppAttempt(); MockAM am1 = rm.sendAMLaunched(attempt1.getAppAttemptId()); am1.registerAppAttempt(); // add request for containers List<ResourceRequest> requests = new ArrayList<ResourceRequest>(); requests.add(am1.createResourceReq("127.0.0.1", 1 * GB, 1, 1)); requests.add(am1.createResourceReq("/default-rack", 1 * GB, 1, 1)); am1.allocate(requests, null); // send the request try { // kick the schedule nm1.nodeHeartbeat(true); } catch (NullPointerException e) { Assert.fail("NPE when allocating container on node but " + "forget to set off-switch request should be handled"); } rm.stop(); }
public static Pair<String, String> encryptHandler(String dataId, String content) { if (!checkCipher(dataId)) { return Pair.with("", content); } Optional<String> algorithmName = parseAlgorithmName(dataId); Optional<EncryptionPluginService> optional = algorithmName.flatMap( EncryptionPluginManager.instance()::findEncryptionService); if (!optional.isPresent()) { LOGGER.warn("[EncryptionHandler] [encryptHandler] No encryption program with the corresponding name found"); return Pair.with("", content); } EncryptionPluginService encryptionPluginService = optional.get(); String secretKey = encryptionPluginService.generateSecretKey(); String encryptContent = encryptionPluginService.encrypt(secretKey, content); return Pair.with(encryptionPluginService.encryptSecretKey(secretKey), encryptContent); }
@Test void testCornerCaseDataIdAlgoParse() { String dataId = "cipher-"; Pair<String, String> pair = EncryptionHandler.encryptHandler(dataId, "content"); assertNotNull(pair, "should not throw exception when parsing enc algo for dataId '" + dataId + "'"); }
@Override public Path copy(final Path source, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { return copy(source, segmentService.list(source), target, status, callback, listener); }
@Test public void testCopyLargeObjectSameBucket() throws Exception { final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("IAD"); final Path originFolder = new Path(container, UUID.randomUUID().toString(), EnumSet.of(Path.Type.directory)); final Path sourceFile = new Path(originFolder, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); final SwiftRegionService regionService = new SwiftRegionService(session); final SwiftSegmentService segmentService = new SwiftSegmentService(session, ".segments-test/"); prepareFile(sourceFile, regionService, segmentService); final SwiftFindFeature findFeature = new SwiftFindFeature(session); assertTrue(findFeature.find(sourceFile)); final List<Path> sourceSegments = segmentService.list(sourceFile); final Path targetFolder = new Path(container, UUID.randomUUID().toString(), EnumSet.of(Path.Type.directory)); final Path targetFile = new Path(targetFolder, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); final Path copiedFile = new SwiftLargeObjectCopyFeature(session, regionService) .copy(sourceFile, targetFile, new TransferStatus(), new DisabledConnectionCallback(), new DisabledStreamListener()); // copied file exists assertTrue(findFeature.find(copiedFile)); final List<Path> targetSegments = segmentService.list(targetFile); // delete source, without deleting copied-segments new SwiftDeleteFeature(session, segmentService, regionService).delete( Collections.singletonMap(sourceFile, new TransferStatus()), new DisabledPasswordCallback(), new Delete.DisabledCallback(), true); assertFalse(findFeature.find(sourceFile)); assertTrue(targetSegments.stream().allMatch(p -> { try { return findFeature.find(p); } catch(BackgroundException e) { return false; } })); new SwiftDeleteFeature(session, segmentService, regionService).delete( Collections.singletonMap(copiedFile, new TransferStatus()), new DisabledPasswordCallback(), new Delete.DisabledCallback(), true); assertFalse(findFeature.find(copiedFile)); }
public List<String> toList(boolean trim) { return toList((str) -> trim ? StrUtil.trim(str) : str); }
@Test public void splitByCharIgnoreEmptyTest(){ String str1 = "a, ,,efedsfs, ddf,"; SplitIter splitIter = new SplitIter(str1, new CharFinder(',', false), Integer.MAX_VALUE, true ); final List<String> strings = splitIter.toList(false); assertEquals(4, strings.size()); }
@Nonnull public static <T> AggregateOperation1<T, LongDoubleAccumulator, Double> averagingDouble( @Nonnull ToDoubleFunctionEx<? super T> getDoubleValueFn ) { checkSerializable(getDoubleValueFn, "getDoubleValueFn"); // count == accumulator.value1 // sum == accumulator.value2 return AggregateOperation .withCreate(LongDoubleAccumulator::new) .andAccumulate((LongDoubleAccumulator a, T item) -> { // a bit faster check than in addExact, specialized for increment if (a.getLong() == Long.MAX_VALUE) { throw new ArithmeticException("Counter overflow"); } a.setLong(a.getLong() + 1); a.setDouble(a.getDouble() + getDoubleValueFn.applyAsDouble(item)); }) .andCombine((a1, a2) -> { a1.setLong(Math.addExact(a1.getLong(), a2.getLong())); a1.setDouble(a1.getDouble() + a2.getDouble()); }) .andDeduct((a1, a2) -> { a1.setLong(Math.subtractExact(a1.getLong(), a2.getLong())); a1.setDouble(a1.getDouble() - a2.getDouble()); }) .andExportFinish(a -> a.getDouble() / a.getLong()); }
@Test public void when_averagingDouble() { validateOp(averagingDouble(Double::doubleValue), identity(), 1.5, 2.5, new LongDoubleAccumulator(1, 1.5), new LongDoubleAccumulator(2, 4.0), 2.0); }
public static boolean canDrop(FilterPredicate pred, List<ColumnChunkMetaData> columns) { Objects.requireNonNull(pred, "pred cannot be null"); Objects.requireNonNull(columns, "columns cannot be null"); return pred.accept(new StatisticsFilter(columns)); }
@Test public void testGtEq() { assertFalse(canDrop(gtEq(intColumn, 9), columnMetas)); assertFalse(canDrop(gtEq(intColumn, 10), columnMetas)); assertFalse(canDrop(gtEq(intColumn, 100), columnMetas)); assertTrue(canDrop(gtEq(intColumn, 101), columnMetas)); assertTrue(canDrop(gtEq(intColumn, 0), nullColumnMetas)); assertTrue(canDrop(gtEq(intColumn, 7), nullColumnMetas)); assertTrue(canDrop(gtEq(missingColumn, fromString("any")), columnMetas)); assertFalse(canDrop(gtEq(intColumn, 1), missingMinMaxColumnMetas)); assertFalse(canDrop(gtEq(doubleColumn, 0.1), missingMinMaxColumnMetas)); }
public MetricName tagged(Map<String, String> add) { final Map<String, String> tags = new HashMap<>(add); tags.putAll(this.tags); return new MetricName(key, tags); }
@Test(expected=IllegalArgumentException.class) public void testTaggedNotPairs() { MetricName.EMPTY.tagged("foo"); }
@Override public void doFilter(HttpRequest request, HttpResponse response, FilterChain chain) throws IOException { boolean forceRedirect = config .getBoolean(SONAR_FORCE_REDIRECT_DEFAULT_ADMIN_CREDENTIALS) .orElse(true); if (forceRedirect && userSession.hasSession() && userSession.isLoggedIn() && userSession.isSystemAdministrator() && !"admin".equals(userSession.getLogin()) && defaultAdminCredentialsVerifier.hasDefaultCredentialUser()) { redirectTo(response, request.getContextPath() + CHANGE_ADMIN_PASSWORD_PATH); } chain.doFilter(request, response); }
@Test public void do_not_redirect_if_instance_does_not_use_default_admin_credentials() throws Exception { when(defaultAdminCredentialsVerifier.hasDefaultCredentialUser()).thenReturn(false); underTest.doFilter(request, response, chain); verify(response, never()).sendRedirect(any()); }
@Override public HttpHeaders add(HttpHeaders headers) { if (headers instanceof DefaultHttpHeaders) { this.headers.add(((DefaultHttpHeaders) headers).headers); return this; } else { return super.add(headers); } }
@Test public void testAsciiStringKeyRetrievedAsString() { final HttpHeaders headers = new DefaultHttpHeaders(false); // Test adding AsciiString key and retrieving it using a String key final String cacheControl = "no-cache"; headers.add(HttpHeaderNames.CACHE_CONTROL, cacheControl); final String value = headers.getAsString(HttpHeaderNames.CACHE_CONTROL); assertNotNull(value); assertEquals(cacheControl, value); final String value2 = headers.getAsString(HttpHeaderNames.CACHE_CONTROL.toString()); assertNotNull(value2); assertEquals(cacheControl, value2); }
@Override @SuppressFBWarnings(value = "EI_EXPOSE_REP") public KsqlConfig getKsqlConfig() { return ksqlConfig; }
@Test public void shouldPollToEndOfTopic() { // Given: addPollResult("foo", "val".getBytes(StandardCharsets.UTF_8)); addPollResult("bar", "baz".getBytes(StandardCharsets.UTF_8)); expectRead(consumerBefore); addPollResult(KafkaConfigStore.CONFIG_MSG_KEY, savedProperties); expectRead(consumerAfter); // When: getKsqlConfig(); // Then: verifyDrainLog(consumerBefore, 2); verifyProduce(); }
public boolean equalsExtended(Object obj) { if (equals(obj)) { RequestKey other = (RequestKey) obj; boolean headersEqual = other.headers == null || headers == null || headers.equals(other.headers); boolean charsetEqual = other.charset == null || charset == null || charset.equals(other.charset); boolean bodyEqual = other.body == null || body == null || Arrays.equals(other.body, body); return headersEqual && charsetEqual && bodyEqual; } return false; }
@Test void equalsExtended() { RequestKey requestKey2 = RequestKey.builder(HttpMethod.GET, "a").build(); assertThat(requestKey.hashCode()).isEqualTo(requestKey2.hashCode()); assertThat(requestKey.equalsExtended(requestKey2)).isEqualTo(true); }
static String generateHecToken() { return UUID.randomUUID().toString(); }
@Test public void testGenerateHecTokenMeetsRequirements() { for (int i = 0; i < 10000; i++) { String password = generateHecToken(); int lower = 0; int upper = 0; for (int j = 0; j < password.length(); j++) { char c = password.charAt(j); String s = String.valueOf(c); lower += s.toLowerCase().equals(s) ? 1 : 0; upper += s.toUpperCase().equals(s) ? 1 : 0; } assertThat(lower).isAtLeast(1); assertThat(upper).isAtLeast(1); } }
@Override public boolean isExisted(final String originalName) { return encryptTable.isCipherColumn(originalName) || !encryptTable.isAssistedQueryColumn(originalName) && !encryptTable.isLikeQueryColumn(originalName); }
@Test void assertIsExistedWithCipherColumn() { EncryptTable encryptTable = mock(EncryptTable.class); when(encryptTable.isCipherColumn("cipher_column")).thenReturn(true); EncryptColumnExistedReviser reviser = new EncryptColumnExistedReviser(encryptTable); assertTrue(reviser.isExisted("cipher_column")); }
@Override protected void init() throws ServiceException { LOG.info("Using FileSystemAccess JARs version [{}]", VersionInfo.getVersion()); String security = getServiceConfig().get(AUTHENTICATION_TYPE, "simple").trim(); if (security.equals("kerberos")) { String defaultName = getServer().getName(); String keytab = System.getProperty("user.home") + "/" + defaultName + ".keytab"; keytab = getServiceConfig().get(KERBEROS_KEYTAB, keytab).trim(); if (keytab.length() == 0) { throw new ServiceException(FileSystemAccessException.ERROR.H01, KERBEROS_KEYTAB); } String principal = defaultName + "/localhost@LOCALHOST"; principal = getServiceConfig().get(KERBEROS_PRINCIPAL, principal).trim(); if (principal.length() == 0) { throw new ServiceException(FileSystemAccessException.ERROR.H01, KERBEROS_PRINCIPAL); } Configuration conf = new Configuration(); conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos"); UserGroupInformation.setConfiguration(conf); try { UserGroupInformation.loginUserFromKeytab(principal, keytab); } catch (IOException ex) { throw new ServiceException(FileSystemAccessException.ERROR.H02, ex.getMessage(), ex); } LOG.info("Using FileSystemAccess Kerberos authentication, principal [{}] keytab [{}]", principal, keytab); } else if (security.equals("simple")) { Configuration conf = new Configuration(); conf.set(HADOOP_SECURITY_AUTHENTICATION, "simple"); UserGroupInformation.setConfiguration(conf); LOG.info("Using FileSystemAccess simple/pseudo authentication, principal [{}]", System.getProperty("user.name")); } else { throw new ServiceException(FileSystemAccessException.ERROR.H09, security); } String hadoopConfDirProp = getServiceConfig().get(HADOOP_CONF_DIR, getServer().getConfigDir()); File hadoopConfDir = new File(hadoopConfDirProp).getAbsoluteFile(); if (!hadoopConfDir.exists()) { hadoopConfDir = new File(getServer().getConfigDir()).getAbsoluteFile(); } if (!hadoopConfDir.exists()) { throw new ServiceException(FileSystemAccessException.ERROR.H10, hadoopConfDir); } try { serviceHadoopConf = loadHadoopConf(hadoopConfDir); fileSystemConf = getNewFileSystemConfiguration(); } catch (IOException ex) { throw new ServiceException(FileSystemAccessException.ERROR.H11, ex.toString(), ex); } if (LOG.isDebugEnabled()) { LOG.debug("FileSystemAccess FileSystem configuration:"); for (Map.Entry entry : serviceHadoopConf) { LOG.debug(" {} = {}", entry.getKey(), entry.getValue()); } } setRequiredServiceHadoopConf(serviceHadoopConf); nameNodeWhitelist = toLowerCase(getServiceConfig().getTrimmedStringCollection(NAME_NODE_WHITELIST)); }
@Test @TestException(exception = ServiceException.class, msgRegExp = "H02.*") @TestDir public void kerberosInitializationFailure() throws Exception { String dir = TestDirHelper.getTestDir().getAbsolutePath(); String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName())); Configuration conf = new Configuration(false); conf.set("server.services", services); conf.set("server.hadoop.authentication.type", "kerberos"); conf.set("server.hadoop.authentication.kerberos.keytab", "/tmp/foo"); conf.set("server.hadoop.authentication.kerberos.principal", "foo@FOO"); Server server = new Server("server", dir, dir, dir, dir, conf); server.init(); }
@Override public SchemaAndValue get(final ProcessingLogConfig config) { final Struct struct = new Struct(ProcessingLogMessageSchema.PROCESSING_LOG_SCHEMA) .put(ProcessingLogMessageSchema.TYPE, MessageType.SERIALIZATION_ERROR.getTypeId()) .put(ProcessingLogMessageSchema.SERIALIZATION_ERROR, serializationError(config)); return new SchemaAndValue(ProcessingLogMessageSchema.PROCESSING_LOG_SCHEMA, struct); }
@Test public void shouldSetEmptyRecordToNull() { // Given: final SerializationError<GenericRow> serError = new SerializationError<>(ERROR, Optional.empty(), TOPIC, false); // When: final SchemaAndValue msg = serError.get(LOGGING_CONFIG); // Then: final Struct struct = (Struct) msg.value(); assertThat( struct.get(ProcessingLogMessageSchema.TYPE), equalTo(ProcessingLogMessageSchema.MessageType.SERIALIZATION_ERROR.getTypeId())); final Struct serializationError = struct.getStruct(SERIALIZATION_ERROR); assertThat(serializationError.get(SERIALIZATION_ERROR_FIELD_RECORD), is(nullValue())); }
@Override public boolean remove(Object object) { return get(removeAsync(object)); }
@Test public void testRemove() { RScoredSortedSet<Integer> set = redisson.getScoredSortedSet("simple"); set.add(4, 5); set.add(2, 3); set.add(0, 1); set.add(1, 2); set.add(3, 4); Assertions.assertFalse(set.remove(0)); Assertions.assertTrue(set.remove(3)); assertThat(set).containsExactly(1, 2, 4, 5); }
public static <InputT> UsingBuilder<InputT> of(PCollection<InputT> input) { return named(null).of(input); }
@Test public void testBuild_ImplicitName() { final PCollection<String> dataset = TestUtils.createMockDataset(TypeDescriptors.strings()); final PCollection<String> mapped = MapElements.of(dataset).using(s -> s).output(); final MapElements map = (MapElements) TestUtils.getProducer(mapped); assertFalse(map.getName().isPresent()); }
public int getResourceProfilesFailedRetrieved() { return numGetResourceProfilesFailedRetrieved.value(); }
@Test public void testGetResourceProfilesRetrievedFailed() { long totalBadBefore = metrics.getResourceProfilesFailedRetrieved(); badSubCluster.getResourceProfilesFailed(); Assert.assertEquals(totalBadBefore + 1, metrics.getResourceProfilesFailedRetrieved()); }
@Override public ScalarOperator visitInPredicate(InPredicateOperator predicate, Void context) { return shuttleIfUpdate(predicate); }
@Test void visitInPredicate() { InPredicateOperator operator = new InPredicateOperator(true, ImmutableList.of()); { ScalarOperator newOperator = shuttle.visitInPredicate(operator, null); assertEquals(operator, newOperator); } { ScalarOperator newOperator = shuttle2.visitInPredicate(operator, null); assertEquals(operator, newOperator); } }
@Override public long tick() throws InterruptedException { long now = mClock.millis(); mSleeper.sleep( () -> Duration.ofMillis(mIntervalSupplier.getNextInterval(mPreviousTickedMs, now))); mPreviousTickedMs = mClock.millis(); return mIntervalSupplier.getRunLimit(mPreviousTickedMs); }
@Test public void maintainInterval() throws Exception { SleepingTimer stimer = new SleepingTimer(THREAD_NAME, mMockLogger, mFakeClock, new SteppingThreadSleeper(mMockSleeper, mFakeClock), () -> new FixedIntervalSupplier(INTERVAL_MS)); stimer.tick(); mFakeClock.addTimeMs(INTERVAL_MS / 3); stimer.tick(); verify(mMockSleeper).sleep(Duration.ofMillis(INTERVAL_MS - (INTERVAL_MS / 3))); }
public int getColumnSize() { return columnSize; }
@Test public void build_string_column_def_with_only_required_attributes() { VarcharColumnDef def = new VarcharColumnDef.Builder() .setColumnName("issues") .setLimit(10) .build(); assertThat(def.getName()).isEqualTo("issues"); assertThat(def.getColumnSize()).isEqualTo(10); assertThat(def.isNullable()).isTrue(); assertThat(def.getDefaultValue()).isNull(); }
public static InputStream markSupportedInputStream(final InputStream is, final int markBufferSize) { if (is.markSupported()) { return is; } return new InputStream() { byte[] mMarkBuffer; boolean mInMarked = false; boolean mInReset = false; boolean mDry = false; private int mPosition = 0; private int mCount = 0; @Override public int read() throws IOException { if (!mInMarked) { return is.read(); } else { if (mPosition < mCount) { byte b = mMarkBuffer[mPosition++]; return b & 0xFF; } if (!mInReset) { if (mDry) { return -1; } if (null == mMarkBuffer) { mMarkBuffer = new byte[markBufferSize]; } if (mPosition >= markBufferSize) { throw new IOException("Mark buffer is full!"); } int read = is.read(); if (-1 == read) { mDry = true; return -1; } mMarkBuffer[mPosition++] = (byte) read; mCount++; return read; } else { // mark buffer is used, exit mark status! mInMarked = false; mInReset = false; mPosition = 0; mCount = 0; return is.read(); } } } /** * NOTE: the <code>readlimit</code> argument for this class * has no meaning. */ @Override public synchronized void mark(int readlimit) { mInMarked = true; mInReset = false; // mark buffer is not empty int count = mCount - mPosition; if (count > 0) { System.arraycopy(mMarkBuffer, mPosition, mMarkBuffer, 0, count); mCount = count; mPosition = 0; } } @Override public synchronized void reset() throws IOException { if (!mInMarked) { throw new IOException("should mark before reset!"); } mInReset = true; mPosition = 0; } @Override public boolean markSupported() { return true; } @Override public int available() throws IOException { int available = is.available(); if (mInMarked && mInReset) { available += mCount - mPosition; } return available; } @Override public void close() throws IOException { is.close(); } }; }
@Test void testMarkInputSupport() { Assertions.assertThrows(IOException.class, () -> { InputStream is = StreamUtilsTest.class.getResourceAsStream("/StreamUtilsTest.txt"); try { is = StreamUtils.markSupportedInputStream(new PushbackInputStream(is), 1); is.mark(1); int read = is.read(); assertThat(read, is((int) '0')); is.skip(1); is.read(); } finally { if (is != null) { is.close(); } } }); }
protected void commitTransaction(final Map<TopicPartition, OffsetAndMetadata> offsets, final ConsumerGroupMetadata consumerGroupMetadata) { if (!eosEnabled()) { throw new IllegalStateException(formatException("Exactly-once is not enabled")); } maybeBeginTransaction(); try { // EOS-v2 assumes brokers are on version 2.5+ and thus can understand the full set of consumer group metadata // Thus if we are using EOS-v1 and can't make this assumption, we must downgrade the request to include only the group id metadata final ConsumerGroupMetadata maybeDowngradedGroupMetadata = processingMode == EXACTLY_ONCE_V2 ? consumerGroupMetadata : new ConsumerGroupMetadata(consumerGroupMetadata.groupId()); producer.sendOffsetsToTransaction(offsets, maybeDowngradedGroupMetadata); producer.commitTransaction(); transactionInFlight = false; } catch (final ProducerFencedException | InvalidProducerEpochException | CommitFailedException | InvalidPidMappingException error) { throw new TaskMigratedException( formatException("Producer got fenced trying to commit a transaction"), error ); } catch (final TimeoutException timeoutException) { // re-throw to trigger `task.timeout.ms` throw timeoutException; } catch (final KafkaException error) { throw new StreamsException( formatException("Error encountered trying to commit a transaction"), error ); } }
@Test public void shouldFailOnEosSendOffsetFatal() { eosAlphaMockProducer.sendOffsetsToTransactionException = new RuntimeException("KABOOM!"); final RuntimeException thrown = assertThrows( RuntimeException.class, // we pass in `null` to verify that `sendOffsetsToTransaction()` fails instead of `commitTransaction()` // `sendOffsetsToTransaction()` would throw an NPE on `null` offsets () -> eosAlphaStreamsProducer.commitTransaction(null, new ConsumerGroupMetadata("appId")) ); assertThat(thrown.getMessage(), is("KABOOM!")); }
public static boolean isEligibleForCarbonsDelivery(final Message stanza) { // To properly handle messages exchanged with a MUC (or similar service), the server must be able to identify MUC-related messages. // This can be accomplished by tracking the clients' presence in MUCs, or by checking for the <x xmlns="http://jabber.org/protocol/muc#user"> // element in messages. The following rules apply to MUC-related messages: if (stanza.getChildElement("x", "http://jabber.org/protocol/muc#user") != null) { // A <message/> containing a Direct MUC Invitations (XEP-0249) SHOULD be carbon-copied. if (containsChildElement(stanza, Set.of("x"), "jabber:x:conference")) { return true; } // A <message/> containing a Mediated Invitation SHOULD be carbon-copied. if (stanza.getChildElement("x", "http://jabber.org/protocol/muc#user") != null && stanza.getChildElement("x", "http://jabber.org/protocol/muc#user").element("invite") != null) { return true; } // A private <message/> from a local user to a MUC participant (sent to a full JID) SHOULD be carbon-copied // The server SHOULD limit carbon-copying to the clients sharing a Multi-Session Nick in that MUC, and MAY // inject the <x/> element into such carbon copies. Clients can not respond to carbon-copies of MUC-PMs // related to a MUC they are not joined to. Therefore, they SHOULD either ignore such carbon copies, or // provide a way for the user to join the MUC before answering. if (stanza.getTo() != null && stanza.getTo().getResource() != null && stanza.getFrom() != null && stanza.getFrom().getNode() != null && XMPPServer.getInstance().isLocal(stanza.getFrom())) { return true; // TODO The server SHOULD limit carbon-copying to the clients sharing a Multi-Session Nick in that MUC (OF-2780). } // A private <message/> from a MUC participant (received from a full JID) to a local user SHOULD NOT be // carbon-copied (these messages are already replicated by the MUC service to all joined client instances). if (stanza.getFrom() != null && stanza.getFrom().getResource() != null && stanza.getTo() != null && stanza.getTo().getNode() != null && XMPPServer.getInstance().isLocal(stanza.getTo())) { return false; } } // A <message/> of type "groupchat" SHOULD NOT be carbon-copied. if (stanza.getType() == Message.Type.groupchat) { return false; } // A <message/> is eligible for carbons delivery if it does not contain a <private/> child element... if (containsChildElement(stanza, Set.of("private", "received"), "urn:xmpp:carbons")) { return false; } // and if at least one of the following is true: // ... it is of type "chat". if (stanza.getType() == Message.Type.chat) { return true; } // ... it is of type "normal" and contains a <body> element. if ((stanza.getType() == null || stanza.getType() == Message.Type.normal) && stanza.getBody() != null) { return true; } // ... it contains payload elements typically used in IM if (containsChildElement(stanza, Set.of("request", "received"), "urn:xmpp:receipts") // Message Delivery Receipts (XEP-0184) || containsChildElement(stanza, Set.of("active", "inactive", "gone", "composing", "paused"), "http://jabber.org/protocol/chatstates") // Chat State Notifications (XEP-0085) || (containsChildElement(stanza, Set.of("markable", "received", "displayed", "acknowledged"), "urn:xmpp:chat-markers")) // Chat Markers (XEP-0333)). ) { return true; } // ... it is of type "error" and it was sent in response to a <message/> that was eligible for carbons delivery. // TODO implement me (OF-2779) return false; }
@Test public void testMucGroupChatRawData() throws Exception { // Setup test fixture. final String raw = "<message xmlns=\"jabber:client\" to=\"john@example.org/barfoo\" type=\"groupchat\" id=\"7cb29947-fda2-4a44-b349-ec83fbbf062f\" from=\"room1@muc.example.org/Johnny\">\n" + " <active xmlns=\"http://jabber.org/protocol/chatstates\" />\n" + " <markable xmlns=\"urn:xmpp:chat-markers:0\" />\n" + " <origin-id xmlns=\"urn:xmpp:sid:0\" id=\"7cb29947-fda2-4a44-b349-ec83fbbf062f\" />\n" + " <encrypted xmlns=\"eu.siacs.conversations.axolotl\">\n" + " <header sid=\"12121212\">\n" + " <key rid=\"2334343434\">MOCK-TESTDATA</key>\n" + " <iv>TESTTEST</iv>\n" + "</header>\n" + " <payload>TEST</payload>\n" + "</encrypted>\n" + " <encryption xmlns=\"urn:xmpp:eme:0\" name=\"OMEMO\" namespace=\"eu.siacs.conversations.axolotl\" />\n" + " <body>You received a message encrypted with OMEMO but your client doesn't support OMEMO.</body>\n" + " <store xmlns=\"urn:xmpp:hints\" />\n" + " <stanza-id xmlns=\"urn:xmpp:sid:0\" id=\"d9c123d0-8738-40be-a1a3-497435e0761d\" by=\"room1@muc.example.org\" />\n" + " <addresses xmlns=\"http://jabber.org/protocol/address\">\n" + " <address type=\"ofrom\" jid=\"john@example.org\" />\n" + "</addresses>\n" + "</message>\n"; final Message input = new Message(DocumentHelper.parseText(raw).getRootElement()); // Execute system under test. final boolean result = Forwarded.isEligibleForCarbonsDelivery(input); // Verify results. assertFalse(result); }
public void upgrade() { for (final Document document : collection.find()) { LOG.debug("Migrate view sharing: {}", document); final ObjectId sharingId = document.getObjectId("_id"); final String sharingType = document.get("type", String.class); final String viewId = document.get("view_id", String.class); try { switch (sharingType) { case "users": //noinspection unchecked migrateUsers(viewId, (Collection<String>) document.get("users", Collection.class)); break; case "roles": //noinspection unchecked migrateRoles(viewId, (Collection<String>) document.get("roles", Collection.class)); break; case "all_of_instance": migrateAllOfInstance(viewId); break; default: LOG.warn("Skipping unknown view sharing type: {}", sharingType); continue; // Continue here so we don't delete the sharing document } // The view sharing document should be removed after successful migration deleteViewSharing(sharingId); } catch (Exception e) { LOG.error("Couldn't migrate view sharing: {}", document, e); } } }
@Test @DisplayName("migrate role shares") void migrateRoleShares() throws Exception { final User userJane = createUser("jane"); final User userJohn = createUser("john"); final Role role1 = createRole("role1"); final Role role2 = createRole("role2"); when(userService.loadAllForRole(role1)).thenReturn(ImmutableSet.of(userJane, userJohn)); when(userService.loadAllForRole(role2)).thenReturn(Collections.emptySet()); when(roleService.load(role1.getName())).thenReturn(role1); when(roleService.load(role2.getName())).thenReturn(role2); final GRN jane = GRNTypes.USER.toGRN(userJane.getName()); final GRN john = GRNTypes.USER.toGRN(userJohn.getName()); final GRN dashboard1 = GRNTypes.DASHBOARD.toGRN("54e3deadbeefdeadbeef0002"); assertThat(grantService.hasGrantFor(jane, Capability.VIEW, dashboard1)).isFalse(); assertThat(grantService.hasGrantFor(john, Capability.VIEW, dashboard1)).isFalse(); migration.upgrade(); assertThat(grantService.hasGrantFor(jane, Capability.VIEW, dashboard1)).isTrue(); assertThat(grantService.hasGrantFor(john, Capability.VIEW, dashboard1)).isTrue(); assertThat(grantService.hasGrantFor(jane, Capability.OWN, dashboard1)).isFalse(); assertThat(grantService.hasGrantFor(jane, Capability.MANAGE, dashboard1)).isFalse(); assertThat(grantService.hasGrantFor(john, Capability.OWN, dashboard1)).isFalse(); assertThat(grantService.hasGrantFor(john, Capability.MANAGE, dashboard1)).isFalse(); assertDeletedViewSharing("54e3deadbeefdeadbeef0002"); }
@Override public void putAll(Map<? extends K, ? extends V> m) { for (Map.Entry<? extends K, ? extends V> e : m.entrySet()) put(e.getKey(), e.getValue()); }
@Test public void testPutAll() { HashMap<Object, Object> hashMap = new HashMap<>(); hashMap.put(null, "value"); hashMap.put("key", null); hashMap.put("key1", "value"); Assert.assertEquals(3, hashMap.size()); SafeConcurrentHashMap<Object, Object> safeConcurrentHashMap = new SafeConcurrentHashMap<>(); safeConcurrentHashMap.putAll(hashMap); Assert.assertEquals(1, safeConcurrentHashMap.size()); Assert.assertEquals("value", safeConcurrentHashMap.get("key1")); }
List<ModelNode> getNodes() { return modelNodes; }
@Test void require_that_dependencies_are_correctly_set() { ModelGraphBuilder builder = new ModelGraphBuilder(); builder.addBuilder(new GraphMock.BC()).addBuilder(new GraphMock.BB()).addBuilder(new GraphMock.BA()); ModelGraph graph = builder.build(); List<ModelNode> nodes = graph.getNodes(); assertEquals(3, graph.getNodes().size()); assertTrue(nodes.get(0).hasDependencies()); assertTrue(nodes.get(1).hasDependencies()); assertFalse(nodes.get(2).hasDependencies()); assertTrue(nodes.get(0).dependsOn(nodes.get(1))); assertTrue(nodes.get(1).dependsOn(nodes.get(2))); assertFalse(nodes.get(2).dependsOn(nodes.get(0))); }
@Override public void onSuccess(RestResponse restResponse) { Response<MultiplexedResponseContent> response; try { response = _decoder.decodeResponse(restResponse); } catch (RestLiDecodingException e) { onError(e); return; } // individual callbacks are notified first notifyIndividualCallbacks(response); // aggregated callback is guaranteed to be called after all individual callbacks notifyAggregatedCallback(response); }
@Test public void testSuccess() throws Exception { FutureCallback<RestResponse> callback1 = new FutureCallback<>(); FutureCallback<RestResponse> callback2 = new FutureCallback<>(); ImmutableMap<Integer, Callback<RestResponse>> individualCallbacks = ImmutableMap.<Integer, Callback<RestResponse>>of(ID1, callback1, ID2, callback2); FutureCallback<MultiplexedResponse> aggregatedCallback = new FutureCallback<>(); TestRecord entity1 = fakeEntity(ID1); IndividualResponse ir1 = fakeIndividualResponse(entity1); TestRecord entity2 = fakeEntity(ID2); IndividualResponse ir2 = fakeIndividualResponse(entity2); MultiplexedResponseContent responseContent = new MultiplexedResponseContent() .setResponses(new IndividualResponseMap(ImmutableMap.of(Integer.toString(ID1), ir1, Integer.toString(ID2), ir2))); MultiplexedCallback multiplexedCallback = new MultiplexedCallback(individualCallbacks, aggregatedCallback); multiplexedCallback.onSuccess(fakeRestResponse(responseContent)); assertRestResponseEquals(callback1.get(), fakeRestResponse(entity1)); assertRestResponseEquals(callback2.get(), fakeRestResponse(entity2)); MultiplexedResponse multiplexedResponse = aggregatedCallback.get(); Assert.assertEquals(multiplexedResponse.getStatus(), HttpStatus.S_200_OK.getCode()); Assert.assertEquals(multiplexedResponse.getHeaders(), HEADERS); }
@Transactional @Cacheable(CACHE_DATABASE_SEARCH) @CacheEvict(value = CACHE_AVERAGE_REVIEW_RATING, allEntries = true) public SearchHits<ExtensionSearch> search(ISearchService.Options options) { // grab all extensions var matchingExtensions = repositories.findAllActiveExtensions(); // no extensions in the database if (matchingExtensions.isEmpty()) { return new SearchHitsImpl<>(0,TotalHitsRelation.OFF, 0f, null, null, Collections.emptyList(), null, null); } // exlude namespaces if(options.namespacesToExclude != null) { for(var namespaceToExclude : options.namespacesToExclude) { matchingExtensions = matchingExtensions.filter(extension -> !extension.getNamespace().getName().equals(namespaceToExclude)); } } // filter target platform if(TargetPlatform.isValid(options.targetPlatform)) { matchingExtensions = matchingExtensions.filter(extension -> extension.getVersions().stream().anyMatch(ev -> ev.getTargetPlatform().equals(options.targetPlatform))); } // filter category if (options.category != null) { matchingExtensions = matchingExtensions.filter(extension -> { var latest = repositories.findLatestVersion(extension, null, false, true); return latest.getCategories().stream().anyMatch(category -> category.equalsIgnoreCase(options.category)); }); } // filter text if (options.queryString != null) { matchingExtensions = matchingExtensions.filter(extension -> { var latest = repositories.findLatestVersion(extension, null, false, true); return extension.getName().toLowerCase().contains(options.queryString.toLowerCase()) || extension.getNamespace().getName().contains(options.queryString.toLowerCase()) || (latest.getDescription() != null && latest.getDescription() .toLowerCase().contains(options.queryString.toLowerCase())) || (latest.getDisplayName() != null && latest.getDisplayName() .toLowerCase().contains(options.queryString.toLowerCase())); }); } // need to perform the sortBy () // 'relevance' | 'timestamp' | 'rating' | 'downloadCount'; Stream<ExtensionSearch> searchEntries; if("relevance".equals(options.sortBy) || "rating".equals(options.sortBy)) { var searchStats = new SearchStats(repositories); searchEntries = matchingExtensions.stream().map(extension -> relevanceService.toSearchEntry(extension, searchStats)); } else { searchEntries = matchingExtensions.stream().map(extension -> { var latest = repositories.findLatestVersion(extension, null, false, true); var targetPlatforms = repositories.findExtensionTargetPlatforms(extension); return extension.toSearch(latest, targetPlatforms); }); } var comparators = new HashMap<>(Map.of( "relevance", new RelevanceComparator(), "timestamp", new TimestampComparator(), "rating", new RatingComparator(), "downloadCount", new DownloadedCountComparator() )); var comparator = comparators.get(options.sortBy); if(comparator != null) { searchEntries = searchEntries.sorted(comparator); } var sortedExtensions = searchEntries.collect(Collectors.toList()); // need to do sortOrder // 'asc' | 'desc'; if ("desc".equals(options.sortOrder)) { // reverse the order Collections.reverse(sortedExtensions); } // Paging var totalHits = sortedExtensions.size(); var endIndex = Math.min(sortedExtensions.size(), options.requestedOffset + options.requestedSize); var startIndex = Math.min(endIndex, options.requestedOffset); sortedExtensions = sortedExtensions.subList(startIndex, endIndex); List<SearchHit<ExtensionSearch>> searchHits; if (sortedExtensions.isEmpty()) { searchHits = Collections.emptyList(); } else { // client is interested only in the extension IDs searchHits = sortedExtensions.stream().map(extensionSearch -> new SearchHit<>(null, null, null, 0.0f, null, null, null, null, null, null, extensionSearch)).collect(Collectors.toList()); } return new SearchHitsImpl<>(totalHits, TotalHitsRelation.OFF, 0f, null, null, searchHits, null, null); }
@Test public void testPages() { var ext1 = mockExtension("ext1", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext2 = mockExtension("ext2", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext3 = mockExtension("ext3", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext4 = mockExtension("ext4", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext5 = mockExtension("ext5", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext6 = mockExtension("ext6", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext7 = mockExtension("ext7", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); Mockito.when(repositories.findAllActiveExtensions()).thenReturn(Streamable.of(List.of(ext1, ext2, ext3, ext4, ext5, ext6, ext7))); var pageSizeItems = 2; var searchOptions = new ISearchService.Options(null, null, TargetPlatform.NAME_UNIVERSAL, pageSizeItems, 4, null, null, false); var result = search.search(searchOptions); // 7 total hits assertThat(result.getTotalHits()).isEqualTo(7); // But it should only contains 2 search items as specified by the pageSize var hits = result.getSearchHits(); assertThat(hits.size()).isEqualTo(pageSizeItems); assertThat(getIdFromExtensionHits(hits, 0)).isEqualTo(getIdFromExtensionName("ext5")); assertThat(getIdFromExtensionHits(hits, 1)).isEqualTo(getIdFromExtensionName("ext6")); }
@Override public void setMetadata(final Path file, final TransferStatus status) throws BackgroundException { try { final BlobRequestOptions options = new BlobRequestOptions(); if(containerService.isContainer(file)) { final CloudBlobContainer container = session.getClient().getContainerReference(containerService.getContainer(file).getName()); container.setMetadata(new HashMap<>(status.getMetadata())); container.uploadMetadata(AccessCondition.generateEmptyCondition(), options, context); } else { final CloudBlob blob = session.getClient().getContainerReference(containerService.getContainer(file).getName()) .getBlobReferenceFromServer(containerService.getKey(file)); // Populates the blob properties and metadata blob.downloadAttributes(); // Replace metadata final HashMap<String, String> pruned = new HashMap<>(); for(Map.Entry<String, String> m : status.getMetadata().entrySet()) { final BlobProperties properties = blob.getProperties(); if(HttpHeaders.CACHE_CONTROL.equalsIgnoreCase(m.getKey())) { // Update properties properties.setCacheControl(m.getValue()); continue; } if(HttpHeaders.CONTENT_TYPE.equalsIgnoreCase(m.getKey())) { // Update properties properties.setContentType(m.getValue()); continue; } pruned.put(m.getKey(), m.getValue()); } blob.setMetadata(pruned); blob.uploadMetadata(AccessCondition.generateEmptyCondition(), options, context); blob.uploadProperties(); } } catch(URISyntaxException e) { throw new NotfoundException(e.getMessage(), e); } catch(StorageException e) { throw new AzureExceptionMappingService().map("Failure to write attributes of {0}", e, file); } }
@Test public void testSetMetadata() throws Exception { final Path container = new Path("cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new AzureTouchFeature(session, null).touch(test, new TransferStatus()); final String v = new AlphanumericRandomStringService().random(); new AzureMetadataFeature(session, null).setMetadata(test, Collections.singletonMap("Test", v)); final Map<String, String> metadata = new AzureMetadataFeature(session, null).getMetadata(test); assertFalse(metadata.isEmpty()); assertTrue(metadata.containsKey("Test")); assertEquals(v, metadata.get("Test")); new AzureDeleteFeature(session, null).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static Object coerceString(String value, Class<?> clazz) { if (value == null) { return null; } else if (String.class.equals(clazz)) { return value; } else if (Short.TYPE.equals(clazz) || Short.class.equals(clazz)) { return Short.valueOf(value); } else if (Double.TYPE.equals(clazz) || Double.class.equals(clazz)) { return Double.valueOf(value); } else if (Float.TYPE.equals(clazz) || Float.class.equals(clazz)) { return Float.valueOf(value); } else if (Boolean.class.equals(clazz) || Boolean.TYPE.equals(clazz)) { return Boolean.valueOf(value); } else if (Integer.TYPE.equals(clazz) || Integer.class.equals(clazz)) { return Integer.valueOf(value); } else if (Long.TYPE.equals(clazz) || Long.class.equals(clazz)) { return Long.valueOf(value); } else if (ByteString.class.equals(clazz)) { return ByteString.copyAvroString(value, true); } else if (clazz.isEnum()) { return DataTemplateUtil.coerceOutput(value, clazz); } throw new IllegalArgumentException("Cannot coerce String to type: " + value + " -> " + clazz.getName()); }
@Test public void test() { Assert.assertSame(ValueConverter.coerceString(null, Object.class), null); Assert.assertEquals(ValueConverter.coerceString("Test String", String.class), "Test String"); Assert.assertSame(ValueConverter.coerceString("true", boolean.class), true); Assert.assertSame(ValueConverter.coerceString("false", Boolean.class), false); Assert.assertEquals(ValueConverter.coerceString("1", short.class), (short) 1); Assert.assertEquals(ValueConverter.coerceString("2", Short.class), (short) 2); Assert.assertEquals(ValueConverter.coerceString("3", int.class), 3); Assert.assertEquals(ValueConverter.coerceString("4", Integer.class), 4); Assert.assertEquals(ValueConverter.coerceString("5", long.class), 5L); Assert.assertEquals(ValueConverter.coerceString("6", Long.class), 6L); Assert.assertEquals(ValueConverter.coerceString("7.8", float.class), 7.8F); Assert.assertEquals(ValueConverter.coerceString("9.10", Float.class), 9.1F); Assert.assertEquals(ValueConverter.coerceString("11", Float.class), 11F); Assert.assertEquals(ValueConverter.coerceString("12.13", double.class), 12.13D); Assert.assertEquals(ValueConverter.coerceString("14.15", Double.class), 14.15D); Assert.assertEquals(ValueConverter.coerceString(_bytes16, ByteString.class), ByteString.copyAvroString(_bytes16, true)); Assert.assertSame(ValueConverter.coerceString("APPLE", Fruits.class), Fruits.APPLE); Assert.assertSame(ValueConverter.coerceString("ORANGE", Fruits.class), Fruits.ORANGE); Assert.assertSame(ValueConverter.coerceString("BLUEBERRY", Fruits.class), Fruits.$UNKNOWN); }
@Override public void define(Context context) { NewController controller = context.createController(WEBHOOKS_CONTROLLER); controller.setDescription("Webhooks allow to notify external services when a project analysis is done"); controller.setSince("6.2"); for (WebhooksWsAction action : actions) { action.define(controller); } controller.done(); }
@Test public void test_definition() { WebhooksWsAction action = newFakeAction(); WebhooksWs underTest = new WebhooksWs(action); WebService.Context context = new WebService.Context(); underTest.define(context); WebService.Controller controller = context.controller("api/webhooks"); assertThat(controller).isNotNull(); assertThat(controller.description()).isNotEmpty(); assertThat(controller.since()).isEqualTo("6.2"); }
public boolean isProactiveSupportEnabled() { if (properties == null) { return false; } return getMetricsEnabled(); }
@Test public void isProactiveSupportEnabledFull() { // Given Properties serverProperties = new Properties(); serverProperties.setProperty(BaseSupportConfig.CONFLUENT_SUPPORT_METRICS_ENABLE_CONFIG, "true"); BaseSupportConfig supportConfig = new TestSupportConfig(serverProperties); // When/Then assertTrue(supportConfig.isProactiveSupportEnabled()); }
static void setConstructor(final DroolsCompilationDTO<TreeModel> compilationDTO, final ClassOrInterfaceDeclaration modelTemplate) { KiePMMLModelFactoryUtils.init(compilationDTO, modelTemplate); final ConstructorDeclaration constructorDeclaration = modelTemplate.getDefaultConstructor().orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_DEFAULT_CONSTRUCTOR, modelTemplate.getName()))); final BlockStmt body = constructorDeclaration.getBody(); final ExplicitConstructorInvocationStmt superStatement = CommonCodegenUtils.getExplicitConstructorInvocationStmt(body) .orElseThrow(() -> new KiePMMLException(String.format(MISSING_CONSTRUCTOR_IN_BODY, body))); CommonCodegenUtils.setExplicitConstructorInvocationStmtArgument(superStatement, "algorithmName", String.format("\"%s\"", compilationDTO.getModel().getAlgorithmName())); }
@Test void setConstructor() { final String targetField = "whatIdo"; final ClassOrInterfaceDeclaration modelTemplate = classOrInterfaceDeclaration.clone(); final CommonCompilationDTO<TreeModel> compilationDTO = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, pmml, treeModel, new PMMLCompilationContextMock(), "FILENAME"); final DroolsCompilationDTO<TreeModel> droolsCompilationDTO = DroolsCompilationDTO.fromCompilationDTO(compilationDTO, new HashMap<>()); KiePMMLTreeModelFactory.setConstructor(droolsCompilationDTO, modelTemplate); Map<Integer, Expression> superInvocationExpressionsMap = new HashMap<>(); superInvocationExpressionsMap.put(0, new NameExpr(String.format("\"%s\"", "FILENAME"))); superInvocationExpressionsMap.put(1, new NameExpr(String.format("\"%s\"", treeModel.getModelName()))); superInvocationExpressionsMap.put(3, new NameExpr(String.format("\"%s\"", treeModel.getAlgorithmName()))); MINING_FUNCTION miningFunction = MINING_FUNCTION.byName(treeModel.getMiningFunction().value()); PMML_MODEL pmmlModel = PMML_MODEL.byName(treeModel.getClass().getSimpleName()); Map<String, Expression> assignExpressionMap = new HashMap<>(); assignExpressionMap.put("targetField", new StringLiteralExpr(targetField)); assignExpressionMap.put("miningFunction", new NameExpr(miningFunction.getClass().getName() + "." + miningFunction.name())); assignExpressionMap.put("pmmlMODEL", new NameExpr(pmmlModel.getClass().getName() + "." + pmmlModel.name())); ConstructorDeclaration constructorDeclaration = modelTemplate.getDefaultConstructor().get(); assertThat(commonEvaluateConstructor(constructorDeclaration, getSanitizedClassName(treeModel.getModelName()), superInvocationExpressionsMap, assignExpressionMap)).isTrue(); }
@Override public Set<K> keySet() { Set<K> keys = Sets.newHashSet(); items.keySet().forEach(k -> keys.add(serializer.decode(k))); return keys; }
@Test public void testKeySet() throws Exception { //Tests key set generation fillMap(10); Set<Integer> keys = map.keySet(); for (int i = 0; i < 10; i++) { assertTrue("The key set doesn't contain all keys 0-9", keys.contains(i)); } assertEquals("The key set has an incorrect number of entries", 10, keys.size()); }
public static boolean isNormalizedPathOutsideWorkingDir(String path) { final String normalize = FilenameUtils.normalize(path); final String prefix = FilenameUtils.getPrefix(normalize); return (normalize != null && StringUtils.isBlank(prefix)); }
@Test public void shouldReturnFalseEvenIfAnAbsolutePathKeepsYouInsideSandbox() { File file = new File("somethingInsideCurrentFolder"); assertThat(FilenameUtil.isNormalizedPathOutsideWorkingDir(file.getAbsolutePath()), is(false)); }
public boolean shouldRecord() { return this.recordingLevel.shouldRecord(config.recordLevel().id); }
@Test public void testShouldRecordForTraceLevelSensor() { Sensor traceSensor = new Sensor(null, "traceSensor", null, INFO_CONFIG, Time.SYSTEM, 0, Sensor.RecordingLevel.TRACE); assertFalse(traceSensor.shouldRecord()); traceSensor = new Sensor(null, "traceSensor", null, DEBUG_CONFIG, Time.SYSTEM, 0, Sensor.RecordingLevel.TRACE); assertFalse(traceSensor.shouldRecord()); traceSensor = new Sensor(null, "traceSensor", null, TRACE_CONFIG, Time.SYSTEM, 0, Sensor.RecordingLevel.TRACE); assertTrue(traceSensor.shouldRecord()); }
@Override public Optional<Object> removeCustomContextData(String key) { return getCustomContextData(key).isPresent() ? Optional.of(_customRequestContext.remove(key)) : Optional.empty(); }
@Test public void testRemoveCustomContextData() throws RestLiSyntaxException { final ResourceContextImpl context = new ResourceContextImpl(); String bar = "bar"; context.putCustomContextData("foo", bar); Optional<Object> barRemove = context.removeCustomContextData("foo"); Optional<Object> barAfterRemove = context.getCustomContextData("foo"); Assert.assertSame(barRemove.get(), bar); Assert.assertFalse(barAfterRemove.isPresent()); }
public boolean initWithCommittedOffsetsIfNeeded(Timer timer) { final Set<TopicPartition> initializingPartitions = subscriptions.initializingPartitions(); final Map<TopicPartition, OffsetAndMetadata> offsets = fetchCommittedOffsets(initializingPartitions, timer); // "offsets" will be null if the offset fetch requests did not receive responses within the given timeout if (offsets == null) return false; refreshCommittedOffsets(offsets, this.metadata, this.subscriptions); return true; }
@Test public void testRefreshOffsetWithValidation() { client.prepareResponse(groupCoordinatorResponse(node, Errors.NONE)); coordinator.ensureCoordinatorReady(time.timer(Long.MAX_VALUE)); subscriptions.assignFromUser(singleton(t1p)); // Initial leader epoch of 4 MetadataResponse metadataResponse = RequestTestUtils.metadataUpdateWith("kafka-cluster", 1, Collections.emptyMap(), singletonMap(topic1, 1), tp -> 4); client.updateMetadata(metadataResponse); // Load offsets from previous epoch client.prepareResponse(offsetFetchResponse(t1p, Errors.NONE, "", 100L, Optional.of(3))); coordinator.initWithCommittedOffsetsIfNeeded(time.timer(Long.MAX_VALUE)); // Offset gets loaded, but requires validation assertEquals(Collections.emptySet(), subscriptions.initializingPartitions()); assertFalse(subscriptions.hasAllFetchPositions()); assertTrue(subscriptions.awaitingValidation(t1p)); assertEquals(subscriptions.position(t1p).offset, 100L); assertNull(subscriptions.validPosition(t1p)); }
@Deprecated public void setLoadInfo(Table targetTable, BrokerDesc brokerDesc, List<BrokerFileGroup> fileGroups) { this.targetTable = targetTable; this.brokerDesc = brokerDesc; this.fileGroups = fileGroups; }
@Test public void testNoFilesFound() { Analyzer analyzer = new Analyzer(GlobalStateMgr.getCurrentState(), new ConnectContext()); DescriptorTable descTable = analyzer.getDescTbl(); TupleDescriptor tupleDesc = descTable.createTupleDescriptor("DestTableTuple"); List<List<TBrokerFileStatus>> fileStatusesList = Lists.newArrayList(); fileStatusesList.add(Lists.newArrayList()); FileScanNode scanNode = new FileScanNode(new PlanNodeId(0), tupleDesc, "FileScanNode", fileStatusesList, 0, WarehouseManager.DEFAULT_WAREHOUSE_ID); List<String> files = Lists.newArrayList("hdfs://127.0.0.1:9001/file1", "hdfs://127.0.0.1:9001/file2", "hdfs://127.0.0.1:9001/file3", "hdfs://127.0.0.1:9001/file4"); DataDescription desc = new DataDescription("testTable", null, files, null, null, null, "csv", false, null); BrokerFileGroup brokerFileGroup = new BrokerFileGroup(desc); Deencapsulation.setField(brokerFileGroup, "filePaths", files); List<BrokerFileGroup> fileGroups = Lists.newArrayList(brokerFileGroup); scanNode.setLoadInfo(jobId, txnId, null, brokerDesc, fileGroups, true, loadParallelInstanceNum); ExceptionChecker.expectThrowsWithMsg(UserException.class, "No files were found matching the pattern(s) or path(s): " + "'hdfs://127.0.0.1:9001/file1, hdfs://127.0.0.1:9001/file2, hdfs://127.0.0.1:9001/file3, ...'", () -> Deencapsulation.invoke(scanNode, "getFileStatusAndCalcInstance")); }
CacheConfig<K, V> asCacheConfig() { return this.copy(new CacheConfig<>(), false); }
@Test public void serializationSucceeds_cacheLoaderFactory() { CacheConfig<String, Person> cacheConfig = newDefaultCacheConfig("test"); cacheConfig.setCacheLoaderFactory(new PersonCacheLoaderFactory()); PreJoinCacheConfig preJoinCacheConfig = new PreJoinCacheConfig(cacheConfig); Data data = serializationService.toData(preJoinCacheConfig); PreJoinCacheConfig deserialized = serializationService.toObject(data); assertEquals(preJoinCacheConfig, deserialized); assertEquals(cacheConfig, deserialized.asCacheConfig()); assertTrue("Invalid Factory Class", deserialized.getCacheLoaderFactory() instanceof PersonCacheLoaderFactory); }
@Override protected void doUpdate(final List<PluginData> dataList) { dataList.forEach(pluginDataSubscriber::onSubscribe); }
@Test public void testDoUpdate() { List<PluginData> pluginDataList = createFakePluginDataObjects(4); pluginDataHandler.doUpdate(pluginDataList); pluginDataList.forEach(verify(subscriber)::onSubscribe); }
public static L0ModificationInstruction modL0Lambda(Lambda lambda) { checkNotNull(lambda, "L0 OCh signal cannot be null"); if (lambda instanceof OchSignal) { return new ModOchSignalInstruction((OchSignal) lambda); } else { throw new UnsupportedOperationException(String.format("Unsupported type: %s", lambda)); } }
@Test public void testModL0LambdaMethod() { Instruction instruction = Instructions.modL0Lambda(och1); L0ModificationInstruction.ModOchSignalInstruction ochInstruction = checkAndConvert(instruction, Instruction.Type.L0MODIFICATION, L0ModificationInstruction.ModOchSignalInstruction.class); assertThat(ochInstruction.lambda(), is(och1)); }
@Override public Iterable<Host> getHosts() { checkPermission(HOST_READ); return store.getHosts(); }
@Test public void getHosts() { detect(HID1, MAC1, VLAN1, LOC1, IP1); detect(HID2, MAC2, VLAN1, LOC2, IP2); validateHosts("host not properly stored", mgr.getHosts(), HID1, HID2); validateHosts("can't get hosts by VLAN", mgr.getHostsByVlan(VLAN1), HID1, HID2); validateHosts("can't get hosts by MAC", mgr.getHostsByMac(MAC1), HID1); validateHosts("can't get hosts by IP", mgr.getHostsByIp(IP1), HID1); validateHosts("can't get hosts by location", mgr.getConnectedHosts(LOC1), HID1); assertTrue("incorrect host location", mgr.getConnectedHosts(DID2).isEmpty()); }
State getState() { return state; }
@Test public void verify_regular_start_restart_cycle() { assertThat(underTest.getState()).isEqualTo(INIT); verifyMoveTo(STARTING); verifyMoveTo(OPERATIONAL); verifyMoveTo(RESTARTING); verifyMoveTo(STARTING); verifyMoveTo(OPERATIONAL); }
public static boolean needOpenTransaction(final SQLStatement sqlStatement) { if (sqlStatement instanceof SelectStatement && !((SelectStatement) sqlStatement).getFrom().isPresent()) { return false; } return sqlStatement instanceof DDLStatement || sqlStatement instanceof DMLStatement; }
@Test void assertNeedOpenTransactionForSelectStatement() { SelectStatement selectStatement = new MySQLSelectStatement(); assertFalse(AutoCommitUtils.needOpenTransaction(selectStatement)); selectStatement.setFrom(mock(SimpleTableSegment.class)); assertTrue(AutoCommitUtils.needOpenTransaction(selectStatement)); }
public static FromEndOfWindow pastEndOfWindow() { return new FromEndOfWindow(); }
@Test public void testLateFiringsToString() { TriggerStateMachine trigger = AfterWatermarkStateMachine.pastEndOfWindow() .withLateFirings(StubTriggerStateMachine.named("t1")); assertEquals("AfterWatermark.pastEndOfWindow().withLateFirings(t1)", trigger.toString()); }
public BackgroundException map(final IOException failure, final Path directory) { return super.map("Connection failed", failure, directory); }
@Test public void testPlaceholder() { final BackgroundException e = new DefaultIOExceptionMappingService().map("{0} message", new SocketException("s"), new Path("/n", EnumSet.of(Path.Type.directory, Path.Type.volume))); assertEquals("N message.", e.getMessage()); assertEquals("S. The connection attempt was rejected. The server may be down, or your network may not be properly configured.", e.getDetail()); }
public static NotFoundException releaseNotFound(Object releaseId) { return new NotFoundException("release not found for releaseId:%s", releaseId); }
@Test public void testReleaseNotFoundException() { NotFoundException exception = NotFoundException.releaseNotFound(66); assertEquals(exception.getMessage(), "release not found for releaseId:66"); }
public FEELFnResult<List<Object>> invoke(@ParameterName( "list" ) Object list) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } // spec requires us to return a new list final List<Object> result = new ArrayList<>(); if ( list instanceof Collection ) { for (Object o : (Collection) list) { if ( !result.contains( o ) ) { result.add(o); } } } else { result.add( list ); } return FEELFnResult.ofResult( result ); }
@Test void invokeParamArray() { FunctionTestUtil.assertResultList( distinctValuesFunction.invoke(new Object[]{BigDecimal.valueOf(10.1)}), Collections.singletonList(new Object[]{BigDecimal.valueOf(10.1)})); }
public long getEstimatedLength(long rowCount, TupleDescriptor tupleDescriptor) { List<Column> dataColumns = tupleDescriptor.getSlots().stream().map(s -> s.getColumn()) .collect(Collectors.toList()); long rowSize = dataColumns.stream().mapToInt(column -> column.getType().getTypeSize()).sum(); return rowCount * rowSize; }
@Test public void testEstimatedLength(@Mocked PaimonTable table) { BinaryRow row1 = new BinaryRow(2); BinaryRowWriter writer = new BinaryRowWriter(row1, 10); writer.writeInt(0, 2000); writer.writeInt(1, 4444); writer.complete(); List<DataFileMeta> meta1 = new ArrayList<>(); meta1.add(new DataFileMeta("file1", 100, 200, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null, 1, 1, 1, DUMMY_LEVEL, 0L, null)); meta1.add(new DataFileMeta("file2", 100, 300, EMPTY_MIN_KEY, EMPTY_MAX_KEY, EMPTY_KEY_STATS, null, 1, 1, 1, DUMMY_LEVEL, 0L, null)); DataSplit split = DataSplit.builder().withSnapshot(1L).withPartition(row1).withBucket(1) .withBucketPath("not used").withDataFiles(meta1).isStreaming(false).build(); TupleDescriptor desc = new TupleDescriptor(new TupleId(0)); desc.setTable(table); SlotDescriptor slot1 = new SlotDescriptor(new SlotId(1), "id", Type.INT, false); slot1.setColumn(new Column("id", Type.INT)); SlotDescriptor slot2 = new SlotDescriptor(new SlotId(2), "name", Type.STRING, false); slot2.setColumn(new Column("name", Type.STRING)); desc.addSlot(slot1); desc.addSlot(slot2); PaimonScanNode scanNode = new PaimonScanNode(new PlanNodeId(0), desc, "XXX"); long totalFileLength = scanNode.getEstimatedLength(split.rowCount(), desc); Assert.assertEquals(10000, totalFileLength); }
@Override public OpenstackNode removeNode(String hostname) { checkArgument(!Strings.isNullOrEmpty(hostname), ERR_NULL_HOSTNAME); OpenstackNode osNode = osNodeStore.removeNode(hostname); log.info(String.format(MSG_NODE, hostname, MSG_REMOVED)); return osNode; }
@Test(expected = IllegalArgumentException.class) public void testRemoveNullNode() { target.removeNode(null); }
@Override public CloseableIterator<ScannerReport.CpdTextBlock> readCpdTextBlocks(int componentRef) { ensureInitialized(); return delegate.readCpdTextBlocks(componentRef); }
@Test public void readComponentDuplicationBlocks_returns_empty_list_if_file_does_not_exist() { assertThat(underTest.readCpdTextBlocks(COMPONENT_REF)).isExhausted(); }
public boolean awaitAllMessages(long timeout, TimeUnit timeUnit) { // Create a new message drain latch as a local variable to avoid SpotBugs warnings about inconsistent synchronization // on an instance variable when invoking CountDownLatch::await outside a synchronized block CountDownLatch messageDrainLatch; synchronized (this) { messageDrainLatch = new CountDownLatch(numUnackedMessages); this.messageDrainLatch = messageDrainLatch; } try { return messageDrainLatch.await(timeout, timeUnit); } catch (InterruptedException e) { return false; } }
@Test public void testAwaitMessagesNoneSubmitted() { assertTrue(submittedRecords.awaitAllMessages(0, TimeUnit.MILLISECONDS)); }
@Override protected SideInputReader getSideInputReaderForViews( Iterable<? extends PCollectionView<?>> views) { return StreamingModeSideInputReader.of(views, this); }
@Test public void testSideInputReaderReconstituted() { Pipeline p = Pipeline.create(); PCollectionView<String> preview1 = p.apply(Create.of("")).apply(View.asSingleton()); PCollectionView<String> preview2 = p.apply(Create.of("")).apply(View.asSingleton()); PCollectionView<String> preview3 = p.apply(Create.of("")).apply(View.asSingleton()); SideInputReader sideInputReader = executionContext.getSideInputReaderForViews(Arrays.asList(preview1, preview2)); assertTrue(sideInputReader.contains(preview1)); assertTrue(sideInputReader.contains(preview2)); assertFalse(sideInputReader.contains(preview3)); PCollectionView<String> view1 = SerializableUtils.ensureSerializable(preview1); PCollectionView<String> view2 = SerializableUtils.ensureSerializable(preview2); PCollectionView<String> view3 = SerializableUtils.ensureSerializable(preview3); assertTrue(sideInputReader.contains(view1)); assertTrue(sideInputReader.contains(view2)); assertFalse(sideInputReader.contains(view3)); }
public abstract BuiltIndex<T> build();
@Test @UseDataProvider("indexWithAndWithoutRelations") public void fail_when_replica_customization_cant_be_parsed(Index index) { settings.setProperty(CLUSTER_ENABLED.getKey(), "true"); settings.setProperty(SEARCH_REPLICAS.getKey(), "ꝱꝲꝳପ"); SettingsConfiguration settingsConfiguration = newBuilder(settings.asConfig()).setDefaultNbOfShards(5).build(); IndexMainType mainType = IndexType.main(index, "foo"); assertThatThrownBy(() -> new SimplestNewIndex(mainType, settingsConfiguration)) .isInstanceOf(IllegalStateException.class) .hasMessage("The property 'sonar.search.replicas' is not an int value: For input string: \"ꝱꝲꝳପ\""); }
public Map<String, String> getHeaders() { return _headers; }
@Test public void testHeadersCaseInsensitiveSet() { final long id = 42l; GetRequestBuilder<Long, TestRecord> builder = generateDummyRequestBuilder(); Request<TestRecord> request = builder .id(id) .setHeader("header", "value1") .setHeader("HEADER", "value2") .build(); Assert.assertEquals(request.getHeaders().get("header"), "value2"); }
void handleFinish(Resp response, Span span) { if (response == null) throw new NullPointerException("response == null"); if (span.isNoop()) return; if (response.error() != null) { span.error(response.error()); // Ensures MutableSpan.error() for SpanHandler } try { parseResponse(response, span); } catch (Throwable t) { propagateIfFatal(t); Platform.get().log("error parsing response {0}", response, t); } finally { long finishTimestamp = response.finishTimestamp(); if (finishTimestamp == 0L) { span.finish(); } else { span.finish(finishTimestamp); } } }
@Test void handleFinish_parsesTagsWithCustomizer() { when(span.customizer()).thenReturn(spanCustomizer); handler.handleFinish(response, span); verify(responseParser).parse(response, context, spanCustomizer); }
public Set<Long> calculateUsers(DelegateExecution execution, int level) { Assert.isTrue(level > 0, "level 必须大于 0"); // 获得发起人 ProcessInstance processInstance = processInstanceService.getProcessInstance(execution.getProcessInstanceId()); Long startUserId = NumberUtils.parseLong(processInstance.getStartUserId()); // 获得对应 leve 的部门 DeptRespDTO dept = null; for (int i = 0; i < level; i++) { // 获得 level 对应的部门 if (dept == null) { dept = getStartUserDept(startUserId); if (dept == null) { // 找不到发起人的部门,所以无法使用该规则 return emptySet(); } } else { DeptRespDTO parentDept = deptApi.getDept(dept.getParentId()); if (parentDept == null) { // 找不到父级部门,所以只好结束寻找。原因是:例如说,级别比较高的人,所在部门层级比较少 break; } dept = parentDept; } } return dept.getLeaderUserId() != null ? asSet(dept.getLeaderUserId()) : emptySet(); }
@Test public void testCalculateUsers_noParentDept() { // 准备参数 DelegateExecution execution = mockDelegateExecution(1L); // mock 方法(startUser) AdminUserRespDTO startUser = randomPojo(AdminUserRespDTO.class, o -> o.setDeptId(10L)); when(adminUserApi.getUser(eq(1L))).thenReturn(startUser); DeptRespDTO startUserDept = randomPojo(DeptRespDTO.class, o -> o.setId(10L).setParentId(100L) .setLeaderUserId(20L)); // mock 方法(getDept) when(deptApi.getDept(eq(10L))).thenReturn(startUserDept); when(deptApi.getDept(eq(100L))).thenReturn(null); // 调用 Set<Long> result = expression.calculateUsers(execution, 2); // 断言 assertEquals(asSet(20L), result); }
@Override public void init(Properties config, ServletContext servletContext, long tokenValidity) throws Exception { String signatureSecretFile = config.getProperty( AuthenticationFilter.SIGNATURE_SECRET_FILE, null); if (signatureSecretFile != null) { try (Reader reader = new InputStreamReader(Files.newInputStream( Paths.get(signatureSecretFile)), StandardCharsets.UTF_8)) { StringBuilder sb = new StringBuilder(); int c = reader.read(); while (c > -1) { sb.append((char) c); c = reader.read(); } secret = sb.toString().getBytes(StandardCharsets.UTF_8); if (secret.length == 0) { throw new RuntimeException("No secret in signature secret file: " + signatureSecretFile); } } catch (IOException ex) { throw new RuntimeException("Could not read signature secret file: " + signatureSecretFile); } } secrets = new byte[][]{secret}; }
@Test public void testEmptySecretFileThrows() throws Exception { File secretFile = File.createTempFile("test_empty_secret", ".txt"); assertTrue(secretFile.exists()); FileSignerSecretProvider secretProvider = new FileSignerSecretProvider(); Properties secretProviderProps = new Properties(); secretProviderProps.setProperty( AuthenticationFilter.SIGNATURE_SECRET_FILE, secretFile.getAbsolutePath()); Exception exception = assertThrows(RuntimeException.class, new ThrowingRunnable() { @Override public void run() throws Throwable { secretProvider.init(secretProviderProps, null, -1); } }); assertTrue(exception.getMessage().startsWith( "No secret in signature secret file:")); }
@Override public Device getDevice(DeviceId deviceId) { return devices.get(deviceId); }
@Test public final void testGetDevice() { putDevice(DID1, SW1); assertDevice(DID1, SW1, deviceStore.getDevice(DID1)); assertNull("DID2 shouldn't be there", deviceStore.getDevice(DID2)); }
@Override public void doRun() { if (versionOverride.isPresent()) { LOG.debug("Elasticsearch version is set manually. Not running check."); return; } final Optional<SearchVersion> probedVersion = this.versionProbe.probe(this.elasticsearchHosts); probedVersion.ifPresent(version -> { if (compatible(this.initialElasticsearchVersion, version)) { notificationService.fixed(Notification.Type.ES_VERSION_MISMATCH); } else { LOG.warn("Elasticsearch version currently running ({}) is incompatible with the one Graylog was started " + "with ({}) - a restart is required!", version, initialElasticsearchVersion); final Notification notification = notificationService.buildNow() .addType(Notification.Type.ES_VERSION_MISMATCH) .addSeverity(Notification.Severity.URGENT) .addDetail("initial_version", initialElasticsearchVersion.toString()) .addDetail("current_version", version.toString()); notificationService.publishIfFirst(notification); } }); }
@Test void doesNotDoAnythingIfVersionWasNotProbed() { returnProbedVersion(null); createPeriodical(SearchVersion.elasticsearch(8, 0, 0)).doRun(); verifyNoInteractions(notificationService); }
public static <T> Either<String, T> resolveImportDMN(Import importElement, Collection<T> dmns, Function<T, QName> idExtractor) { final String importerDMNNamespace = ((Definitions) importElement.getParent()).getNamespace(); final String importerDMNName = ((Definitions) importElement.getParent()).getName(); final String importNamespace = importElement.getNamespace(); final String importName = importElement.getName(); final String importLocationURI = importElement.getLocationURI(); // This is optional final String importModelName = importElement.getAdditionalAttributes().get(TImport.MODELNAME_QNAME); LOGGER.debug("Resolving an Import in DMN Model with name={} and namespace={}. " + "Importing a DMN model with namespace={} name={} locationURI={}, modelName={}", importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName); List<T> matchingDMNList = dmns.stream() .filter(m -> idExtractor.apply(m).getNamespaceURI().equals(importNamespace)) .toList(); if (matchingDMNList.size() == 1) { T located = matchingDMNList.get(0); // Check if the located DMN Model in the NS, correspond for the import `drools:modelName`. if (importModelName == null || idExtractor.apply(located).getLocalPart().equals(importModelName)) { LOGGER.debug("DMN Model with name={} and namespace={} successfully imported a DMN " + "with namespace={} name={} locationURI={}, modelName={}", importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName); return Either.ofRight(located); } else { LOGGER.error("DMN Model with name={} and namespace={} can't import a DMN with namespace={}, name={}, modelName={}, " + "located within namespace only {} but does not match for the actual modelName", importerDMNName, importerDMNNamespace, importNamespace, importName, importModelName, idExtractor.apply(located)); return Either.ofLeft(String.format( "DMN Model with name=%s and namespace=%s can't import a DMN with namespace=%s, name=%s, modelName=%s, " + "located within namespace only %s but does not match for the actual modelName", importerDMNName, importerDMNNamespace, importNamespace, importName, importModelName, idExtractor.apply(located))); } } else { List<T> usingNSandName = matchingDMNList.stream() .filter(dmn -> idExtractor.apply(dmn).getLocalPart().equals(importModelName)) .toList(); if (usingNSandName.size() == 1) { LOGGER.debug("DMN Model with name={} and namespace={} successfully imported a DMN " + "with namespace={} name={} locationURI={}, modelName={}", importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName); return Either.ofRight(usingNSandName.get(0)); } else if (usingNSandName.isEmpty()) { LOGGER.error("DMN Model with name={} and namespace={} failed to import a DMN with namespace={} name={} locationURI={}, modelName={}.", importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName); return Either.ofLeft(String.format( "DMN Model with name=%s and namespace=%s failed to import a DMN with namespace=%s name=%s locationURI=%s, modelName=%s. ", importerDMNName, importerDMNNamespace, importNamespace, importName, importLocationURI, importModelName)); } else { LOGGER.error("DMN Model with name={} and namespace={} detected a collision ({} elements) trying to import a DMN with namespace={} name={} locationURI={}, modelName={}", importerDMNName, importerDMNNamespace, usingNSandName.size(), importNamespace, importName, importLocationURI, importModelName); return Either.ofLeft(String.format( "DMN Model with name=%s and namespace=%s detected a collision trying to import a DMN with %s namespace, " + "%s name and modelName %s. There are %s DMN files with the same namespace in your project. " + "Please change the DMN namespaces and make them unique to fix this issue.", importerDMNName, importerDMNNamespace, importNamespace, importName, importModelName, usingNSandName.size())); } } }
@Test void locateInNSAliasedBadScenario() { // this is a BAD scenario are in namespace `nsA` there are 2 models with the same name. final Import i = makeImport("nsA", "aliased", "mA"); final List<QName> available = Arrays.asList(new QName("nsA", "mA"), new QName("nsA", "mA"), new QName("nsB", "m3")); final Either<String, QName> result = ImportDMNResolverUtil.resolveImportDMN(i, available, Function.identity()); assertThat(result.isLeft()).isTrue(); }
@CanIgnoreReturnValue public final Ordered containsAtLeast( @Nullable Object firstExpected, @Nullable Object secondExpected, @Nullable Object @Nullable ... restOfExpected) { return containsAtLeastElementsIn(accumulate(firstExpected, secondExpected, restOfExpected)); }
@Test public void iterableContainsAtLeastFailsWithSameToStringAndHomogeneousListWithDuplicates() { expectFailureWhenTestingThat(asList(1L, 2L, 2L)).containsAtLeast(1, 1, 2); assertFailureValue("missing (3)", "1 [2 copies], 2 (java.lang.Integer)"); assertFailureValue("though it did contain (3)", "1, 2 [2 copies] (java.lang.Long)"); }
@Override public File getScannerEngine() { File scannerDir = new File(fs.getHomeDir(), "lib/scanner"); if (!scannerDir.exists()) { throw new NotFoundException(format("Scanner directory not found: %s", scannerDir.getAbsolutePath())); } return listFiles(scannerDir, VISIBLE, directoryFileFilter()) .stream() .filter(file -> file.getName().endsWith(".jar")) .findFirst() .orElseThrow(() -> new NotFoundException(format("Scanner JAR not found in directory: %s", scannerDir.getAbsolutePath()))); }
@Test void getScannerEngineMetadata_shouldFail_whenHashComputingFailed() { ScannerEngineHandlerImpl spy = spy(new ScannerEngineHandlerImpl(serverFileSystem)); doReturn(new File("no-file")).when(spy).getScannerEngine(); assertThatThrownBy(spy::getScannerEngineMetadata) .isInstanceOf(UncheckedIOException.class) .hasMessageContaining("Unable to compute SHA-256 checksum of the Scanner Engine"); }
public KernPair(String firstKernCharacter, String secondKernCharacter, float x, float y) { this.firstKernCharacter = firstKernCharacter; this.secondKernCharacter = secondKernCharacter; this.x = x; this.y = y; }
@Test void testKernPair() { KernPair kernPair = new KernPair("firstKernCharacter", "secondKernCharacter", 10f, 20f); assertEquals("firstKernCharacter", kernPair.getFirstKernCharacter()); assertEquals("secondKernCharacter", kernPair.getSecondKernCharacter()); assertEquals(10f, kernPair.getX(), 0.0f); assertEquals(20f, kernPair.getY(), 0.0f); }
@Override public List<PluginConfiguration> getSecretsConfigMetadata(String pluginId) { return pluginRequestHelper.submitRequest(pluginId, REQUEST_GET_SECRETS_CONFIG_METADATA, new DefaultPluginInteractionCallback<>() { @Override public List<PluginConfiguration> onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) { return secretsMessageConverterV1.getSecretsConfigMetadataFromResponse(responseBody); } }); }
@Test void shouldTalkToPlugin_toFetchSecretsConfigMetadata() { String responseBody = "[{\"key\":\"Username\",\"metadata\":{\"required\":true,\"secure\":false}},{\"key\":\"Password\",\"metadata\":{\"required\":true,\"secure\":true}}]"; when(pluginManager.submitTo(eq(PLUGIN_ID), eq(SECRETS_EXTENSION), requestArgumentCaptor.capture())).thenReturn(DefaultGoPluginApiResponse.success(responseBody)); final List<PluginConfiguration> metadata = secretsExtensionV1.getSecretsConfigMetadata(PLUGIN_ID); assertThat(metadata).hasSize(2); assertThat(metadata).contains(new PluginConfiguration("Username", new Metadata(true, false)), new PluginConfiguration("Password", new Metadata(true, true))); assertExtensionRequest(REQUEST_GET_SECRETS_CONFIG_METADATA, null); }
@Udf(description = "Splits a string into an array of substrings based on a delimiter.") public List<String> split( @UdfParameter( description = "The string to be split. If NULL, then function returns NULL.") final String string, @UdfParameter( description = "The delimiter to split a string by. If NULL, then function returns NULL.") final String delimiter) { if (string == null || delimiter == null) { return null; } // Java split() accepts regular expressions as a delimiter, but the behavior of this UDF split() // is to accept only literal strings. This method uses Guava Splitter instead, which does not // accept any regex pattern. This is to avoid a confusion to users when splitting by regex // special characters, such as '.' and '|'. try { // Guava Splitter does not accept empty delimiters. Use the Java split() method instead. if (delimiter.isEmpty()) { return Arrays.asList(EMPTY_DELIMITER.split(string)); } else { return Splitter.on(delimiter).splitToList(string); } } catch (final Exception e) { throw new KsqlFunctionException( String.format("Invalid delimiter '%s' in the split() function.", delimiter), e); } }
@Test public void shouldSplitAndAddEmptySpacesIfDelimiterBytesIsFoundInContiguousPositions() { final ByteBuffer aBytes = ByteBuffer.wrap(new byte[]{'A'}); final ByteBuffer zBytes = ByteBuffer.wrap(new byte[]{'z'}); final ByteBuffer pipeBytes = ByteBuffer.wrap(new byte[]{'|'}); assertThat( splitUdf.split( ByteBuffer.wrap(new byte[]{'A','|','|','A'}), pipeBytes), contains(aBytes, EMPTY_BYTES, aBytes)); assertThat( splitUdf.split( ByteBuffer.wrap(new byte[]{'z','|','|','A','|','|','z'}), pipeBytes), contains(zBytes, EMPTY_BYTES, aBytes, EMPTY_BYTES, zBytes)); assertThat( splitUdf.split( ByteBuffer.wrap(new byte[]{'|', '|', 'A','|','|','A'}), pipeBytes), contains(EMPTY_BYTES, EMPTY_BYTES, aBytes, EMPTY_BYTES, aBytes)); assertThat( splitUdf.split( ByteBuffer.wrap(new byte[]{'A','|','|','A','|','|'}), pipeBytes), contains(aBytes, EMPTY_BYTES, aBytes, EMPTY_BYTES, EMPTY_BYTES)); }
@Override public void run() { try { cleanup(); } catch (Exception e) { log.warn("Caught exception during Intent cleanup", e); } }
@Test public void skipPoll() { IntentStoreDelegate mockDelegate = new IntentStoreDelegate() { @Override public void process(IntentData intentData) { intentData.setState(CORRUPT); store.write(intentData); } @Override public void notify(IntentEvent event) {} }; store.setDelegate(mockDelegate); Intent intent = new MockIntent(1L); IntentData data = new IntentData(intent, INSTALL_REQ, null); store.addPending(data); Intent intent2 = new MockIntent(2L); Timestamp version = new SystemClockTimestamp(1L); data = new IntentData(intent2, INSTALL_REQ, version); store.addPending(data); cleanup.run(); assertEquals("Expect number of submits incorrect", 1, service.submitCounter()); }
@Override public void calculate(TradePriceCalculateReqBO param, TradePriceCalculateRespBO result) { // 1.1 校验积分功能是否开启 int givePointPerYuan = Optional.ofNullable(memberConfigApi.getConfig()) .filter(config -> BooleanUtil.isTrue(config.getPointTradeDeductEnable())) .map(MemberConfigRespDTO::getPointTradeGivePoint) .orElse(0); if (givePointPerYuan <= 0) { return; } // 1.2 校验支付金额 if (result.getPrice().getPayPrice() <= 0) { return; } // 2.1 计算赠送积分 int givePoint = MoneyUtils.calculateRatePriceFloor(result.getPrice().getPayPrice(), (double) givePointPerYuan); // 2.2 计算分摊的赠送积分 List<TradePriceCalculateRespBO.OrderItem> orderItems = filterList(result.getItems(), TradePriceCalculateRespBO.OrderItem::getSelected); List<Integer> dividePoints = TradePriceCalculatorHelper.dividePrice(orderItems, givePoint); // 3.2 更新 SKU 赠送积分 for (int i = 0; i < orderItems.size(); i++) { TradePriceCalculateRespBO.OrderItem orderItem = orderItems.get(i); // 商品可能赠送了积分,所以这里要加上 orderItem.setGivePoint(orderItem.getGivePoint() + dividePoints.get(i)); } // 3.3 更新订单赠送积分 TradePriceCalculatorHelper.recountAllGivePoint(result); }
@Test public void testCalculate() { // 准备参数 TradePriceCalculateReqBO param = new TradePriceCalculateReqBO() .setUserId(233L) .setItems(asList( new TradePriceCalculateReqBO.Item().setSkuId(10L).setCount(2).setSelected(true), // 全局积分 new TradePriceCalculateReqBO.Item().setSkuId(20L).setCount(3).setSelected(true), // 全局积分 + SKU 积分 new TradePriceCalculateReqBO.Item().setSkuId(30L).setCount(4).setSelected(false), // 全局积分,但是未选中 new TradePriceCalculateReqBO.Item().setSkuId(40L).setCount(5).setSelected(false) // 全局积分 + SKU 积分,但是未选中 )); TradePriceCalculateRespBO result = new TradePriceCalculateRespBO() .setType(TradeOrderTypeEnum.NORMAL.getType()) .setPrice(new TradePriceCalculateRespBO.Price()) .setPromotions(new ArrayList<>()) .setItems(asList( new TradePriceCalculateRespBO.OrderItem().setSkuId(10L).setCount(2).setSelected(true) .setPrice(100).setSpuId(1L).setGivePoint(0), new TradePriceCalculateRespBO.OrderItem().setSkuId(20L).setCount(3).setSelected(true) .setPrice(50).setSpuId(2L).setGivePoint(100), new TradePriceCalculateRespBO.OrderItem().setSkuId(30L).setCount(4).setSelected(false) .setPrice(30).setSpuId(3L).setGivePoint(0), new TradePriceCalculateRespBO.OrderItem().setSkuId(40L).setCount(5).setSelected(false) .setPrice(60).setSpuId(1L).setGivePoint(100) )); // 保证价格被初始化上 TradePriceCalculatorHelper.recountPayPrice(result.getItems()); TradePriceCalculatorHelper.recountAllPrice(result); // mock 方法(积分配置 信息) MemberConfigRespDTO memberConfig = randomPojo(MemberConfigRespDTO.class, o -> o.setPointTradeDeductEnable(true) // 启用积分折扣 .setPointTradeGivePoint(100)); // 1 元赠送多少分 when(memberConfigApi.getConfig()).thenReturn(memberConfig); // 调用 tradePointGiveCalculator.calculate(param, result); // 断言:Price 部分 assertEquals(result.getGivePoint(), 2 * 100 + 3 * 50 + 100); // 断言:SKU 1 TradePriceCalculateRespBO.OrderItem orderItem01 = result.getItems().get(0); assertEquals(orderItem01.getSkuId(), 10L); assertEquals(orderItem01.getCount(), 2); assertEquals(orderItem01.getPrice(), 100); assertEquals(orderItem01.getGivePoint(), 2 * 100); // 全局积分 // 断言:SKU 2 TradePriceCalculateRespBO.OrderItem orderItem02 = result.getItems().get(1); assertEquals(orderItem02.getSkuId(), 20L); assertEquals(orderItem02.getCount(), 3); assertEquals(orderItem02.getPrice(), 50); assertEquals(orderItem02.getGivePoint(), 3 * 50 + 100); // 全局积分 + SKU 积分 // 断言:SKU 3 TradePriceCalculateRespBO.OrderItem orderItem03 = result.getItems().get(2); assertEquals(orderItem03.getSkuId(), 30L); assertEquals(orderItem03.getCount(), 4); assertEquals(orderItem03.getPrice(), 30); assertEquals(orderItem03.getGivePoint(), 0); // 全局积分,但是未选中 // 断言:SKU 4 TradePriceCalculateRespBO.OrderItem orderItem04 = result.getItems().get(3); assertEquals(orderItem04.getSkuId(), 40L); assertEquals(orderItem04.getCount(), 5); assertEquals(orderItem04.getPrice(), 60); assertEquals(orderItem04.getGivePoint(), 100); // 全局积分 + SKU 积分,但是未选中 }
public static TopicMessageType getMessageType(SendMessageRequestHeader requestHeader) { Map<String, String> properties = MessageDecoder.string2messageProperties(requestHeader.getProperties()); String traFlag = properties.get(MessageConst.PROPERTY_TRANSACTION_PREPARED); TopicMessageType topicMessageType = TopicMessageType.NORMAL; if (Boolean.parseBoolean(traFlag)) { topicMessageType = TopicMessageType.TRANSACTION; } else if (properties.containsKey(MessageConst.PROPERTY_SHARDING_KEY)) { topicMessageType = TopicMessageType.FIFO; } else if (properties.get("__STARTDELIVERTIME") != null || properties.get(MessageConst.PROPERTY_DELAY_TIME_LEVEL) != null || properties.get(MessageConst.PROPERTY_TIMER_DELIVER_MS) != null || properties.get(MessageConst.PROPERTY_TIMER_DELAY_SEC) != null || properties.get(MessageConst.PROPERTY_TIMER_DELAY_MS) != null) { topicMessageType = TopicMessageType.DELAY; } return topicMessageType; }
@Test public void testGetMessageTypeWithMultipleProperties() { SendMessageRequestHeader requestHeader = new SendMessageRequestHeader(); Map<String, String> map = new HashMap<>(); map.put(MessageConst.PROPERTY_DELAY_TIME_LEVEL, "1"); map.put(MessageConst.PROPERTY_SHARDING_KEY, "shardingKey"); requestHeader.setProperties(MessageDecoder.messageProperties2String(map)); TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader); assertThat(TopicMessageType.FIFO).isEqualTo(result); }
@Override public void calculate(TradePriceCalculateReqBO param, TradePriceCalculateRespBO result) { // 0. 只有【普通】订单,才计算该优惠 if (ObjectUtil.notEqual(result.getType(), TradeOrderTypeEnum.NORMAL.getType())) { return; } // 1. 获得用户的会员等级 MemberUserRespDTO user = memberUserApi.getUser(param.getUserId()); if (user.getLevelId() == null || user.getLevelId() <= 0) { return; } MemberLevelRespDTO level = memberLevelApi.getMemberLevel(user.getLevelId()); if (level == null || level.getDiscountPercent() == null) { return; } // 2. 计算每个 SKU 的优惠金额 result.getItems().forEach(orderItem -> { // 2.1 计算优惠金额 Integer vipPrice = calculateVipPrice(orderItem.getPayPrice(), level.getDiscountPercent()); if (vipPrice <= 0) { return; } // 2.2 记录优惠明细 if (orderItem.getSelected()) { // 注意,只有在选中的情况下,才会记录到优惠明细。否则仅仅是更新 SKU 优惠金额,用于展示 TradePriceCalculatorHelper.addPromotion(result, orderItem, level.getId(), level.getName(), PromotionTypeEnum.MEMBER_LEVEL.getType(), String.format("会员等级折扣:省 %s 元", formatPrice(vipPrice)), vipPrice); } // 2.3 更新 SKU 的优惠金额 orderItem.setVipPrice(vipPrice); TradePriceCalculatorHelper.recountPayPrice(orderItem); }); TradePriceCalculatorHelper.recountAllPrice(result); }
@Test public void testCalculate() { // 准备参数 TradePriceCalculateReqBO param = new TradePriceCalculateReqBO() .setUserId(1024L) .setItems(asList( new TradePriceCalculateReqBO.Item().setSkuId(10L).setCount(2).setSelected(true), // 匹配活动,且已选中 new TradePriceCalculateReqBO.Item().setSkuId(20L).setCount(3).setSelected(false) // 匹配活动,但未选中 )); TradePriceCalculateRespBO result = new TradePriceCalculateRespBO() .setType(TradeOrderTypeEnum.NORMAL.getType()) .setPrice(new TradePriceCalculateRespBO.Price()) .setPromotions(new ArrayList<>()) .setItems(asList( new TradePriceCalculateRespBO.OrderItem().setSkuId(10L).setCount(2).setSelected(true) .setPrice(100), new TradePriceCalculateRespBO.OrderItem().setSkuId(20L).setCount(3).setSelected(false) .setPrice(50) )); // 保证价格被初始化上 TradePriceCalculatorHelper.recountPayPrice(result.getItems()); TradePriceCalculatorHelper.recountAllPrice(result); // mock 方法(会员等级) when(memberUserApi.getUser(eq(1024L))).thenReturn(new MemberUserRespDTO().setLevelId(2048L)); when(memberLevelApi.getMemberLevel(eq(2048L))).thenReturn( new MemberLevelRespDTO().setId(2048L).setName("VIP 会员").setDiscountPercent(60)); // 调用 memberLevelPriceCalculator.calculate(param, result); // 断言:Price 部分 TradePriceCalculateRespBO.Price price = result.getPrice(); assertEquals(price.getTotalPrice(), 200); assertEquals(price.getDiscountPrice(), 0); assertEquals(price.getPointPrice(), 0); assertEquals(price.getDeliveryPrice(), 0); assertEquals(price.getCouponPrice(), 0); assertEquals(price.getVipPrice(), 80); assertEquals(price.getPayPrice(), 120); assertNull(result.getCouponId()); // 断言:SKU 1 assertEquals(result.getItems().size(), 2); TradePriceCalculateRespBO.OrderItem orderItem01 = result.getItems().get(0); assertEquals(orderItem01.getSkuId(), 10L); assertEquals(orderItem01.getCount(), 2); assertEquals(orderItem01.getPrice(), 100); assertEquals(orderItem01.getDiscountPrice(), 0); assertEquals(orderItem01.getDeliveryPrice(), 0); assertEquals(orderItem01.getCouponPrice(), 0); assertEquals(orderItem01.getPointPrice(), 0); assertEquals(orderItem01.getVipPrice(), 80); assertEquals(orderItem01.getPayPrice(), 120); // 断言:SKU 2 TradePriceCalculateRespBO.OrderItem orderItem02 = result.getItems().get(1); assertEquals(orderItem02.getSkuId(), 20L); assertEquals(orderItem02.getCount(), 3); assertEquals(orderItem02.getPrice(), 50); assertEquals(orderItem02.getDiscountPrice(), 0); assertEquals(orderItem02.getDeliveryPrice(), 0); assertEquals(orderItem02.getCouponPrice(), 0); assertEquals(orderItem02.getPointPrice(), 0); assertEquals(orderItem02.getVipPrice(), 60); assertEquals(orderItem02.getPayPrice(), 90); // 断言:Promotion 部分 assertEquals(result.getPromotions().size(), 1); TradePriceCalculateRespBO.Promotion promotion01 = result.getPromotions().get(0); assertEquals(promotion01.getId(), 2048L); assertEquals(promotion01.getName(), "VIP 会员"); assertEquals(promotion01.getType(), PromotionTypeEnum.MEMBER_LEVEL.getType()); assertEquals(promotion01.getTotalPrice(), 200); assertEquals(promotion01.getDiscountPrice(), 80); assertTrue(promotion01.getMatch()); assertEquals(promotion01.getDescription(), "会员等级折扣:省 0.80 元"); TradePriceCalculateRespBO.PromotionItem promotionItem01 = promotion01.getItems().get(0); assertEquals(promotion01.getItems().size(), 1); assertEquals(promotionItem01.getSkuId(), 10L); assertEquals(promotionItem01.getTotalPrice(), 200); assertEquals(promotionItem01.getDiscountPrice(), 80); }
public void enterTerminalState() { healthService.enterTerminalState(); }
@Test void enterTerminalState() { String service = "serv2"; manager.setStatus(service, ServingStatus.SERVING); ServingStatus stored = manager.getHealthService() .check(HealthCheckRequest.newBuilder().setService(service).build()) .getStatus(); Assertions.assertEquals(ServingStatus.SERVING, stored); manager.enterTerminalState(); ServingStatus stored2 = manager.getHealthService() .check(HealthCheckRequest.newBuilder().setService(service).build()) .getStatus(); Assertions.assertEquals(ServingStatus.NOT_SERVING, stored2); }
public void logAppendSessionClose( final int memberId, final long sessionId, final CloseReason closeReason, final long leadershipTermId, final long timestamp, final TimeUnit timeUnit) { final int length = appendSessionCloseLength(closeReason, timeUnit); final int captureLength = captureLength(length); final int encodedLength = encodedLength(captureLength); final ManyToOneRingBuffer ringBuffer = this.ringBuffer; final int index = ringBuffer.tryClaim(APPEND_SESSION_CLOSE.toEventCodeId(), encodedLength); if (index > 0) { try { ClusterEventEncoder.encodeAppendSessionClose( (UnsafeBuffer)ringBuffer.buffer(), index, captureLength, length, memberId, sessionId, closeReason, leadershipTermId, timestamp, timeUnit); } finally { ringBuffer.commit(index); } } }
@Test void logAppendSessionClose() { final int offset = ALIGNMENT + 4; logBuffer.putLong(CAPACITY + TAIL_POSITION_OFFSET, offset); final int memberId = 829374; final long sessionId = 289374L; final CloseReason closeReason = CloseReason.TIMEOUT; final long leadershipTermId = 2039842L; final long timestamp = 29384; final TimeUnit timeUnit = MILLISECONDS; logger.logAppendSessionClose(memberId, sessionId, closeReason, leadershipTermId, timestamp, timeUnit); final int length = 3 * SIZE_OF_LONG + SIZE_OF_INT + (SIZE_OF_INT + closeReason.name().length()) + (SIZE_OF_INT + timeUnit.name().length()); verifyLogHeader(logBuffer, offset, APPEND_SESSION_CLOSE.toEventCodeId(), length, length); int index = encodedMsgOffset(offset) + LOG_HEADER_LENGTH; assertEquals(sessionId, logBuffer.getLong(index, LITTLE_ENDIAN)); index += SIZE_OF_LONG; assertEquals(leadershipTermId, logBuffer.getLong(index, LITTLE_ENDIAN)); index += SIZE_OF_LONG; assertEquals(timestamp, logBuffer.getLong(index, LITTLE_ENDIAN)); index += SIZE_OF_LONG; assertEquals(memberId, logBuffer.getInt(index, LITTLE_ENDIAN)); index += SIZE_OF_INT; assertEquals(closeReason.name(), logBuffer.getStringAscii(index, LITTLE_ENDIAN)); index += SIZE_OF_INT + closeReason.name().length(); assertEquals(timeUnit.name(), logBuffer.getStringAscii(index, LITTLE_ENDIAN)); final StringBuilder sb = new StringBuilder(); ClusterEventDissector.dissectAppendCloseSession( APPEND_SESSION_CLOSE, logBuffer, encodedMsgOffset(offset), sb); final String expectedMessagePattern = "\\[[0-9]+\\.[0-9]+] CLUSTER: APPEND_SESSION_CLOSE " + "\\[55/55]: memberId=829374 sessionId=289374 closeReason=TIMEOUT leadershipTermId=2039842 " + "timestamp=29384 timeUnit=MILLISECONDS"; assertThat(sb.toString(), Matchers.matchesPattern(expectedMessagePattern)); }
public HashMap<String, String> parseInfoToGetUUID(String output, String queryURL, SAXBuilder builder) { HashMap<String, String> uidToUrlMap = new HashMap<>(); try { Document document = builder.build(new StringReader(output)); Element root = document.getRootElement(); List<Element> entries = root.getChildren("entry"); for (Element entry : entries) { uidToUrlMap.put(queryURL, entry.getChild("repository").getChild("uuid").getValue()); } } catch (Exception e) { throw new RuntimeException(e); } return uidToUrlMap; }
@Test public void shouldThrowUpWhenSvnInfoOutputIsInvalidToMapUrlToUUID() { assertThatThrownBy(() -> new SvnLogXmlParser().parseInfoToGetUUID("Svn threw up and it's drunk", "does not matter", new SAXBuilder())) .isInstanceOf(RuntimeException.class); }
@Override public Iterable<Token> tokenize(String input, Language language, StemMode stemMode, boolean removeAccents) { if (input.isEmpty()) return List.of(); List<Token> tokens = textToTokens(input, analyzerFactory.getAnalyzer(language, stemMode, removeAccents)); log.log(Level.FINEST, () -> "Tokenized '" + language + "' text='" + input + "' into: n=" + tokens.size() + ", tokens=" + tokens); return tokens; }
@Test public void testOptionalPathWithClasspathResources() { String languageCode = Language.ENGLISH.languageCode(); LuceneAnalysisConfig enConfig = new LuceneAnalysisConfig.Builder() .analysis( Map.of(languageCode, new LuceneAnalysisConfig.Analysis.Builder().tokenFilters(List.of( new LuceneAnalysisConfig .Analysis .TokenFilters .Builder() .name("englishMinimalStem"), new LuceneAnalysisConfig .Analysis .TokenFilters .Builder() .name("stop") .conf("words", "classpath-stopwords.txt")))) ).build(); LuceneLinguistics linguistics = new LuceneLinguistics(enConfig, new ComponentRegistry<>()); Iterable<Token> tokens = linguistics .getTokenizer() .tokenize("Dogs and Cats", Language.ENGLISH, StemMode.ALL, false); assertEquals(List.of("and", "Cat"), tokenStrings(tokens)); }
ConnectionFactory getConnectionFactory() { ConnectionFactory factory = new ConnectionFactory(); factory.setHost(this.config.getHost()); factory.setPort(this.config.getPort()); factory.setVirtualHost(this.config.getVirtualHost()); factory.setUsername(this.config.getUsername()); factory.setPassword(this.config.getPassword()); factory.setAutomaticRecoveryEnabled(this.config.isAutomaticRecoveryEnabled()); factory.setConnectionTimeout(this.config.getConnectionTimeout()); factory.setHandshakeTimeout(this.config.getHandshakeTimeout()); this.config.getClientProperties().forEach((k, v) -> factory.getClientProperties().put(k, v)); return factory; }
@Test public void verifyGetConnectionFactoryMethod() { ReflectionTestUtils.setField(node, "config", config); ConnectionFactory connectionFactory = node.getConnectionFactory(); assertThat(connectionFactory).isNotNull(); assertThat(connectionFactory.getHost()).isEqualTo(config.getHost()); assertThat(connectionFactory.getPort()).isEqualTo(config.getPort()); assertThat(connectionFactory.getVirtualHost()).isEqualTo(config.getVirtualHost()); assertThat(connectionFactory.getUsername()).isEqualTo(config.getUsername()); assertThat(connectionFactory.getPassword()).isEqualTo(config.getPassword()); assertThat(connectionFactory.isAutomaticRecoveryEnabled()).isEqualTo(config.isAutomaticRecoveryEnabled()); assertThat(connectionFactory.getConnectionTimeout()).isEqualTo(config.getConnectionTimeout()); assertThat(connectionFactory.getHandshakeTimeout()).isEqualTo(config.getHandshakeTimeout()); Map<String, Object> expectedClientProperties = new ConnectionFactory().getClientProperties(); expectedClientProperties.putAll(config.getClientProperties()); assertThat(connectionFactory.getClientProperties()).isEqualTo(expectedClientProperties); }
public <T> SideInput<T> fetchSideInput( PCollectionView<T> view, BoundedWindow sideWindow, String stateFamily, SideInputState state, Supplier<Closeable> scopedReadStateSupplier) { Callable<SideInput<T>> loadSideInputFromWindmill = () -> loadSideInputFromWindmill(view, sideWindow, stateFamily, scopedReadStateSupplier); SideInputCache.Key<T> sideInputCacheKey = SideInputCache.Key.create( getInternalTag(view), sideWindow, getViewFn(view).getTypeDescriptor()); try { if (state == SideInputState.KNOWN_READY) { Optional<SideInput<T>> existingCacheEntry = sideInputCache.get(sideInputCacheKey); if (!existingCacheEntry.isPresent()) { return sideInputCache.getOrLoad(sideInputCacheKey, loadSideInputFromWindmill); } if (!existingCacheEntry.get().isReady()) { return sideInputCache.invalidateThenLoadNewEntry( sideInputCacheKey, loadSideInputFromWindmill); } return existingCacheEntry.get(); } return sideInputCache.getOrLoad(sideInputCacheKey, loadSideInputFromWindmill); } catch (Exception e) { LOG.error("Fetch failed: ", e); throw new RuntimeException("Exception while fetching side input: ", e); } }
@Test public void testFetchGlobalDataNull() throws Exception { SideInputStateFetcherFactory factory = SideInputStateFetcherFactory.fromOptions( PipelineOptionsFactory.as(DataflowStreamingPipelineOptions.class)); SideInputStateFetcher fetcher = factory.createSideInputStateFetcher(server::getSideInputData); ByteStringOutputStream stream = new ByteStringOutputStream(); ListCoder.of(VoidCoder.of()) .encode(Collections.singletonList(null), stream, Coder.Context.OUTER); ByteString encodedIterable = stream.toByteString(); PCollectionView<Void> view = TestPipeline.create().apply(Create.empty(VoidCoder.of())).apply(View.asSingleton()); String tag = view.getTagInternal().getId(); // Test three calls in a row. First, data is not ready, then data is ready, // then the data is already cached. when(server.getSideInputData(any(Windmill.GlobalDataRequest.class))) .thenReturn( buildGlobalDataResponse(tag, false, null), buildGlobalDataResponse(tag, true, encodedIterable)); assertFalse( fetcher .fetchSideInput( view, GlobalWindow.INSTANCE, STATE_FAMILY, SideInputState.UNKNOWN, readStateSupplier) .isReady()); assertFalse( fetcher .fetchSideInput( view, GlobalWindow.INSTANCE, STATE_FAMILY, SideInputState.UNKNOWN, readStateSupplier) .isReady()); assertNull( fetcher .fetchSideInput( view, GlobalWindow.INSTANCE, STATE_FAMILY, SideInputState.KNOWN_READY, readStateSupplier) .value() .orElse(null)); assertNull( fetcher .fetchSideInput( view, GlobalWindow.INSTANCE, STATE_FAMILY, SideInputState.KNOWN_READY, readStateSupplier) .value() .orElse(null)); verify(server, times(2)).getSideInputData(buildGlobalDataRequest(tag)); verifyNoMoreInteractions(server); }
public static DescribeAclsRequest parse(ByteBuffer buffer, short version) { return new DescribeAclsRequest(new DescribeAclsRequestData(new ByteBufferAccessor(buffer), version), version); }
@Test public void shouldRoundTripAnyV0AsLiteral() { final DescribeAclsRequest original = new DescribeAclsRequest.Builder(ANY_FILTER).build(V0); final DescribeAclsRequest expected = new DescribeAclsRequest.Builder( new AclBindingFilter(new ResourcePatternFilter( ANY_FILTER.patternFilter().resourceType(), ANY_FILTER.patternFilter().name(), PatternType.LITERAL), ANY_FILTER.entryFilter())).build(V0); final DescribeAclsRequest result = DescribeAclsRequest.parse(original.serialize(), V0); assertRequestEquals(expected, result); }
@Override public KeyValueIterator<Windowed<K>, V> findSessions(final K key, final long earliestSessionEndTime, final long latestSessionStartTime) { Objects.requireNonNull(key, "key cannot be null"); final Bytes bytesKey = keyBytes(key); return new MeteredWindowedKeyValueIterator<>( wrapped().findSessions( bytesKey, earliestSessionEndTime, latestSessionStartTime), fetchSensor, iteratorDurationSensor, streamsMetrics, serdes::keyFrom, serdes::valueFrom, time, numOpenIterators, openIterators); }
@Test public void shouldThrowNullPointerOnFindSessionsIfKeyIsNull() { setUpWithoutContext(); assertThrows(NullPointerException.class, () -> store.findSessions(null, 0, 0)); }
@Override protected void serviceInit(Configuration conf) throws Exception { this.conf = conf; int serialNumberLowDigits = 3; serialNumberFormat = ("%0" + (JobHistoryUtils.SERIAL_NUMBER_DIRECTORY_DIGITS + serialNumberLowDigits) + "d"); long maxFSWaitTime = conf.getLong( JHAdminConfig.MR_HISTORY_MAX_START_WAIT_TIME, JHAdminConfig.DEFAULT_MR_HISTORY_MAX_START_WAIT_TIME); createHistoryDirs(SystemClock.getInstance(), 10 * 1000, maxFSWaitTime); maxTasksForLoadedJob = conf.getInt( JHAdminConfig.MR_HS_LOADED_JOBS_TASKS_MAX, JHAdminConfig.DEFAULT_MR_HS_LOADED_JOBS_TASKS_MAX); this.aclsMgr = new JobACLsManager(conf); maxHistoryAge = conf.getLong(JHAdminConfig.MR_HISTORY_MAX_AGE_MS, JHAdminConfig.DEFAULT_MR_HISTORY_MAX_AGE); jobListCache = createJobListCache(); serialNumberIndex = new SerialNumberIndex(conf.getInt( JHAdminConfig.MR_HISTORY_DATESTRING_CACHE_SIZE, JHAdminConfig.DEFAULT_MR_HISTORY_DATESTRING_CACHE_SIZE)); int numMoveThreads = conf.getInt( JHAdminConfig.MR_HISTORY_MOVE_THREAD_COUNT, JHAdminConfig.DEFAULT_MR_HISTORY_MOVE_THREAD_COUNT); moveToDoneExecutor = createMoveToDoneThreadPool(numMoveThreads); super.serviceInit(conf); }
@Test public void testHistoryFileInfoSummaryFileNotExist() throws Exception { HistoryFileManagerTest hmTest = new HistoryFileManagerTest(); String job = "job_1410889000000_123456"; Path summaryFile = new Path(job + ".summary"); JobIndexInfo jobIndexInfo = new JobIndexInfo(); jobIndexInfo.setJobId(TypeConverter.toYarn(JobID.forName(job))); Configuration conf = dfsCluster.getConfiguration(0); conf.set(JHAdminConfig.MR_HISTORY_DONE_DIR, "/" + UUID.randomUUID()); conf.set(JHAdminConfig.MR_HISTORY_INTERMEDIATE_DONE_DIR, "/" + UUID.randomUUID()); hmTest.serviceInit(conf); HistoryFileInfo info = hmTest.getHistoryFileInfo(null, null, summaryFile, jobIndexInfo, false); info.moveToDone(); Assert.assertFalse(info.didMoveFail()); }
public void setName(String name) { this.name = name; }
@Test void testSetName() { Method method = new Method("bar"); assertEquals("bar", method.getName()); method.setName("foo"); assertEquals("foo", method.getName()); }
@Override public void updateCouponTemplate(CouponTemplateUpdateReqVO updateReqVO) { // 校验存在 CouponTemplateDO couponTemplate = validateCouponTemplateExists(updateReqVO.getId()); // 校验发放数量不能过小 if (updateReqVO.getTotalCount() < couponTemplate.getTakeCount()) { throw exception(COUPON_TEMPLATE_TOTAL_COUNT_TOO_SMALL, couponTemplate.getTakeCount()); } // 校验商品范围 validateProductScope(updateReqVO.getProductScope(), updateReqVO.getProductScopeValues()); // 更新 CouponTemplateDO updateObj = CouponTemplateConvert.INSTANCE.convert(updateReqVO); couponTemplateMapper.updateById(updateObj); }
@Test public void testUpdateCouponTemplate_notExists() { // 准备参数 CouponTemplateUpdateReqVO reqVO = randomPojo(CouponTemplateUpdateReqVO.class); // 调用, 并断言异常 assertServiceException(() -> couponTemplateService.updateCouponTemplate(reqVO), COUPON_TEMPLATE_NOT_EXISTS); }
@Override public void setConfigAttributes(Object attributes) { if (attributes == null) { return; } super.setConfigAttributes(attributes); Map map = (Map) attributes; if (map.containsKey(URL)) { this.url = new UrlArgument((String) map.get(URL)); } if (map.containsKey(USERNAME)) { this.userName = (String) map.get(USERNAME); } if (map.containsKey(DOMAIN)) { this.domain = (String) map.get(DOMAIN); } if (map.containsKey(PASSWORD_CHANGED) && "1".equals(map.get(PASSWORD_CHANGED))) { String passwordToSet = (String) map.get(PASSWORD); resetPassword(passwordToSet); } if (map.containsKey(PROJECT_PATH)) { this.projectPath = (String) map.get(PROJECT_PATH); } }
@Test void setConfigAttributes_shouldUpdatePasswordWhenPasswordChangedBooleanChanged() throws Exception { TfsMaterialConfig tfsMaterialConfig = tfs(new GoCipher(), new UrlArgument("http://10.4.4.101:8080/tfs/Sample"), "loser", "CORPORATE", "passwd", "walk_this_path"); Map<String, String> map = new HashMap<>(); map.put(TfsMaterialConfig.PASSWORD, "secret"); map.put(TfsMaterialConfig.PASSWORD_CHANGED, "1"); tfsMaterialConfig.setConfigAttributes(map); tfsMaterialConfig.setConfigAttributes(map); assertThat((String)ReflectionUtil.getField(tfsMaterialConfig, "password")).isNull(); assertThat(tfsMaterialConfig.getPassword()).isEqualTo("secret"); assertThat(tfsMaterialConfig.getEncryptedPassword()).isEqualTo(new GoCipher().encrypt("secret")); //Dont change map.put(TfsMaterialConfig.PASSWORD, "Hehehe"); map.put(TfsMaterialConfig.PASSWORD_CHANGED, "0"); tfsMaterialConfig.setConfigAttributes(map); assertThat((String)ReflectionUtil.getField(tfsMaterialConfig, "password")).isNull(); assertThat(tfsMaterialConfig.getPassword()).isEqualTo("secret"); assertThat(tfsMaterialConfig.getEncryptedPassword()).isEqualTo(new GoCipher().encrypt("secret")); map.put(TfsMaterialConfig.PASSWORD, ""); map.put(TfsMaterialConfig.PASSWORD_CHANGED, "1"); tfsMaterialConfig.setConfigAttributes(map); assertThat(tfsMaterialConfig.getPassword()).isNull(); assertThat(tfsMaterialConfig.getEncryptedPassword()).isNull(); }
@Override public void execute(Object[] args) { invokeMethod(args); }
@Test void can_define_step() throws Throwable { Method method = JavaStepDefinitionTest.class.getMethod("one_string_argument", String.class); JavaStepDefinition definition = new JavaStepDefinition(method, "three (.*) mice", lookup); definition.execute(new Object[] { "one_string_argument" }); assertThat(argument, is("one_string_argument")); }
public Function getFunction(Function desc, Function.CompareMode mode) { List<Function> fns = vectorizedFunctions.get(desc.functionName()); if (desc.hasNamedArg() && fns != null && !fns.isEmpty()) { fns = fns.stream().filter(Function::hasNamedArg).collect(Collectors.toList()); } if (fns == null || fns.isEmpty()) { return null; } Function func; // To be back-compatible, we first choose the functions from the non-polymorphic functions, if we can't find // a suitable in non-polymorphic functions. We will try to search in the polymorphic functions. List<Function> standFns = fns.stream().filter(fn -> !fn.isPolymorphic()).collect(Collectors.toList()); func = matchStrictFunction(desc, mode, standFns); if (func != null) { return func; } List<Function> polyFns = fns.stream().filter(Function::isPolymorphic).collect(Collectors.toList()); func = matchPolymorphicFunction(desc, mode, polyFns); if (func != null) { return func; } return matchCastFunction(desc, mode, standFns); }
@Test public void testGetLagFunction() { Type[] argTypes1 = {ScalarType.DECIMALV2, ScalarType.TINYINT, ScalarType.TINYINT}; Function lagDesc1 = new Function(new FunctionName(FunctionSet.LAG), argTypes1, ScalarType.INVALID, false); Function newFunction = functionSet.getFunction(lagDesc1, Function.CompareMode.IS_SUPERTYPE_OF); Type[] newArgTypes = newFunction.getArgs(); Assert.assertTrue(newArgTypes[0].matchesType(newArgTypes[2])); Assert.assertTrue(newArgTypes[0].matchesType(ScalarType.DECIMALV2)); Type[] argTypes2 = {ScalarType.VARCHAR, ScalarType.TINYINT, ScalarType.TINYINT}; Function lagDesc2 = new Function(new FunctionName(FunctionSet.LAG), argTypes2, ScalarType.INVALID, false); newFunction = functionSet.getFunction(lagDesc2, Function.CompareMode.IS_SUPERTYPE_OF); newArgTypes = newFunction.getArgs(); Assert.assertTrue(newArgTypes[0].matchesType(newArgTypes[2])); Assert.assertTrue(newArgTypes[0].matchesType(ScalarType.VARCHAR)); }
public static boolean isNull(Object value) { return value == null; }
@SuppressWarnings({"ConstantConditions", "SimplifiableJUnitAssertion"}) @Test public void isNull() { assertEquals(false, TernaryLogic.isNull(false)); assertEquals(false, TernaryLogic.isNull(true)); assertEquals(true, TernaryLogic.isNull(null)); assertEquals(false, TernaryLogic.isNull(new Object())); }
public void handleTask(Task task) throws InterruptedException { var time = task.getTime(); Thread.sleep(time); LOGGER.info("It takes " + time + " milliseconds to finish the task"); task.setFinished(true); }
@Test void testHandleTask() throws InterruptedException { var taskHandler = new TaskHandler(); var handle = new Task(100); taskHandler.handleTask(handle); assertTrue(handle.isFinished()); }
@Override public UserIdentity login(String username, Object credentials, ServletRequest request) { if (!(credentials instanceof SignedJWT)) { return null; } if (!(request instanceof HttpServletRequest)) { return null; } SignedJWT jwtToken = (SignedJWT) credentials; JWTClaimsSet claimsSet; boolean valid; try { claimsSet = jwtToken.getJWTClaimsSet(); valid = validateToken(jwtToken, claimsSet, username); } catch (ParseException e) { JWT_LOGGER.warn(String.format("%s: Couldn't parse a JWT token", username), e); return null; } if (valid) { String serializedToken = (String) request.getAttribute(JwtAuthenticator.JWT_TOKEN_REQUEST_ATTRIBUTE); UserIdentity rolesDelegate = _authorizationService.getUserIdentity((HttpServletRequest) request, username); if (rolesDelegate == null) { return null; } else { return getUserIdentity(jwtToken, claimsSet, serializedToken, username, rolesDelegate); } } else { return null; } }
@Test public void testFailAudienceValidation() throws Exception { UserStore testUserStore = new UserStore(); testUserStore.addUser(TEST_USER, SecurityUtils.NO_CREDENTIAL, new String[] {"USER"}); TokenGenerator.TokenAndKeys tokenAndKeys = TokenGenerator.generateToken(TEST_USER, Arrays.asList("A", "B")); JwtLoginService loginService = new JwtLoginService( new UserStoreAuthorizationService(testUserStore), tokenAndKeys.publicKey(), Arrays.asList("C", "D")); SignedJWT jwtToken = SignedJWT.parse(tokenAndKeys.token()); HttpServletRequest request = mock(HttpServletRequest.class); UserIdentity identity = loginService.login(TEST_USER, jwtToken, request); assertNull(identity); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { // Make sure file is available in cache final List<TransferStatus> chunks = bulk.query(Transfer.Type.download, file, status); // Sort chunks by offset chunks.sort(Comparator.comparingLong(TransferStatus::getOffset)); final List<LazyInputStream> streams = new ArrayList<>(); for(TransferStatus chunk : chunks) { final LazyInputStream in = new LazyInputStream(new LazyInputStream.OpenCallback() { @Override public InputStream open() throws IOException { try { return session.getClient().getObjectImpl( false, containerService.getContainer(file).getName(), containerService.getKey(file), null, null, null, null, null, null, file.attributes().getVersionId(), new HashMap<String, Object>(), chunk.getParameters()) .getDataInputStream(); } catch(ServiceException e) { throw new IOException(e.getMessage(), e); } } }); streams.add(in); } // Concatenate streams return new SequenceInputStream(Collections.enumeration(streams)); }
@Test public void testRead() throws Exception { final Path container = new SpectraDirectoryFeature(session, new SpectraWriteFeature(session)).mkdir( new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path test = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final byte[] content = RandomUtils.nextBytes(1023); final TransferStatus status = new TransferStatus().withLength(content.length); status.setChecksum(new CRC32ChecksumCompute().compute(new ByteArrayInputStream(content), status)); final OutputStream out = new SpectraWriteFeature(session).write(test, status, new DisabledConnectionCallback()); assertNotNull(out); new StreamCopier(new TransferStatus(), new TransferStatus()).transfer(new ByteArrayInputStream(content), out); out.close(); new SpectraBulkService(session).pre(Transfer.Type.download, Collections.singletonMap(new TransferItem(test), status), new DisabledConnectionCallback()); final InputStream in = new SpectraReadFeature(session).read(test, status, new DisabledConnectionCallback()); assertNotNull(in); final ByteArrayOutputStream buffer = new ByteArrayOutputStream(content.length); new StreamCopier(status, status).transfer(in, buffer); assertArrayEquals(content, buffer.toByteArray()); in.close(); new SpectraDeleteFeature(session).delete(Collections.<Path>singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); new SpectraDeleteFeature(session).delete(Collections.<Path>singletonList(container), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public int hashCode() { if (value == null) { return 31; } // Using recommended hashing algorithm from Effective Java for longs and doubles if (isIntegral(this)) { long value = getAsNumber().longValue(); return (int) (value ^ (value >>> 32)); } if (value instanceof Number) { long value = Double.doubleToLongBits(getAsNumber().doubleValue()); return (int) (value ^ (value >>> 32)); } return value.hashCode(); }
@Test public void testIntegerEqualsLong() { JsonPrimitive p1 = new JsonPrimitive(10); JsonPrimitive p2 = new JsonPrimitive(10L); assertThat(p1).isEqualTo(p2); assertThat(p1.hashCode()).isEqualTo(p2.hashCode()); }
public static String getMetaKey(String key) { if (RouterConstant.VERSION.equals(key)) { return RouterConstant.META_VERSION_KEY; } if (RouterConstant.ZONE.equals(key)) { return RouterConstant.META_ZONE_KEY; } return RouterConstant.PARAMETERS_KEY_PREFIX + key; }
@Test public void testGetMetaKey() { Assert.assertEquals(RouterConstant.META_VERSION_KEY, RuleUtils.getMetaKey(RouterConstant.VERSION)); Assert.assertEquals(RouterConstant.META_ZONE_KEY, RuleUtils.getMetaKey(RouterConstant.ZONE)); Assert.assertEquals(RouterConstant.PARAMETERS_KEY_PREFIX + "group", RuleUtils.getMetaKey("group")); }
@Internal public Transformation<T> getTransformation() { return transformation; }
@Test void testGettingTransformationWithNewSinkAPI() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); final Transformation<?> transformation = env.fromData(1, 2) .sinkTo(TestSinkV2.<Integer>newBuilder().build()) .getTransformation(); assertThat(transformation).isInstanceOf(SinkTransformation.class); }
@Override @Cacheable(value = RedisKeyConstants.PERMISSION_MENU_ID_LIST, key = "#permission") public List<Long> getMenuIdListByPermissionFromCache(String permission) { List<MenuDO> menus = menuMapper.selectListByPermission(permission); return convertList(menus, MenuDO::getId); }
@Test public void testGetMenuIdListByPermissionFromCache() { // mock 数据 MenuDO menu100 = randomPojo(MenuDO.class); menuMapper.insert(menu100); MenuDO menu101 = randomPojo(MenuDO.class); menuMapper.insert(menu101); // 准备参数 String permission = menu100.getPermission(); // 调用 List<Long> ids = menuService.getMenuIdListByPermissionFromCache(permission); // 断言 assertEquals(1, ids.size()); assertEquals(menu100.getId(), ids.get(0)); }
public RuntimeOptionsBuilder parse(String... args) { return parse(Arrays.asList(args)); }
@Test void name_without_spaces_is_preserved() { RuntimeOptions options = parser .parse("--name", "someName") .build(); Pattern actualPattern = options.getNameFilters().iterator().next(); assertThat(actualPattern.pattern(), is("someName")); }
@SuppressWarnings("unchecked") @VisibleForTesting Schema<T> initializeSchema() throws ClassNotFoundException { if (StringUtils.isEmpty(this.pulsarSinkConfig.getTypeClassName())) { return (Schema<T>) Schema.BYTES; } Class<?> typeArg = Reflections.loadClass(this.pulsarSinkConfig.getTypeClassName(), functionClassLoader); if (Void.class.equals(typeArg)) { // return type is 'void', so there's no schema to check return null; } ConsumerConfig consumerConfig = new ConsumerConfig(); consumerConfig.setSchemaProperties(pulsarSinkConfig.getSchemaProperties()); if (!StringUtils.isEmpty(pulsarSinkConfig.getSchemaType())) { if (GenericRecord.class.isAssignableFrom(typeArg)) { consumerConfig.setSchemaType(SchemaType.AUTO_CONSUME.toString()); SchemaType configuredSchemaType = SchemaType.valueOf(pulsarSinkConfig.getSchemaType()); if (SchemaType.AUTO_CONSUME != configuredSchemaType) { log.info("The configured schema type {} is not able to write GenericRecords." + " So overwrite the schema type to be {}", configuredSchemaType, SchemaType.AUTO_CONSUME); } } else { consumerConfig.setSchemaType(pulsarSinkConfig.getSchemaType()); } return (Schema<T>) topicSchema.getSchema(pulsarSinkConfig.getTopic(), typeArg, consumerConfig, false); } else { consumerConfig.setSchemaType(pulsarSinkConfig.getSerdeClassName()); return (Schema<T>) topicSchema.getSchema(pulsarSinkConfig.getTopic(), typeArg, consumerConfig, false, functionClassLoader); } }
@Test public void testComplexOuputType() throws PulsarClientException { PulsarSinkConfig pulsarConfig = getPulsarConfigs(); // set type to void pulsarConfig.setTypeClassName(ComplexUserDefinedType.class.getName()); pulsarConfig.setSerdeClassName(ComplexSerDe.class.getName()); PulsarSink pulsarSink = new PulsarSink(getPulsarClient(), pulsarConfig, new HashMap<>(), mock(ComponentStatsManager.class), Thread.currentThread().getContextClassLoader(), producerCache); try { pulsarSink.initializeSchema(); } catch (Exception ex) { ex.printStackTrace(); fail(); } }
public static void cleanDirectory(File directory) throws IOException { requireNonNull(directory, DIRECTORY_CAN_NOT_BE_NULL); if (!directory.exists()) { return; } cleanDirectoryImpl(directory.toPath()); }
@Test public void cleanDirectory_does_nothing_if_argument_does_not_exist() throws IOException { FileUtils2.cleanDirectory(new File("/a/b/ToDoSSS")); }
@Override public String getName() { return _name; }
@Test public void testSha256TransformFunction() { ExpressionContext expression = RequestContextUtils.getExpression(String.format("sha256(%s)", BYTES_SV_COLUMN)); TransformFunction transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap); assertTrue(transformFunction instanceof ScalarTransformFunctionWrapper); assertEquals(transformFunction.getName(), "sha256"); String[] expectedValues = new String[NUM_ROWS]; for (int i = 0; i < NUM_ROWS; i++) { expectedValues[i] = DigestUtils.sha256Hex(_bytesSVValues[i]); } testTransformFunction(transformFunction, expectedValues); }
void forwardToStateService(DeviceStateServiceMsgProto deviceStateServiceMsg, TbCallback callback) { if (statsEnabled) { stats.log(deviceStateServiceMsg); } stateService.onQueueMsg(deviceStateServiceMsg, callback); }
@Test public void givenStatsDisabled_whenForwardingConnectMsgToStateService_thenStatsAreNotRecorded() { // GIVEN ReflectionTestUtils.setField(defaultTbCoreConsumerServiceMock, "stats", statsMock); ReflectionTestUtils.setField(defaultTbCoreConsumerServiceMock, "statsEnabled", false); var connectMsg = TransportProtos.DeviceConnectProto.newBuilder() .setTenantIdMSB(tenantId.getId().getMostSignificantBits()) .setTenantIdLSB(tenantId.getId().getLeastSignificantBits()) .setDeviceIdMSB(deviceId.getId().getMostSignificantBits()) .setDeviceIdLSB(deviceId.getId().getLeastSignificantBits()) .setLastConnectTime(time) .build(); doCallRealMethod().when(defaultTbCoreConsumerServiceMock).forwardToStateService(connectMsg, tbCallbackMock); // WHEN defaultTbCoreConsumerServiceMock.forwardToStateService(connectMsg, tbCallbackMock); // THEN then(statsMock).should(never()).log(connectMsg); }
@Override public void define(Context context) { NewController controller = context.createController(API_USERS) .setSince(SINCE_VERSION) .setDescription(DESCRIPTION); usersWsActions.forEach(action -> action.define(controller)); controller.done(); }
@Test public void define_ws() { WebService.Context context = new WebService.Context(); underTest.define(context); WebService.Controller controller = context.controller(UsersWs.API_USERS); assertThat(controller).isNotNull(); assertThat(controller.since()).isEqualTo(UsersWs.SINCE_VERSION); assertThat(controller.description()).isEqualTo(UsersWs.DESCRIPTION); }