focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public void push(int v) { if (endIndexPlusOne >= arr.length) { arr = Arrays.copyOf(arr, (int) (arr.length * growFactor)); } arr[endIndexPlusOne] = v; endIndexPlusOne++; }
@Test public void testPush() { SimpleIntDeque deque = new SimpleIntDeque(8, 2f); for (int i = 0; i < 60; i++) { deque.push(i); assertEquals(i + 1, deque.getSize()); } assertEquals(60, deque.getSize()); assertEquals(0, deque.pop()); assertEquals(59, deque.getSize()); assertEquals(1, deque.pop()); assertEquals(58, deque.getSize()); deque.push(2); assertEquals(59, deque.getSize()); deque.push(3); assertEquals(60, deque.getSize()); for (int i = 0; i < 50; i++) { assertEquals(i + 2, deque.pop()); } assertEquals(10, deque.getSize()); assertEquals(39, deque.getCapacity()); deque.push(123); assertEquals(11, deque.getSize()); assertEquals(52, deque.pop()); assertEquals(10, deque.getSize()); }
public static void touch(String path, String fileName) throws IOException { FileUtils.touch(Paths.get(path, fileName).toFile()); }
@Test void testTouch() throws IOException { File file = Paths.get(EnvUtil.getNacosTmpDir(), "touch.ut").toFile(); assertFalse(file.exists()); DiskUtils.touch(file); assertTrue(file.exists()); file.deleteOnExit(); }
@ConstantFunction(name = "mod", argTypes = {SMALLINT, SMALLINT}, returnType = SMALLINT) public static ConstantOperator modSMALLINT(ConstantOperator first, ConstantOperator second) { if (second.getSmallint() == 0) { return ConstantOperator.createNull(Type.SMALLINT); } return ConstantOperator.createSmallInt((short) (first.getSmallint() % second.getSmallint())); }
@Test public void modSMALLINT() { assertEquals(0, ScalarOperatorFunctions.modSMALLINT(O_SI_10, O_SI_10).getSmallint()); }
@Operation(summary = "stop", description = "TASK_INSTANCE_STOP") @Parameters({ @Parameter(name = "id", description = "TASK_INSTANCE_ID", required = true, schema = @Schema(implementation = int.class, example = "12")) }) @PostMapping(value = "/{id}/stop") @ResponseStatus(HttpStatus.OK) @ApiException(TASK_STOP_ERROR) public Result<Object> stopTask(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "id") Integer id) { return taskInstanceService.stopTask(loginUser, projectCode, id); }
@Test public void testStopTask() { Result mockResult = new Result(); putMsg(mockResult, Status.SUCCESS); when(taskInstanceService.stopTask(any(), Mockito.anyLong(), Mockito.anyInt())).thenReturn(mockResult); Result taskResult = taskInstanceV2Controller.stopTask(null, 1L, 1); Assertions.assertEquals(Integer.valueOf(Status.SUCCESS.getCode()), taskResult.getCode()); }
public static <T> Map<String, T> translateDeprecatedConfigs(Map<String, T> configs, String[][] aliasGroups) { return translateDeprecatedConfigs(configs, Stream.of(aliasGroups) .collect(Collectors.toMap(x -> x[0], x -> Stream.of(x).skip(1).collect(Collectors.toList())))); }
@Test public void testAllowDeprecatedNulls() { Map<String, String> config = new HashMap<>(); config.put("foo.bar.deprecated", null); config.put("foo.bar", "baz"); Map<String, String> newConfig = ConfigUtils.translateDeprecatedConfigs(config, new String[][]{ {"foo.bar", "foo.bar.deprecated"} }); assertNotNull(newConfig); assertEquals("baz", newConfig.get("foo.bar")); assertNull(newConfig.get("foo.bar.deprecated")); }
@NonNull @Override @SuppressWarnings("ConstantConditions") public String getHostUrl() { return Jenkins.get().getRootUrl(); }
@Test public void verify(){ List<JwtTokenServiceEndpoint> jwtTokenServiceEndpoints = JwtTokenServiceEndpoint.all(); assertEquals(1, jwtTokenServiceEndpoints.size()); assertEquals(Jenkins.get().getRootUrl(), jwtTokenServiceEndpoints.get(0).getHostUrl()); }
@Override public <VR> KTable<K, VR> transformValues(final ValueTransformerWithKeySupplier<? super K, ? super V, ? extends VR> transformerSupplier, final String... stateStoreNames) { return doTransformValues(transformerSupplier, null, NamedInternal.empty(), stateStoreNames); }
@SuppressWarnings("unchecked") @Test public void shouldThrowNullPointerOnTransformValuesWithKeyWhenMaterializedIsNull() { final ValueTransformerWithKeySupplier<String, String, ?> valueTransformerSupplier = mock(ValueTransformerWithKeySupplier.class); assertThrows(NullPointerException.class, () -> table.transformValues(valueTransformerSupplier, (Materialized) null)); }
@Override protected InputStream readInputStreamParam(String key) { Part part = readPart(key); return (part == null) ? null : part.getInputStream(); }
@Test public void read_input_stream() throws Exception { when(source.getContentType()).thenReturn("multipart/form-data"); InputStream file = mock(InputStream.class); Part part = mock(Part.class); when(part.getInputStream()).thenReturn(file); when(part.getSize()).thenReturn(10L); when(source.getPart("param1")).thenReturn(part); assertThat(underTest.readInputStreamParam("param1")).isEqualTo(file); assertThat(underTest.readInputStreamParam("param2")).isNull(); }
@Override public void doRun() { // Point deflector to a new index if required. if (cluster.isConnected()) { indexSetRegistry.forEach((indexSet) -> { try { if (indexSet.getConfig().isWritable()) { checkAndRepair(indexSet); checkForRotation(indexSet); } else { LOG.debug("Skipping non-writable index set <{}> ({})", indexSet.getConfig().id(), indexSet.getConfig().title()); } } catch (Exception e) { LOG.error("Couldn't point deflector to a new index", e); } }); } else { LOG.warn("Elasticsearch cluster isn't healthy. Skipping index rotation."); } }
@Test public void testDoNotPerformRotationIfClusterIsDown() throws NoTargetIndexException { final Provider<RotationStrategy> provider = spy(new RotationStrategyProvider()); when(cluster.isConnected()).thenReturn(false); final IndexRotationThread rotationThread = new IndexRotationThread( notificationService, indices, indexSetRegistry, cluster, new NullActivityWriter(), nodeId, ImmutableMap.<String, Provider<RotationStrategy>>builder().put("strategy", provider).build(), dataTieringOrchestrator); rotationThread.doRun(); verify(indexSet, never()).cycle(); verify(provider, never()).get(); }
public static Status unblock( final UnsafeBuffer logMetaDataBuffer, final UnsafeBuffer termBuffer, final int blockedOffset, final int tailOffset, final int termId) { Status status = NO_ACTION; int frameLength = frameLengthVolatile(termBuffer, blockedOffset); if (frameLength < 0) { resetHeader(logMetaDataBuffer, termBuffer, blockedOffset, termId, -frameLength); status = UNBLOCKED; } else if (0 == frameLength) { int currentOffset = blockedOffset + FRAME_ALIGNMENT; while (currentOffset < tailOffset) { frameLength = frameLengthVolatile(termBuffer, currentOffset); if (frameLength != 0) { if (scanBackToConfirmZeroed(termBuffer, currentOffset, blockedOffset)) { final int length = currentOffset - blockedOffset; resetHeader(logMetaDataBuffer, termBuffer, blockedOffset, termId, length); status = UNBLOCKED; } break; } currentOffset += FRAME_ALIGNMENT; } if (currentOffset == termBuffer.capacity()) { if (0 == frameLengthVolatile(termBuffer, blockedOffset)) { final int length = currentOffset - blockedOffset; resetHeader(logMetaDataBuffer, termBuffer, blockedOffset, termId, length); status = UNBLOCKED_TO_END; } } } return status; }
@Test void shouldTakeNoActionWhenMessageIsComplete() { final int termOffset = 0; final int tailOffset = TERM_BUFFER_CAPACITY; when(mockTermBuffer.getIntVolatile(termOffset)).thenReturn(HEADER_LENGTH); assertEquals( NO_ACTION, TermUnblocker.unblock(mockLogMetaDataBuffer, mockTermBuffer, termOffset, tailOffset, TERM_ID)); }
@Override public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer, final Merger<? super K, V> sessionMerger) { return aggregate(initializer, sessionMerger, Materialized.with(null, null)); }
@Test public void namedParamShouldSetName() { final StreamsBuilder builder = new StreamsBuilder(); final KStream<String, String> stream = builder.stream(TOPIC, Consumed .with(Serdes.String(), Serdes.String())); groupedStream = stream.groupByKey(Grouped.with(Serdes.String(), Serdes.String())); groupedStream.cogroup(MockAggregator.TOSTRING_ADDER) .windowedBy(SessionWindows.with(ofMillis(1))) .aggregate(MockInitializer.STRING_INIT, sessionMerger, Named.as("foo")); assertThat(builder.build().describe().toString(), equalTo( "Topologies:\n" + " Sub-topology: 0\n" + " Source: KSTREAM-SOURCE-0000000000 (topics: [topic])\n" + " --> foo-cogroup-agg-0\n" + " Processor: foo-cogroup-agg-0 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000001])\n" + " --> foo-cogroup-merge\n" + " <-- KSTREAM-SOURCE-0000000000\n" + " Processor: foo-cogroup-merge (stores: [])\n" + " --> none\n" + " <-- foo-cogroup-agg-0\n\n")); }
@SuppressWarnings("ReturnValueIgnored") void startStreams() { getWorkStream.get(); getDataStream.get(); commitWorkStream.get(); workCommitter.get().start(); // *stream.get() is all memoized in a threadsafe manner. started.set(true); }
@Test public void testStartStream_onlyStartsStreamsOnce() { long itemBudget = 1L; long byteBudget = 1L; WindmillStreamSender windmillStreamSender = newWindmillStreamSender( GetWorkBudget.builder().setBytes(byteBudget).setItems(itemBudget).build()); windmillStreamSender.startStreams(); windmillStreamSender.startStreams(); windmillStreamSender.startStreams(); verify(streamFactory, times(1)) .createDirectGetWorkStream( eq(connection), eq( GET_WORK_REQUEST .toBuilder() .setMaxItems(itemBudget) .setMaxBytes(byteBudget) .build()), any(ThrottleTimer.class), any(), any(), any(), eq(workItemScheduler)); verify(streamFactory, times(1)) .createGetDataStream(eq(connection.stub()), any(ThrottleTimer.class)); verify(streamFactory, times(1)) .createCommitWorkStream(eq(connection.stub()), any(ThrottleTimer.class)); }
public static URL appendTrailingSlash(URL originalURL) { try { return originalURL.getPath().endsWith("/") ? originalURL : new URL(originalURL.getProtocol(), originalURL.getHost(), originalURL.getPort(), originalURL.getFile() + '/'); } catch (MalformedURLException ignored) { // shouldn't happen throw new IllegalArgumentException("Invalid resource URL: " + originalURL); } }
@Test void appendTrailingSlashAddsASlash() { final URL url = getClass().getResource("/META-INF"); assertThat(url.toExternalForm()) .doesNotMatch(".*/$"); assertThat(ResourceURL.appendTrailingSlash(url).toExternalForm()) .endsWith("/"); }
@Override public boolean alterDatabase(String catName, String dbName, Database db) throws NoSuchObjectException, MetaException { boolean succ = rawStore.alterDatabase(catName, dbName, db); if (succ && !canUseEvents) { // in case of event based cache update, cache will be updated during commit. sharedCache .alterDatabaseInCache(StringUtils.normalizeIdentifier(catName), StringUtils.normalizeIdentifier(dbName), db); } return succ; }
@Test public void testAlterDatabase() throws Exception { Configuration conf = MetastoreConf.newMetastoreConf(); MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST, true); MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CACHED_RAW_STORE_MAX_CACHE_MEMORY, "-1Kb"); MetaStoreTestUtils.setConfForStandloneMode(conf); CachedStore cachedStore = new CachedStore(); CachedStore.clearSharedCache(); cachedStore.setConfForTest(conf); ObjectStore objectStore = (ObjectStore) cachedStore.getRawStore(); // Prewarm CachedStore CachedStore.setCachePrewarmedState(false); CachedStore.prewarm(objectStore); // Read database via CachedStore List<String> allDatabases = cachedStore.getAllDatabases(DEFAULT_CATALOG_NAME); Assert.assertEquals(2, allDatabases.size()); // Alter the db via CachedStore (can only alter owner or parameters) String dbOwner = "user2"; Database db = new Database(db1); db.setOwnerName(dbOwner); String dbName = db1.getName(); cachedStore.alterDatabase(DEFAULT_CATALOG_NAME, dbName, db); db = cachedStore.getDatabase(DEFAULT_CATALOG_NAME, dbName); // Read db via ObjectStore Database dbRead = objectStore.getDatabase(DEFAULT_CATALOG_NAME, dbName); Assert.assertEquals(db, dbRead); // Alter db via ObjectStore dbOwner = "user3"; db = new Database(db1); db.setOwnerName(dbOwner); objectStore.alterDatabase(DEFAULT_CATALOG_NAME, dbName, db); db = objectStore.getDatabase(DEFAULT_CATALOG_NAME, dbName); updateCache(cachedStore); updateCache(cachedStore); // Read db via CachedStore dbRead = cachedStore.getDatabase(DEFAULT_CATALOG_NAME, dbName); Assert.assertEquals(db, dbRead); cachedStore.shutdown(); }
public static RawTransaction decode(final String hexTransaction) { final byte[] transaction = Numeric.hexStringToByteArray(hexTransaction); TransactionType transactionType = getTransactionType(transaction); switch (transactionType) { case EIP1559: return decodeEIP1559Transaction(transaction); case EIP4844: return decodeEIP4844Transaction(transaction); case EIP2930: return decodeEIP2930Transaction(transaction); default: return decodeLegacyTransaction(transaction); } }
@Test public void testDecodingSigned4844() throws SignatureException { final RawTransaction rawTransaction = createEip4844RawTransaction(); final Transaction4844 transaction4844 = (Transaction4844) rawTransaction.getTransaction(); final byte[] signedMessage = TransactionEncoder.signMessage(rawTransaction, SampleKeys.CREDENTIALS); final String signedHexMessage = Numeric.toHexString(signedMessage); final SignedRawTransaction result = (SignedRawTransaction) TransactionDecoder.decode(signedHexMessage); assertTrue(result.getTransaction() instanceof Transaction4844); final Transaction4844 resultTransaction4844 = (Transaction4844) result.getTransaction(); assertNotNull(result); assertTrue( range(0, transaction4844.getBlobs().get().size()) .allMatch( i -> transaction4844 .getBlobs() .get() .get(i) .getData() .equals( resultTransaction4844 .getBlobs() .get() .get(i) .getData()))); assertEquals(transaction4844.getChainId(), resultTransaction4844.getChainId()); assertEquals(transaction4844.getNonce(), resultTransaction4844.getNonce()); assertEquals(transaction4844.getMaxFeePerGas(), resultTransaction4844.getMaxFeePerGas()); assertEquals( transaction4844.getMaxPriorityFeePerGas(), resultTransaction4844.getMaxPriorityFeePerGas()); assertEquals(transaction4844.getGasLimit(), resultTransaction4844.getGasLimit()); assertEquals(transaction4844.getTo(), resultTransaction4844.getTo()); assertEquals(transaction4844.getValue(), resultTransaction4844.getValue()); assertEquals(transaction4844.getData(), resultTransaction4844.getData()); assertEquals( transaction4844.getMaxFeePerBlobGas(), resultTransaction4844.getMaxFeePerBlobGas()); assertEquals(result.getFrom(), SampleKeys.CREDENTIALS.getAddress()); }
@Override protected CompletableFuture<ProfilingInfo> handleRequest( @Nonnull HandlerRequest<ProfilingRequestBody> request, @Nonnull ResourceManagerGateway gateway) throws RestHandlerException { ProfilingRequestBody profilingRequest = request.getRequestBody(); int duration = profilingRequest.getDuration(); if (duration <= 0 || duration > maxDurationInSeconds) { return FutureUtils.completedExceptionally( new IllegalArgumentException( String.format( "`duration` must be set between (0s, %ds].", maxDurationInSeconds))); } final ResourceID taskManagerId = request.getPathParameter(TaskManagerIdPathParameter.class); return gateway.requestProfiling( taskManagerId, duration, profilingRequest.getMode(), getTimeout()); }
@Test void testGetTaskManagerProfilingForUnknownTaskExecutorException() throws Exception { resourceManagerGateway.setRequestProfilingListFunction( EXPECTED_TASK_MANAGER_ID -> FutureUtils.completedExceptionally( new UnknownTaskExecutorException(EXPECTED_TASK_MANAGER_ID))); try { taskManagerProfilingHandler.handleRequest(handlerRequest, resourceManagerGateway).get(); } catch (ExecutionException e) { final Throwable cause = e.getCause(); assertThat(cause).isInstanceOf(UnknownTaskExecutorException.class); final UnknownTaskExecutorException unknownTaskExecutorException = (UnknownTaskExecutorException) cause; assertThat(unknownTaskExecutorException.getMessage()) .contains("No TaskExecutor registered under " + EXPECTED_TASK_MANAGER_ID); } }
@Override public void execute(ComputationStep.Context context) { // no notification on pull requests as there is no real Quality Gate on those if (analysisMetadataHolder.isPullRequest()) { return; } executeForProject(treeRootHolder.getRoot()); }
@Test public void verify_branch_name_is_not_set_in_notification_when_main() { analysisMetadataHolder.setBranch(new DefaultBranchImpl(DEFAULT_MAIN_BRANCH_NAME)); when(measureRepository.getRawMeasure(PROJECT_COMPONENT, alertStatusMetric)) .thenReturn(of(Measure.newMeasureBuilder().setQualityGateStatus(OK_QUALITY_GATE_STATUS).createNoValue())); when(measureRepository.getBaseMeasure(PROJECT_COMPONENT, alertStatusMetric)).thenReturn( of(Measure.newMeasureBuilder().setQualityGateStatus(new QualityGateStatus(ERROR)).createNoValue())); underTest.execute(new TestComputationStepContext()); verify(notificationService).deliver(notificationArgumentCaptor.capture()); Notification notification = notificationArgumentCaptor.getValue(); assertThat(notification.getType()).isEqualTo("alerts"); assertThat(notification.getFieldValue("projectKey")).isEqualTo(PROJECT_COMPONENT.getKey()); assertThat(notification.getFieldValue("projectName")).isEqualTo(PROJECT_COMPONENT.getName()); assertThat(notification.getFieldValue("projectVersion")).isEqualTo(PROJECT_COMPONENT.getProjectAttributes().getProjectVersion()); assertThat(notification.getFieldValue("branch")).isNull(); reset(measureRepository, eventRepository, notificationService); }
public static UTypeVar create(String name, UType lowerBound, UType upperBound) { return new UTypeVar(name, lowerBound, upperBound); }
@Test public void equality() { UType nullType = UPrimitiveType.create(TypeKind.NULL); UType objectType = UClassType.create("java.lang.Object", ImmutableList.<UType>of()); UType charSequenceType = UClassType.create("java.lang.CharSequence", ImmutableList.<UType>of()); UType stringType = UClassType.create("java.lang.String", ImmutableList.<UType>of()); new EqualsTester() .addEqualityGroup(UTypeVar.create("T", nullType, charSequenceType)) // T extends CharSequence .addEqualityGroup(UTypeVar.create("T", stringType, charSequenceType)) // T extends CharSequence super String .addEqualityGroup(UTypeVar.create("T", nullType, objectType)) // T extends Object .addEqualityGroup(UTypeVar.create("E", nullType, charSequenceType)) // E extends CharSequence .testEquals(); }
public Object evaluate( final GenericRow row, final Object defaultValue, final ProcessingLogger logger, final Supplier<String> errorMsg ) { try { return expressionEvaluator.evaluate(new Object[]{ spec.resolveArguments(row), defaultValue, logger, row }); } catch (final Exception e) { final Throwable cause = e instanceof InvocationTargetException ? e.getCause() : e; logger.error(RecordProcessingError.recordProcessingError(errorMsg.get(), cause, row)); return defaultValue; } }
@Test public void shouldPerformThreadSafeParameterEvaluation() throws Exception { // Given: spec.addParameter( ColumnName.of("foo1"), Integer.class, 0 ); spec.addParameter( ColumnName.of("foo2"), Integer.class, 1 ); final CountDownLatch threadLatch = new CountDownLatch(1); final CountDownLatch mainLatch = new CountDownLatch(1); final Map<String, Object> arguments1 = new HashMap<String, Object>() {{ put("var0", 123); put("var1", 456); }}; final Map<String, Object> arguments2 = new HashMap<String, Object>() {{ put("var0", 100); put("var1", 200); }}; when(expressionEvaluator.evaluate(new Object[]{arguments1, DEFAULT_VAL, processingLogger, genericRow(123, 456)})) .thenAnswer( invocation -> { threadLatch.countDown(); assertThat(mainLatch.await(10, TimeUnit.SECONDS), is(true)); return RETURN_VALUE; }); compiledExpression = new CompiledExpression( expressionEvaluator, spec.build(), EXPRESSION_TYPE, expression ); final Thread thread = new Thread( () -> compiledExpression .evaluate(genericRow(123, 456), DEFAULT_VAL, processingLogger, errorMsgSupplier) ); // When: thread.start(); // Then: assertThat(threadLatch.await(10, TimeUnit.SECONDS), is(true)); // When: compiledExpression .evaluate(genericRow(100, 200), DEFAULT_VAL, processingLogger, errorMsgSupplier); mainLatch.countDown(); // Then: thread.join(); verify(expressionEvaluator, times(1)) .evaluate(new Object[]{arguments1, DEFAULT_VAL, processingLogger, genericRow(123, 456)}); verify(expressionEvaluator, times(1)) .evaluate(new Object[]{arguments2, DEFAULT_VAL, processingLogger, genericRow(100, 200)}); }
public Mono<Void> resetToEarliest( KafkaCluster cluster, String group, String topic, Collection<Integer> partitions) { return checkGroupCondition(cluster, group) .flatMap(ac -> offsets(ac, topic, partitions, OffsetSpec.earliest()) .flatMap(offsets -> resetOffsets(ac, group, offsets))); }
@Test void resetToEarliest() { sendMsgsToPartition(Map.of(0, 10, 1, 10, 2, 10)); commit(Map.of(0, 5L, 1, 5L, 2, 5L)); offsetsResetService.resetToEarliest(cluster, groupId, topic, List.of(0, 1)).block(); assertOffsets(Map.of(0, 0L, 1, 0L, 2, 5L)); commit(Map.of(0, 5L, 1, 5L, 2, 5L)); offsetsResetService.resetToEarliest(cluster, groupId, topic, null).block(); assertOffsets(Map.of(0, 0L, 1, 0L, 2, 0L, 3, 0L, 4, 0L)); }
public static boolean isInvalidStanzaSentPriorToResourceBinding(final Packet stanza, final ClientSession session) { // Openfire sets 'authenticated' only after resource binding. if (session.getStatus() == Session.Status.AUTHENTICATED) { return false; } // Beware, the 'to' address in the stanza will have been overwritten by the final JID intendedRecipient = stanza.getTo(); final JID serverDomain = new JID(XMPPServer.getInstance().getServerInfo().getXMPPDomain()); // If there's no 'to' address, then the stanza is implicitly addressed at the user itself. if (intendedRecipient == null) { return false; } // TODO: after authentication (but prior to resource binding), it should be possible to verify that the // intended recipient's bare JID corresponds with the authorized user. Openfire currently does not have an API // that can be used to obtain the authorized username, prior to resource binding. if (intendedRecipient.equals(serverDomain)) { return false; } return true; }
@Test public void testIsInvalid_noToAddress_unauthenticated() throws Exception { // Setup test fixture. final Packet stanza = new Message(); final LocalClientSession session = mock(LocalClientSession.class, withSettings().strictness(Strictness.LENIENT)); when(session.getStatus()).thenReturn(Session.Status.CONNECTED); // Openfire sets 'AUTHENTICATED' only after resource binding has been done. // Execute system under test. final boolean result = SessionPacketRouter.isInvalidStanzaSentPriorToResourceBinding(stanza, session); // Verify results. assertFalse(result); }
@Override public String convert(ILoggingEvent event) { Map<String, String> mdcPropertyMap = event.getMDCPropertyMap(); if (mdcPropertyMap == null) { return defaultValue; } if (key == null) { return outputMDCForAllKeys(mdcPropertyMap); } else { String value = mdcPropertyMap.get(key); if (value != null) { return value; } else { return defaultValue; } } }
@Test public void testConvertWithMultipleEntries() { MDC.put("testKey", "testValue"); MDC.put("testKey2", "testValue2"); ILoggingEvent le = createLoggingEvent(); String result = converter.convert(le); boolean isConform = result.matches("testKey2?=testValue2?, testKey2?=testValue2?"); assertTrue(result + " is not conform", isConform); }
public static String encodeHexString(byte[] bytes) { int l = bytes.length; char[] out = new char[l << 1]; for (int i = 0, j = 0; i < l; i++) { out[j++] = DIGITS_LOWER[(XF0 & bytes[i]) >>> DISPLACEMENT]; out[j++] = DIGITS_LOWER[XF & bytes[i]]; } return new String(out); }
@Test public void assetEncodeHexString() { String encodeHexString = "00010f107f80203040506070"; byte[] bytes = {0, 1, 15, 16, 127, -128, 32, 48, 64, 80, 96, 112}; Assert.isTrue(encodeHexString.equals(Md5Util.encodeHexString(bytes))); }
public static boolean safeContains(final Range<Comparable<?>> range, final Comparable<?> endpoint) { try { return range.contains(endpoint); } catch (final ClassCastException ex) { Comparable<?> rangeUpperEndpoint = range.hasUpperBound() ? range.upperEndpoint() : null; Comparable<?> rangeLowerEndpoint = range.hasLowerBound() ? range.lowerEndpoint() : null; Optional<Class<?>> clazz = getTargetNumericType(Arrays.asList(rangeLowerEndpoint, rangeUpperEndpoint, endpoint)); if (!clazz.isPresent()) { throw ex; } Range<Comparable<?>> newRange = createTargetNumericTypeRange(range, clazz.get()); return newRange.contains(parseNumberByClazz(endpoint.toString(), clazz.get())); } }
@Test void assertSafeContainsForLong() { Range<Comparable<?>> range = Range.closed(12L, 1000L); assertTrue(SafeNumberOperationUtils.safeContains(range, 500)); }
public void logRequest(Config config, HttpRequest request) { requestCount++; String uri = request.getUrl(); HttpLogModifier requestModifier = logModifier(config, uri); String maskedUri = requestModifier == null ? uri : requestModifier.uri(uri); StringBuilder sb = new StringBuilder(); sb.append("request:\n").append(requestCount).append(" > ") .append(request.getMethod()).append(' ').append(maskedUri); logHeaders(requestCount, " > ", sb, requestModifier, request.getHeaders()); ResourceType rt = ResourceType.fromContentType(request.getContentType()); if (rt == null || rt.isBinary()) { // don't log body } else { byte[] body; if (rt == ResourceType.MULTIPART) { body = request.getBodyForDisplay() == null ? null : request.getBodyForDisplay().getBytes(); } else { body = request.getBody(); } logBody(config, requestModifier, sb, uri, body, true, rt); } sb.append('\n'); logger.debug("{}", sb); }
@Test void testRequestLoggingPlain() { HttpRequest httpRequest = httpRequestBuilder.body("hello").contentType("text/plain").path("/plain").build(); httpLogger.logRequest(config, httpRequest); String logs = logAppender.collect(); assertTrue(logs.contains("hello")); assertTrue(logs.contains("Content-Type: text/plain")); }
public static Optional<Expression> convert( org.apache.flink.table.expressions.Expression flinkExpression) { if (!(flinkExpression instanceof CallExpression)) { return Optional.empty(); } CallExpression call = (CallExpression) flinkExpression; Operation op = FILTERS.get(call.getFunctionDefinition()); if (op != null) { switch (op) { case IS_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::isNull); case NOT_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::notNull); case LT: return convertFieldAndLiteral(Expressions::lessThan, Expressions::greaterThan, call); case LT_EQ: return convertFieldAndLiteral( Expressions::lessThanOrEqual, Expressions::greaterThanOrEqual, call); case GT: return convertFieldAndLiteral(Expressions::greaterThan, Expressions::lessThan, call); case GT_EQ: return convertFieldAndLiteral( Expressions::greaterThanOrEqual, Expressions::lessThanOrEqual, call); case EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.isNaN(ref); } else { return Expressions.equal(ref, lit); } }, call); case NOT_EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.notNaN(ref); } else { return Expressions.notEqual(ref, lit); } }, call); case NOT: return onlyChildAs(call, CallExpression.class) .flatMap(FlinkFilters::convert) .map(Expressions::not); case AND: return convertLogicExpression(Expressions::and, call); case OR: return convertLogicExpression(Expressions::or, call); case STARTS_WITH: return convertLike(call); } } return Optional.empty(); }
@Test public void testGreaterThanEquals() { UnboundPredicate<Integer> expected = org.apache.iceberg.expressions.Expressions.greaterThanOrEqual("field1", 1); Optional<org.apache.iceberg.expressions.Expression> actual = FlinkFilters.convert(resolve(Expressions.$("field1").isGreaterOrEqual(Expressions.lit(1)))); assertThat(actual).isPresent(); assertPredicatesMatch(expected, actual.get()); Optional<org.apache.iceberg.expressions.Expression> actual1 = FlinkFilters.convert(resolve(Expressions.lit(1).isLessOrEqual(Expressions.$("field1")))); assertThat(actual1).isPresent(); assertPredicatesMatch(expected, actual1.get()); }
public static ShowResultSet execute(ShowStmt statement, ConnectContext context) { return GlobalStateMgr.getCurrentState().getShowExecutor().showExecutorVisitor.visit(statement, context); }
@Test public void testShowAlterTable() throws AnalysisException, DdlException { ShowAlterStmt stmt = new ShowAlterStmt(ShowAlterStmt.AlterType.OPTIMIZE, "testDb", null, null, null); stmt.setNode(new OptimizeProcDir(globalStateMgr.getSchemaChangeHandler(), globalStateMgr.getDb("testDb"))); ShowExecutor.execute(stmt, ctx); }
public Properties getProperties() { return properties; }
@Test public void testUriWithExtraCredentials() throws SQLException, UnsupportedEncodingException { String extraCredentials = "test.token.foo:bar;test.token.abc:xyz;test.scopes:read_only|read_write"; String encodedExtraCredentials = URLEncoder.encode(extraCredentials, StandardCharsets.UTF_8.toString()); PrestoDriverUri parameters = createDriverUri("presto://localhost:8080?extraCredentials=" + encodedExtraCredentials); Properties properties = parameters.getProperties(); assertEquals(properties.getProperty(EXTRA_CREDENTIALS.getKey()), extraCredentials); }
public Pair<Boolean, GroupExpression> insertGroupExpression(GroupExpression groupExpression, Group targetGroup) { if (groupExpressions.get(groupExpression) != null) { GroupExpression existedGroupExpression = groupExpressions.get(groupExpression); Group existedGroup = existedGroupExpression.getGroup(); if (needMerge(targetGroup, existedGroup)) { mergeGroup(existedGroup, targetGroup); } return new Pair<>(false, existedGroupExpression); } if (targetGroup == null) { targetGroup = newGroup(); groups.add(targetGroup); } groupExpressions.put(groupExpression, groupExpression); targetGroup.addExpression(groupExpression); return new Pair<>(true, groupExpression); }
@Test public void testInsertGroupExpression(@Mocked OlapTable olapTable1, @Mocked OlapTable olapTable2) { new Expectations() { { olapTable1.getId(); result = 0; minTimes = 0; olapTable2.getId(); result = 1; minTimes = 0; } }; OptExpression expr = OptExpression.create(new LogicalProjectOperator(Maps.newHashMap()), OptExpression.create(new LogicalJoinOperator(), OptExpression.create(new LogicalOlapScanOperator(olapTable1)), OptExpression.create(new LogicalOlapScanOperator(olapTable2)))); Memo memo = new Memo(); memo.init(expr); Operator projectOperator = LogicalLimitOperator.init(1, 1); GroupExpression newGroupExpression = new GroupExpression(projectOperator, Lists.newArrayList()); memo.insertGroupExpression(newGroupExpression, memo.getGroups().get(3)); assertEquals(memo.getGroups().size(), 4); assertEquals(memo.getGroupExpressions().size(), 5); assertEquals(memo.getGroups().get(3).getLogicalExpressions().size(), 2); assertEquals(memo.getGroups().get(3).getPhysicalExpressions().size(), 0); }
public boolean isTaintVulnerability(DefaultIssue issue) { return taintRepositories.contains(issue.getRuleKey().repository()) && issue.getLocations() != null && !RuleType.SECURITY_HOTSPOT.equals(issue.type()); }
@Test public void test_isTaintVulnerability() { DefaultIssue taintWithoutLocation = createIssueWithRepository("noTaintIssue", "roslyn.sonaranalyzer.security.cs") .toDefaultIssue(); DefaultIssue taint = createIssueWithRepository("taintIssue", "roslyn.sonaranalyzer.security.cs") .setLocations(DbIssues.Locations.newBuilder() .setTextRange(DbCommons.TextRange.newBuilder().build()) .build()) .toDefaultIssue(); DefaultIssue issue = createIssueWithRepository("standardIssue", "java") .setLocations(DbIssues.Locations.newBuilder() .setTextRange(DbCommons.TextRange.newBuilder().build()) .build()) .toDefaultIssue(); DefaultIssue hotspot = createIssueWithRepository("hotspot", "roslyn.sonaranalyzer.security.cs", RuleType.SECURITY_HOTSPOT).toDefaultIssue(); assertThat(underTest.isTaintVulnerability(taintWithoutLocation)).isFalse(); assertThat(underTest.isTaintVulnerability(taint)).isTrue(); assertThat(underTest.isTaintVulnerability(issue)).isFalse(); assertThat(underTest.isTaintVulnerability(hotspot)).isFalse(); }
OutputT apply(InputT input) throws UserCodeExecutionException { Optional<UserCodeExecutionException> latestError = Optional.empty(); long waitFor = 0L; while (waitFor != BackOff.STOP) { try { sleepIfNeeded(waitFor); incIfPresent(getCallCounter()); return getThrowableFunction().apply(input); } catch (UserCodeExecutionException e) { if (!e.shouldRepeat()) { throw e; } latestError = Optional.of(e); } catch (InterruptedException ignored) { } try { incIfPresent(getBackoffCounter()); waitFor = getBackOff().nextBackOffMillis(); } catch (IOException e) { throw new UserCodeExecutionException(e); } } throw latestError.orElse( new UserCodeExecutionException("failed to process for input: " + input)); }
@Test public void givenSetupQuotaErrorsExceedsLimit_throws() { pipeline .apply(Create.of(1)) .apply( ParDo.of( new DoFnWithRepeaters( new CallerImpl(0), new SetupTeardownImpl(LIMIT + 1, UserCodeQuotaException.class))) .withOutputTags(OUTPUT_TAG, TupleTagList.of(FAILURE_TAG))); UncheckedExecutionException thrown = assertThrows(UncheckedExecutionException.class, pipeline::run); assertThat(thrown.getCause(), allOf(notNullValue(), instanceOf(UserCodeException.class))); assertThat( thrown.getCause().getCause(), allOf(notNullValue(), instanceOf(UserCodeQuotaException.class))); }
protected void declareRuleFromAttribute(final Attribute attribute, final String parentPath, final int attributeIndex, final List<KiePMMLDroolsRule> rules, final String statusToSet, final String characteristicReasonCode, final Number characteristicBaselineScore, final boolean isLastCharacteristic) { logger.trace("declareRuleFromAttribute {} {}", attribute, parentPath); final Predicate predicate = attribute.getPredicate(); // This means the rule should not be created at all. // Different semantics has to be implemented if the "False"/"True" predicates are declared inside // an XOR compound predicate if (predicate instanceof False) { return; } String currentRule = String.format(PATH_PATTERN, parentPath, attributeIndex); KiePMMLReasonCodeAndValue reasonCodeAndValue = getKiePMMLReasonCodeAndValue(attribute, characteristicReasonCode, characteristicBaselineScore); PredicateASTFactoryData predicateASTFactoryData = new PredicateASTFactoryData(predicate, outputFields, rules, parentPath, currentRule, fieldTypeMap); KiePMMLPredicateASTFactory.factory(predicateASTFactoryData).declareRuleFromPredicate(attribute.getPartialScore(), statusToSet, reasonCodeAndValue, isLastCharacteristic); }
@Test void declareRuleFromAttributeWithSimplePredicateNotLastCharacteristic() { Attribute attribute = getSimplePredicateAttribute(); final String parentPath = "parent_path"; final int attributeIndex = 2; final List<KiePMMLDroolsRule> rules = new ArrayList<>(); final String statusToSet = "status_to_set"; final String characteristicReasonCode = "REASON_CODE"; final double characteristicBaselineScore = 12; final boolean isLastCharacteristic = false; getKiePMMLScorecardModelCharacteristicASTFactory() .declareRuleFromAttribute(attribute, parentPath, attributeIndex, rules, statusToSet, characteristicReasonCode, characteristicBaselineScore, isLastCharacteristic); assertThat(rules).hasSize(1); commonValidateRule(rules.get(0), attribute, statusToSet, parentPath, attributeIndex, isLastCharacteristic, 1, null, BOOLEAN_OPERATOR.AND, "value <= 5.0", 1); }
NewCookie createAuthenticationCookie(SessionResponse token, ContainerRequestContext requestContext) { return makeCookie(token.getAuthenticationToken(), token.validUntil(), requestContext); }
@Test void pathFromConfig() throws Exception { final HttpConfiguration httpConfiguration = new HttpConfiguration(); new JadConfig(new InMemoryRepository( Map.of("http_external_uri", "http://graylog.local/path/from/config/")), httpConfiguration) .process(); System.out.println(httpConfiguration.getHttpExternalUri()); final CookieFactory cookieFactory = new CookieFactory(httpConfiguration); final NewCookie cookie = cookieFactory.createAuthenticationCookie(sessionResponse, containerRequest); assertThat(cookie.getPath()).isEqualTo("/path/from/config/"); }
public static PTransformMatcher createViewWithViewFn(final Class<? extends ViewFn> viewFnType) { return application -> { if (!(application.getTransform() instanceof CreatePCollectionView)) { return false; } CreatePCollectionView<?, ?> createView = (CreatePCollectionView<?, ?>) application.getTransform(); ViewFn<?, ?> viewFn = createView.getView().getViewFn(); return viewFn.getClass().equals(viewFnType); }; }
@Test public void createViewWithViewFnDifferentViewFn() { PCollection<Integer> input = p.apply(Create.of(1)); PCollectionView<Iterable<Integer>> view = input.apply(View.asIterable()); // Purposely create a subclass to get a different class then what was expected. IterableViewFn<Integer> viewFn = new PCollectionViews.IterableViewFn<Integer>(() -> TypeDescriptors.integers()) {}; CreatePCollectionView<?, ?> createView = CreatePCollectionView.of(view); PTransformMatcher matcher = PTransformMatchers.createViewWithViewFn(viewFn.getClass()); assertThat(matcher.matches(getAppliedTransform(createView)), is(false)); }
@Override public GroupAssignment assign( GroupSpec groupSpec, SubscribedTopicDescriber subscribedTopicDescriber ) throws PartitionAssignorException { if (groupSpec.memberIds().isEmpty()) { return new GroupAssignment(Collections.emptyMap()); } else if (groupSpec.subscriptionType() == SubscriptionType.HOMOGENEOUS) { return assignHomogeneousGroup(groupSpec, subscribedTopicDescriber); } else { return assignHeterogeneousGroup(groupSpec, subscribedTopicDescriber); } }
@Test public void testReassignmentWhenOneMemberRemovedAfterInitialAssignmentWithTwoMembersTwoTopics() { Map<Uuid, TopicMetadata> topicMetadata = new HashMap<>(); topicMetadata.put(topic1Uuid, new TopicMetadata( topic1Uuid, topic1Name, 3, Collections.emptyMap() )); topicMetadata.put(topic2Uuid, new TopicMetadata( topic2Uuid, topic2Name, 3, Collections.emptyMap() )); Map<String, MemberSubscriptionAndAssignmentImpl> members = new TreeMap<>(); // Member A was removed members.put(memberB, new MemberSubscriptionAndAssignmentImpl( Optional.empty(), Optional.empty(), mkSet(topic1Uuid, topic2Uuid), new Assignment(mkAssignment( mkTopicAssignment(topic1Uuid, 2), mkTopicAssignment(topic2Uuid, 2) )) )); GroupSpec groupSpec = new GroupSpecImpl( members, HOMOGENEOUS, invertedTargetAssignment(members) ); SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata); GroupAssignment computedAssignment = assignor.assign( groupSpec, subscribedTopicMetadata ); Map<String, Map<Uuid, Set<Integer>>> expectedAssignment = new HashMap<>(); expectedAssignment.put(memberB, mkAssignment( mkTopicAssignment(topic1Uuid, 0, 1, 2), mkTopicAssignment(topic2Uuid, 0, 1, 2) )); assertAssignment(expectedAssignment, computedAssignment); }
public void logTerminationPosition( final int memberId, final long logLeadershipTermId, final long logPosition) { final int length = terminationPositionLength(); final int captureLength = captureLength(length); final int encodedLength = encodedLength(captureLength); final ManyToOneRingBuffer ringBuffer = this.ringBuffer; final int index = ringBuffer.tryClaim(TERMINATION_POSITION.toEventCodeId(), encodedLength); if (index > 0) { try { ClusterEventEncoder.encodeTerminationPosition( (UnsafeBuffer)ringBuffer.buffer(), index, captureLength, length, memberId, logLeadershipTermId, logPosition); } finally { ringBuffer.commit(index); } } }
@Test void logTerminationPosition() { final long logLeadershipTermId = 96; final long logPosition = 128L; final int memberId = 222; final int offset = 64; logBuffer.putLong(CAPACITY + TAIL_POSITION_OFFSET, offset); logger.logTerminationPosition(memberId, logLeadershipTermId, logPosition); verifyLogHeader( logBuffer, offset, TERMINATION_POSITION.toEventCodeId(), terminationPositionLength(), terminationPositionLength()); final int index = encodedMsgOffset(offset) + LOG_HEADER_LENGTH; assertEquals(logLeadershipTermId, logBuffer.getLong(index, LITTLE_ENDIAN)); assertEquals(logPosition, logBuffer.getLong(index + SIZE_OF_LONG, LITTLE_ENDIAN)); assertEquals(memberId, logBuffer.getInt(index + 2 * SIZE_OF_LONG, LITTLE_ENDIAN)); final StringBuilder sb = new StringBuilder(); ClusterEventDissector.dissectTerminationPosition( TERMINATION_POSITION, logBuffer, encodedMsgOffset(offset), sb); final String expectedMessagePattern = "\\[[0-9]+\\.[0-9]+] CLUSTER: TERMINATION_POSITION " + "\\[20/20]: memberId=222 logLeadershipTermId=96 logPosition=128"; assertThat(sb.toString(), Matchers.matchesPattern(expectedMessagePattern)); }
void checkIsStart() { if (!initialized) { throw new IllegalStateException("Publisher does not start"); } }
@Test void testCheckIsStart() { assertThrows(IllegalStateException.class, () -> { publisher.shutdown(); publisher = new DefaultPublisher(); publisher.checkIsStart(); }); }
public static ULocalVarIdent create(CharSequence identifier) { return new AutoValue_ULocalVarIdent(StringName.of(identifier)); }
@Test public void serialization() { SerializableTester.reserializeAndAssert(ULocalVarIdent.create("foo")); }
@Override public boolean resolve(final Path file) { if(PreferencesFactory.get().getBoolean("path.symboliclink.resolve")) { // Follow links instead return false; } // Create symbolic link only if supported by the local file system if(feature != null) { final Path target = file.getSymlinkTarget(); // Only create symbolic link if target is included in the download for(TransferItem root : files) { if(this.findTarget(target, root.remote)) { if(log.isDebugEnabled()) { log.debug(String.format("Resolved target %s for %s", target, file)); } // Create symbolic link return true; } } } // Otherwise download target file return false; }
@Test public void testResolve() { final ArrayList<TransferItem> files = new ArrayList<>(); files.add(new TransferItem(new Path("/a", EnumSet.of(Path.Type.directory)))); DownloadSymlinkResolver resolver = new DownloadSymlinkResolver(files); Path p = new Path("/a/b", EnumSet.of(Path.Type.file, AbstractPath.Type.symboliclink)); p.setSymlinkTarget(new Path("/a/c", EnumSet.of(Path.Type.file))); assertTrue(resolver.resolve(p)); p.setSymlinkTarget(new Path("/b/c", EnumSet.of(Path.Type.file))); assertFalse(resolver.resolve(p)); }
@Override public int read() throws IOException { if (mPosition == mLength) { // at end of file return -1; } updateStreamIfNeeded(); int res = mUfsInStream.get().read(); if (res == -1) { return -1; } mPosition++; Metrics.BYTES_READ_FROM_UFS.inc(1); return res; }
@Test public void readAll() throws IOException, AlluxioException { int len = CHUNK_SIZE * 5; int start = 0; AlluxioURI ufsPath = getUfsPath(); createFile(ufsPath, CHUNK_SIZE * 5); byte[] res = new byte[CHUNK_SIZE]; try (FileInStream inStream = getStream(ufsPath)) { while (start < len) { assertEquals(CHUNK_SIZE, inStream.read(res)); assertTrue(BufferUtils.equalIncreasingByteArray(start, CHUNK_SIZE, res)); start += CHUNK_SIZE; } } }
@Override public String getUnderFSType() { return "cos"; }
@Test public void getUnderFSType() { Assert.assertEquals("cos", mCOSUnderFileSystem.getUnderFSType()); }
static String indent(String item) { // '([^']|'')*': Matches the escape sequence "'...'" where the content between "'" // characters can contain anything except "'" unless its doubled (''). // // Then each match is checked. If it starts with "'", it's left unchanged // (escaped sequence). Otherwise, it replaces newlines within the match with indent. Pattern pattern = Pattern.compile("('([^']|'')*')|\\n"); Matcher matcher = pattern.matcher(item); StringBuffer output = new StringBuffer(); while (matcher.find()) { final String group = matcher.group(); if (group.startsWith("'")) { matcher.appendReplacement(output, Matcher.quoteReplacement(group)); } else { String replaced = group.replaceAll("\n", "\n" + OPERATION_INDENT); matcher.appendReplacement(output, Matcher.quoteReplacement(replaced)); } } matcher.appendTail(output); return "\n" + OPERATION_INDENT + output; }
@Test void testIndentChildWithLiteralWithNewline() { String sourceQuery = "SELECT *, '\n' FROM source_t"; String s = String.format( "SELECT * FROM (%s\n) WHERE a > 5", OperationUtils.indent(sourceQuery)); assertThat(s) .isEqualTo( "SELECT * FROM (\n" + " SELECT *, '\n' FROM source_t\n" + ") WHERE a > 5"); }
@Override public void update(Object elem) throws Exception { // Increment object counter. if (objectCount != null) { objectCount.addValue(1L); } // Increment byte counter. if ((byteCountObserver != null || meanByteCountObserver != null) && (sampleElement() || elementByteSizeObservable.isRegisterByteSizeObserverCheap(elem))) { if (byteCountObserver != null) { byteCountObserver.setScalingFactor( Math.max(samplingToken, SAMPLING_CUTOFF) / (double) SAMPLING_CUTOFF); elementByteSizeObservable.registerByteSizeObserver(elem, byteCountObserver); } if (meanByteCountObserver != null) { elementByteSizeObservable.registerByteSizeObserver(elem, meanByteCountObserver); } if (byteCountObserver != null && !byteCountObserver.getIsLazy()) { byteCountObserver.advance(); } if (meanByteCountObserver != null && !meanByteCountObserver.getIsLazy()) { meanByteCountObserver.advance(); } } }
@Test public void testUpdate() throws Exception { TestOutputCounter outputCounter = new TestOutputCounter(NameContextsForTests.nameContextForTest()); outputCounter.update("hi"); outputCounter.finishLazyUpdate("hi"); outputCounter.update("bob"); outputCounter.finishLazyUpdate("bob"); CounterMean<Long> meanByteCount = outputCounter.getMeanByteCount().getAggregate(); assertEquals(7, (long) meanByteCount.getAggregate()); assertEquals(2, meanByteCount.getCount()); }
@Override public void stop() throws Exception { factory.close(); dataSource.stop(); }
@Test void closesTheFactoryOnStopping() throws Exception { manager.stop(); verify(factory).close(); }
public void addConditions(Condition... conditions) { if (null == this.conditions) { this.conditions = conditions; } else { this.conditions = ArrayUtil.addAll(this.conditions, conditions); } }
@Test public void ConditionGroupToStringTest() { Condition condition1 = new Condition("a", "A"); Condition condition2 = new Condition("b", "B"); condition2.setLinkOperator(LogicalOperator.OR); Condition condition3 = new Condition("c", "C"); Condition condition4 = new Condition("d", "D"); ConditionGroup cg = new ConditionGroup(); cg.addConditions(condition1, condition2); // 条件组嵌套情况 ConditionGroup cg2 = new ConditionGroup(); cg2.addConditions(cg, condition3); final ConditionBuilder conditionBuilder = ConditionBuilder.of(cg2, condition4); assertEquals("((a = ? OR b = ?) AND c = ?) AND d = ?", conditionBuilder.build()); assertEquals(ListUtil.of("A", "B", "C", "D"), conditionBuilder.getParamValues()); }
public static <K, V> AsMultimap<K, V> asMultimap() { return new AsMultimap<>(false); }
@Test @Category({ValidatesRunner.class}) public void testWindowedMultimapSideInputWithNonDeterministicKeyCoder() { final PCollectionView<Map<String, Iterable<Integer>>> view = pipeline .apply( "CreateSideInput", Create.timestamped( TimestampedValue.of(KV.of("a", 1), new Instant(1)), TimestampedValue.of(KV.of("a", 1), new Instant(2)), TimestampedValue.of(KV.of("a", 2), new Instant(7)), TimestampedValue.of(KV.of("b", 3), new Instant(14))) .withCoder(KvCoder.of(new NonDeterministicStringCoder(), VarIntCoder.of()))) .apply("SideWindowInto", Window.into(FixedWindows.of(Duration.millis(10)))) .apply(View.asMultimap()); PCollection<KV<String, Integer>> output = pipeline .apply( "CreateMainInput", Create.timestamped( TimestampedValue.of("apple", new Instant(5)), TimestampedValue.of("banana", new Instant(13)), TimestampedValue.of("blackberry", new Instant(16)))) .apply("MainWindowInto", Window.into(FixedWindows.of(Duration.millis(10)))) .apply( "OutputSideInputs", ParDo.of( new DoFn<String, KV<String, Integer>>() { @ProcessElement public void processElement(ProcessContext c) { for (Integer v : c.sideInput(view).get(c.element().substring(0, 1))) { c.output(KV.of(c.element(), v)); } } }) .withSideInputs(view)); PAssert.that(output) .containsInAnyOrder( KV.of("apple", 1), KV.of("apple", 1), KV.of("apple", 2), KV.of("banana", 3), KV.of("blackberry", 3)); pipeline.run(); }
public String transform() throws ScanException { StringBuilder stringBuilder = new StringBuilder(); compileNode(node, stringBuilder, new Stack<Node>()); return stringBuilder.toString(); }
@Test public void cascadedTransformation() throws ScanException { propertyContainer0.putProperty("x", "${a}"); propertyContainer0.putProperty("a", "b"); propertyContainer0.putProperty("b", "c"); String result = transform("${${x}}"); Assertions.assertEquals("c", result); }
@Udf(description = "Converts a TIMESTAMP value into the" + " string representation of the timestamp in the given format. Single quotes in the" + " timestamp format can be escaped with '', for example: 'yyyy-MM-dd''T''HH:mm:ssX'" + " The system default time zone is used when no time zone is explicitly provided." + " The format pattern should be in the format expected" + " by java.time.format.DateTimeFormatter") public String formatTimestamp( @UdfParameter( description = "TIMESTAMP value.") final Timestamp timestamp, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.time.format.DateTimeFormatter.") final String formatPattern) { return formatTimestamp(timestamp, formatPattern, ZoneId.of("GMT").getId()); }
@Test public void shouldRoundTripWithParseTimestamp() { final String pattern = "yyyy-MM-dd HH:mm:ss.SSS'Freya'"; final ParseTimestamp parseTimestamp = new ParseTimestamp(); IntStream.range(-10_000, 20_000) .parallel() .forEach(idx -> { final Timestamp timestamp = new Timestamp(1538361611123L + idx); final String result = udf.formatTimestamp(timestamp, pattern); final SimpleDateFormat sdf = new SimpleDateFormat(pattern); sdf.setTimeZone(TimeZone.getTimeZone("GMT")); final String expectedResult = sdf.format(timestamp); assertThat(result, is(expectedResult)); final Timestamp roundtripTimestamp = parseTimestamp.parseTimestamp(result, pattern); assertThat(roundtripTimestamp, is(timestamp)); }); }
public void delete(DbSession dbSession, GroupDto group) { checkGroupIsNotDefault(dbSession, group); checkNotTryingToDeleteLastAdminGroup(dbSession, group); removeGroupPermissions(dbSession, group); removeGroupFromPermissionTemplates(dbSession, group); removeGroupMembers(dbSession, group); removeGroupFromQualityProfileEdit(dbSession, group); removeGroupFromQualityGateEdit(dbSession, group); removeGroupScimLink(dbSession, group); removeExternalGroupMapping(dbSession, group); removeGithubOrganizationGroup(dbSession, group); removeGroup(dbSession, group); }
@Test public void delete_whenDefaultGroup_throwAndDontDeleteGroup() { GroupDto groupDto = mockGroupDto(); when(dbClient.groupDao().selectByName(dbSession, DefaultGroups.USERS)) .thenReturn(Optional.of(groupDto)); assertThatThrownBy(() -> groupService.delete(dbSession, groupDto)) .isInstanceOf(IllegalArgumentException.class) .hasMessage(format("Default group '%s' cannot be used to perform this action", GROUP_NAME)); verifyNoGroupDelete(dbSession, groupDto); }
public void unregisterEventDefinition(String id) { removeGrantsForTarget(grnRegistry.newGRN(GRNTypes.EVENT_DEFINITION, id)); }
@Test void unregisterEventDefinition() { entityOwnershipService.unregisterEventDefinition("1234"); assertGrantRemoval(GRNTypes.EVENT_DEFINITION, "1234"); }
public static Expression convert(Predicate[] predicates) { Expression expression = Expressions.alwaysTrue(); for (Predicate predicate : predicates) { Expression converted = convert(predicate); Preconditions.checkArgument( converted != null, "Cannot convert Spark predicate to Iceberg expression: %s", predicate); expression = Expressions.and(expression, converted); } return expression; }
@Test public void testInValuesContainNull() { String col = "strCol"; NamedReference namedReference = FieldReference.apply(col); LiteralValue nullValue = new LiteralValue(null, DataTypes.StringType); LiteralValue value1 = new LiteralValue("value1", DataTypes.StringType); LiteralValue value2 = new LiteralValue("value2", DataTypes.StringType); // Values only contains null Predicate inNull = new Predicate("IN", expressions(namedReference, nullValue)); Expression expectedInNull = Expressions.in(col); Expression actualInNull = SparkV2Filters.convert(inNull); assertEquals(expectedInNull, actualInNull); Predicate in = new Predicate("IN", expressions(namedReference, nullValue, value1, value2)); Expression expectedIn = Expressions.in(col, "value1", "value2"); Expression actualIn = SparkV2Filters.convert(in); assertEquals(expectedIn, actualIn); }
@SuppressWarnings("unused") // Part of required API. public void execute( final ConfiguredStatement<InsertValues> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final InsertValues insertValues = statement.getStatement(); final MetaStore metaStore = executionContext.getMetaStore(); final KsqlConfig config = statement.getSessionConfig().getConfig(true); final DataSource dataSource = getDataSource(config, metaStore, insertValues); validateInsert(insertValues.getColumns(), dataSource); final ProducerRecord<byte[], byte[]> record = buildRecord(statement, metaStore, dataSource, serviceContext); try { producer.sendRecord(record, serviceContext, config.getProducerClientConfigProps()); } catch (final TopicAuthorizationException e) { // TopicAuthorizationException does not give much detailed information about why it failed, // except which topics are denied. Here we just add the ACL to make the error message // consistent with other authorization error messages. final Exception rootCause = new KsqlTopicAuthorizationException( AclOperation.WRITE, e.unauthorizedTopics() ); throw new KsqlException(createInsertFailedExceptionMessage(insertValues), rootCause); } catch (final ClusterAuthorizationException e) { // ClusterAuthorizationException is thrown when using idempotent producers // and either a topic write permission or a cluster-level idempotent write // permission (only applicable for broker versions no later than 2.8) is // missing. In this case, we include additional context to help the user // distinguish this type of failure from other permissions exceptions // such as the ones thrown above when TopicAuthorizationException is caught. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } catch (final KafkaException e) { if (e.getCause() != null && e.getCause() instanceof ClusterAuthorizationException) { // The error message thrown when an idempotent producer is missing permissions // is (nondeterministically) inconsistent: it is either a raw ClusterAuthorizationException, // as checked for above, or a ClusterAuthorizationException wrapped inside a KafkaException. // ksqlDB handles these two the same way, accordingly. // See https://issues.apache.org/jira/browse/KAFKA-14138 for more. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } else { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } } catch (final Exception e) { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } }
@Test public void shouldFillInMissingColumnsWithNulls() { // Given: final ConfiguredStatement<InsertValues> statement = givenInsertValues( ImmutableList.of(K0, COL0), ImmutableList.of( new StringLiteral("str"), new StringLiteral("str")) ); // When: executor.execute(statement, mock(SessionProperties.class), engine, serviceContext); // Then: verify(keySerializer).serialize(TOPIC_NAME, genericKey("str")); verify(valueSerializer).serialize(TOPIC_NAME, genericRow("str", null)); verify(producer).send(new ProducerRecord<>(TOPIC_NAME, null, 1L, KEY, VALUE)); }
@Override public Optional<ConstraintMetaData> revise(final String tableName, final ConstraintMetaData originalMetaData, final ShardingRule rule) { for (DataNode each : shardingTable.getActualDataNodes()) { String referencedTableName = originalMetaData.getReferencedTableName(); Optional<String> logicIndexName = getLogicIndex(originalMetaData.getName(), each.getTableName()); if (logicIndexName.isPresent()) { return Optional.of(new ConstraintMetaData( logicIndexName.get(), rule.getAttributes().getAttribute(DataNodeRuleAttribute.class).findLogicTableByActualTable(referencedTableName).orElse(referencedTableName))); } } return Optional.empty(); }
@Test void assertReviseWhenTableMatches() { ConstraintMetaData originalMetaData = new ConstraintMetaData("test_table_name_1", "referenced_table_name"); Optional<ConstraintMetaData> actual = reviser.revise("table_name_1", originalMetaData, shardingRule); assertTrue(actual.isPresent()); assertThat(actual.get().getName(), is("test")); assertThat(actual.get().getReferencedTableName(), is("referenced_table_name")); }
public Map<String, List<QueryHeaderRewriteRule>> getQueryParamRewriteRules() { return queryParamRewriteRules; }
@Test public void testQueryParamRewriteRules() { Assert.assertNotNull(routerConfig.getQueryParamRewriteRules()); Assert.assertEquals(routerConfig.getQueryParamRewriteRules().size(), 4); }
public static void validateMaterializedViewPartitionColumns( SemiTransactionalHiveMetastore metastore, MetastoreContext metastoreContext, Table viewTable, MaterializedViewDefinition viewDefinition) { SchemaTableName viewName = new SchemaTableName(viewTable.getDatabaseName(), viewTable.getTableName()); Map<String, Map<SchemaTableName, String>> viewToBaseDirectColumnMap = viewDefinition.getDirectColumnMappingsAsMap(); if (viewToBaseDirectColumnMap.isEmpty()) { throw new PrestoException( NOT_SUPPORTED, format("Materialized view %s must have at least one column directly defined by a base table column.", viewName)); } List<Column> viewPartitions = viewTable.getPartitionColumns(); if (viewPartitions.isEmpty()) { throw new PrestoException(NOT_SUPPORTED, "Unpartitioned materialized view is not supported."); } List<Table> baseTables = viewDefinition.getBaseTables().stream() .map(baseTableName -> metastore.getTable(metastoreContext, baseTableName.getSchemaName(), baseTableName.getTableName()) .orElseThrow(() -> new TableNotFoundException(baseTableName))) .collect(toImmutableList()); Map<Table, List<Column>> baseTablePartitions = baseTables.stream() .collect(toImmutableMap( table -> table, Table::getPartitionColumns)); for (Table baseTable : baseTablePartitions.keySet()) { SchemaTableName schemaBaseTable = new SchemaTableName(baseTable.getDatabaseName(), baseTable.getTableName()); if (!isCommonPartitionFound(schemaBaseTable, baseTablePartitions.get(baseTable), viewPartitions, viewToBaseDirectColumnMap)) { throw new PrestoException( NOT_SUPPORTED, format("Materialized view %s must have at least one partition column that exists in %s as well", viewName, baseTable.getTableName())); } if (viewDefinition.getBaseTablesOnOuterJoinSide().contains(schemaBaseTable) && viewToBaseTableOnOuterJoinSideIndirectMappedPartitions(viewDefinition, baseTable).get().isEmpty()) { throw new PrestoException( NOT_SUPPORTED, format("Outer join conditions in Materialized view %s must have at least one common partition equality constraint", viewName)); } } }
@Test(expectedExceptions = PrestoException.class, expectedExceptionsMessageRegExp = "Materialized view schema.table must have at least one partition column that exists in table as well") public void testValidateMaterializedViewPartitionColumnsNoneCommonPartition() { TestingSemiTransactionalHiveMetastore testMetastore = TestingSemiTransactionalHiveMetastore.create(); Column dsColumn = new Column("ds", HIVE_STRING, Optional.empty(), Optional.empty()); Column shipmodeColumn = new Column("shipmode", HIVE_STRING, Optional.empty(), Optional.empty()); List<Column> partitionColumns = ImmutableList.of(shipmodeColumn); SchemaTableName tableName = new SchemaTableName(SCHEMA_NAME, TABLE_NAME); Map<String, Map<SchemaTableName, String>> originalColumnMapping = ImmutableMap.of(dsColumn.getName(), ImmutableMap.of(tableName, dsColumn.getName())); testMetastore.addTable(SCHEMA_NAME, TABLE_NAME, getTable(partitionColumns), ImmutableList.of()); List<Column> viewPartitionColumns = ImmutableList.of(dsColumn); validateMaterializedViewPartitionColumns(testMetastore, metastoreContext, getTable(viewPartitionColumns), getConnectorMaterializedViewDefinition(ImmutableList.of(tableName), originalColumnMapping)); }
@Override public void deregister(ServiceCombRegistration registration) { RegisterCenterService registerService = getRegisterCenterService(); if (registerService == null) { LOGGER.severe("registerCenterService is null, fail to unRegister!"); return; } registerService.unRegister(); }
@Test public void deregister() { registry.deregister(Mockito.mock(ServiceCombRegistration.class)); Mockito.verify(spyService, Mockito.times(1)).unRegister(); }
public Map<String, Object> getAllLocalProperties() { Map<String, Object> result = new LinkedHashMap<>( connectionPropertySynonyms.getLocalProperties().size() + poolPropertySynonyms.getLocalProperties().size() + customProperties.getProperties().size(), 1F); result.putAll(connectionPropertySynonyms.getLocalProperties()); result.putAll(poolPropertySynonyms.getLocalProperties()); result.putAll(customProperties.getProperties()); return result; }
@SuppressWarnings("unchecked") @Test void assertGetDataSourceConfigurationWithConnectionInitSqls() { MockedDataSource actualDataSource = new MockedDataSource(); actualDataSource.setDriverClassName(MockedDataSource.class.getName()); actualDataSource.setUrl("jdbc:mock://127.0.0.1/foo_ds"); actualDataSource.setUsername("root"); actualDataSource.setPassword("root"); actualDataSource.setConnectionInitSqls(Arrays.asList("set names utf8mb4;", "set names utf8;")); DataSourcePoolProperties actual = DataSourcePoolPropertiesCreator.create(actualDataSource); assertThat(actual.getPoolClassName(), is(MockedDataSource.class.getName())); assertThat(actual.getAllLocalProperties().get("driverClassName").toString(), is(MockedDataSource.class.getName())); assertThat(actual.getAllLocalProperties().get("url").toString(), is("jdbc:mock://127.0.0.1/foo_ds")); assertThat(actual.getAllLocalProperties().get("username").toString(), is("root")); assertThat(actual.getAllLocalProperties().get("password").toString(), is("root")); assertNull(actual.getAllLocalProperties().get("loginTimeout")); assertThat(actual.getAllLocalProperties().get("connectionInitSqls"), instanceOf(List.class)); List<String> actualConnectionInitSql = (List<String>) actual.getAllLocalProperties().get("connectionInitSqls"); assertThat(actualConnectionInitSql, hasItem("set names utf8mb4;")); assertThat(actualConnectionInitSql, hasItem("set names utf8;")); }
public int calculateBufferSize(long totalBufferSizeInBytes, int totalBuffers) { checkArgument(totalBufferSizeInBytes >= 0, "Size of buffer should be non negative"); checkArgument(totalBuffers > 0, "Number of buffers should be positive"); // Since the result value is always limited by max buffer size while the instant value is // potentially unlimited. It can lead to an instant change from min to max value in case // when the instant value is significantly larger than the possible max value. // The solution is to limit the instant buffer size by twice of current buffer size in order // to have the same growth and shrink speeds. for example if the instant value is equal to 0 // and the current value is 16000 we can decrease it at maximum by 1600(suppose alfa=0.1) . // The idea is to allow increase and decrease size by the same number. So if the instant // value would be large(for example 100000) it will be possible to increase the current // value by 1600(the same as decreasing) because the limit will be 2 * currentValue = 32000. // Example of change speed: // growing = 32768, 29647, 26823, 24268, 21956, 19864 // shrinking = 19864, 21755, 23826, 26095, 28580, 31301, 32768 long desirableBufferSize = Math.min(totalBufferSizeInBytes / totalBuffers, 2L * lastBufferSize); lastBufferSize += alpha * (desirableBufferSize - lastBufferSize); return lastBufferSize = Math.max(minBufferSize, Math.min(lastBufferSize, maxBufferSize)); }
@Test void testSizeLessThanMinSize() { BufferSizeEMA calculator = new BufferSizeEMA(200, 10, 3); // Impossible to less than min. assertThat(calculator.calculateBufferSize(0, 1)).isEqualTo(100); assertThat(calculator.calculateBufferSize(0, 1)).isEqualTo(50); assertThat(calculator.calculateBufferSize(0, 1)).isEqualTo(25); assertThat(calculator.calculateBufferSize(0, 1)).isEqualTo(12); assertThat(calculator.calculateBufferSize(0, 1)).isEqualTo(10); assertThat(calculator.calculateBufferSize(0, 1)).isEqualTo(10); }
@Override public <PS extends Serializer<P>, P> KeyValueIterator<K, V> prefixScan(final P prefix, final PS prefixKeySerializer) { Objects.requireNonNull(prefix); Objects.requireNonNull(prefixKeySerializer); final NextIteratorFunction<K, V, ReadOnlyKeyValueStore<K, V>> nextIteratorFunction = new NextIteratorFunction<K, V, ReadOnlyKeyValueStore<K, V>>() { @Override public KeyValueIterator<K, V> apply(final ReadOnlyKeyValueStore<K, V> store) { try { return store.prefixScan(prefix, prefixKeySerializer); } catch (final InvalidStateStoreException e) { throw new InvalidStateStoreException("State store is not available anymore and may have been migrated to another instance; please re-discover its location from the state metadata."); } } }; final List<ReadOnlyKeyValueStore<K, V>> stores = storeProvider.stores(storeName, storeType); return new DelegatingPeekingKeyValueIterator<>( storeName, new CompositeKeyValueIterator<>(stores.iterator(), nextIteratorFunction)); }
@Test public void shouldThrowInvalidStoreExceptionOnPrefixScanDuringRebalance() { assertThrows(InvalidStateStoreException.class, () -> rebalancing().prefixScan("anything", new StringSerializer())); }
static <T> T copy(T object, DataComplexTable alreadyCopied) throws CloneNotSupportedException { if (object == null) { return null; } else if (isComplex(object)) { DataComplex src = (DataComplex) object; @SuppressWarnings("unchecked") T found = (T) alreadyCopied.get(src); if (found != null) { return found; } else { DataComplex clone = src.clone(); alreadyCopied.put(src, clone); if (clone instanceof DataMap) { ((DataMap)clone).copyReferencedObjects(alreadyCopied); } else if (clone instanceof DataList) { ((DataList)clone).copyReferencedObjects(alreadyCopied); } @SuppressWarnings("unchecked") T converted = (T) clone; return converted; } } else if (isPrimitive(object)) { return object; } else { throw new CloneNotSupportedException("Illegal value encountered: " + object); } }
@Test public void mapClonesHaveDifferentHashValues() throws CloneNotSupportedException { DataMap originalMap = new DataMap(); originalMap.put("key", "value"); DataMap copyMap = originalMap.copy(); // The objects should be "equal," but not identical. assertTrue(copyMap.equals(originalMap)); assertFalse(copyMap.dataComplexHashCode() == originalMap.dataComplexHashCode()); }
void precheckMaxResultLimitOnLocalPartitions(String mapName) { // check if feature is enabled if (!isPreCheckEnabled) { return; } // limit number of local partitions to check to keep runtime constant PartitionIdSet localPartitions = mapServiceContext.getCachedOwnedPartitions(); int partitionsToCheck = min(localPartitions.size(), maxLocalPartitionsLimitForPreCheck); if (partitionsToCheck == 0) { return; } // calculate size of local partitions int localPartitionSize = getLocalPartitionSize(mapName, localPartitions, partitionsToCheck); if (localPartitionSize == 0) { return; } // check local result size long localResultLimit = getNodeResultLimit(partitionsToCheck); if (localPartitionSize > localResultLimit * MAX_RESULT_LIMIT_FACTOR_FOR_PRECHECK) { var localMapStatsProvider = mapServiceContext.getLocalMapStatsProvider(); if (localMapStatsProvider != null && localMapStatsProvider.hasLocalMapStatsImpl(mapName)) { localMapStatsProvider.getLocalMapStatsImpl(mapName).incrementQueryResultSizeExceededCount(); } throw new QueryResultSizeExceededException(maxResultLimit, " Result size exceeded in local pre-check."); } }
@Test(expected = QueryResultSizeExceededException.class) public void testLocalPreCheckEnabledWitDifferentPartitionSizesOverLimit() { int[] partitionSizes = {0, 2200, Integer.MIN_VALUE}; populatePartitions(partitionSizes); initMocksWithConfiguration(200000, 2); limiter.precheckMaxResultLimitOnLocalPartitions(ANY_MAP_NAME); }
public static KiePMMLTransformationDictionary getKiePMMLTransformationDictionary(final TransformationDictionary toConvert, final List<Field<?>> fields) { final List<KiePMMLDerivedField> kiePMMLDerivedFields = getKiePMMLDerivedFields(toConvert.getDerivedFields(), fields); final List<KiePMMLDefineFunction> kiePMMLDefineFunctions = getKiePMMLDefineFunctions(toConvert.getDefineFunctions()); return KiePMMLTransformationDictionary.builder(UUID.randomUUID().toString(), getKiePMMLExtensions(toConvert.getExtensions())) .withDefineFunctions(kiePMMLDefineFunctions) .withDerivedFields(kiePMMLDerivedFields) .build(); }
@Test void getKiePMMLTransformationDictionary() { final TransformationDictionary toConvert = getRandomTransformationDictionary(); KiePMMLTransformationDictionary retrieved = KiePMMLTransformationDictionaryInstanceFactory.getKiePMMLTransformationDictionary(toConvert, Collections.emptyList()); assertThat(retrieved).isNotNull(); List<DerivedField> derivedFields = toConvert.getDerivedFields(); List<KiePMMLDerivedField> derivedFieldsToVerify = retrieved.getDerivedFields(); assertThat(derivedFieldsToVerify).hasSameSizeAs(derivedFields); derivedFields.forEach(derivedFieldSource -> { Optional<KiePMMLDerivedField> derivedFieldToVerify = derivedFieldsToVerify.stream().filter(param -> param.getName().equals(derivedFieldSource.getName())) .findFirst(); assertThat(derivedFieldToVerify).isPresent(); commonVerifyKiePMMLDerivedField(derivedFieldToVerify.get(), derivedFieldSource); }); List<DefineFunction> defineFunctions = toConvert.getDefineFunctions(); List<KiePMMLDefineFunction> defineFunctionsToVerify = retrieved.getDefineFunctions(); assertThat(defineFunctionsToVerify).hasSameSizeAs(defineFunctions); defineFunctions.forEach(defineFunctionSource -> { Optional<KiePMMLDefineFunction> defineFunctionToVerify = defineFunctionsToVerify.stream().filter(param -> param.getName().equals(defineFunctionSource.getName())) .findFirst(); assertThat(defineFunctionToVerify).isPresent(); commonVerifyKiePMMLDefineFunction(defineFunctionToVerify.get(), defineFunctionSource); }); }
public static <T> MutationDetector forValueWithCoder(T value, Coder<T> coder) throws CoderException { if (value == null) { return noopMutationDetector(); } else { return new CodedValueMutationDetector<>(value, coder); } }
@Test public void testEquivalentListOfArrays() throws Exception { List<byte[]> value = Arrays.asList(new byte[] {0x1}, new byte[] {0x2, 0x3}, new byte[] {0x4}); MutationDetector detector = MutationDetectors.forValueWithCoder(value, ListCoder.of(ByteArrayCoder.of())); value.set(0, new byte[] {0x1}); detector.verifyUnmodified(); }
@Override public AppResponse process(Flow flow, ConfirmRequest request) throws FlowNotDefinedException, IOException, NoSuchAlgorithmException { var authAppSession = appSessionService.getSession(request.getAuthSessionId()); if (!isAppSessionAuthenticated(authAppSession) || !request.getUserAppId().equals(authAppSession.getUserAppId())){ return new NokResponse(); } appAuthenticator = appAuthenticatorService.findByUserAppId(authAppSession.getUserAppId()); if (!isAppAuthenticatorActivated(appAuthenticator) || !appAuthenticatorService.exists(appAuthenticator)) return new NokResponse(); if (appSession.getEidasUit()){ var response = validatePipSignature(request.getSignatureOfPip()); if (response != null) return response; } if (appSession.getAction() != null ) { var result = digidClient.getAccountStatus(appAuthenticator.getAccountId()); if (ERROR_DECEASED.equals(result.get("error"))) return deceasedResponse(); switch(appSession.getAction()){ case "activate_with_app" -> digidClient.remoteLog("1366", Map.of(lowerUnderscore(ACCOUNT_ID), appAuthenticator.getAccountId(), lowerUnderscore(HIDDEN), true)); case "upgrade_rda_widchecker" -> digidClient.remoteLog("1318", getAppDetails()); default -> digidClient.remoteLog("1344", getAppDetails()); } } appSession.setAppAuthenticationLevel(appAuthenticator.getAuthenticationLevel()); appSession.setAccountId(authAppSession.getAccountId()); appSession.setSubstantialActivatedAt(appAuthenticator.getSubstantieelActivatedAt()); appSession.setSubstantialDocumentType(appAuthenticator.getSubstantieelDocumentType()); appSession.setUserAppId(authAppSession.getUserAppId()); if (appSession.getOidcSessionId() != null && authAppSession.getState().equals(State.AUTHENTICATED.name())) { oidcClient.confirmOidc(appSession.getAccountId(), appAuthenticator.getAuthenticationLevel(), appSession.getOidcSessionId()); } if (appSession.getAdSessionId() != null && authAppSession.getState().equals(State.AUTHENTICATED.name())) { var bsn = digidClient.getBsn(appSession.getAccountId()); samlClient.updateAdSession(appSession.getAdSessionId(), appAuthenticator.getAuthenticationLevel(), bsn.get(BSN)); } return new ConfirmationResponse(appAuthenticator.getId().equals(appSession.getAppToDestroy())); }
@Test public void processReturnsNokResponseIfNotExisting() throws FlowNotDefinedException, IOException, NoSuchAlgorithmException { //given when(appAuthenticatorService.exists(mockedAppAuthenticator)).thenReturn(false); when(appSessionService.getSession(confirmRequest.getAuthSessionId())).thenReturn(authAppSession); //when AppResponse appResponse = confirmed.process(mockedFlow, confirmRequest); //then assertTrue(appResponse instanceof NokResponse); }
@Override public int hashCode() { return key.hashCode(); }
@Test public void test_equals_and_hashcode() { NewAdHocRule adHocRule1 = new NewAdHocRule(ScannerReport.ExternalIssue.newBuilder().setEngineId("eslint").setRuleId("no-cond-assign").build()); NewAdHocRule adHocRule2 = new NewAdHocRule(ScannerReport.ExternalIssue.newBuilder().setEngineId("eslint").setRuleId("no-cond-assign").build()); NewAdHocRule anotherAdHocRule = new NewAdHocRule(ScannerReport.ExternalIssue.newBuilder().setEngineId("eslint").setRuleId("another").build()); assertThat(adHocRule1) .isEqualTo(adHocRule1) .isEqualTo(adHocRule2) .isNotNull() .isNotEqualTo(anotherAdHocRule) .hasSameHashCodeAs(adHocRule1) .hasSameHashCodeAs(adHocRule2); assertThat(adHocRule1.hashCode()).isNotEqualTo(anotherAdHocRule.hashCode()); }
public Optional<Path> getMountPoint() { return Optional.ofNullable(mMountPoint); }
@Test public void testGetMountPoint() { mJCommander.parse("-m", "/tmp/fuse-mp"); assertEquals(Optional.of(Paths.get("/tmp/fuse-mp")), mOptions.getMountPoint()); }
public Set<ContentPack> loadAllLatest() { final Set<ContentPack> allContentPacks = loadAll(); final ImmutableMultimap.Builder<ModelId, ContentPack> byIdBuilder = ImmutableMultimap.builder(); for (ContentPack contentPack : allContentPacks) { byIdBuilder.put(contentPack.id(), contentPack); } final ImmutableMultimap<ModelId, ContentPack> contentPacksById = byIdBuilder.build(); final ImmutableSet.Builder<ContentPack> latestContentPacks = ImmutableSet.builderWithExpectedSize(contentPacksById.keySet().size()); for (ModelId id : contentPacksById.keySet()) { final ImmutableCollection<ContentPack> contentPacks = contentPacksById.get(id); final ContentPack latestContentPackRevision = Collections.max(contentPacks, Comparator.comparingInt(Revisioned::revision)); latestContentPacks.add(latestContentPackRevision); } return latestContentPacks.build(); }
@Test @MongoDBFixtures("ContentPackPersistenceServiceTest.json") public void loadAllLatest() { final Set<ContentPack> contentPacks = contentPackPersistenceService.loadAllLatest(); assertThat(contentPacks) .hasSize(3) .anyMatch(contentPack -> contentPack.id().equals(ModelId.of("dcd74ede-6832-4ef7-9f69-deadbeef0000")) && contentPack.revision() == 3); }
static TokenizerConfig loadTokenizer(Path tokenizerPath) throws IOException { ObjectMapper mapper = new ObjectMapper(new JsonFactory()); JsonNode rootNode = mapper.readTree(tokenizerPath.toFile()); // The tokenizer file is a JSON object with the following schema /* * { * "version": "1.0", * "truncation": null, * "padding": null, * "added_tokens": [ * { * "id": 0, * "special": true, * "content": "[PAD]", * "single_word": false, * "lstrip": false, * "rstrip": false, * "normalized": false * } * ], * "normalizer": { * "type": "BertNormalizer", * "clean_text": true, * "handle_chinese_chars": true, * "strip_accents": null, * "lowercase": false * }, * "pre_tokenizer": { * "type": "BertPreTokenizer" * }, * "post_processor": { * "type": "TemplateProcessing", * "single": [ * { * "SpecialToken": { * "id": "[CLS]", * "type_id": 0 * } * }, * { * "Sequence": { * "id": "A", * "type_id": 0 * } * }, * { * "SpecialToken": { * "id": "[SEP]", * "type_id": 0 * } * } * ], * "pair": [ * { * "SpecialToken": { * "id": "[CLS]", * "type_id": 0 * } * }, * { * "Sequence": { * "id": "A", * "type_id": 0 * } * }, * { * "SpecialToken": { * "id": "[SEP]", * "type_id": 0 * } * }, * { * "Sequence": { * "id": "B", * "type_id": 1 * } * }, * { * "SpecialToken": { * "id": "[SEP]", * "type_id": 1 * } * } * ], * "special_tokens": { * "[SEP]": { * "id": "[SEP]", * "ids": [ * 102 * ], * "tokens": [ * "[SEP]" * ] * }, * "[CLS]": { * "id": "[CLS]", * "ids": [ * 101 * ], * "tokens": [ * "[CLS]" * ] * } * } * }, * "decoder": { * "type": "WordPiece", * "prefix": "##", * "cleanup": true * }, * "model": { * "unk_token": "[UNK]", * "continuing_subword_prefix": "##", * "max_input_chars_per_word": 100, * "vocab": { * "[PAD]": 0, * ... * } * } * } */ Map<String,Integer> vocabMap = new HashMap<>(); String unknownToken; String classificationToken; String separatorToken; boolean lowercase = false; boolean stripAccents = false; int maxInputCharsPerWord = 100; // Parse out token normalization settings JsonNode normalizer = rootNode.get("normalizer"); if (normalizer != null) { lowercase = normalizer.get("lowercase").asBoolean(); stripAccents = normalizer.get("strip_accents").asBoolean(); } else { throw new IllegalStateException("Failed to parse tokenizer json, did not find the normalizer"); } // Parse out classification and separator tokens JsonNode postProcessor = rootNode.get("post_processor"); if (postProcessor != null) { String processorType = postProcessor.get("type").asText(); if (processorType != null && processorType.equals("TemplateProcessing")) { JsonNode specialTokens = postProcessor.get("special_tokens"); if (specialTokens != null) { JsonNode sepNode = specialTokens.get(SEPARATOR_TOKEN); if (sepNode != null) { separatorToken = sepNode.get("tokens").get(0).asText(); } else { throw new IllegalStateException("Failed to parse tokenizer json, did not find separator token."); } JsonNode classificationNode = specialTokens.get(CLASSIFICATION_TOKEN); if (classificationNode != null) { classificationToken = classificationNode.get("tokens").get(0).asText(); } else { throw new IllegalStateException("Failed to parse tokenizer json, did not find classification token."); } } else { throw new IllegalStateException("Failed to parse tokenizer json, did not find the special tokens."); } } else if (processorType != null && processorType.equals("BertProcessing")) { JsonNode sepNode = postProcessor.get("sep"); if (sepNode != null) { separatorToken = sepNode.get(0).asText(); } else { throw new IllegalStateException("Failed to parse tokenizer json, did not find separator token."); } JsonNode clsNode = postProcessor.get("cls"); if (clsNode != null) { classificationToken = clsNode.get(0).asText(); } else { throw new IllegalStateException("Failed to parse tokenizer json, did not find classification token."); } } else { throw new IllegalStateException("Failed to parse tokenizer json, did not recognise post_processor:type " + processorType); } } else { throw new IllegalStateException("Failed to parse tokenizer json, did not find the post processor"); } // Parse out tokens and ids JsonNode model = rootNode.get("model"); if (model != null) { unknownToken = model.get("unk_token").asText(); if (unknownToken == null || unknownToken.isEmpty()) { throw new IllegalStateException("Failed to parse tokenizer json, did not extract unknown token"); } maxInputCharsPerWord = model.get("max_input_chars_per_word").asInt(); if (maxInputCharsPerWord == 0) { throw new IllegalStateException("Failed to parse tokenizer json, did not extract max_input_chars_per_word"); } JsonNode vocab = model.get("vocab"); if (vocab != null) { for (Iterator<Map.Entry<String,JsonNode>> termItr = vocab.fields(); termItr.hasNext();) { Map.Entry<String,JsonNode> term = termItr.next(); int value = term.getValue().asInt(-1); if (value == -1) { throw new IllegalStateException("Failed to parse tokenizer json, could not extract vocab item '" + term.getKey() + "'"); } else { vocabMap.put(term.getKey(),value); } } } else { throw new IllegalStateException("Failed to parse tokenizer json, did not extract vocab"); } } else { throw new IllegalStateException("Failed to parse tokenizer json, did not find the model"); } return new TokenizerConfig(vocabMap,unknownToken,classificationToken,separatorToken,lowercase,stripAccents,maxInputCharsPerWord); }
@Test public void testTokenizerLoading() throws URISyntaxException, IOException { Path vocabPath = Paths.get(BERTFeatureExtractorTest.class.getResource("bert-base-cased-vocab.txt").toURI()); Path tokenizerPath = Paths.get(BERTFeatureExtractorTest.class.getResource("bert-base-cased-tokenizer.json").toURI()); BERTFeatureExtractor.TokenizerConfig config = BERTFeatureExtractor.loadTokenizer(tokenizerPath); List<String> vocabList = Files.readAllLines(vocabPath, StandardCharsets.UTF_8); Assertions.assertEquals(config.tokenIDs.size(),vocabList.size()); for (String vocabElement : vocabList) { Assertions.assertTrue(config.tokenIDs.containsKey(vocabElement)); } Assertions.assertEquals(100, config.maxInputCharsPerWord); Assertions.assertEquals(false, config.lowercase); Assertions.assertEquals(false, config.stripAccents); Assertions.assertEquals("[UNK]",config.unknownToken); Assertions.assertEquals("[CLS]",config.classificationToken); Assertions.assertEquals("[SEP]",config.separatorToken); tokenizerPath = Paths.get(BERTFeatureExtractorTest.class.getResource("tinybert-tokenizer.json").toURI()); config = BERTFeatureExtractor.loadTokenizer(tokenizerPath); Assertions.assertEquals(100, config.maxInputCharsPerWord); Assertions.assertEquals(true, config.lowercase); Assertions.assertEquals(true, config.stripAccents); Assertions.assertEquals("[UNK]",config.unknownToken); Assertions.assertEquals("[CLS]",config.classificationToken); Assertions.assertEquals("[SEP]",config.separatorToken); }
@Bean("GlobalTempFolder") public TempFolder provide(ScannerProperties scannerProps, SonarUserHome userHome) { var workingPathName = StringUtils.defaultIfBlank(scannerProps.property(CoreProperties.GLOBAL_WORKING_DIRECTORY), CoreProperties.GLOBAL_WORKING_DIRECTORY_DEFAULT_VALUE); var workingPath = Paths.get(workingPathName); if (!workingPath.isAbsolute()) { var home = userHome.getPath(); workingPath = home.resolve(workingPath).normalize(); } try { cleanTempFolders(workingPath); } catch (IOException e) { LOG.error(String.format("failed to clean global working directory: %s", workingPath), e); } var tempDir = createTempFolder(workingPath); return new DefaultTempFolder(tempDir.toFile(), true); }
@Test void cleanUpOld(@TempDir Path workingDir) throws IOException { long creationTime = System.currentTimeMillis() - TimeUnit.DAYS.toMillis(100); for (int i = 0; i < 3; i++) { Path tmp = workingDir.resolve(".sonartmp_" + i); Files.createDirectories(tmp); setFileCreationDate(tmp, creationTime); assumeCorrectFileCreationDate(tmp, creationTime); } underTest.provide( new ScannerProperties(Map.of(CoreProperties.GLOBAL_WORKING_DIRECTORY, workingDir.toAbsolutePath().toString())), sonarUserHome); // this also checks that all other temps were deleted assertThat(workingDir.toFile().list()).hasSize(1); }
public static ZoneTime of(LocalTime localTime, ZoneId zoneId, boolean hasSeconds) { return new ZoneTime(localTime, zoneId, hasSeconds); }
@Test void of() { ZoneTime retrieved = ZoneTime.of(localTime, zoneId, true); assertNotNull(retrieved); assertEquals(offsetTime, retrieved.getOffsetTime()); assertEquals(zoneId, retrieved.getZoneId()); }
@Override public boolean match(Message msg, StreamRule rule) { if (msg.getField(rule.getField()) == null) { return rule.getInverted(); } final String value = msg.getField(rule.getField()).toString(); return rule.getInverted() ^ value.trim().equals(rule.getValue()); }
@Test public void testSuccessfulMatch() { StreamRule rule = getSampleRule(); Message msg = getSampleMessage(); msg.addField("something", "foo"); StreamRuleMatcher matcher = getMatcher(rule); assertTrue(matcher.match(msg, rule)); }
@Override public <R extends MessageResponse<?>> R chat(Prompt<R> prompt, ChatOptions options) { Map<String, String> headers = new HashMap<>(); headers.put("Content-Type", "application/json"); headers.put("Authorization", "Bearer " + getConfig().getApiKey()); Consumer<Map<String, String>> headersConfig = config.getHeadersConfig(); if (headersConfig != null) { headersConfig.accept(headers); } String payload = OpenAiLLmUtil.promptToPayload(prompt, config, options, false); String endpoint = config.getEndpoint(); String response = httpClient.post(endpoint + "/v1/chat/completions", headers, payload); if (StringUtil.noText(response)) { return null; } if (config.isDebug()) { System.out.println(">>>>receive payload:" + response); } JSONObject jsonObject = JSON.parseObject(response); JSONObject error = jsonObject.getJSONObject("error"); AbstractBaseMessageResponse<?> messageResponse; if (prompt instanceof FunctionPrompt) { messageResponse = new FunctionMessageResponse(((FunctionPrompt) prompt).getFunctions() , functionMessageParser.parse(jsonObject)); } else { messageResponse = new AiMessageResponse(aiMessageParser.parse(jsonObject)); } if (error != null && !error.isEmpty()) { messageResponse.setError(true); messageResponse.setErrorMessage(error.getString("message")); messageResponse.setErrorType(error.getString("type")); messageResponse.setErrorCode(error.getString("code")); } //noinspection unchecked return (R) messageResponse; }
@Test(expected = LlmException.class) public void testChat() { OpenAiLlmConfig config = new OpenAiLlmConfig(); config.setApiKey("sk-rts5NF6n*******"); Llm llm = new OpenAiLlm(config); String response = llm.chat("请问你叫什么名字"); System.out.println(response); }
public CreateTableCommand createTableCommand( final KsqlStructuredDataOutputNode outputNode, final Optional<RefinementInfo> emitStrategy ) { Optional<WindowInfo> windowInfo = outputNode.getKsqlTopic().getKeyFormat().getWindowInfo(); if (windowInfo.isPresent() && emitStrategy.isPresent()) { final WindowInfo info = windowInfo.get(); windowInfo = Optional.of(WindowInfo.of( info.getType(), info.getSize(), Optional.of(emitStrategy.get().getOutputRefinement()) )); } return new CreateTableCommand( outputNode.getSinkName().get(), outputNode.getSchema(), outputNode.getTimestampColumn(), outputNode.getKsqlTopic().getKafkaTopicName(), Formats.from(outputNode.getKsqlTopic()), windowInfo, Optional.of(outputNode.getOrReplace()), Optional.of(false) ); }
@Test public void shouldBuildTimestampColumnForTable() { // Given: givenProperty( CommonCreateConfigs.TIMESTAMP_NAME_PROPERTY, new StringLiteral(quote(ELEMENT2.getName().text())) ); final CreateTable statement = new CreateTable(SOME_NAME, TABLE_ELEMENTS, false, true, withProperties, false); // When: final CreateTableCommand cmd = createSourceFactory.createTableCommand( statement, ksqlConfig ); // Then: assertThat( cmd.getTimestampColumn(), is(Optional.of( new TimestampColumn(ELEMENT2.getName(), Optional.empty())) ) ); }
@Override public void calculate(TradePriceCalculateReqBO param, TradePriceCalculateRespBO result) { // 1.1 校验优惠劵 if (param.getCouponId() == null) { return; } CouponRespDTO coupon = couponApi.validateCoupon(new CouponValidReqDTO() .setId(param.getCouponId()).setUserId(param.getUserId())); Assert.notNull(coupon, "校验通过的优惠劵({}),不能为空", param.getCouponId()); // 1.2 只有【普通】订单,才允许使用优惠劵 if (ObjectUtil.notEqual(result.getType(), TradeOrderTypeEnum.NORMAL.getType())) { throw exception(PRICE_CALCULATE_COUPON_NOT_MATCH_NORMAL_ORDER); } // 2.1 获得匹配的商品 SKU 数组 List<TradePriceCalculateRespBO.OrderItem> orderItems = filterMatchCouponOrderItems(result, coupon); if (CollUtil.isEmpty(orderItems)) { throw exception(COUPON_NO_MATCH_SPU); } // 2.2 计算是否满足优惠劵的使用金额 Integer totalPayPrice = TradePriceCalculatorHelper.calculateTotalPayPrice(orderItems); if (totalPayPrice < coupon.getUsePrice()) { throw exception(COUPON_NO_MATCH_MIN_PRICE); } // 3.1 计算可以优惠的金额 Integer couponPrice = getCouponPrice(coupon, totalPayPrice); Assert.isTrue(couponPrice < totalPayPrice, "优惠劵({}) 的优惠金额({}),不能大于订单总金额({})", coupon.getId(), couponPrice, totalPayPrice); // 3.2 计算分摊的优惠金额 List<Integer> divideCouponPrices = TradePriceCalculatorHelper.dividePrice(orderItems, couponPrice); // 4.1 记录使用的优惠劵 result.setCouponId(param.getCouponId()); // 4.2 记录优惠明细 TradePriceCalculatorHelper.addPromotion(result, orderItems, param.getCouponId(), coupon.getName(), PromotionTypeEnum.COUPON.getType(), StrUtil.format("优惠劵:省 {} 元", TradePriceCalculatorHelper.formatPrice(couponPrice)), divideCouponPrices); // 4.3 更新 SKU 优惠金额 for (int i = 0; i < orderItems.size(); i++) { TradePriceCalculateRespBO.OrderItem orderItem = orderItems.get(i); orderItem.setCouponPrice(divideCouponPrices.get(i)); TradePriceCalculatorHelper.recountPayPrice(orderItem); } TradePriceCalculatorHelper.recountAllPrice(result); }
@Test public void testCalculate() { // 准备参数 TradePriceCalculateReqBO param = new TradePriceCalculateReqBO() .setUserId(233L).setCouponId(1024L) .setItems(asList( new TradePriceCalculateReqBO.Item().setSkuId(10L).setCount(2).setSelected(true), // 匹配优惠劵 new TradePriceCalculateReqBO.Item().setSkuId(20L).setCount(3).setSelected(true), // 匹配优惠劵 new TradePriceCalculateReqBO.Item().setSkuId(30L).setCount(4).setSelected(true), // 不匹配优惠劵 new TradePriceCalculateReqBO.Item().setSkuId(40L).setCount(5).setSelected(false) // 匹配优惠劵,但是未选中 )); TradePriceCalculateRespBO result = new TradePriceCalculateRespBO() .setType(TradeOrderTypeEnum.NORMAL.getType()) .setPrice(new TradePriceCalculateRespBO.Price()) .setPromotions(new ArrayList<>()) .setItems(asList( new TradePriceCalculateRespBO.OrderItem().setSkuId(10L).setCount(2).setSelected(true) .setPrice(100).setSpuId(1L), new TradePriceCalculateRespBO.OrderItem().setSkuId(20L).setCount(3).setSelected(true) .setPrice(50).setSpuId(2L), new TradePriceCalculateRespBO.OrderItem().setSkuId(30L).setCount(4).setSelected(true) .setPrice(30).setSpuId(3L), new TradePriceCalculateRespBO.OrderItem().setSkuId(40L).setCount(5).setSelected(false) .setPrice(60).setSpuId(1L) )); // 保证价格被初始化上 TradePriceCalculatorHelper.recountPayPrice(result.getItems()); TradePriceCalculatorHelper.recountAllPrice(result); // mock 方法(优惠劵 Coupon 信息) CouponRespDTO coupon = randomPojo(CouponRespDTO.class, o -> o.setId(1024L).setName("程序员节") .setProductScope(PromotionProductScopeEnum.SPU.getScope()).setProductScopeValues(asList(1L, 2L)) .setUsePrice(350).setDiscountType(PromotionDiscountTypeEnum.PERCENT.getType()) .setDiscountPercent(50).setDiscountLimitPrice(70)); when(couponApi.validateCoupon(eq(new CouponValidReqDTO().setId(1024L).setUserId(233L)))).thenReturn(coupon); // 调用 tradeCouponPriceCalculator.calculate(param, result); // 断言 assertEquals(result.getCouponId(), 1024L); // 断言:Price 部分 TradePriceCalculateRespBO.Price price = result.getPrice(); assertEquals(price.getTotalPrice(), 470); assertEquals(price.getDiscountPrice(), 0); assertEquals(price.getPointPrice(), 0); assertEquals(price.getDeliveryPrice(), 0); assertEquals(price.getCouponPrice(), 70); assertEquals(price.getPayPrice(), 400); // 断言:SKU 1 TradePriceCalculateRespBO.OrderItem orderItem01 = result.getItems().get(0); assertEquals(orderItem01.getSkuId(), 10L); assertEquals(orderItem01.getCount(), 2); assertEquals(orderItem01.getPrice(), 100); assertEquals(orderItem01.getDiscountPrice(), 0); assertEquals(orderItem01.getDeliveryPrice(), 0); assertEquals(orderItem01.getCouponPrice(), 40); assertEquals(orderItem01.getPointPrice(), 0); assertEquals(orderItem01.getPayPrice(), 160); // 断言:SKU 2 TradePriceCalculateRespBO.OrderItem orderItem02 = result.getItems().get(1); assertEquals(orderItem02.getSkuId(), 20L); assertEquals(orderItem02.getCount(), 3); assertEquals(orderItem02.getPrice(), 50); assertEquals(orderItem02.getDiscountPrice(), 0); assertEquals(orderItem02.getDeliveryPrice(), 0); assertEquals(orderItem02.getCouponPrice(), 30); assertEquals(orderItem02.getPointPrice(), 0); assertEquals(orderItem02.getPayPrice(), 120); // 断言:SKU 3 TradePriceCalculateRespBO.OrderItem orderItem03 = result.getItems().get(2); assertEquals(orderItem03.getSkuId(), 30L); assertEquals(orderItem03.getCount(), 4); assertEquals(orderItem03.getPrice(), 30); assertEquals(orderItem03.getDiscountPrice(), 0); assertEquals(orderItem03.getCouponPrice(), 0); assertEquals(orderItem03.getPointPrice(), 0); assertEquals(orderItem03.getPayPrice(), 120); // 断言:SKU 4 TradePriceCalculateRespBO.OrderItem orderItem04 = result.getItems().get(3); assertEquals(orderItem04.getSkuId(), 40L); assertEquals(orderItem04.getCount(), 5); assertEquals(orderItem04.getPrice(), 60); assertEquals(orderItem04.getDiscountPrice(), 0); assertEquals(orderItem04.getCouponPrice(), 0); assertEquals(orderItem04.getPointPrice(), 0); assertEquals(orderItem04.getPayPrice(), 300); // 断言:Promotion 部分 assertEquals(result.getPromotions().size(), 1); TradePriceCalculateRespBO.Promotion promotion01 = result.getPromotions().get(0); assertEquals(promotion01.getId(), 1024L); assertEquals(promotion01.getName(), "程序员节"); assertEquals(promotion01.getType(), PromotionTypeEnum.COUPON.getType()); assertEquals(promotion01.getTotalPrice(), 350); assertEquals(promotion01.getDiscountPrice(), 70); assertTrue(promotion01.getMatch()); assertEquals(promotion01.getDescription(), "优惠劵:省 0.70 元"); assertEquals(promotion01.getItems().size(), 2); TradePriceCalculateRespBO.PromotionItem promotionItem011 = promotion01.getItems().get(0); assertEquals(promotionItem011.getSkuId(), 10L); assertEquals(promotionItem011.getTotalPrice(), 200); assertEquals(promotionItem011.getDiscountPrice(), 40); TradePriceCalculateRespBO.PromotionItem promotionItem012 = promotion01.getItems().get(1); assertEquals(promotionItem012.getSkuId(), 20L); assertEquals(promotionItem012.getTotalPrice(), 150); assertEquals(promotionItem012.getDiscountPrice(), 30); }
@Override public Workspace duplicateWorkspace(Workspace workspace) { synchronized (this) { DuplicateTask duplicateTask = new DuplicateTask(workspace); Future<WorkspaceImpl> res = longTaskExecutor.execute(duplicateTask, () -> { WorkspaceImpl newWorkspace = duplicateTask.run(); // Null if cancelled if (newWorkspace != null) { newWorkspace.getLookup().lookup(WorkspaceInformationImpl.class).setName( NbBundle.getMessage(ProjectControllerImpl.class, "Workspace.duplicated.name", workspace.getName())); fireWorkspaceEvent(EventType.INITIALIZE, newWorkspace); openWorkspace(newWorkspace); } return newWorkspace; }, "", t -> handleException(workspace.getProject(), t)); try { return res.get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } } }
@Test public void testDuplicateWorkspace() { MockServices.setServices(MockController.class); ProjectControllerImpl pc = new ProjectControllerImpl(); pc.addWorkspaceListener(workspaceListener); pc.newProject(); Workspace duplicate = pc.duplicateWorkspace(pc.getCurrentWorkspace()); Assert.assertNotNull(duplicate); Assert.assertTrue(duplicate.isOpen()); Assert.assertSame(duplicate, pc.getCurrentWorkspace()); Assert.assertEquals(2, pc.getCurrentProject().getWorkspaces().size()); Mockito.verify(workspaceListener).initialize(duplicate); Mockito.verify(workspaceListener).select(duplicate); Assert.assertNotNull(duplicate.getLookup().lookup(MockModel.class)); }
public long getNumSegmentsProcessed() { return _brokerResponse.has(NUM_SEGMENTS_PROCESSED) ? _brokerResponse.get(NUM_SEGMENTS_PROCESSED).asLong() : -1L; }
@Test public void testGetNumSegmentsProcessed() { // Run the test final long result = _executionStatsUnderTest.getNumSegmentsProcessed(); // Verify the results assertEquals(10L, result); }
@Override public String getPluginIDOfFirstPluginInBundle(String bundleSymbolicName) { return pluginRegistry.getBundleDescriptor(bundleSymbolicName).descriptors().get(0).id(); }
@Test void shouldGetIDOfFirstPluginInBundle() { final GoPluginDescriptor pluginDescriptor1 = GoPluginDescriptor.builder().id("plugin.1").build(); final GoPluginDescriptor pluginDescriptor2 = GoPluginDescriptor.builder().id("plugin.2").build(); final GoPluginBundleDescriptor bundleDescriptor = new GoPluginBundleDescriptor(pluginDescriptor1, pluginDescriptor2); when(pluginRegistry.getBundleDescriptor(bundleDescriptor.bundleSymbolicName())).thenReturn(bundleDescriptor); final String pluginIDOfFirstPluginInBundle = serviceDefault.getPluginIDOfFirstPluginInBundle(bundleDescriptor.bundleSymbolicName()); assertThat(pluginIDOfFirstPluginInBundle).isEqualTo("plugin.1"); }
@Override public void start() { DatabaseVersion.Status status = version.getStatus(); if (status == DatabaseVersion.Status.REQUIRES_DOWNGRADE) { throw MessageException.of("Database was upgraded to a more recent version of SonarQube. " + "A backup must probably be restored or the DB settings are incorrect."); } if (status == DatabaseVersion.Status.REQUIRES_UPGRADE) { Optional<Long> currentVersion = this.version.getVersion(); if (currentVersion.isPresent() && currentVersion.get() < DatabaseVersion.MIN_UPGRADE_VERSION) { throw MessageException.of("The version of SonarQube you are trying to upgrade from is too old. Please upgrade to the " + MIN_UPGRADE_VERSION_HUMAN_READABLE + " Long-Term Active version first."); } String msg = "The database must be manually upgraded. Please backup the database and browse /setup. " + "For more information: https://docs.sonarsource.com/sonarqube/latest/setup/upgrading"; LoggerFactory.getLogger(DatabaseServerCompatibility.class).warn(msg); Logger logger = LoggerFactory.getLogger(STARTUP_LOGGER_NAME); logger.warn(HIGHLIGHTER); logger.warn(msg); logger.warn(HIGHLIGHTER); } }
@Test public void log_warning_if_requires_upgrade() { DatabaseVersion version = mock(DatabaseVersion.class); when(version.getStatus()).thenReturn(DatabaseVersion.Status.REQUIRES_UPGRADE); when(version.getVersion()).thenReturn(Optional.of(DatabaseVersion.MIN_UPGRADE_VERSION)); new DatabaseServerCompatibility(version).start(); assertThat(logTester.logs()).hasSize(4); assertThat(logTester.logs(Level.WARN)).contains( "The database must be manually upgraded. Please backup the database and browse /setup. " + "For more information: https://docs.sonarsource.com/sonarqube/latest/setup/upgrading", "################################################################################", "The database must be manually upgraded. Please backup the database and browse /setup. " + "For more information: https://docs.sonarsource.com/sonarqube/latest/setup/upgrading", "################################################################################"); }
@Override public Map<String, ConfigChangeItem> doParse(String oldContent, String newContent, String type) throws IOException { Properties oldProps = new Properties(); Properties newProps = new Properties(); if (StringUtils.isNotBlank(oldContent)) { oldProps.load(new StringReader(oldContent)); } if (StringUtils.isNotBlank(newContent)) { newProps.load(new StringReader(newContent)); } return filterChangeData(oldProps, newProps); }
@Test void testRemoveKey() throws IOException { Map<String, ConfigChangeItem> map = parser.doParse("app.name = nacos", "", type); assertEquals("nacos", map.get("app.name").getOldValue()); assertNull(map.get("app.name").getNewValue()); }
@Override public void execute(final List<String> args, final PrintWriter terminal) { CliCmdUtil.ensureArgCountBounds(args, 0, 1, HELP); if (args.isEmpty()) { terminal.println(restClient.getServerAddress()); return; } else { final String serverAddress = args.get(0); restClient.setServerAddress(serverAddress); terminal.println("Server now: " + serverAddress); resetCliForNewServer.fire(); } validateClient(terminal, restClient); }
@Test(expected = KsqlRestClientException.class) public void shouldThrowIfRestClientThrowsOnSet() { // Given: doThrow(new KsqlRestClientException("Boom")).when(restClient).setServerAddress("localhost:8088"); // When: command.execute(ImmutableList.of("localhost:8088"), terminal); }
public static JibContainerBuilder create( String baseImageReference, Set<Platform> platforms, CommonCliOptions commonCliOptions, ConsoleLogger logger) throws InvalidImageReferenceException, FileNotFoundException { if (baseImageReference.startsWith(DOCKER_DAEMON_IMAGE_PREFIX)) { return Jib.from( DockerDaemonImage.named(baseImageReference.replaceFirst(DOCKER_DAEMON_IMAGE_PREFIX, ""))); } if (baseImageReference.startsWith(TAR_IMAGE_PREFIX)) { return Jib.from( TarImage.at(Paths.get(baseImageReference.replaceFirst(TAR_IMAGE_PREFIX, "")))); } ImageReference imageReference = ImageReference.parse(baseImageReference.replaceFirst(REGISTRY_IMAGE_PREFIX, "")); RegistryImage registryImage = RegistryImage.named(imageReference); DefaultCredentialRetrievers defaultCredentialRetrievers = DefaultCredentialRetrievers.init( CredentialRetrieverFactory.forImage( imageReference, logEvent -> logger.log(logEvent.getLevel(), logEvent.getMessage()))); Credentials.getFromCredentialRetrievers(commonCliOptions, defaultCredentialRetrievers) .forEach(registryImage::addCredentialRetriever); JibContainerBuilder containerBuilder = Jib.from(registryImage); if (!platforms.isEmpty()) { containerBuilder.setPlatforms(platforms); } return containerBuilder; }
@Test public void testCreate_dockerBaseImage() throws IOException, InvalidImageReferenceException, CacheDirectoryCreationException { JibContainerBuilder containerBuilder = ContainerBuilders.create( "docker://docker-image-ref", Collections.emptySet(), mockCommonCliOptions, mockLogger); BuildContext buildContext = JibContainerBuilderTestHelper.toBuildContext( containerBuilder, Containerizer.to(RegistryImage.named("ignored"))); ImageConfiguration imageConfiguration = buildContext.getBaseImageConfiguration(); assertThat(imageConfiguration.getImage().toString()).isEqualTo("docker-image-ref"); assertThat(imageConfiguration.getDockerClient().isPresent()).isTrue(); assertThat(imageConfiguration.getTarPath().isPresent()).isFalse(); }
@Override public boolean isScanAllowedUsingPermissionsFromDevopsPlatform() { checkState(authAppInstallationToken != null, "An auth app token is required in case repository permissions checking is necessary."); String[] orgaAndRepoTokenified = devOpsProjectCreationContext.fullName().split("/"); String organization = orgaAndRepoTokenified[0]; String repository = orgaAndRepoTokenified[1]; Set<DevOpsPermissionsMappingDto> permissionsMappingDtos = dbClient.githubPermissionsMappingDao() .findAll(dbClient.openSession(false), devOpsPlatformSettings.getDevOpsPlatform()); boolean userHasDirectAccessToRepo = doesUserHaveScanPermission(organization, repository, permissionsMappingDtos); if (userHasDirectAccessToRepo) { return true; } return doesUserBelongToAGroupWithScanPermission(organization, repository, permissionsMappingDtos); }
@Test void isScanAllowedUsingPermissionsFromDevopsPlatform_whenUserIsNotAGitHubUser_returnsFalse() { assertThat(githubProjectCreator.isScanAllowedUsingPermissionsFromDevopsPlatform()).isFalse(); }
public Preference<Boolean> getBoolean(@StringRes int prefKey, @BoolRes int defaultValue) { return mRxSharedPreferences.getBoolean( mResources.getString(prefKey), mResources.getBoolean(defaultValue)); }
@Test public void testBooleanHappyPath() { RxSharedPrefs impl = new RxSharedPrefs(getApplicationContext(), this::testRestoreFunction); final Preference<Boolean> preference = impl.getBoolean(R.string.pref_test_key, R.bool.pref_test_value); Assert.assertTrue(preference.get()); final AtomicReference<Boolean> observedValue = new AtomicReference<>(null); mCompositeDisposable.add(preference.asObservable().subscribe(observedValue::set)); Assert.assertTrue(observedValue.get()); SharedPrefsHelper.setPrefsValue(R.string.pref_test_key, false); Assert.assertFalse(preference.get()); Assert.assertFalse(observedValue.get()); }
public PaginationContext createPaginationContext(final LimitSegment limitSegment, final List<Object> params) { return new PaginationContext(limitSegment.getOffset().orElse(null), limitSegment.getRowCount().orElse(null), params); }
@Test void assertPaginationContextCreatedProperlyWhenPaginationValueSegmentIsNumberLiteralPaginationValueSegment() { LimitSegment limitSegment = new LimitSegment(0, 10, new NumberLiteralLimitValueSegment(0, 10, 1L), new NumberLiteralLimitValueSegment(10, 20, 2L)); PaginationContext paginationContext = new LimitPaginationContextEngine().createPaginationContext(limitSegment, Collections.emptyList()); assertTrue(paginationContext.isHasPagination()); }
@Override public JType apply(String nodeName, JsonNode node, JsonNode parent, JClassContainer jClassContainer, Schema schema) { String propertyTypeName = getTypeName(node); JType type; if (propertyTypeName.equals("object") || node.has("properties") && node.path("properties").size() > 0) { type = ruleFactory.getObjectRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema); } else if (node.has("existingJavaType")) { String typeName = node.path("existingJavaType").asText(); if (isPrimitive(typeName, jClassContainer.owner())) { type = primitiveType(typeName, jClassContainer.owner()); } else { type = resolveType(jClassContainer, typeName); } } else if (propertyTypeName.equals("string")) { type = jClassContainer.owner().ref(String.class); } else if (propertyTypeName.equals("number")) { type = getNumberType(jClassContainer.owner(), ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("integer")) { type = getIntegerType(jClassContainer.owner(), node, ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("boolean")) { type = unboxIfNecessary(jClassContainer.owner().ref(Boolean.class), ruleFactory.getGenerationConfig()); } else if (propertyTypeName.equals("array")) { type = ruleFactory.getArrayRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema); } else { type = jClassContainer.owner().ref(Object.class); } if (!node.has("javaType") && !node.has("existingJavaType") && node.has("format")) { type = ruleFactory.getFormatRule().apply(nodeName, node.get("format"), node, type, schema); } else if (!node.has("javaType") && !node.has("existingJavaType") && propertyTypeName.equals("string") && node.has("media")) { type = ruleFactory.getMediaRule().apply(nodeName, node.get("media"), node, type, schema); } return type; }
@Test public void applyGeneratesBigIntegerOverridingLong() { JPackage jpackage = new JCodeModel()._package(getClass().getPackage().getName()); ObjectNode objectNode = new ObjectMapper().createObjectNode(); objectNode.put("type", "integer"); // isUseBigIntegers should override isUseLongIntegers when(config.isUseBigIntegers()).thenReturn(true); when(config.isUseLongIntegers()).thenReturn(true); JType result = rule.apply("fooBar", objectNode, null, jpackage, null); assertThat(result.fullName(), is(BigInteger.class.getName())); }
@Override public String getSource() { return source; }
@Test public void testGetSource() { assertEquals("source", batchEventData.getSource()); assertEquals("source", batchEventDataSameAttribute.getSource()); assertEquals("otherSource", batchEventDataOtherSource.getSource()); assertEquals("source", batchEventDataOtherPartitionId.getSource()); assertEquals("source", batchEventDataOtherEvent.getSource()); assertEquals("source", batchEventDataNoEvent.getSource()); }
@Override public List<Change> computeDiff(final List<T> source, final List<T> target, DiffAlgorithmListener progress) { Objects.requireNonNull(source, "source list must not be null"); Objects.requireNonNull(target, "target list must not be null"); if (progress != null) { progress.diffStart(); } PathNode path = buildPath(source, target, progress); List<Change> result = buildRevision(path, source, target); if (progress != null) { progress.diffEnd(); } return result; }
@Test public void testDiffMyersExample1ForwardWithListener() { List<String> original = Arrays.asList("A", "B", "C", "A", "B", "B", "A"); List<String> revised = Arrays.asList("C", "B", "A", "B", "A", "C"); List<String> logdata = new ArrayList<>(); final Patch<String> patch = Patch.generate(original, revised, new MyersDiff<String>().computeDiff(original, revised, new DiffAlgorithmListener() { @Override public void diffStart() { logdata.add("start"); } @Override public void diffStep(int value, int max) { logdata.add(value + " - " + max); } @Override public void diffEnd() { logdata.add("end"); } })); assertNotNull(patch); assertEquals(4, patch.getDeltas().size()); assertEquals("Patch{deltas=[[DeleteDelta, position: 0, lines: [A, B]], [InsertDelta, position: 3, lines: [B]], [DeleteDelta, position: 5, lines: [B]], [InsertDelta, position: 7, lines: [C]]]}", patch.toString()); System.out.println(logdata); assertEquals(8, logdata.size()); }
@Override @CheckForNull public EmailMessage format(Notification notif) { if (!(notif instanceof ChangesOnMyIssuesNotification)) { return null; } ChangesOnMyIssuesNotification notification = (ChangesOnMyIssuesNotification) notif; if (notification.getChange() instanceof AnalysisChange) { checkState(!notification.getChangedIssues().isEmpty(), "changedIssues can't be empty"); return formatAnalysisNotification(notification.getChangedIssues().keySet().iterator().next(), notification); } return formatMultiProject(notification); }
@Test public void format_set_html_message_with_issues_grouped_by_status_closed_or_any_other_when_change_from_analysis() { Project project = newProject("foo"); Rule rule = newRandomNotAHotspotRule("bar"); Set<ChangedIssue> changedIssues = Arrays.stream(ISSUE_STATUSES) .map(status -> newChangedIssue(status + "", status, project, rule)) .collect(toSet()); AnalysisChange analysisChange = newAnalysisChange(); EmailMessage emailMessage = underTest.format(new ChangesOnMyIssuesNotification(analysisChange, changedIssues)); HtmlListAssert htmlListAssert = HtmlFragmentAssert.assertThat(emailMessage.getMessage()) .hasParagraph().hasParagraph() // skip header .hasParagraph("Closed issue:") .withoutLink() .hasList("Rule " + rule.getName() + " - See the single issue") .withLinkOn("See the single issue") .hasParagraph("Open issues:") .withoutLink() .hasList("Rule " + rule.getName() + " - See all " + (ISSUE_STATUSES.length - 1) + " issues") .withLinkOn("See all " + (ISSUE_STATUSES.length - 1) + " issues"); verifyEnd(htmlListAssert); }
static long deleteObsoleteCounterFiles(String application) { final Calendar nowMinusOneYearAndADay = Calendar.getInstance(); nowMinusOneYearAndADay.add(Calendar.DAY_OF_YEAR, -getObsoleteStatsDays()); nowMinusOneYearAndADay.add(Calendar.DAY_OF_YEAR, -1); // filtre pour ne garder que les fichiers d'extension .ser.gz et pour éviter d'instancier des File inutiles long diskUsage = 0; for (final File file : listSerGzFiles(application)) { boolean deleted = false; if (file.lastModified() < nowMinusOneYearAndADay.getTimeInMillis()) { deleted = file.delete(); } if (!deleted) { diskUsage += file.length(); } } // on retourne true si tous les fichiers .ser.gz obsolètes ont été supprimés, false sinon return diskUsage; }
@Test public void testDeleteObsoleteCounterFiles() throws IOException { final Counter counter = new Counter("http", null); counter.setApplication("test counter"); final File storageDir = Parameters.getStorageDirectory(counter.getApplication()); final File obsoleteFile = new File(storageDir, "obsolete.ser.gz"); final File notObsoleteFile = new File(storageDir, "notobsolete.ser.gz"); checkSetup(storageDir, obsoleteFile, notObsoleteFile); final Calendar nowMinus1YearAnd2Days = Calendar.getInstance(); nowMinus1YearAnd2Days.add(Calendar.YEAR, -1); nowMinus1YearAnd2Days.add(Calendar.DAY_OF_YEAR, -2); if (!obsoleteFile.setLastModified(nowMinus1YearAnd2Days.getTimeInMillis())) { fail("setLastModified"); } CounterStorage.deleteObsoleteCounterFiles(counter.getApplication()); // le fichier doit avoir été supprimé if (obsoleteFile.exists()) { fail("obsolete file still exists"); } if (!notObsoleteFile.delete()) { notObsoleteFile.deleteOnExit(); } Utils.setProperty(Parameter.OBSOLETE_STATS_DAYS, "1"); CounterStorage.deleteObsoleteCounterFiles(counter.getApplication()); }
@Override public Set<EmailRecipient> findSubscribedEmailRecipients(String dispatcherKey, String projectKey, SubscriberPermissionsOnProject subscriberPermissionsOnProject) { verifyProjectKey(projectKey); try (DbSession dbSession = dbClient.openSession(false)) { Set<EmailSubscriberDto> emailSubscribers = dbClient.propertiesDao().findEmailSubscribersForNotification( dbSession, dispatcherKey, EmailNotificationChannel.class.getSimpleName(), projectKey); return keepAuthorizedEmailSubscribers(dbSession, projectKey, subscriberPermissionsOnProject, emailSubscribers); } }
@Test public void findSubscribedEmailRecipients_with_logins_fails_with_NPE_if_logins_is_null() { String dispatcherKey = randomAlphabetic(12); String projectKey = randomAlphabetic(6); assertThatThrownBy(() -> underTest.findSubscribedEmailRecipients(dispatcherKey, projectKey, null, ALL_MUST_HAVE_ROLE_USER)) .isInstanceOf(NullPointerException.class) .hasMessage("logins can't be null"); }
static void dissectString( final DriverEventCode code, final MutableDirectBuffer buffer, final int offset, final StringBuilder builder) { final int encodedLength = dissectLogHeader(CONTEXT, code, buffer, offset, builder); builder.append(": ").append(buffer.getStringAscii(offset + encodedLength, LITTLE_ENDIAN)); }
@Test void dissectString() { internalEncodeLogHeader(buffer, 0, 1, 1, () -> 1_100_000_000L); buffer.putStringAscii(LOG_HEADER_LENGTH, "Hello, World!"); DriverEventDissector.dissectString(CMD_IN_CLIENT_CLOSE, buffer, 0, builder); assertEquals("[1.100000000] " + CONTEXT + ": " + CMD_IN_CLIENT_CLOSE.name() + " [1/1]: Hello, World!", builder.toString()); }
@VisibleForTesting static Optional<String> performUpdateCheck( Path configDir, String currentVersion, String versionUrl, String toolName, Consumer<LogEvent> log) { Path lastUpdateCheck = configDir.resolve(LAST_UPDATE_CHECK_FILENAME); try { // Check time of last update check if (Files.exists(lastUpdateCheck)) { try { String fileContents = new String(Files.readAllBytes(lastUpdateCheck), StandardCharsets.UTF_8); Instant modifiedTime = Instant.parse(fileContents); if (modifiedTime.plus(Duration.ofDays(1)).isAfter(Instant.now())) { return Optional.empty(); } } catch (DateTimeParseException | IOException ex) { // If reading update time failed, file might be corrupt, so delete it log.accept(LogEvent.debug("Failed to read lastUpdateCheck; " + ex.getMessage())); Files.delete(lastUpdateCheck); } } // Check for update FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {}); try { Response response = httpClient.get( new URL(versionUrl), Request.builder() .setHttpTimeout(3000) .setUserAgent("jib " + currentVersion + " " + toolName) .build()); VersionJsonTemplate version = JsonTemplateMapper.readJson(response.getBody(), VersionJsonTemplate.class); Path lastUpdateCheckTemp = Files.createTempFile(configDir, LAST_UPDATE_CHECK_FILENAME, null); lastUpdateCheckTemp.toFile().deleteOnExit(); Files.write(lastUpdateCheckTemp, Instant.now().toString().getBytes(StandardCharsets.UTF_8)); Files.move(lastUpdateCheckTemp, lastUpdateCheck, StandardCopyOption.REPLACE_EXISTING); if (currentVersion.equals(version.latest)) { return Optional.empty(); } return Optional.of(version.latest); } finally { httpClient.shutDown(); } } catch (IOException ex) { log.accept(LogEvent.debug("Update check failed; " + ex.getMessage())); } return Optional.empty(); }
@Test public void testPerformUpdateCheck_lastUpdateCheckTooSoon() throws IOException { FileTime modifiedTime = FileTime.from(Instant.now().minusSeconds(12)); setupLastUpdateCheck(); Files.write( configDir.resolve("lastUpdateCheck"), modifiedTime.toString().getBytes(StandardCharsets.UTF_8)); Optional<String> message = UpdateChecker.performUpdateCheck( configDir, "1.0.2", testWebServer.getEndpoint(), "tool-name", ignored -> {}); assertThat(message).isEmpty(); // lastUpdateCheck should not have changed String lastUpdateTime = new String( Files.readAllBytes(configDir.resolve("lastUpdateCheck")), StandardCharsets.UTF_8); assertThat(modifiedTime.toInstant()).isEqualTo(Instant.parse(lastUpdateTime)); }
static boolean isTableUsingInstancePoolAndReplicaGroup(@Nonnull TableConfig tableConfig) { boolean status = true; Map<String, InstanceAssignmentConfig> instanceAssignmentConfigMap = tableConfig.getInstanceAssignmentConfigMap(); if (instanceAssignmentConfigMap != null) { for (InstanceAssignmentConfig instanceAssignmentConfig : instanceAssignmentConfigMap.values()) { if (instanceAssignmentConfig != null) { status &= (instanceAssignmentConfig.getTagPoolConfig().isPoolBased() && instanceAssignmentConfig.getReplicaGroupPartitionConfig().isReplicaGroupBased()); } else { status = false; } } } else { status = false; } return status; }
@Test public void testValidIGnRGRealtimeTable() { InstanceAssignmentConfig config = new InstanceAssignmentConfig(new InstanceTagPoolConfig("DefaultTenant", true, 0, null), null, new InstanceReplicaGroupPartitionConfig(true, 0, 0, 0, 0, 0, false, null), null, false); TableConfig tableConfig = new TableConfig("table", TableType.REALTIME.name(), new SegmentsValidationAndRetentionConfig(), new TenantConfig("DefaultTenant", "DefaultTenant", null), new IndexingConfig(), new TableCustomConfig(null), null, null, null, null, Map.of("CONSUMING", config), null, null, null, null, null, null, false, null, null, null); Assert.assertTrue(TableConfigUtils.isTableUsingInstancePoolAndReplicaGroup(tableConfig)); }
public ScopedSpan startScopedSpan(String name) { return startScopedSpanWithParent(name, currentTraceContext.get()); }
@Test void startScopedSpan_isInScope() { assertRealRoot(tracer.startScopedSpan("foo")); assertRealRoot(tracer.startScopedSpan("foo", deferDecision(), false)); }
public Certificate add(CvCertificate cert) { final Certificate db = Certificate.from(cert); if (repository.countByIssuerAndSubject(db.getIssuer(), db.getSubject()) > 0) { throw new ClientException(String.format( "Certificate of subject %s and issuer %s already exists", db.getSubject(), db.getIssuer())); } // Special case for first CVCA certificate for this document type if (db.getType() == Certificate.Type.CVCA && repository.countByDocumentTypeAndType(db.getDocumentType(), db.getType()) == 0) { signatureService.verify(cert, cert.getBody().getPublicKey(), cert.getBody().getPublicKey().getParams()); logger.warn("Added first CVCA certificate for {}, set trusted flag manually", db.getDocumentType()); } else { verify(cert); if (db.getType() == Certificate.Type.AT) { verifyPublicKey(cert); } } return repository.saveAndFlush(db); }
@Test public void shouldCheckIfPublicKeyExistAndIsEqualIfWhenAddingAT() throws Exception { final HsmClient.KeyInfo keyInfo = new HsmClient.KeyInfo(); keyInfo.setPublicKey(Hex.decode("04" + "SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS" + "SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS" )); Mockito.doReturn(keyInfo).when(hsmClient).keyInfo(Mockito.eq("AT"), Mockito.eq("SSSSSSSSSSSSSSSS")); certificateRepo.save(loadCvCertificate("rdw/acc/cvca.cvcert", true)); certificateRepo.save(loadCvCertificate("rdw/acc/dvca.cvcert", false)); assertDoesNotThrow(() -> service.add(readCvCertificate("rdw/acc/at001.cvcert"))); }
public static void checkParam(String dataId, String group, String content) throws NacosException { checkKeyParam(dataId, group); if (StringUtils.isBlank(content)) { throw new NacosException(NacosException.CLIENT_INVALID_PARAM, CONTENT_INVALID_MSG); } }
@Test void testCheckParam2Fail() throws NacosException { Throwable exception = assertThrows(NacosException.class, () -> { String dataId = "b"; String group = "c"; String datumId = "d"; String content = ""; ParamUtils.checkParam(dataId, group, datumId, content); }); assertTrue(exception.getMessage().contains("content invalid")); }
@Override public boolean accept(File pathname) { final boolean accepted = super.accept(pathname); return accepted && !isExcludedJar(pathname); }
@Test public void testAcceptSupportedExtensions() throws Exception { JarAnalyzer instance = new JarAnalyzer(); instance.initialize(getSettings()); instance.prepare(null); instance.setEnabled(true); String[] files = {"test.jar", "test.war"}; for (String name : files) { assertTrue(name, instance.accept(new File(name))); } }
@SuppressWarnings("unchecked") public List<String> getList(String key) { return (List<String>) get(key); }
@Test public void testEmptyList() { AbstractConfig conf; ConfigDef configDef = new ConfigDef().define("a", Type.LIST, "", new ConfigDef.NonNullValidator(), Importance.HIGH, "doc"); conf = new AbstractConfig(configDef, Collections.emptyMap()); assertEquals(Collections.emptyList(), conf.getList("a")); conf = new AbstractConfig(configDef, Collections.singletonMap("a", "")); assertEquals(Collections.emptyList(), conf.getList("a")); conf = new AbstractConfig(configDef, Collections.singletonMap("a", "b,c,d")); assertEquals(Arrays.asList("b", "c", "d"), conf.getList("a")); }
@Description("unescape a URL-encoded string") @ScalarFunction @LiteralParameters("x") @SqlType("varchar(x)") public static Slice urlDecode(@SqlType("varchar(x)") Slice value) { try { return slice(URLDecoder.decode(value.toStringUtf8(), UTF_8.name())); } catch (UnsupportedEncodingException e) { throw new AssertionError(e); } catch (IllegalArgumentException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, e); } }
@Test public void testUrlDecode() { String[][] inputOutputPairs = { {"http%3A%2F%2Ftest", "http://test"}, {"http%3A%2F%2Ftest%3Fa%3Db%26c%3Dd", "http://test?a=b&c=d"}, {"http%3A%2F%2F%E3%83%86%E3%82%B9%E3%83%88", "http://\u30c6\u30b9\u30c8"}, {"%7E%40%3A.-*_%2B+%E2%98%83", "~@:.-*_+ \u2603"}, {"test", "test"}, }; for (String[] inputOutputPair : inputOutputPairs) { String input = inputOutputPair[0]; String output = inputOutputPair[1]; assertFunction("url_decode('" + input + "')", createVarcharType(input.length()), output); } }
@PostMapping @Operation( security = @SecurityRequirement(name = "keycloak"), requestBody = @io.swagger.v3.oas.annotations.parameters.RequestBody( content = @Content( mediaType = MediaType.APPLICATION_JSON_VALUE, schema = @Schema( type = "object", properties = { @StringToClassMapItem(key = "title", value = String.class), @StringToClassMapItem(key = "details", value = String.class) } ) ) ), responses = { @ApiResponse( responseCode = "201", headers = @Header(name = "Content-Type", description = "Тип данных"), content = { @Content( mediaType = MediaType.APPLICATION_JSON_VALUE, schema = @Schema( type = "object", properties = { @StringToClassMapItem(key = "id", value = Integer.class), @StringToClassMapItem(key = "title", value = String.class), @StringToClassMapItem(key = "details", value = String.class) } ) ) } ) }) public ResponseEntity<?> createProduct(@Valid @RequestBody NewProductPayload payload, BindingResult bindingResult, UriComponentsBuilder uriComponentsBuilder) throws BindException { if (bindingResult.hasErrors()) { if (bindingResult instanceof BindException exception) { throw exception; } else { throw new BindException(bindingResult); } } else { Product product = this.productService.createProduct(payload.title(), payload.details()); return ResponseEntity .created(uriComponentsBuilder .replacePath("/catalogue-api/products/{productId}") .build(Map.of("productId", product.getId()))) .body(product); } }
@Test void createProduct_RequestIsInvalid_ReturnsBadRequest() { // given var payload = new NewProductPayload(" ", null); var bindingResult = new MapBindingResult(Map.of(), "payload"); bindingResult.addError(new FieldError("payload", "title", "error")); var uriComponentsBuilder = UriComponentsBuilder.fromUriString("http://localhost"); // when var exception = assertThrows(BindException.class, () -> this.controller.createProduct(payload, bindingResult, uriComponentsBuilder)); // then assertEquals(List.of(new FieldError("payload", "title", "error")), exception.getAllErrors()); verifyNoInteractions(this.productService); }