focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Udf public String lpad( @UdfParameter(description = "String to be padded") final String input, @UdfParameter(description = "Target length") final Integer targetLen, @UdfParameter(description = "Padding string") final String padding) { if (input == null) { return null; } if (padding == null || padding.isEmpty() || targetLen == null || targetLen < 0) { return null; } final StringBuilder sb = new StringBuilder(targetLen + padding.length()); final int padUpTo = Math.max(targetLen - input.length(), 0); for (int i = 0; i < padUpTo; i += padding.length()) { sb.append(padding); } sb.setLength(padUpTo); sb.append(input); sb.setLength(targetLen); return sb.toString(); }
@Test public void shouldReturnNullForNullPaddingString() { final String result = udf.lpad("foo", 4, null); assertThat(result, is(nullValue())); }
public Flowable<V> iterator() { return scanIteratorReactive(null, 10); }
@Test public void testIteratorNextNext() { RScoredSortedSetRx<String> set = redisson.getScoredSortedSet("simple"); sync(set.add(1, "1")); sync(set.add(2, "4")); Iterator<String> iter = toIterator(set.iterator()); Assertions.assertEquals("1", iter.next()); Assertions.assertEquals("4", iter.next()); Assertions.assertFalse(iter.hasNext()); }
@Override public void handle(final RoutingContext routingContext) { routingContext.addEndHandler(ar -> { // After the response is complete, log results here. final int status = routingContext.request().response().getStatusCode(); if (!loggingRateLimiter.shouldLog(logger, routingContext.request().path(), status)) { return; } final long contentLength = routingContext.request().response().bytesWritten(); final HttpVersion version = routingContext.request().version(); final HttpMethod method = routingContext.request().method(); final String uri = enableQueryLogging ? routingContext.request().uri() : routingContext.request().path(); if (endpointFilter.isPresent() && endpointFilter.get().matcher(uri).matches()) { return; } final long requestBodyLength = routingContext.request().bytesRead(); final String versionFormatted; switch (version) { case HTTP_1_0: versionFormatted = "HTTP/1.0"; break; case HTTP_1_1: versionFormatted = "HTTP/1.1"; break; case HTTP_2: versionFormatted = "HTTP/2.0"; break; default: versionFormatted = "-"; } final String name = Optional.ofNullable((ApiUser) routingContext.user()) .map(u -> u.getPrincipal().getName()) .orElse("-"); final String userAgent = Optional.ofNullable( routingContext.request().getHeader(HTTP_HEADER_USER_AGENT)).orElse("-"); final String timestamp = Utils.formatRFC1123DateTime(clock.millis()); final SocketAddress socketAddress = routingContext.request().remoteAddress(); final String message = String.format( "%s - %s [%s] \"%s %s %s\" %d %d \"-\" \"%s\" %d", socketAddress == null ? "null" : socketAddress.host(), name, timestamp, method, uri, versionFormatted, status, contentLength, userAgent, requestBodyLength); doLog(status, message); }); routingContext.next(); }
@Test public void shouldSkipLog() { // Given: when(response.getStatusCode()).thenReturn(401); when(loggingRateLimiter.shouldLog(logger, "/query", 401)).thenReturn(false); // When: loggingHandler.handle(routingContext); verify(routingContext).addEndHandler(endCallback.capture()); endCallback.getValue().handle(null); // Then: verify(logger, never()).info(any()); verify(logger, never()).warn(any()); verify(logger, never()).error(any()); }
protected void addHost(MacAddress mac, VlanId vlan, Set<HostLocation> locations, Set<IpAddress> ips) { HostId hid = HostId.hostId(mac, vlan); HostDescription desc = (ips != null) ? new DefaultHostDescription(mac, vlan, locations, ips, true) : new DefaultHostDescription(mac, vlan, locations, Collections.emptySet(), true); providerService.hostDetected(hid, desc, true); }
@Test public void testAddHost() throws Exception { provider.addHost(mac, vlan, locations, auxLocations, ips, innerVlan, outerTpid); assertThat(providerService.hostId, is(hostId)); assertThat(providerService.hostDescription, is(hostDescription)); assertThat(providerService.event, is("hostDetected")); providerService.clear(); }
@Override public void accept(T t) { if (isNull(t) && permitsNull) { return; } if (isNull(t) && !permitsNull) { throw new IllegalArgumentException("Null is an illegal input"); } if (hasValueToMatch()) { checkArgument(t.equals(mustMatchThisValue)); } else { mustMatchThisValue = t; } }
@Test public void rejectsNullWhenConfigured() { SingleValueVerifier<String> svv = new SingleValueVerifier<>(); assertThrows(IllegalArgumentException.class, () -> svv.accept(null)); }
@GetMapping(value = "/{id}") public Mono<Post> get(@PathVariable(value = "id") Long id) { return this.posts.findById(id); }
@Test public void getPostById() throws Exception { this.rest .get() .uri("/posts/1") .accept(APPLICATION_JSON) .exchange() .expectBody() .jsonPath("$.title") .isEqualTo("post one"); this.rest .get() .uri("/posts/2") .accept(APPLICATION_JSON) .exchange() .expectBody() .jsonPath("$.title") .isEqualTo("post two"); }
protected static void checkPayload(Channel channel, long size) throws IOException { int payload = getPayload(channel); boolean overPayload = isOverPayload(payload, size); if (overPayload) { ExceedPayloadLimitException e = new ExceedPayloadLimitException( "Data length too large: " + size + ", max payload: " + payload + ", channel: " + channel); logger.error(TRANSPORT_EXCEED_PAYLOAD_LIMIT, "", "", e.getMessage(), e); throw e; } }
@Test void testCheckPayloadDefault8M() throws Exception { Channel channel = mock(Channel.class); given(channel.getUrl()).willReturn(URL.valueOf("dubbo://1.1.1.1")); AbstractCodec.checkPayload(channel, 1 * 1024 * 1024); try { AbstractCodec.checkPayload(channel, 15 * 1024 * 1024); } catch (IOException expected) { assertThat( expected.getMessage(), allOf( containsString("Data length too large: "), containsString("max payload: " + 8 * 1024 * 1024))); } verify(channel, VerificationModeFactory.atLeastOnce()).getUrl(); }
public byte[] toBytes() { String authzid = authorizationId.isEmpty() ? "" : "a=" + authorizationId; String extensions = extensionsMessage(); if (!extensions.isEmpty()) extensions = SEPARATOR + extensions; String message = String.format("n,%s,%sauth=Bearer %s%s%s%s", authzid, SEPARATOR, tokenValue, extensions, SEPARATOR, SEPARATOR); return message.getBytes(StandardCharsets.UTF_8); }
@Test public void testBuildServerResponseToBytes() throws Exception { String serverMessage = "n,,\u0001auth=Bearer 123.345.567\u0001nineteen=42\u0001\u0001"; OAuthBearerClientInitialResponse response = new OAuthBearerClientInitialResponse(serverMessage.getBytes(StandardCharsets.UTF_8)); String message = new String(response.toBytes(), StandardCharsets.UTF_8); assertEquals(serverMessage, message); }
public static void ensureCookieFormat(final String cookieKey) { if (cookieKey == null) { throw new IllegalArgumentException("Cookie key must not be null"); } final int index = cookieKey.indexOf(':'); if (index < 1) { throw new IllegalArgumentException("Cookie key format must be: namespace:key"); } if (checkNamespace(cookieKey.substring(0, index)).isPresent()) { throw new IllegalArgumentException("Invalid characters in cookie namespace: " + cookieKey); } if (checkValue(cookieKey.substring(index + 1)).isPresent()) { throw new IllegalArgumentException("Invalid characters in cookie value: " + cookieKey); } }
@Test void testCookieFormatWithNullValue() { assertThrows(IllegalArgumentException.class, () -> CookieUtil.ensureCookieFormat(null)); }
public static Sensor failedStreamThreadSensor(final StreamsMetricsImpl streamsMetrics) { final Sensor sensor = streamsMetrics.clientLevelSensor(FAILED_STREAM_THREADS, RecordingLevel.INFO); addSumMetricToSensor( sensor, CLIENT_LEVEL_GROUP, streamsMetrics.clientLevelTagMap(), FAILED_STREAM_THREADS, false, FAILED_STREAM_THREADS_DESCRIPTION ); return sensor; }
@Test public void shouldGetFailedStreamThreadsSensor() { final String name = "failed-stream-threads"; final String description = "The number of failed stream threads since the start of the Kafka Streams client"; when(streamsMetrics.clientLevelSensor(name, RecordingLevel.INFO)).thenReturn(expectedSensor); when(streamsMetrics.clientLevelTagMap()).thenReturn(tagMap); StreamsMetricsImpl.addSumMetricToSensor( expectedSensor, CLIENT_LEVEL_GROUP, tagMap, name, false, description ); final Sensor sensor = ClientMetrics.failedStreamThreadSensor(streamsMetrics); assertThat(sensor, is(expectedSensor)); }
@Override public String toString() { return appendTo(new StringBuilder(TASK)).toString(); }
@Test public void testToString() { JobID jobId = new JobID("1234", 1); for (TaskType type : TaskType.values()) { TaskID taskId = new TaskID(jobId, type, 0); String str = String.format("task_1234_0001_%c_000000", TaskID.getRepresentingCharacter(type)); assertEquals("The toString() method returned the wrong value", str, taskId.toString()); } }
@Override public List<SnowflakeIdentifier> listSchemas(SnowflakeIdentifier scope) { StringBuilder baseQuery = new StringBuilder("SHOW SCHEMAS"); String[] queryParams = null; switch (scope.type()) { case ROOT: // account-level listing baseQuery.append(" IN ACCOUNT"); break; case DATABASE: // database-level listing baseQuery.append(" IN DATABASE IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; default: throw new IllegalArgumentException( String.format("Unsupported scope type for listSchemas: %s", scope)); } final String finalQuery = baseQuery.toString(); final String[] finalQueryParams = queryParams; List<SnowflakeIdentifier> schemas; try { schemas = connectionPool.run( conn -> queryHarness.query( conn, finalQuery, SCHEMA_RESULT_SET_HANDLER, finalQueryParams)); } catch (SQLException e) { throw snowflakeExceptionToIcebergException( scope, e, String.format("Failed to list schemas for scope '%s'", scope)); } catch (InterruptedException e) { throw new UncheckedInterruptedException( e, "Interrupted while listing schemas for scope '%s'", scope); } schemas.forEach( schema -> Preconditions.checkState( schema.type() == SnowflakeIdentifier.Type.SCHEMA, "Expected SCHEMA, got identifier '%s' for scope '%s'", schema, scope)); return schemas; }
@SuppressWarnings("unchecked") @Test public void testListSchemasSQLExceptionWithoutErrorCode() throws SQLException, InterruptedException { Exception injectedException = new SQLException("Fake SQL exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedSQLException.class) .isThrownBy(() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining("Failed to list schemas for scope 'DATABASE: 'DB_1''") .withCause(injectedException); }
public Expression rewrite(final Expression expression) { return new ExpressionTreeRewriter<>(new OperatorPlugin()::process) .rewrite(expression, null); }
@Test public void shouldNotReplaceUnsupportedColumns() { // Given: final Expression predicate = getPredicate( "SELECT * FROM orders where ROWTIME > '2017-01-01' AND ORDERTIME = '2017-01-01';"); // When: final Expression rewritten = rewriter.rewrite(predicate); // Then: assertThat(rewritten.toString(), is(String.format("((ORDERS.ROWTIME > %d) AND (ORDERS.ORDERTIME = '2017-01-01'))", A_TIMESTAMP))); }
int parseAndConvert(String[] args) throws Exception { Options opts = createOptions(); int retVal = 0; try { if (args.length == 0) { LOG.info("Missing command line arguments"); printHelp(opts); return 0; } CommandLine cliParser = new GnuParser().parse(opts, args); if (cliParser.hasOption(CliOption.HELP.shortSwitch)) { printHelp(opts); return 0; } FSConfigToCSConfigConverter converter = prepareAndGetConverter(cliParser); converter.convert(converterParams); String outputDir = converterParams.getOutputDirectory(); boolean skipVerification = cliParser.hasOption(CliOption.SKIP_VERIFICATION.shortSwitch); if (outputDir != null && !skipVerification) { validator.validateConvertedConfig( converterParams.getOutputDirectory()); } } catch (ParseException e) { String msg = "Options parsing failed: " + e.getMessage(); logAndStdErr(e, msg); printHelp(opts); retVal = -1; } catch (PreconditionException e) { String msg = "Cannot start FS config conversion due to the following" + " precondition error: " + e.getMessage(); handleException(e, msg); retVal = -1; } catch (UnsupportedPropertyException e) { String msg = "Unsupported property/setting encountered during FS config " + "conversion: " + e.getMessage(); handleException(e, msg); retVal = -1; } catch (ConversionException | IllegalArgumentException e) { String msg = "Fatal error during FS config conversion: " + e.getMessage(); handleException(e, msg); retVal = -1; } catch (VerificationException e) { Throwable cause = e.getCause(); String msg = "Verification failed: " + e.getCause().getMessage(); conversionOptions.handleVerificationFailure(cause, msg); retVal = -1; } conversionOptions.handleParsingFinished(); return retVal; }
@Test public void testConvertFSConfigurationDefaults() throws Exception { setupFSConfigConversionFiles(true); ArgumentCaptor<FSConfigToCSConfigConverterParams> conversionParams = ArgumentCaptor.forClass(FSConfigToCSConfigConverterParams.class); FSConfigToCSConfigArgumentHandler argumentHandler = createArgumentHandler(); String[] args = getArgumentsAsArrayWithDefaults("-f", FSConfigConverterTestCommons.FS_ALLOC_FILE, "-r", FSConfigConverterTestCommons.CONVERSION_RULES_FILE); argumentHandler.parseAndConvert(args); // validate params verify(mockConverter).convert(conversionParams.capture()); FSConfigToCSConfigConverterParams params = conversionParams.getValue(); LOG.info("FS config converter parameters: " + params); assertEquals("Yarn site config", FSConfigConverterTestCommons.YARN_SITE_XML, params.getYarnSiteXmlConfig()); assertEquals("FS xml", FSConfigConverterTestCommons.FS_ALLOC_FILE, params.getFairSchedulerXmlConfig()); assertEquals("Conversion rules config", FSConfigConverterTestCommons.CONVERSION_RULES_FILE, params.getConversionRulesConfig()); assertFalse("Console mode", params.isConsole()); }
public static boolean safeRangeEquals(final Range<Comparable<?>> sourceRange, final Range<Comparable<?>> targetRange) { Class<?> clazz = getRangeTargetNumericType(sourceRange, targetRange); if (null == clazz) { return sourceRange.equals(targetRange); } Range<Comparable<?>> newSourceRange = createTargetNumericTypeRange(sourceRange, clazz); Range<Comparable<?>> newTargetRange = createTargetNumericTypeRange(targetRange, clazz); return newSourceRange.equals(newTargetRange); }
@Test void assertSafeRangeEqualsForInteger() { assertTrue(SafeNumberOperationUtils.safeRangeEquals(Range.greaterThan(1), Range.greaterThan(1L))); }
@Override public String forDisplay() { return super.forDisplay().replace(SINGLE_HASH, DOUBLE_HASH); }
@Test void shouldMaskThePasswordInDisplayName() { HgUrlArgument hgUrlArgument = new HgUrlArgument("http://user:pwd@url##branch"); assertThat(hgUrlArgument.forDisplay(), is("http://user:******@url##branch")); }
@Override public int compareTo(Resource other) { checkArgument(other != null && getClass() == other.getClass() && name.equals(other.name)); return value.compareTo(other.value); }
@Test void testCompareTo() { final Resource resource1 = new TestResource(0.0); final Resource resource2 = new TestResource(0.0); final Resource resource3 = new TestResource(1.0); assertThatComparable(resource1).isEqualByComparingTo(resource1); assertThatComparable(resource2).isEqualByComparingTo(resource1); assertThatComparable(resource1).isLessThan(resource3); assertThatComparable(resource3).isGreaterThan(resource1); }
static PublicationParams getPublicationParams( final ChannelUri channelUri, final MediaDriver.Context ctx, final DriverConductor driverConductor, final boolean isIpc) { final PublicationParams params = new PublicationParams(ctx, isIpc); params.getEntityTag(channelUri, driverConductor); params.getSessionId(channelUri, driverConductor); params.getTermBufferLength(channelUri); params.getMtuLength(channelUri); params.getLingerTimeoutNs(channelUri); params.getEos(channelUri); params.getSparse(channelUri, ctx); params.getSpiesSimulateConnection(channelUri, ctx); params.getUntetheredWindowLimitTimeout(channelUri, ctx); params.getUntetheredRestingTimeout(channelUri, ctx); params.getMaxResend(channelUri); int count = 0; final String initialTermIdStr = channelUri.get(INITIAL_TERM_ID_PARAM_NAME); count = initialTermIdStr != null ? count + 1 : count; final String termIdStr = channelUri.get(TERM_ID_PARAM_NAME); count = termIdStr != null ? count + 1 : count; final String termOffsetStr = channelUri.get(TERM_OFFSET_PARAM_NAME); count = termOffsetStr != null ? count + 1 : count; if (count > 0) { if (count < 3) { throw new IllegalArgumentException("params must be used as a complete set: " + INITIAL_TERM_ID_PARAM_NAME + " " + TERM_ID_PARAM_NAME + " " + TERM_OFFSET_PARAM_NAME + " channel=" + channelUri); } params.initialTermId = Integer.parseInt(initialTermIdStr); params.termId = Integer.parseInt(termIdStr); params.termOffset = Integer.parseInt(termOffsetStr); if (params.termOffset > params.termLength) { throw new IllegalArgumentException( TERM_OFFSET_PARAM_NAME + "=" + params.termOffset + " > " + TERM_LENGTH_PARAM_NAME + "=" + params.termLength + ": channel=" + channelUri); } if (params.termOffset < 0 || params.termOffset > LogBufferDescriptor.TERM_MAX_LENGTH) { throw new IllegalArgumentException( TERM_OFFSET_PARAM_NAME + "=" + params.termOffset + " out of range: channel=" + channelUri); } if ((params.termOffset & (FrameDescriptor.FRAME_ALIGNMENT - 1)) != 0) { throw new IllegalArgumentException( TERM_OFFSET_PARAM_NAME + "=" + params.termOffset + " must be a multiple of FRAME_ALIGNMENT: channel=" + channelUri); } if (params.termId - params.initialTermId < 0) { throw new IllegalStateException( "difference greater than 2^31 - 1: " + INITIAL_TERM_ID_PARAM_NAME + "=" + params.initialTermId + " when " + TERM_ID_PARAM_NAME + "=" + params.termId + " channel=" + channelUri); } params.hasPosition = true; } params.isResponse = CONTROL_MODE_RESPONSE.equals(channelUri.get(MDC_CONTROL_MODE_PARAM_NAME)); params.responseCorrelationId = Long.parseLong(channelUri.get(RESPONSE_CORRELATION_ID_PARAM_NAME, "-1")); return params; }
@Test void hasNegativeMaxRetransmits() { final ChannelUri uri = ChannelUri.parse("aeron:udp?endpoint=localhost:1010|" + CommonContext.MAX_RESEND_PARAM_NAME + "=-1234"); final IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, () -> PublicationParams.getPublicationParams(uri, ctx, conductor, false)); assertTrue(exception.getMessage().contains("must be > 0")); }
public static UWildcardType create(BoundKind boundKind, UType bound) { return new AutoValue_UWildcardType(boundKind, bound); }
@Test public void serialization() { UType numberType = UClassType.create("java.lang.Number", ImmutableList.<UType>of()); SerializableTester.reserializeAndAssert(UWildcardType.create(BoundKind.EXTENDS, numberType)); }
protected String getErrorJson(String message) { try { return LambdaContainerHandler.getObjectMapper().writeValueAsString(new ErrorModel(message)); } catch (JsonProcessingException e) { log.error("Could not produce error JSON", e); return "{ \"message\": \"" + message + "\" }"; } }
@Test void getErrorJson_JsonParsinException_validJson() throws IOException { ObjectMapper mockMapper = mock(ObjectMapper.class); JsonProcessingException exception = mock(JsonProcessingException.class); when(mockMapper.writeValueAsString(any(Object.class))).thenThrow(exception); String output = exceptionHandler.getErrorJson(INVALID_RESPONSE_MESSAGE); assertNotNull(output); ErrorModel error = objectMapper.readValue(output, ErrorModel.class); assertNotNull(error); assertEquals(INVALID_RESPONSE_MESSAGE, error.getMessage()); }
@Override public Optional<String> buildCRC32SQL(final String qualifiedTableName, final String columnName) { return Optional.of(String.format("SELECT BIT_XOR(CAST(CRC32(%s) AS UNSIGNED)) AS checksum, COUNT(1) AS cnt FROM %s", columnName, qualifiedTableName)); }
@Test void assertBuildSumCrc32SQL() { Optional<String> actual = sqlBuilder.buildCRC32SQL("t2", "id"); assertTrue(actual.isPresent()); assertThat(actual.get(), is("SELECT BIT_XOR(CAST(CRC32(id) AS UNSIGNED)) AS checksum, COUNT(1) AS cnt FROM t2")); }
@Override public Map<ExecutionAttemptID, ExecutionSlotAssignment> allocateSlotsFor( List<ExecutionAttemptID> executionAttemptIds) { Map<ExecutionAttemptID, ExecutionSlotAssignment> result = new HashMap<>(); Map<SlotRequestId, ExecutionAttemptID> remainingExecutionsToSlotRequest = new HashMap<>(executionAttemptIds.size()); List<PhysicalSlotRequest> physicalSlotRequests = new ArrayList<>(executionAttemptIds.size()); for (ExecutionAttemptID executionAttemptId : executionAttemptIds) { if (requestedPhysicalSlots.containsKeyA(executionAttemptId)) { result.put( executionAttemptId, new ExecutionSlotAssignment( executionAttemptId, requestedPhysicalSlots.getValueByKeyA(executionAttemptId))); } else { final SlotRequestId slotRequestId = new SlotRequestId(); final ResourceProfile resourceProfile = resourceProfileRetriever.apply(executionAttemptId); Collection<TaskManagerLocation> preferredLocations = preferredLocationsRetriever.getPreferredLocations( executionAttemptId.getExecutionVertexId(), Collections.emptySet()); final SlotProfile slotProfile = SlotProfile.priorAllocation( resourceProfile, resourceProfile, preferredLocations, Collections.emptyList(), Collections.emptySet()); final PhysicalSlotRequest request = new PhysicalSlotRequest( slotRequestId, slotProfile, slotWillBeOccupiedIndefinitely); physicalSlotRequests.add(request); remainingExecutionsToSlotRequest.put(slotRequestId, executionAttemptId); } } result.putAll( allocatePhysicalSlotsFor(remainingExecutionsToSlotRequest, physicalSlotRequests)); return result; }
@Test void testFailedPhysicalSlotRequestFailsLogicalSlotFuture() { final AllocationContext context = new AllocationContext( TestingPhysicalSlotProvider.createWithoutImmediatePhysicalSlotCreation(), false); final CompletableFuture<LogicalSlot> slotFuture = context.allocateSlotsFor(EXECUTION_ATTEMPT_ID); final SlotRequestId slotRequestId = context.getSlotProvider().getFirstRequestOrFail().getSlotRequestId(); assertThat(slotFuture).isNotDone(); context.getSlotProvider() .getResponses() .get(slotRequestId) .completeExceptionally(new Throwable()); assertThat(slotFuture).isCompletedExceptionally(); // next allocation allocates new slot context.allocateSlotsFor(EXECUTION_ATTEMPT_ID); assertThat(context.getSlotProvider().getRequests()).hasSize(2); }
public static QueryDescription forQueryMetadata( final QueryMetadata queryMetadata, final Map<KsqlHostInfoEntity, KsqlQueryStatus> ksqlHostQueryStatus ) { if (queryMetadata instanceof PersistentQueryMetadata) { final PersistentQueryMetadata persistentQuery = (PersistentQueryMetadata) queryMetadata; return create( persistentQuery, persistentQuery.getResultTopic().map(t -> t.getKeyFormat().getWindowType()) .orElse(Optional.empty()), persistentQuery.getSinkName(), ksqlHostQueryStatus ); } return create( queryMetadata, Optional.empty(), Optional.empty(), ksqlHostQueryStatus ); }
@Test public void shouldHandleRowKeyInValueSchemaForTransientQuery() { // Given: final LogicalSchema schema = LogicalSchema.builder() .valueColumn(ColumnName.of("field1"), SqlTypes.INTEGER) .valueColumn(ColumnName.of("ROWKEY"), SqlTypes.STRING) .valueColumn(ColumnName.of("field2"), SqlTypes.STRING) .build(); transientQuery = new TransientQueryMetadata( SQL_TEXT, schema, SOURCE_NAMES, "execution plan", queryQueue, QUERY_ID, "app id", topology, kafkaStreamsBuilder, STREAMS_PROPS, PROP_OVERRIDES, closeTimeout, 10, ResultType.STREAM, 0L, 0L, listener, processingLoggerFactory ); transientQuery.initialize(); // When: transientQueryDescription = QueryDescriptionFactory.forQueryMetadata(transientQuery, Collections.emptyMap()); // Then: assertThat(transientQueryDescription.getFields(), contains( new FieldInfo("field1", new SchemaInfo(SqlBaseType.INTEGER, null, null), Optional.empty()), new FieldInfo("ROWKEY", new SchemaInfo(SqlBaseType.STRING, null, null), Optional.empty()), new FieldInfo("field2", new SchemaInfo(SqlBaseType.STRING, null, null), Optional.empty()))); }
static boolean isItemMatch(String pattern, String value) { if (StringUtils.isEmpty(pattern)) { return value == null; } else { return "*".equals(pattern) || pattern.equals(value); } }
@Test void testIsItemMatch() throws Exception { assertTrue(UrlUtils.isItemMatch(null, null)); assertTrue(!UrlUtils.isItemMatch("1", null)); assertTrue(!UrlUtils.isItemMatch(null, "1")); assertTrue(UrlUtils.isItemMatch("1", "1")); assertTrue(UrlUtils.isItemMatch("*", null)); assertTrue(UrlUtils.isItemMatch("*", "*")); assertTrue(UrlUtils.isItemMatch("*", "1234")); assertTrue(!UrlUtils.isItemMatch(null, "*")); }
@Override public void clearAll() { Arrays.fill(pixels, Pixel.WHITE); }
@Test void testClearAll() { try { var field = FrameBuffer.class.getDeclaredField("pixels"); var pixels = new Pixel[FrameBuffer.HEIGHT * FrameBuffer.WIDTH]; Arrays.fill(pixels, Pixel.WHITE); pixels[0] = Pixel.BLACK; var frameBuffer = new FrameBuffer(); field.setAccessible(true); field.set(frameBuffer, pixels); frameBuffer.clearAll(); assertEquals(Pixel.WHITE, frameBuffer.getPixels()[0]); } catch (NoSuchFieldException | IllegalAccessException e) { fail("Fail to modify field access."); } }
public static DateTime convertToDateTime(@Nonnull Object value) { if (value instanceof DateTime) { return (DateTime) value; } if (value instanceof Date) { return new DateTime(value, DateTimeZone.UTC); } else if (value instanceof ZonedDateTime) { final DateTimeZone dateTimeZone = DateTimeZone.forTimeZone(TimeZone.getTimeZone(((ZonedDateTime) value).getZone())); return new DateTime(Date.from(((ZonedDateTime) value).toInstant()), dateTimeZone); } else if (value instanceof OffsetDateTime) { return new DateTime(Date.from(((OffsetDateTime) value).toInstant()), DateTimeZone.UTC); } else if (value instanceof LocalDateTime) { final LocalDateTime localDateTime = (LocalDateTime) value; final ZoneId defaultZoneId = ZoneId.systemDefault(); final ZoneOffset offset = defaultZoneId.getRules().getOffset(localDateTime); return new DateTime(Date.from(localDateTime.toInstant(offset))); } else if (value instanceof LocalDate) { final LocalDate localDate = (LocalDate) value; final LocalDateTime localDateTime = localDate.atStartOfDay(); final ZoneId defaultZoneId = ZoneId.systemDefault(); final ZoneOffset offset = defaultZoneId.getRules().getOffset(localDateTime); return new DateTime(Date.from(localDateTime.toInstant(offset))); } else if (value instanceof Instant) { return new DateTime(Date.from((Instant) value), DateTimeZone.UTC); } else if (value instanceof String) { return ES_DATE_FORMAT_FORMATTER.parseDateTime((String) value); } else { throw new IllegalArgumentException("Value of invalid type <" + value.getClass().getSimpleName() + "> provided"); } }
@Test void convertFromInvalidDateString() { assertThatThrownBy(() -> DateTimeConverter.convertToDateTime("2031-14-14 13:14:10.123")) .isInstanceOf(IllegalArgumentException.class) .hasMessageStartingWith("Cannot parse \"2031-14-14"); }
@JsonProperty public String getIndex() { return index; }
@Test public void testGetIndex() throws Exception { assertEquals(INDEX_NAME, messageSummary.getIndex()); }
public static JobDataNodeLine unmarshal(final String text) { return new JobDataNodeLine(Splitter.on('|').omitEmptyStrings().splitToList(text).stream().map(JobDataNodeEntry::unmarshal).collect(Collectors.toList())); }
@Test void assertUnmarshal() { String expected = "t_order:ds_0.t_order_0,ds_0.t_order_1|t_order_item:ds_0.t_order_item_0,ds_0.t_order_item_1"; JobDataNodeLine actual = JobDataNodeLine.unmarshal(expected); assertThat(actual.getEntries().size(), is(2)); List<JobDataNodeEntry> entries = new ArrayList<>(actual.getEntries()); assertThat(entries.get(0).getLogicTableName(), is("t_order")); assertThat(entries.get(1).getLogicTableName(), is("t_order_item")); }
@VisibleForTesting static void selectHandler( final RoutingContext context, final Pattern skipPathsPattern, final boolean jaasAvailable, final boolean pluginAvailable ) { if (skipPathsPattern.matcher(context.normalizedPath()).matches()) { context.data().put(PROVIDER_KEY, Provider.SKIP); context.next(); return; } if (SystemAuthenticationHandler.isAuthenticatedAsSystemUser(context)) { context.data().put(PROVIDER_KEY, Provider.SYSTEM); context.next(); return; } final String authHeader = context.request().getHeader("Authorization"); if (jaasAvailable && authHeader != null && authHeader.toLowerCase().startsWith("basic ")) { context.data().put(PROVIDER_KEY, Provider.JAAS); } else if (pluginAvailable) { context.data().put(PROVIDER_KEY, Provider.PLUGIN); } else { // Fail the request as unauthorized - this will occur if no auth plugin but Jaas handler // is configured, but auth header is not basic auth context.fail( UNAUTHORIZED.code(), new KsqlApiException("Unauthorized", ERROR_CODE_UNAUTHORIZED)); } context.next(); }
@Test public void shouldNotSelectSkipProviderForAuthorizedPaths() { // Given: Mockito.when(routingContext.normalizedPath()).thenReturn("/authorized_path"); Map<String, Object> data = mock(Map.class); Mockito.when(routingContext.data()).thenReturn(data); HttpServerRequest request = mock(HttpServerRequest.class); Mockito.when(routingContext.request()).thenReturn(request); Pattern p = Pattern.compile("/unauthorized.*"); // When: AuthHandlers.selectHandler(routingContext, p, false, true); // Then (make sure the authorization "work" is not skipped): Mockito.verify(data).put("provider", AuthHandlers.Provider.PLUGIN); Mockito.verify(routingContext).next(); }
static void checkValidCollectionName(String databaseName, String collectionName) { String fullCollectionName = databaseName + "." + collectionName; if (collectionName.length() < MIN_COLLECTION_NAME_LENGTH) { throw new IllegalArgumentException("Collection name cannot be empty."); } if (fullCollectionName.length() > MAX_COLLECTION_NAME_LENGTH) { throw new IllegalArgumentException( "Collection name " + fullCollectionName + " cannot be longer than " + MAX_COLLECTION_NAME_LENGTH + " characters, including the database name and dot."); } if (ILLEGAL_COLLECTION_CHARS.matcher(collectionName).find()) { throw new IllegalArgumentException( "Collection name " + collectionName + " is not a valid name. Only letters, numbers, hyphens, underscores and exclamation points are allowed."); } if (collectionName.charAt(0) != '_' && !Character.isLetter(collectionName.charAt(0))) { throw new IllegalArgumentException( "Collection name " + collectionName + " must start with a letter or an underscore."); } String illegalKeyword = "system."; if (collectionName.startsWith(illegalKeyword)) { throw new IllegalArgumentException( "Collection name " + collectionName + " cannot start with the prefix \"" + illegalKeyword + "\"."); } }
@Test public void testCheckValidCollectionNameThrowsErrorWhenNameIsTooShort() { assertThrows( IllegalArgumentException.class, () -> checkValidCollectionName("test-database", "")); }
@Override public Path copy(final Path source, final Path target, final TransferStatus status, final ConnectionCallback prompt, final StreamListener listener) throws BackgroundException { final SMBSession.DiskShareWrapper share = session.openShare(source); try { try (final File sourceFile = share.get().openFile(new SMBPathContainerService(session).getKey(source), new HashSet<>(Arrays.asList(AccessMask.FILE_READ_DATA, AccessMask.FILE_READ_ATTRIBUTES)), Collections.singleton(FileAttributes.FILE_ATTRIBUTE_NORMAL), Collections.singleton(SMB2ShareAccess.FILE_SHARE_READ), SMB2CreateDisposition.FILE_OPEN, Collections.singleton(SMB2CreateOptions.FILE_NON_DIRECTORY_FILE)); final File targetFile = share.get().openFile(new SMBPathContainerService(session).getKey(target), Collections.singleton(AccessMask.MAXIMUM_ALLOWED), Collections.singleton(FileAttributes.FILE_ATTRIBUTE_NORMAL), Collections.singleton(SMB2ShareAccess.FILE_SHARE_READ), status.isExists() ? SMB2CreateDisposition.FILE_OVERWRITE : SMB2CreateDisposition.FILE_CREATE, Collections.singleton(SMB2CreateOptions.FILE_NON_DIRECTORY_FILE))) { sourceFile.remoteCopyTo(targetFile); } listener.sent(status.getLength()); } catch(IOException e) { throw new SMBTransportExceptionMappingService().map("Cannot copy {0}", e, source); } catch(SMBRuntimeException e) { throw new SMBExceptionMappingService().map("Cannot copy {0}", e, source); } catch(BufferException e) { throw new BackgroundException(e); } finally { session.releaseShare(share); } return target; }
@Test public void testCopyFile() throws Exception { final Path home = new DefaultHomeFinderService(session).find(); final Path file = new SMBTouchFeature(session).touch(new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final PathAttributes attr = new SMBAttributesFinderFeature(session).find(file); final Path destinationFolder = new SMBDirectoryFeature(session).mkdir( new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path copy = new Path(destinationFolder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final Path fileCopied = new SMBCopyFeature(session).copy(file, copy, new TransferStatus(), new DisabledConnectionCallback(), new DisabledStreamListener()); assertNotEquals(attr, new SMBAttributesFinderFeature(session).find(fileCopied)); ListService list = new SMBListService(session); assertTrue(list.list(home, new DisabledListProgressListener()).contains(file)); assertTrue(list.list(destinationFolder, new DisabledListProgressListener()).contains(copy)); new SMBDeleteFeature(session).delete(Arrays.asList(file, copy, destinationFolder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static DataMap getAnnotationsMap(Annotation[] as) { return annotationsToData(as, true); }
@Test(description = "Empty annotation: data map with annotation + no members") public void succeedsOnRestSpecAnnotationWithoutMembers() { @SupportedEmpty class LocalClass { } final Annotation[] annotations = LocalClass.class.getAnnotations(); final DataMap actual = ResourceModelAnnotation.getAnnotationsMap(annotations); Assert.assertNotNull(actual); Assert.assertTrue(actual.get(SUPPORTED_EMPTY) instanceof DataMap); final DataMap dataMap = ((DataMap) actual.get(SUPPORTED_EMPTY)); Assert.assertTrue(dataMap.isEmpty()); }
@Override @Transactional public OAuth2AccessTokenDO createAccessToken(Long userId, Integer userType, String clientId, List<String> scopes) { OAuth2ClientDO clientDO = oauth2ClientService.validOAuthClientFromCache(clientId); // 创建刷新令牌 OAuth2RefreshTokenDO refreshTokenDO = createOAuth2RefreshToken(userId, userType, clientDO, scopes); // 创建访问令牌 return createOAuth2AccessToken(refreshTokenDO, clientDO); }
@Test public void testCreateAccessToken() { TenantContextHolder.setTenantId(0L); // 准备参数 Long userId = randomLongId(); Integer userType = UserTypeEnum.ADMIN.getValue(); String clientId = randomString(); List<String> scopes = Lists.newArrayList("read", "write"); // mock 方法 OAuth2ClientDO clientDO = randomPojo(OAuth2ClientDO.class).setClientId(clientId) .setAccessTokenValiditySeconds(30).setRefreshTokenValiditySeconds(60); when(oauth2ClientService.validOAuthClientFromCache(eq(clientId))).thenReturn(clientDO); // mock 数据(用户) AdminUserDO user = randomPojo(AdminUserDO.class); when(adminUserService.getUser(userId)).thenReturn(user); // 调用 OAuth2AccessTokenDO accessTokenDO = oauth2TokenService.createAccessToken(userId, userType, clientId, scopes); // 断言访问令牌 OAuth2AccessTokenDO dbAccessTokenDO = oauth2AccessTokenMapper.selectByAccessToken(accessTokenDO.getAccessToken()); assertPojoEquals(accessTokenDO, dbAccessTokenDO, "createTime", "updateTime", "deleted"); assertEquals(userId, accessTokenDO.getUserId()); assertEquals(userType, accessTokenDO.getUserType()); assertEquals(2, accessTokenDO.getUserInfo().size()); assertEquals(user.getNickname(), accessTokenDO.getUserInfo().get("nickname")); assertEquals(user.getDeptId().toString(), accessTokenDO.getUserInfo().get("deptId")); assertEquals(clientId, accessTokenDO.getClientId()); assertEquals(scopes, accessTokenDO.getScopes()); assertFalse(DateUtils.isExpired(accessTokenDO.getExpiresTime())); // 断言访问令牌的缓存 OAuth2AccessTokenDO redisAccessTokenDO = oauth2AccessTokenRedisDAO.get(accessTokenDO.getAccessToken()); assertPojoEquals(accessTokenDO, redisAccessTokenDO, "createTime", "updateTime", "deleted"); // 断言刷新令牌 OAuth2RefreshTokenDO refreshTokenDO = oauth2RefreshTokenMapper.selectList().get(0); assertPojoEquals(accessTokenDO, refreshTokenDO, "id", "expiresTime", "createTime", "updateTime", "deleted"); assertFalse(DateUtils.isExpired(refreshTokenDO.getExpiresTime())); }
@Override public ObjectNode encode(Criterion criterion, CodecContext context) { EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(criterion, context); return encoder.encode(); }
@Test public void matchVlanIdTest() { Criterion criterion = Criteria.matchVlanId(VlanId.ANY); ObjectNode result = criterionCodec.encode(criterion, context); assertThat(result, matchesCriterion(criterion)); }
public static void main(String[] args) { try { FSConfigToCSConfigArgumentHandler fsConfigConversionArgumentHandler = new FSConfigToCSConfigArgumentHandler(); int exitCode = fsConfigConversionArgumentHandler.parseAndConvert(args); if (exitCode != 0) { LOG.error(FATAL, "Error while starting FS configuration conversion, " + "see previous error messages for details!"); } exitFunction.accept(exitCode); } catch (Throwable t) { LOG.error(FATAL, "Error while starting FS configuration conversion!", t); exitFunction.accept(-1); } }
@Test public void testConvertFSConfigurationWithLongSwitches() throws IOException { setupFSConfigConversionFiles(); FSConfigToCSConfigConverterMain.main(new String[] { "--print", "--rules-to-file", "--percentage", "--yarnsiteconfig", YARN_SITE_XML, "--fsconfig", FS_ALLOC_FILE, "--rulesconfig", CONVERSION_RULES_FILE}); String stdout = converterTestCommons.getStdOutContent().toString(); assertTrue("Stdout doesn't contain yarn-site.xml", stdout.contains("======= yarn-site.xml =======")); assertTrue("Stdout doesn't contain capacity-scheduler.xml", stdout.contains("======= capacity-scheduler.xml =======")); assertTrue("Stdout doesn't contain mapping-rules.json", stdout.contains("======= mapping-rules.json =======")); assertEquals("Exit code", 0, exitFunc.exitCode); }
@Override public void addCron(String group, String name, Instrumentation.Cron cron) { Timer timer = getToAdd(group, name, Timer.class, timerLock, timers); timer.addCron((Cron) cron); }
@Test public void timer() throws Exception { InstrumentationService.Timer timer = new InstrumentationService.Timer(2); InstrumentationService.Cron cron = new InstrumentationService.Cron(); long ownStart; long ownEnd; long totalStart; long totalEnd; long ownDelta; long totalDelta; long avgTotal; long avgOwn; cron.start(); ownStart = Time.now(); totalStart = ownStart; ownDelta = 0; sleep(100); cron.stop(); ownEnd = Time.now(); ownDelta += ownEnd - ownStart; sleep(100); cron.start(); ownStart = Time.now(); sleep(100); cron.stop(); ownEnd = Time.now(); ownDelta += ownEnd - ownStart; totalEnd = ownEnd; totalDelta = totalEnd - totalStart; avgTotal = totalDelta; avgOwn = ownDelta; timer.addCron(cron); long[] values = timer.getValues(); assertEquals(values[InstrumentationService.Timer.LAST_TOTAL], totalDelta, 20); assertEquals(values[InstrumentationService.Timer.LAST_OWN], ownDelta, 20); assertEquals(values[InstrumentationService.Timer.AVG_TOTAL], avgTotal, 20); assertEquals(values[InstrumentationService.Timer.AVG_OWN], avgOwn, 20); cron = new InstrumentationService.Cron(); cron.start(); ownStart = Time.now(); totalStart = ownStart; ownDelta = 0; sleep(200); cron.stop(); ownEnd = Time.now(); ownDelta += ownEnd - ownStart; sleep(200); cron.start(); ownStart = Time.now(); sleep(200); cron.stop(); ownEnd = Time.now(); ownDelta += ownEnd - ownStart; totalEnd = ownEnd; totalDelta = totalEnd - totalStart; avgTotal = (avgTotal * 1 + totalDelta) / 2; avgOwn = (avgOwn * 1 + ownDelta) / 2; timer.addCron(cron); values = timer.getValues(); assertEquals(values[InstrumentationService.Timer.LAST_TOTAL], totalDelta, 20); assertEquals(values[InstrumentationService.Timer.LAST_OWN], ownDelta, 20); assertEquals(values[InstrumentationService.Timer.AVG_TOTAL], avgTotal, 20); assertEquals(values[InstrumentationService.Timer.AVG_OWN], avgOwn, 20); avgTotal = totalDelta; avgOwn = ownDelta; cron = new InstrumentationService.Cron(); cron.start(); ownStart = Time.now(); totalStart = ownStart; ownDelta = 0; sleep(300); cron.stop(); ownEnd = Time.now(); ownDelta += ownEnd - ownStart; sleep(300); cron.start(); ownStart = Time.now(); sleep(300); cron.stop(); ownEnd = Time.now(); ownDelta += ownEnd - ownStart; totalEnd = ownEnd; totalDelta = totalEnd - totalStart; avgTotal = (avgTotal * 1 + totalDelta) / 2; avgOwn = (avgOwn * 1 + ownDelta) / 2; cron.stop(); timer.addCron(cron); values = timer.getValues(); assertEquals(values[InstrumentationService.Timer.LAST_TOTAL], totalDelta, 20); assertEquals(values[InstrumentationService.Timer.LAST_OWN], ownDelta, 20); assertEquals(values[InstrumentationService.Timer.AVG_TOTAL], avgTotal, 20); assertEquals(values[InstrumentationService.Timer.AVG_OWN], avgOwn, 20); JSONObject json = (JSONObject) new JSONParser().parse(timer.toJSONString()); assertEquals(json.size(), 4); assertEquals(json.get("lastTotal"), values[InstrumentationService.Timer.LAST_TOTAL]); assertEquals(json.get("lastOwn"), values[InstrumentationService.Timer.LAST_OWN]); assertEquals(json.get("avgTotal"), values[InstrumentationService.Timer.AVG_TOTAL]); assertEquals(json.get("avgOwn"), values[InstrumentationService.Timer.AVG_OWN]); StringWriter writer = new StringWriter(); timer.writeJSONString(writer); writer.close(); json = (JSONObject) new JSONParser().parse(writer.toString()); assertEquals(json.size(), 4); assertEquals(json.get("lastTotal"), values[InstrumentationService.Timer.LAST_TOTAL]); assertEquals(json.get("lastOwn"), values[InstrumentationService.Timer.LAST_OWN]); assertEquals(json.get("avgTotal"), values[InstrumentationService.Timer.AVG_TOTAL]); assertEquals(json.get("avgOwn"), values[InstrumentationService.Timer.AVG_OWN]); }
public static Port udp(int port) { return new Port(port, UDP_PROTOCOL); }
@Test public void testUdp() { Port port = Port.udp(6666); Assert.assertEquals(6666, port.getPort()); Assert.assertEquals("6666/udp", port.toString()); }
public static Expression resolve(final Expression expression, final SqlType sqlType) { if (sqlType instanceof SqlDecimal) { return resolveToDecimal(expression, (SqlDecimal)sqlType); } return expression; }
@Test public void shouldCastToDecimal() { // Given final Map<Literal, BigDecimal> fromLiterals = ImmutableMap.of( new IntegerLiteral(5), new BigDecimal("5.00"), new LongLiteral(5), new BigDecimal("5.00"), new DoubleLiteral(5), new BigDecimal("5.00"), new DecimalLiteral(BigDecimal.TEN), new BigDecimal("10.00"), new DecimalLiteral(new BigDecimal("10.1")), new BigDecimal("10.10") ); for (final Map.Entry<Literal, BigDecimal> entry : fromLiterals.entrySet()) { final Literal literal = entry.getKey(); final BigDecimal expected = entry.getValue(); // When final Expression expression = ImplicitlyCastResolver.resolve(literal, DECIMAL_5_2); // Then assertThat("Should cast " + literal.getClass().getSimpleName() + " to " + DECIMAL_5_2, expression, instanceOf(DecimalLiteral.class)); assertThat("Should cast " + literal.getClass().getSimpleName() + " to " + DECIMAL_5_2, ((DecimalLiteral)expression).getValue(), is(expected) ); } }
static SortKey[] rangeBounds( int numPartitions, Comparator<StructLike> comparator, SortKey[] samples) { // sort the keys first Arrays.sort(samples, comparator); int numCandidates = numPartitions - 1; SortKey[] candidates = new SortKey[numCandidates]; int step = (int) Math.ceil((double) samples.length / numPartitions); int position = step - 1; int numChosen = 0; while (position < samples.length && numChosen < numCandidates) { SortKey candidate = samples[position]; // skip duplicate values if (numChosen > 0 && candidate.equals(candidates[numChosen - 1])) { // linear probe for the next distinct value position += 1; } else { candidates[numChosen] = candidate; position += step; numChosen += 1; } } return candidates; }
@Test public void testRangeBoundsDivisible() { assertThat( SketchUtil.rangeBounds( 3, SORT_ORDER_COMPARTOR, new SortKey[] { CHAR_KEYS.get("a"), CHAR_KEYS.get("b"), CHAR_KEYS.get("c"), CHAR_KEYS.get("d"), CHAR_KEYS.get("e"), CHAR_KEYS.get("f") })) .containsExactly(CHAR_KEYS.get("b"), CHAR_KEYS.get("d")); }
@UdafFactory(description = "sum int values in a list into a single int") public static TableUdaf<List<Integer>, Integer, Integer> sumIntList() { return new TableUdaf<List<Integer>, Integer, Integer>() { @Override public Integer initialize() { return 0; } @Override public Integer aggregate(final List<Integer> valueToAdd, final Integer aggregateValue) { if (valueToAdd == null) { return aggregateValue; } return aggregateValue + sumList(valueToAdd); } @Override public Integer merge(final Integer aggOne, final Integer aggTwo) { return aggOne + aggTwo; } @Override public Integer map(final Integer agg) { return agg; } @Override public Integer undo(final List<Integer> valueToUndo, final Integer aggregateValue) { if (valueToUndo == null) { return aggregateValue; } return aggregateValue - sumList(valueToUndo); } private int sumList(final List<Integer> list) { return sum(list, initialize(), Integer::sum); } }; }
@Test public void shouldSumEmpty() { final TableUdaf<List<Integer>, Integer, Integer> udaf = ListSumUdaf.sumIntList(); final int sum = udaf.aggregate(Collections.emptyList(), 0); assertThat(0, equalTo(sum)); }
public List<Analyzer> getAnalyzers() { return getAnalyzers(AnalysisPhase.values()); }
@Test public void testGetAnalyzers_SpecificPhases() throws Exception { AnalyzerService instance = new AnalyzerService(Thread.currentThread().getContextClassLoader(), getSettings()); List<Analyzer> result = instance.getAnalyzers(INITIAL, FINAL); for (Analyzer a : result) { if (a.getAnalysisPhase() != INITIAL && a.getAnalysisPhase() != FINAL) { fail("Only expecting analyzers for phases " + INITIAL + " and " + FINAL); } } }
@Override public File newFile() { return newFile(null, null); }
@Test public void newFile_throws_ISE_if_name_is_not_valid() throws Exception { File rootTempFolder = temp.newFolder(); DefaultTempFolder underTest = new DefaultTempFolder(rootTempFolder); assertThatThrownBy(() -> underTest.newFile(tooLong.toString(), ".txt")) .isInstanceOf(IllegalStateException.class) .hasMessage("Failed to create temp file"); }
public static String evaluate(final co.elastic.logstash.api.Event event, final String template) throws JsonProcessingException { if (event instanceof Event) { return evaluate((Event) event, template); } else { throw new IllegalStateException("Unknown event concrete class: " + event.getClass().getName()); } }
@Test public void testCompletelyStaticTemplate() throws IOException { Event event = getTestEvent(); String path = "/full/path/awesome"; assertEquals(path, StringInterpolation.evaluate(event, path)); }
@Override public Flux<BooleanResponse<RenameCommand>> rename(Publisher<RenameCommand> commands) { return execute(commands, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getNewName(), "New name must not be null!"); byte[] keyBuf = toByteArray(command.getKey()); byte[] newKeyBuf = toByteArray(command.getNewName()); if (executorService.getConnectionManager().calcSlot(keyBuf) == executorService.getConnectionManager().calcSlot(newKeyBuf)) { return super.rename(commands); } return read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.DUMP, keyBuf) .filter(Objects::nonNull) .zipWith( Mono.defer(() -> pTtl(command.getKey()) .filter(Objects::nonNull) .map(ttl -> Math.max(0, ttl)) .switchIfEmpty(Mono.just(0L)) ) ) .flatMap(valueAndTtl -> { return write(newKeyBuf, StringCodec.INSTANCE, RedisCommands.RESTORE, newKeyBuf, valueAndTtl.getT2(), valueAndTtl.getT1()); }) .thenReturn(new BooleanResponse<>(command, true)) .doOnSuccess((ignored) -> del(command.getKey())); }); }
@Test public void testRename() { connection.stringCommands().set(originalKey, value).block(); if (hasTtl) { connection.keyCommands().expire(originalKey, Duration.ofSeconds(1000)).block(); } Integer originalSlot = getSlotForKey(originalKey); newKey = getNewKeyForSlot(new String(originalKey.array()), getTargetSlot(originalSlot)); Boolean response = connection.keyCommands().rename(originalKey, newKey).block(); assertThat(response).isTrue(); final ByteBuffer newKeyValue = connection.stringCommands().get(newKey).block(); assertThat(newKeyValue).isEqualTo(value); if (hasTtl) { assertThat(connection.keyCommands().ttl(newKey).block()).isGreaterThan(0); } else { assertThat(connection.keyCommands().ttl(newKey).block()).isEqualTo(-1); } }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .nullable(column.isNullable()) .comment(column.getComment()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case BOOLEAN: builder.columnType(SQLSERVER_BIT); builder.dataType(SQLSERVER_BIT); break; case TINYINT: builder.columnType(SQLSERVER_TINYINT); builder.dataType(SQLSERVER_TINYINT); break; case SMALLINT: builder.columnType(SQLSERVER_SMALLINT); builder.dataType(SQLSERVER_SMALLINT); break; case INT: builder.columnType(SQLSERVER_INT); builder.dataType(SQLSERVER_INT); break; case BIGINT: builder.columnType(SQLSERVER_BIGINT); builder.dataType(SQLSERVER_BIGINT); break; case FLOAT: builder.columnType(SQLSERVER_REAL); builder.dataType(SQLSERVER_REAL); break; case DOUBLE: builder.columnType(SQLSERVER_FLOAT); builder.dataType(SQLSERVER_FLOAT); break; case DECIMAL: DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = (int) Math.max(0, scale - (precision - MAX_PRECISION)); precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } builder.columnType(String.format("%s(%s,%s)", SQLSERVER_DECIMAL, precision, scale)); builder.dataType(SQLSERVER_DECIMAL); builder.precision(precision); builder.scale(scale); break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(MAX_NVARCHAR); builder.dataType(MAX_NVARCHAR); } else if (column.getColumnLength() <= MAX_NVARCHAR_LENGTH) { builder.columnType( String.format("%s(%s)", SQLSERVER_NVARCHAR, column.getColumnLength())); builder.dataType(SQLSERVER_NVARCHAR); builder.length(column.getColumnLength()); } else { builder.columnType(MAX_NVARCHAR); builder.dataType(MAX_NVARCHAR); builder.length(column.getColumnLength()); } break; case BYTES: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(MAX_VARBINARY); builder.dataType(SQLSERVER_VARBINARY); } else if (column.getColumnLength() <= MAX_BINARY_LENGTH) { builder.columnType( String.format("%s(%s)", SQLSERVER_VARBINARY, column.getColumnLength())); builder.dataType(SQLSERVER_VARBINARY); builder.length(column.getColumnLength()); } else { builder.columnType(MAX_VARBINARY); builder.dataType(SQLSERVER_VARBINARY); builder.length(column.getColumnLength()); } break; case DATE: builder.columnType(SQLSERVER_DATE); builder.dataType(SQLSERVER_DATE); break; case TIME: if (column.getScale() != null && column.getScale() > 0) { int timeScale = column.getScale(); if (timeScale > MAX_TIME_SCALE) { timeScale = MAX_TIME_SCALE; log.warn( "The time column {} type time({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to time({})", column.getName(), column.getScale(), MAX_SCALE, timeScale); } builder.columnType(String.format("%s(%s)", SQLSERVER_TIME, timeScale)); builder.scale(timeScale); } else { builder.columnType(SQLSERVER_TIME); } builder.dataType(SQLSERVER_TIME); break; case TIMESTAMP: if (column.getScale() != null && column.getScale() > 0) { int timestampScale = column.getScale(); if (timestampScale > MAX_TIMESTAMP_SCALE) { timestampScale = MAX_TIMESTAMP_SCALE; log.warn( "The timestamp column {} type timestamp({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to timestamp({})", column.getName(), column.getScale(), MAX_TIMESTAMP_SCALE, timestampScale); } builder.columnType( String.format("%s(%s)", SQLSERVER_DATETIME2, timestampScale)); builder.scale(timestampScale); } else { builder.columnType(SQLSERVER_DATETIME2); } builder.dataType(SQLSERVER_DATETIME2); break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.SQLSERVER, column.getDataType().getSqlType().name(), column.getName()); } return builder.build(); }
@Test public void testReconvertFloat() { Column column = PhysicalColumn.builder().name("test").dataType(BasicType.FLOAT_TYPE).build(); BasicTypeDefine typeDefine = SqlServerTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(SqlServerTypeConverter.SQLSERVER_REAL, typeDefine.getColumnType()); Assertions.assertEquals(SqlServerTypeConverter.SQLSERVER_REAL, typeDefine.getDataType()); }
@Override public ImportResult importItem( UUID jobId, IdempotentImportExecutor idempotentExecutor, TokenSecretAuthData authData, PhotosContainerResource data) throws Exception { // Make the data smugmug compatible data.transmogrify(transmogrificationConfig); try { SmugMugInterface smugMugInterface = getOrCreateSmugMugInterface(authData); for (PhotoAlbum album : data.getAlbums()) { idempotentExecutor.executeAndSwallowIOExceptions( album.getId(), album.getName(), () -> importSingleAlbum(jobId, album, smugMugInterface)); } for (PhotoModel photo : data.getPhotos()) { idempotentExecutor.executeAndSwallowIOExceptions( photo.getIdempotentId(), photo.getTitle(), () -> importSinglePhoto(jobId, idempotentExecutor, photo, smugMugInterface)); } } catch (IOException e) { monitor.severe(() -> "Error importing", e); return new ImportResult(e); } return ImportResult.OK; }
@Test public void importEmptyAlbumName() throws Exception{ UUID jobId = UUID.randomUUID(); PhotoAlbum photoAlbum = new PhotoAlbum("albumid", "", "albumDescription"); PhotosContainerResource photosContainerResource = new PhotosContainerResource(Collections.singletonList(photoAlbum), ImmutableList.of()); SmugMugAlbum smugMugAlbum = new SmugMugAlbum( "date", photoAlbum.getDescription(), "Untitled Album", "privacy", "albumUri1", "urlname", "weburi"); SmugMugAlbumResponse mockAlbumResponse = new SmugMugAlbumResponse(smugMugAlbum.getUri(), "Locator", "LocatorType", smugMugAlbum); when(smugMugInterface.createAlbum(eq(smugMugAlbum.getName()))).thenReturn(mockAlbumResponse); // Run test SmugMugPhotosImporter importer = new SmugMugPhotosImporter( smugMugInterface, config, jobStore, new AppCredentials("key", "secret"), mock(ObjectMapper.class), monitor); ImportResult result = importer.importItem( jobId, EXECUTOR, new TokenSecretAuthData("token", "secret"), photosContainerResource); // Verify verify(smugMugInterface, atLeastOnce()).createAlbum(ArgumentCaptor.forClass(String.class).capture()); }
public int getBlockZ() { return position.blockZ(); }
@Test public void testGetBlockZ() throws Exception { World world = mock(World.class); Location location = new Location(world, Vector3.at(0, 0, TEST_VALUE)); assertEquals(TEST_VALUE, location.getBlockZ()); }
public void setComplexProperty(String name, Object complexProperty) { Method setter = aggregationAssessor.findSetterMethod(name); if (setter == null) { addWarn("Not setter method for property [" + name + "] in " + obj.getClass().getName()); return; } Class<?>[] paramTypes = setter.getParameterTypes(); if (!isSanityCheckSuccessful(name, setter, paramTypes, complexProperty)) { return; } try { invokeMethodWithSingleParameterOnThisObject(setter, complexProperty); } catch (Exception e) { addError("Could not set component " + obj + " for parent component " + obj, e); } }
@Test public void testSetComplexWithCamelCaseName() { SwimmingPool pool = new SwimmingPoolImpl(); setter.setComplexProperty("swimmingPool", pool); assertEquals(pool, house.getSwimmingPool()); }
@Override public Result apply(PathData item, int depth) throws IOException { String name = getPath(item).getName(); if (!caseSensitive) { name = StringUtils.toLowerCase(name); } if (globPattern.matches(name)) { return Result.PASS; } else { return Result.FAIL; } }
@Test public void applyMatch() throws IOException { setup("name"); PathData item = new PathData("/directory/path/name", mockFs.getConf()); assertEquals(Result.PASS, name.apply(item, -1)); }
@Override public NotifyTemplateDO getNotifyTemplate(Long id) { return notifyTemplateMapper.selectById(id); }
@Test public void testGetNotifyTemplate() { // mock 数据 NotifyTemplateDO dbNotifyTemplate = randomPojo(NotifyTemplateDO.class); notifyTemplateMapper.insert(dbNotifyTemplate); // 准备参数 Long id = dbNotifyTemplate.getId(); // 调用 NotifyTemplateDO notifyTemplate = notifyTemplateService.getNotifyTemplate(id); // 断言 assertPojoEquals(dbNotifyTemplate, notifyTemplate); }
@Override public long nextDelayDuration(int reconsumeTimes) { if (reconsumeTimes < 0) { reconsumeTimes = 0; } int index = reconsumeTimes + 2; if (index >= next.length) { index = next.length - 1; } return next[index]; }
@Test public void testNextDelayDuration() { CustomizedRetryPolicy customizedRetryPolicy = new CustomizedRetryPolicy(); long actual = customizedRetryPolicy.nextDelayDuration(0); assertThat(actual).isEqualTo(TimeUnit.SECONDS.toMillis(10)); actual = customizedRetryPolicy.nextDelayDuration(10); assertThat(actual).isEqualTo(TimeUnit.MINUTES.toMillis(9)); }
public static String dataToAvroSchemaJson(DataSchema dataSchema) { return dataToAvroSchemaJson(dataSchema, new DataToAvroSchemaTranslationOptions()); }
@Test(dataProvider = "pegasusDefaultToAvroOptionalSchemaTranslationProvider", description = "Test schemaTranslator for default fields to optional fields translation, in different schema translation modes") public void testPegasusDefaultToAvroOptionalSchemaTranslation(String... testSchemaTextAndExpected) throws IOException { String schemaText = null; String expectedAvroSchema = null; DataMap resultAvroDataMap = null; DataMap expectedAvroDataMap = null; schemaText = testSchemaTextAndExpected[0]; expectedAvroSchema = testSchemaTextAndExpected[1]; List<String> schemaTextForTesting = null; if (schemaText.contains("##T_START")) { String noTyperefSchemaText = schemaText.replace("##T_START", "").replace("##T_END", ""); String typerefSchemaText = schemaText .replace("##T_START", "{ \"type\" : \"typeref\", \"name\" : \"Ref\", \"ref\" : ") .replace("##T_END", "}"); schemaTextForTesting = Arrays.asList(noTyperefSchemaText, typerefSchemaText); } else { schemaTextForTesting = Arrays.asList(schemaText); } for (String schemaStringText: schemaTextForTesting) { DataSchema schema = TestUtil.dataSchemaFromString(schemaStringText); String avroTextFromSchema = null; avroTextFromSchema = SchemaTranslator.dataToAvroSchemaJson( schema, new DataToAvroSchemaTranslationOptions(PegasusToAvroDefaultFieldTranslationMode.DO_NOT_TRANSLATE) ); resultAvroDataMap = TestUtil.dataMapFromString(avroTextFromSchema); expectedAvroDataMap = TestUtil.dataMapFromString(expectedAvroSchema); assertEquals(resultAvroDataMap, expectedAvroDataMap); // Test avro Schema Schema avroSchema = AvroCompatibilityHelper.parse(avroTextFromSchema); // Test validation parsing SchemaParser parser = new SchemaParser(); ValidationOptions options = new ValidationOptions(); options.setAvroUnionMode(true); parser.setValidationOptions(options); parser.parse(avroTextFromSchema); assertFalse(parser.hasError(), parser.errorMessage()); } }
@Override @Transactional(rollbackFor = Exception.class) @LogRecord(type = SYSTEM_USER_TYPE, subType = SYSTEM_USER_UPDATE_SUB_TYPE, bizNo = "{{#updateReqVO.id}}", success = SYSTEM_USER_UPDATE_SUCCESS) public void updateUser(UserSaveReqVO updateReqVO) { updateReqVO.setPassword(null); // 特殊:此处不更新密码 // 1. 校验正确性 AdminUserDO oldUser = validateUserForCreateOrUpdate(updateReqVO.getId(), updateReqVO.getUsername(), updateReqVO.getMobile(), updateReqVO.getEmail(), updateReqVO.getDeptId(), updateReqVO.getPostIds()); // 2.1 更新用户 AdminUserDO updateObj = BeanUtils.toBean(updateReqVO, AdminUserDO.class); userMapper.updateById(updateObj); // 2.2 更新岗位 updateUserPost(updateReqVO, updateObj); // 3. 记录操作日志上下文 LogRecordContext.putVariable(DiffParseFunction.OLD_OBJECT, BeanUtils.toBean(oldUser, UserSaveReqVO.class)); LogRecordContext.putVariable("user", oldUser); }
@Test public void testUpdateUser_success() { // mock 数据 AdminUserDO dbUser = randomAdminUserDO(o -> o.setPostIds(asSet(1L, 2L))); userMapper.insert(dbUser); userPostMapper.insert(new UserPostDO().setUserId(dbUser.getId()).setPostId(1L)); userPostMapper.insert(new UserPostDO().setUserId(dbUser.getId()).setPostId(2L)); // 准备参数 UserSaveReqVO reqVO = randomPojo(UserSaveReqVO.class, o -> { o.setId(dbUser.getId()); o.setSex(RandomUtil.randomEle(SexEnum.values()).getSex()); o.setMobile(randomString()); o.setPostIds(asSet(2L, 3L)); }); // mock deptService 的方法 DeptDO dept = randomPojo(DeptDO.class, o -> { o.setId(reqVO.getDeptId()); o.setStatus(CommonStatusEnum.ENABLE.getStatus()); }); when(deptService.getDept(eq(dept.getId()))).thenReturn(dept); // mock postService 的方法 List<PostDO> posts = CollectionUtils.convertList(reqVO.getPostIds(), postId -> randomPojo(PostDO.class, o -> { o.setId(postId); o.setStatus(CommonStatusEnum.ENABLE.getStatus()); })); when(postService.getPostList(eq(reqVO.getPostIds()), isNull())).thenReturn(posts); // 调用 userService.updateUser(reqVO); // 断言 AdminUserDO user = userMapper.selectById(reqVO.getId()); assertPojoEquals(reqVO, user, "password"); // 断言关联岗位 List<UserPostDO> userPosts = userPostMapper.selectListByUserId(user.getId()); assertEquals(2L, userPosts.get(0).getPostId()); assertEquals(3L, userPosts.get(1).getPostId()); }
@Nonnull public static <T> Traverser<T> traverseIterator(@Nonnull Iterator<? extends T> iterator) { return () -> iterator.hasNext() ? requireNonNull(iterator.next(), "Iterator returned a null item") : null; }
@Test(expected = NullPointerException.class) public void when_traverseIteratorWithNull_then_failure() { Traverser<Integer> trav = traverseIterator(asList(1, null).iterator()); trav.next(); trav.next(); }
public static Set<String> findKeywordsFromCrashReport(String crashReport) { Matcher matcher = CRASH_REPORT_STACK_TRACE_PATTERN.matcher(crashReport); Set<String> result = new HashSet<>(); if (matcher.find()) { for (String line : matcher.group("stacktrace").split("\\n")) { Matcher lineMatcher = STACK_TRACE_LINE_PATTERN.matcher(line); if (lineMatcher.find()) { String[] method = lineMatcher.group("method").split("\\."); for (int i = 0; i < method.length - 2; i++) { if (PACKAGE_KEYWORD_BLACK_LIST.contains(method[i])) { continue; } result.add(method[i]); } Matcher moduleMatcher = STACK_TRACE_LINE_MODULE_PATTERN.matcher(line); if (moduleMatcher.find()) { for (String module : moduleMatcher.group("tokens").split(",")) { String[] split = module.split(":"); if (split.length >= 2 && "xf".equals(split[0])) { if (PACKAGE_KEYWORD_BLACK_LIST.contains(split[1])) { continue; } result.add(split[1]); } } } } } } return result; }
@Test public void netease() throws IOException { assertEquals( new HashSet<>(Arrays.asList("netease", "battergaming")), CrashReportAnalyzer.findKeywordsFromCrashReport(loadLog("/crash-report/mod/netease.txt"))); }
static ApplicationReindexing withNewReady(ApplicationReindexing reindexing, Supplier<Long> oldestGeneration, Instant now) { // Config convergence means reindexing of detected reindex actions may begin. for (var cluster : reindexing.clusters().entrySet()) for (var pending : cluster.getValue().pending().entrySet()) if (pending.getValue() <= oldestGeneration.get()) { reindexing = reindexing.withReady(cluster.getKey(), pending.getKey(), now, SPEED, CAUSE) .withoutPending(cluster.getKey(), pending.getKey()); } return reindexing; }
@Test public void testReadyComputation() { ApplicationReindexing reindexing = ApplicationReindexing.empty() .withPending("one", "a", 10) .withPending("two", "b", 20) .withReady("one", "a", Instant.ofEpochMilli(3), SPEED, CAUSE) .withReady("two", "b", Instant.ofEpochMilli(2), SPEED, CAUSE) .withReady("two", "c", Instant.ofEpochMilli(3), SPEED, CAUSE); // Nothing happens without convergence. assertEquals(reindexing, withNewReady(reindexing, () -> -1L, Instant.EPOCH)); // Status for (one, a) changes, but not (two, b). Instant later = Instant.ofEpochMilli(3 << 10); // Converged, no longer pending. assertEquals(reindexing.withoutPending("one", "a").withReady("one", "a", later, SPEED, CAUSE), // Converged, now ready. withNewReady(reindexing, () -> 19L, later)); // Converged, no longer pending. // Converged, no Longer pending. assertEquals(reindexing.withoutPending("one", "a").withReady("one", "a", later, SPEED, CAUSE) .withoutPending("two", "b").withReady("two", "b", later, SPEED, CAUSE), withNewReady(reindexing, () -> 20L, later)); // Verify generation supplier isn't called when no pending document types. withNewReady(reindexing.withoutPending("one", "a").withoutPending("two", "b"), () -> { throw new AssertionError("not supposed to run"); }, later); }
@Mapping(target = "root", source = "dataEntityDto") public abstract DataEntityLineage mapLineageDto(final DataEntityLineageDto dataEntityLineageDto);
@Test void mapLineageDto() { final var dto = DataEntityLineageDto.builder() .dataEntityDto( EASY_RANDOM.nextObject(DataEntityDimensionsDto.class)) .downstream(generateStreamDto()) .build(); final var result = mapper.mapLineageDto(dto); assertThat(result.getRoot().getId()).isEqualTo(dto.getDataEntityDto().getDataEntity().getId()); assertThat( result.getDownstream().getNodes().stream().map(DataEntityLineageNode::getId).collect(Collectors.toList())) .isEqualTo(dto.getDownstream().nodes().stream().map(node -> node.entity().getDataEntity().getId()).collect( Collectors.toList())); assertThat( result.getDownstream().getNodes().stream().flatMap(i -> i.getGroupIdList().stream()).distinct().collect( Collectors.toList())) .isEqualTo( List.of(dto.getDownstream().groups().stream().findFirst().orElseThrow().getDataEntity().getId())); assertThat(result.getDownstream().getEdges().stream().findFirst().map(DataEntityLineageEdge::getSourceId) .orElseThrow()) .isIn(result.getDownstream().getNodes().stream().map(DataEntityLineageNode::getId) .collect(Collectors.toList())); }
@Override public Long sendSingleSms(String mobile, Long userId, Integer userType, String templateCode, Map<String, Object> templateParams) { // 校验短信模板是否合法 SmsTemplateDO template = validateSmsTemplate(templateCode); // 校验短信渠道是否合法 SmsChannelDO smsChannel = validateSmsChannel(template.getChannelId()); // 校验手机号码是否存在 mobile = validateMobile(mobile); // 构建有序的模板参数。为什么放在这个位置,是提前保证模板参数的正确性,而不是到了插入发送日志 List<KeyValue<String, Object>> newTemplateParams = buildTemplateParams(template, templateParams); // 创建发送日志。如果模板被禁用,则不发送短信,只记录日志 Boolean isSend = CommonStatusEnum.ENABLE.getStatus().equals(template.getStatus()) && CommonStatusEnum.ENABLE.getStatus().equals(smsChannel.getStatus()); String content = smsTemplateService.formatSmsTemplateContent(template.getContent(), templateParams); Long sendLogId = smsLogService.createSmsLog(mobile, userId, userType, isSend, template, content, templateParams); // 发送 MQ 消息,异步执行发送短信 if (isSend) { smsProducer.sendSmsSendMessage(sendLogId, mobile, template.getChannelId(), template.getApiTemplateId(), newTemplateParams); } return sendLogId; }
@Test public void testSendSingleSms_successWhenSmsTemplateEnable() { // 准备参数 String mobile = randomString(); Long userId = randomLongId(); Integer userType = randomEle(UserTypeEnum.values()).getValue(); String templateCode = randomString(); Map<String, Object> templateParams = MapUtil.<String, Object>builder().put("code", "1234") .put("op", "login").build(); // mock SmsTemplateService 的方法 SmsTemplateDO template = randomPojo(SmsTemplateDO.class, o -> { o.setStatus(CommonStatusEnum.ENABLE.getStatus()); o.setContent("验证码为{code}, 操作为{op}"); o.setParams(Lists.newArrayList("code", "op")); }); when(smsTemplateService.getSmsTemplateByCodeFromCache(eq(templateCode))).thenReturn(template); String content = randomString(); when(smsTemplateService.formatSmsTemplateContent(eq(template.getContent()), eq(templateParams))) .thenReturn(content); // mock SmsChannelService 的方法 SmsChannelDO smsChannel = randomPojo(SmsChannelDO.class, o -> o.setStatus(CommonStatusEnum.ENABLE.getStatus())); when(smsChannelService.getSmsChannel(eq(template.getChannelId()))).thenReturn(smsChannel); // mock SmsLogService 的方法 Long smsLogId = randomLongId(); when(smsLogService.createSmsLog(eq(mobile), eq(userId), eq(userType), eq(Boolean.TRUE), eq(template), eq(content), eq(templateParams))).thenReturn(smsLogId); // 调用 Long resultSmsLogId = smsSendService.sendSingleSms(mobile, userId, userType, templateCode, templateParams); // 断言 assertEquals(smsLogId, resultSmsLogId); // 断言调用 verify(smsProducer).sendSmsSendMessage(eq(smsLogId), eq(mobile), eq(template.getChannelId()), eq(template.getApiTemplateId()), eq(Lists.newArrayList(new KeyValue<>("code", "1234"), new KeyValue<>("op", "login")))); }
@Override public PiAction mapTreatment(TrafficTreatment treatment, PiTableId piTableId) throws PiInterpreterException { if (FORWARDING_CTRL_TBLS.contains(piTableId)) { return treatmentInterpreter.mapForwardingTreatment(treatment, piTableId); } else if (PRE_NEXT_CTRL_TBLS.contains(piTableId)) { return treatmentInterpreter.mapPreNextTreatment(treatment, piTableId); } else if (ACL_CTRL_TBLS.contains(piTableId)) { return treatmentInterpreter.mapAclTreatment(treatment, piTableId); } else if (NEXT_CTRL_TBLS.contains(piTableId)) { return treatmentInterpreter.mapNextTreatment(treatment, piTableId); } else if (E_NEXT_CTRL_TBLS.contains(piTableId)) { return treatmentInterpreter.mapEgressNextTreatment(treatment, piTableId); } else { throw new PiInterpreterException(format( "Treatment mapping not supported for table '%s'", piTableId)); } }
@Test public void testAclTreatmentEmpty() throws Exception { TrafficTreatment treatment = DefaultTrafficTreatment.emptyTreatment(); PiAction mappedAction = interpreter.mapTreatment( treatment, FabricConstants.FABRIC_INGRESS_ACL_ACL); PiAction expectedAction = PiAction.builder() .withId(FabricConstants.FABRIC_INGRESS_ACL_NOP_ACL) .build(); assertEquals(expectedAction, mappedAction); }
@Override public List<RemoteFileInfo> getRemoteFiles(Table table, GetRemoteFilesParams params) { // add scanBuilder param for mock return getRemoteFiles(table, params, new TableReadSessionBuilder()); }
@Test public void testGetRemoteFiles() throws AnalysisException, IOException { Table odpsTable = odpsMetadata.getTable("project", "tableName"); PartitionKey partitionKey = PartitionKey.createPartitionKey(ImmutableList.of(new PartitionValue("a"), new PartitionValue("b")), odpsTable.getPartitionColumns()); GetRemoteFilesParams params = GetRemoteFilesParams.newBuilder().setFieldNames(odpsTable.getPartitionColumnNames()) .setPartitionKeys(ImmutableList.of(partitionKey)).build(); List<RemoteFileInfo> remoteFileInfos = odpsMetadata.getRemoteFiles(odpsTable, params, mockTableReadSessionBuilder); Assert.assertEquals(1, remoteFileInfos.size()); }
@Override public boolean decide(final SelectStatementContext selectStatementContext, final List<Object> parameters, final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database, final SingleRule rule, final Collection<DataNode> includedDataNodes) { Collection<QualifiedTable> singleTables = getSingleTables(selectStatementContext, database, rule); if (singleTables.isEmpty()) { return false; } if (containsView(database, singleTables)) { return true; } if (!includedDataNodes.isEmpty() && !isInnerCommaJoin(selectStatementContext.getSqlStatement())) { return true; } boolean result = rule.isAllTablesInSameComputeNode(includedDataNodes, singleTables); includedDataNodes.addAll(getTableDataNodes(rule, singleTables)); return !result; }
@Test void assertDecideWhenAllSingleTablesInSameComputeNode() { Collection<QualifiedTable> qualifiedTables = Arrays.asList(new QualifiedTable(DefaultDatabase.LOGIC_NAME, "t_order"), new QualifiedTable(DefaultDatabase.LOGIC_NAME, "t_order_item")); SingleRule rule = createSingleRule(qualifiedTables); SelectStatementContext select = createStatementContext(); Collection<DataNode> includedDataNodes = new HashSet<>(); when(rule.isAllTablesInSameComputeNode(includedDataNodes, qualifiedTables)).thenReturn(true); assertFalse(new SingleSQLFederationDecider().decide(select, Collections.emptyList(), mock(RuleMetaData.class), createDatabase(), rule, includedDataNodes)); assertThat(includedDataNodes.size(), is(2)); }
public KsqlGenericRecord build( final List<ColumnName> columnNames, final List<Expression> expressions, final LogicalSchema schema, final DataSourceType dataSourceType ) { final List<ColumnName> columns = columnNames.isEmpty() ? implicitColumns(schema) : columnNames; if (columns.size() != expressions.size()) { throw new KsqlException( "Expected a value for each column." + " Expected Columns: " + columnNames + ". Got " + expressions); } final LogicalSchema schemaWithPseudoColumns = withPseudoColumns(schema); for (ColumnName col : columns) { if (!schemaWithPseudoColumns.findColumn(col).isPresent()) { throw new KsqlException("Column name " + col + " does not exist."); } if (SystemColumns.isDisallowedForInsertValues(col)) { throw new KsqlException("Inserting into column " + col + " is not allowed."); } } final Map<ColumnName, Object> values = resolveValues( columns, expressions, schemaWithPseudoColumns, functionRegistry, config ); if (dataSourceType == DataSourceType.KTABLE) { final String noValue = schemaWithPseudoColumns.key().stream() .map(Column::name) .filter(colName -> !values.containsKey(colName)) .map(ColumnName::text) .collect(Collectors.joining(", ")); if (!noValue.isEmpty()) { throw new KsqlException("Value for primary key column(s) " + noValue + " is required for tables"); } } final long ts = (long) values.getOrDefault(SystemColumns.ROWTIME_NAME, clock.getAsLong()); final GenericKey key = buildKey(schema, values); final GenericRow value = buildValue(schema, values); return KsqlGenericRecord.of(key, value, ts); }
@Test public void shouldBuildPartialColumns() { // Given: final LogicalSchema schema = LogicalSchema.builder() .keyColumn(KEY, SqlTypes.STRING) .valueColumn(COL0, SqlTypes.STRING) .valueColumn(COL1, SqlTypes.STRING) .build(); final List<ColumnName> names = ImmutableList.of(KEY, COL0); final Expression exp = new StringLiteral("a"); // When: final KsqlGenericRecord record = recordFactory.build( names, ImmutableList.of(exp, exp), schema, DataSourceType.KSTREAM ); // Then: assertThat(record, is(KsqlGenericRecord.of( GenericKey.genericKey("a"), GenericRow.genericRow("a", null), 0 ))); }
public int msbIndex() { int index = (size() * 8) - 1; byteLoop: for (int i = 0; i < size(); i++) { byte b = value.get(i); if (b != 0) { for (int j = 7; j >= 0; j--) { byte mask = (byte) ((1 << j) - 1); if ((b & ~mask) != 0) { break byteLoop; } index--; } } index -= 8; } return index; }
@Test public void testMsbIndex() { assertThat("Value 0 should have MSB index -1", ImmutableByteSequence.copyFrom(0).msbIndex(), is(-1)); for (int i = 0; i < 63; i++) { long value = (long) Math.pow(2, i); assertThat(format("Value %d should have MSB index %d", value, i), ImmutableByteSequence.copyFrom(value).msbIndex(), is(i)); } }
@Override public Stream<ColumnName> resolveSelectStar( final Optional<SourceName> sourceName ) { if (sourceName.isPresent() && !sourceName.equals(getSourceName())) { throw new IllegalArgumentException("Expected sourceName of " + getSourceName() + ", but was " + sourceName.get()); } // Note: the 'value' columns include the key columns at this point: return orderColumns(getSchema().value(), getSchema()); }
@Test public void shouldResolveSelectStarIfSourceNotProvidedAndValuesOnly() { // When: final Stream<ColumnName> result = repartitionNode.resolveSelectStar( Optional.empty() ); // Then: final List<ColumnName> names = result.collect(Collectors.toList()); assertThat(names, contains(K0, V0, V1, V2)); }
@PublicAPI(usage = ACCESS) public static PackageMatcher of(String packageIdentifier) { return new PackageMatcher(packageIdentifier); }
@Test public void should_reject_illegal_characters() { String illegalPackageIdentifier = "some" + PackageMatcher.TWO_STAR_REGEX_MARKER + "package"; assertThatThrownBy(() -> PackageMatcher.of(illegalPackageIdentifier)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Package Identifier '%s' may only consist of valid java identifier parts or the symbols '.)(*'", illegalPackageIdentifier); }
public static List<ReporterSetup> fromConfiguration( final Configuration configuration, @Nullable final PluginManager pluginManager) { String includedReportersString = configuration.get(MetricOptions.REPORTERS_LIST, ""); Set<String> namedReporters = findEnabledTraceReportersInConfiguration( configuration, includedReportersString, metricReporterListPattern, metricReporterClassPattern, ConfigConstants.METRICS_REPORTER_PREFIX); if (namedReporters.isEmpty()) { return Collections.emptyList(); } final List<Tuple2<String, Configuration>> reporterConfigurations = loadReporterConfigurations( configuration, namedReporters, ConfigConstants.METRICS_REPORTER_PREFIX); final Map<String, MetricReporterFactory> reporterFactories = loadAvailableReporterFactories(pluginManager); return setupReporters(reporterFactories, reporterConfigurations); }
@Test void testActivateOneReporterAmongTwoDeclared() { final Configuration config = new Configuration(); configureReporter1(config); configureReporter2(config); config.set(MetricOptions.REPORTERS_LIST, "reporter2"); final List<ReporterSetup> reporterSetups = ReporterSetup.fromConfiguration(config, null); assertThat(reporterSetups).hasSize(1); final ReporterSetup setup = reporterSetups.get(0); assertReporter2Configured(setup); }
@Override public SubmitApplicationResponse submitApplication( SubmitApplicationRequest request) throws YarnException, IOException { if (request == null || request.getApplicationSubmissionContext() == null || request.getApplicationSubmissionContext().getApplicationId() == null) { routerMetrics.incrAppsFailedSubmitted(); String errMsg = "Missing submitApplication request or applicationSubmissionContext information."; RouterAuditLogger.logFailure(user.getShortUserName(), SUBMIT_NEW_APP, UNKNOWN, TARGET_CLIENT_RM_SERVICE, errMsg); RouterServerUtil.logAndThrowException(errMsg, null); } long startTime = clock.getTime(); ApplicationId applicationId = request.getApplicationSubmissionContext().getApplicationId(); List<SubClusterId> blacklist = new ArrayList<>(); try { // We need to handle this situation, // the user will provide us with an expected submitRetries, // but if the number of Active SubClusters is less than this number at this time, // we should provide a high number of retry according to the number of Active SubClusters. int activeSubClustersCount = federationFacade.getActiveSubClustersCount(); int actualRetryNums = Math.min(activeSubClustersCount, numSubmitRetries); // Try calling the SubmitApplication method SubmitApplicationResponse response = ((FederationActionRetry<SubmitApplicationResponse>) (retryCount) -> invokeSubmitApplication(blacklist, request, retryCount)). runWithRetries(actualRetryNums, submitIntervalTime); if (response != null) { long stopTime = clock.getTime(); routerMetrics.succeededAppsSubmitted(stopTime - startTime); return response; } } catch (Exception e) { routerMetrics.incrAppsFailedSubmitted(); RouterAuditLogger.logFailure(user.getShortUserName(), SUBMIT_NEW_APP, UNKNOWN, TARGET_CLIENT_RM_SERVICE, e.getMessage(), applicationId); RouterServerUtil.logAndThrowException(e.getMessage(), e); } routerMetrics.incrAppsFailedSubmitted(); String msg = String.format("Application %s with appId %s failed to be submitted.", request.getApplicationSubmissionContext().getApplicationName(), applicationId); RouterAuditLogger.logFailure(user.getShortUserName(), SUBMIT_NEW_APP, UNKNOWN, TARGET_CLIENT_RM_SERVICE, msg, applicationId); throw new YarnException(msg); }
@Test public void testSubmitApplicationEmptyRequest() throws Exception { LOG.info("Test FederationClientInterceptor: Submit Application - Empty."); // null request1 LambdaTestUtils.intercept(YarnException.class, "Missing submitApplication request or applicationSubmissionContext information.", () -> interceptor.submitApplication(null)); // null request2 LambdaTestUtils.intercept(YarnException.class, "Missing submitApplication request or applicationSubmissionContext information.", () -> interceptor.submitApplication(SubmitApplicationRequest.newInstance(null))); // null request3 ApplicationSubmissionContext context = ApplicationSubmissionContext .newInstance(null, "", "", null, null, false, false, -1, null, null); SubmitApplicationRequest request = SubmitApplicationRequest.newInstance(context); LambdaTestUtils.intercept(YarnException.class, "Missing submitApplication request or applicationSubmissionContext information.", () -> interceptor.submitApplication(request)); }
public int getReplicas() { final int replicas = flinkConfig.get(KubernetesConfigOptions.KUBERNETES_JOBMANAGER_REPLICAS); if (replicas < 1) { throw new IllegalConfigurationException( String.format( "'%s' should not be configured less than one.", KubernetesConfigOptions.KUBERNETES_JOBMANAGER_REPLICAS.key())); } else if (replicas > 1 && !HighAvailabilityMode.isHighAvailabilityModeActivated(flinkConfig)) { throw new IllegalConfigurationException( "High availability should be enabled when starting standby JobManagers."); } return replicas; }
@Test public void testGetReplicasWithTwoShouldFailWhenHAIsNotEnabled() { flinkConfig.set(KubernetesConfigOptions.KUBERNETES_JOBMANAGER_REPLICAS, 2); assertThatThrownBy(() -> kubernetesJobManagerParameters.getReplicas()) .isInstanceOf(IllegalConfigurationException.class); }
public static short translateBucketAcl(AccessControlList acl, String userId) { short mode = (short) 0; for (Grant grant : acl.getGrantsAsList()) { Permission perm = grant.getPermission(); Grantee grantee = grant.getGrantee(); if (perm.equals(Permission.Read)) { if (isUserIdInGrantee(grantee, userId)) { // If the bucket is readable by the user, add r and x to the owner mode. mode |= (short) 0500; } } else if (perm.equals(Permission.Write)) { if (isUserIdInGrantee(grantee, userId)) { // If the bucket is writable by the user, +w to the owner mode. mode |= (short) 0200; } } else if (perm.equals(Permission.FullControl)) { if (isUserIdInGrantee(grantee, userId)) { // If the user has full control to the bucket, +rwx to the owner mode. mode |= (short) 0700; } } } return mode; }
@Test public void translateUserReadPermission() { mAcl.grantPermission(mUserGrantee, Permission.Read); Assert.assertEquals((short) 0500, S3AUtils.translateBucketAcl(mAcl, ID)); Assert.assertEquals((short) 0000, S3AUtils.translateBucketAcl(mAcl, OTHER_ID)); mAcl.grantPermission(mUserGrantee, Permission.ReadAcp); Assert.assertEquals((short) 0500, S3AUtils.translateBucketAcl(mAcl, ID)); Assert.assertEquals((short) 0000, S3AUtils.translateBucketAcl(mAcl, OTHER_ID)); }
public Calendar ceil(long t) { Calendar cal = new GregorianCalendar(Locale.US); cal.setTimeInMillis(t); return ceil(cal); }
@Test public void hashSkips() throws Exception { compare(new GregorianCalendar(2013, Calendar.MARCH, 21, 16, 26), new CronTab("H/15 * * * *", Hash.from("stuff")).ceil(new GregorianCalendar(2013, Calendar.MARCH, 21, 16, 21))); compare(new GregorianCalendar(2013, Calendar.MARCH, 21, 16, 41), new CronTab("H/15 * * * *", Hash.from("stuff")).ceil(new GregorianCalendar(2013, Calendar.MARCH, 21, 16, 31))); compare(new GregorianCalendar(2013, Calendar.MARCH, 21, 16, 56), new CronTab("H/15 * * * *", Hash.from("stuff")).ceil(new GregorianCalendar(2013, Calendar.MARCH, 21, 16, 42))); compare(new GregorianCalendar(2013, Calendar.MARCH, 21, 17, 11), new CronTab("H/15 * * * *", Hash.from("stuff")).ceil(new GregorianCalendar(2013, Calendar.MARCH, 21, 16, 59))); compare(new GregorianCalendar(2013, Calendar.MARCH, 21, 0, 2), new CronTab("H(0-15)/3 * * * *", Hash.from("junk")).ceil(new GregorianCalendar(2013, Calendar.MARCH, 21, 0, 0))); compare(new GregorianCalendar(2013, Calendar.MARCH, 21, 0, 2), new CronTab("H(0-3)/4 * * * *", Hash.from("junk")).ceil(new GregorianCalendar(2013, Calendar.MARCH, 21, 0, 0))); compare(new GregorianCalendar(2013, Calendar.MARCH, 21, 1, 2), new CronTab("H(0-3)/4 * * * *", Hash.from("junk")).ceil(new GregorianCalendar(2013, Calendar.MARCH, 21, 0, 5))); Locale saveLocale = Locale.getDefault(); Locale.setDefault(Locale.ENGLISH); try { ANTLRException e = assertThrows(ANTLRException.class, () -> compare(new GregorianCalendar(2013, Calendar.MARCH, 21, 0, 0), new CronTab("H(0-3)/15 * * * *", Hash.from("junk")).ceil(new GregorianCalendar(2013, Calendar.MARCH, 21, 0, 0)))); assertThat(e, instanceOf(IllegalArgumentException.class)); assertEquals("line 1:9: 15 is an invalid value. Must be within 1 and 4", e.getMessage()); } finally { Locale.setDefault(saveLocale); } }
protected boolean isCurrentlyPredicting() { return isPredictionOn() && !mWord.isEmpty(); }
@Test public void testHandleCompleteCandidateUpdateFromExternalAndBackSpaceWithoutRestart() { simulateFinishInputFlow(); SharedPrefsHelper.setPrefsValue(R.string.settings_key_allow_suggestions_restart, false); simulateOnStartInputFlow(); mAnySoftKeyboardUnderTest.simulateTextTyping("he"); Assert.assertEquals("he", getCurrentTestInputConnection().getCurrentTextInInputConnection()); var currentState = getCurrentTestInputConnection().getCurrentState(); Assert.assertEquals(2, currentState.selectionStart); Assert.assertEquals(2, currentState.selectionEnd); Assert.assertEquals(0, currentState.candidateStart); Assert.assertEquals(2, currentState.candidateEnd); Assert.assertTrue(mAnySoftKeyboardUnderTest.isCurrentlyPredicting()); // simulating external change getCurrentTestInputConnection().setComposingText("hell is here ", 1); TestRxSchedulers.foregroundAdvanceBy(100); Assert.assertEquals( "hell is here ", getCurrentTestInputConnection().getCurrentTextInInputConnection()); currentState = getCurrentTestInputConnection().getCurrentState(); Assert.assertEquals(13, currentState.selectionStart); Assert.assertEquals(13, currentState.selectionEnd); Assert.assertEquals(13, currentState.candidateStart); Assert.assertEquals(13, currentState.candidateEnd); Assert.assertFalse(mAnySoftKeyboardUnderTest.isCurrentlyPredicting()); mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE); TestRxSchedulers.drainAllTasksUntilEnd(); Assert.assertEquals( "hell is here", getCurrentTestInputConnection().getCurrentTextInInputConnection()); currentState = getCurrentTestInputConnection().getCurrentState(); Assert.assertEquals(12, currentState.selectionStart); Assert.assertEquals(12, currentState.selectionEnd); Assert.assertEquals(12, currentState.candidateStart); Assert.assertEquals(12, currentState.candidateEnd); Assert.assertFalse(mAnySoftKeyboardUnderTest.isCurrentlyPredicting()); mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE); TestRxSchedulers.drainAllTasksUntilEnd(); Assert.assertEquals( "hell is her", getCurrentTestInputConnection().getCurrentTextInInputConnection()); Assert.assertFalse(mAnySoftKeyboardUnderTest.isCurrentlyPredicting()); }
@Override public Mono<UserDetails> findByUsername(String username) { return userService.getUser(username) .onErrorMap(UserNotFoundException.class, e -> new BadCredentialsException("Invalid Credentials")) .flatMap(user -> { var name = user.getMetadata().getName(); var userBuilder = User.withUsername(name) .password(user.getSpec().getPassword()) .disabled(requireNonNullElse(user.getSpec().getDisabled(), false)); var setAuthorities = roleService.getRolesByUsername(name) // every authenticated user should have authenticated and anonymous roles. .concatWithValues(AUTHENTICATED_ROLE_NAME, ANONYMOUS_ROLE_NAME) .map(roleName -> new SimpleGrantedAuthority(ROLE_PREFIX + roleName)) .distinct() .collectList() .doOnNext(userBuilder::authorities); return setAuthorities.then(Mono.fromSupplier(() -> { var twoFactorAuthSettings = TwoFactorUtils.getTwoFactorAuthSettings(user); return new HaloUser.Builder(userBuilder.build()) .twoFactorAuthEnabled( (!twoFactorAuthDisabled) && twoFactorAuthSettings.isAvailable() ) .totpEncryptedSecret(user.getSpec().getTotpEncryptedSecret()) .build(); })); }); }
@Test void shouldFindUserDetailsByExistingUsernameButWithoutAnyRoles() { var foundUser = createFakeUser(); when(userService.getUser("faker")).thenReturn(Mono.just(foundUser)); when(roleService.getRolesByUsername("faker")).thenReturn(Flux.empty()); StepVerifier.create(userDetailService.findByUsername("faker")) .expectSubscription() .assertNext(gotUser -> { assertEquals(foundUser.getMetadata().getName(), gotUser.getUsername()); assertEquals(foundUser.getSpec().getPassword(), gotUser.getPassword()); assertEquals( Set.of("ROLE_anonymous", "ROLE_authenticated"), authorityListToSet(gotUser.getAuthorities())); }) .verifyComplete(); }
@Override public CompletableFuture<Void> beginStep(Exchange exchange, CamelSagaStep step) { LRASagaStep sagaStep; try { sagaStep = LRASagaStep.fromCamelSagaStep(step, exchange); } catch (RuntimeException ex) { return CompletableFuture.supplyAsync(() -> { throw ex; }); } return sagaService.getClient().join(this.lraURL, sagaStep, exchange); }
@DisplayName("Tests whether join is called on LRAClient") @Test void testBeginStep() throws Exception { CamelSagaStep step = new CamelSagaStep(null, null, Collections.emptyMap(), null); CompletableFuture<Void> expected = CompletableFuture.completedFuture(null); Mockito.when(client.join(Mockito.eq(url), Mockito.any(LRASagaStep.class), Mockito.eq(exchange))).thenReturn(expected); CompletableFuture<Void> actual = coordinator.beginStep(exchange, step); Assertions.assertSame(expected, actual); Mockito.verify(sagaService).getClient(); Mockito.verify(client).join(Mockito.eq(url), Mockito.any(LRASagaStep.class), Mockito.eq(exchange)); }
public ImportedProject importProject(ImportProjectRequest request) { try (DbSession dbSession = dbClient.openSession(false)) { checkNewCodeDefinitionParam(request.newCodeDefinitionType(), request.newCodeDefinitionValue()); AlmSettingDto almSetting = dbClient.almSettingDao().selectByUuid(dbSession, request.almSettingId()).orElseThrow(() -> new IllegalArgumentException("ALM setting not found")); DevOpsProjectDescriptor projectDescriptor = new DevOpsProjectDescriptor(almSetting.getAlm(), almSetting.getUrl(), request.repositoryIdentifier(), request.projectIdentifier()); DevOpsProjectCreator projectCreator = devOpsProjectCreatorFactory.getDevOpsProjectCreator(almSetting, projectDescriptor) .orElseThrow(() -> new IllegalArgumentException(format("Platform %s not supported", almSetting.getAlm().name()))); CreationMethod creationMethod = getCreationMethod(request.monorepo()); ComponentCreationData componentCreationData = projectCreator.createProjectAndBindToDevOpsPlatform( dbSession, creationMethod, request.monorepo(), request.projectKey(), request.projectName()); ProjectDto projectDto = Optional.ofNullable(componentCreationData.projectDto()).orElseThrow(); BranchDto mainBranchDto = Optional.ofNullable(componentCreationData.mainBranchDto()).orElseThrow(); if (request.newCodeDefinitionType() != null) { newCodeDefinitionResolver.createNewCodeDefinition(dbSession, projectDto.getUuid(), mainBranchDto.getUuid(), mainBranchDto.getKey(), request.newCodeDefinitionType(), request.newCodeDefinitionValue()); } componentUpdater.commitAndIndex(dbSession, componentCreationData); ProjectAlmSettingDto projectAlmSettingDto = dbClient.projectAlmSettingDao().selectByProject(dbSession, projectDto) .orElseThrow(() -> new IllegalStateException("Project ALM setting was not created")); dbSession.commit(); return new ImportedProject(projectDto, projectAlmSettingDto); } }
@Test void createImportedProject_whenAlmIsSupportedAndNewCodeDefinitionDefined_shouldCreateProjectAndNewCodeDefinition() { userSession.logIn().addPermission(PROVISION_PROJECTS); DbSession dbSession = mockDbSession(); AlmSettingDto almSetting = mockAlmSetting(dbSession); DevOpsProjectCreator devOpsProjectCreator = mockDevOpsProjectCreator(almSetting); ComponentCreationData componentCreationData = mockProjectCreation(devOpsProjectCreator, ALM_IMPORT_API, false, dbSession); ProjectDto projectDto = mockProjectDto(componentCreationData); mockBranchDto(componentCreationData); ProjectAlmSettingDto projectAlmSettingDto = mockProjectAlmSetting(dbSession, projectDto); ImportProjectRequest request = new ImportProjectRequest(PROJECT_KEY, PROJECT_NAME, ALM_SETTING_ID, DOP_REPOSITORY_ID, DOP_PROJECT_ID, "NUMBER_OF_DAYS", "10", false); ImportedProject importedProject = importProjectService.importProject(request); assertThat(importedProject.projectDto()).isEqualTo(projectDto); assertThat(importedProject.projectAlmSettingDto()).isEqualTo(projectAlmSettingDto); verify(newCodeDefinitionResolver).createNewCodeDefinition( dbSession, PROJECT_UUID, MAIN_BRANCH_UUID, MAIN_BRANCH_KEY, "NUMBER_OF_DAYS", "10"); verify(componentUpdater).commitAndIndex(dbSession, componentCreationData); }
@Override public String toString() { return "ListView{" + "data=" + data + ", count=" + count + '}'; }
@Test void testToString() { List<String> data = new LinkedList<>(); data.add("1"); data.add("2"); data.add("3"); ListView<String> listView = new ListView<>(); listView.setData(data); listView.setCount(data.size()); String actual = listView.toString(); assertEquals("ListView{data=[1, 2, 3], count=3}", actual); }
@Override public BlockStoragePolicySpi getStoragePolicy(Path src) throws IOException { return super.getStoragePolicy(fullPath(src)); }
@Test(timeout = 30000) public void testGetStoragePolicy() throws Exception { Path storagePolicyPath = new Path("/storagePolicy"); Path chRootedStoragePolicyPath = new Path("/a/b/storagePolicy"); Configuration conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); URI chrootUri = URI.create("mockfs://foo/a/b"); ChRootedFileSystem chrootFs = new ChRootedFileSystem(chrootUri, conf); FileSystem mockFs = ((FilterFileSystem) chrootFs.getRawFileSystem()) .getRawFileSystem(); chrootFs.getStoragePolicy(storagePolicyPath); verify(mockFs).getStoragePolicy(chRootedStoragePolicyPath); }
private static String approximateSimpleName(Class<?> clazz, boolean dropOuterClassNames) { checkArgument(!clazz.isAnonymousClass(), "Attempted to get simple name of anonymous class"); return approximateSimpleName(clazz.getName(), dropOuterClassNames); }
@Test public void testDropsStandardSuffixes() { assertEquals("Embedded", NameUtils.approximateSimpleName("EmbeddedDoFn", true)); assertEquals("Embedded", NameUtils.approximateSimpleName("EmbeddedFn", true)); assertEquals("Embedded", NameUtils.approximateSimpleName("EmbeddedDoFn", false)); assertEquals("Embedded", NameUtils.approximateSimpleName("EmbeddedFn", false)); }
public static Finder advancedFinder(String query, String orQuery, String notQuery) { return Finder.contains(query) .or(Finder.contains(orQuery)) .not(Finder.contains(notQuery)); }
@Test void advancedFinderTest() { var res = advancedFinder("it was", "kingdom", "sea").find(text()); assertEquals(1, res.size()); assertEquals("It was many and many a year ago,", res.get(0)); }
public int initialWindowLength() { return CongestionControl.receiverWindowLength(ccOutcome); }
@Test void shouldSetWindowLengthFromChannel() { final UdpChannel channelWithWindow = UdpChannel.parse("aeron:udp?endpoint=127.0.0.1:9999|rcv-wnd=8192"); final MediaDriver.Context context = new MediaDriver.Context().initialWindowLength(16536); final int termLength = 1_000_000; final StaticWindowCongestionControl staticWindowCongestionControl = new StaticWindowCongestionControl( 0, channelWithWindow, 0, 0, termLength, 0, null, null, null, context, null); assertEquals(8192, staticWindowCongestionControl.initialWindowLength()); }
@Override public Optional<SoamId> createDm(MdId mdName, MaIdShort maName, MepId mepId, DelayMeasurementCreate dmNew) throws CfmConfigException, SoamConfigException { DeviceId mepDeviceId = cfmMepService.getMep(mdName, maName, mepId).deviceId(); if (mepDeviceId == null) { throw new CfmConfigException("Unable to create DM. MEP :" + mdName + "/" + maName + "/" + mepId + " does not exist"); } else if (deviceService.getDevice(mepDeviceId) == null) { throw new CfmConfigException("Device " + mepDeviceId + " from MEP :" + mdName + "/" + maName + "/" + mepId + " does not exist"); } else if (!deviceService.getDevice(mepDeviceId).is(SoamDmProgrammable.class)) { throw new CfmConfigException("Device " + mepDeviceId + " from MEP :" + mdName + "/" + maName + "/" + mepId + " does not implement SoamDmProgrammable"); } log.debug("Creating new DM in MD {}, MA {}, MEP {} on Device {}", mdName, maName, mepId, mepDeviceId); return deviceService.getDevice(mepDeviceId) .as(SoamDmProgrammable.class).createDm(mdName, maName, mepId, dmNew); }
@Test public void testCreateDm() throws CfmConfigException, SoamConfigException { expect(deviceService.getDevice(DEVICE_ID1)).andReturn(device1).anyTimes(); replay(deviceService); expect(mepService.getMep(MDNAME1, MANAME1, MEPID1)).andReturn(mep1).anyTimes(); replay(mepService); expect(driverService.getDriver(DEVICE_ID1)).andReturn(testDriver).anyTimes(); replay(driverService); DelayMeasurementCreate dmCreate1 = DefaultDelayMeasurementCreate .builder(DelayMeasurementCreate.DmType.DM1DMTX, DelayMeasurementCreate.Version.Y17312011, MepId.valueOf((short) 11), Mep.Priority.PRIO3) .binsPerFdInterval((short) 4) .binsPerFdrInterval((short) 5) .binsPerIfdvInterval((short) 6) .build(); assertEquals(1000, soamManager.createDm( MDNAME1, MANAME1, MEPID1, dmCreate1).get().value()); }
public void expandRegexMapping(Model<T> model) { expandRegexMapping(model.getFeatureIDMap()); }
@Test public void testInvalidRegexMapping() { List<String> fieldNames = Arrays.asList("Armadillos", "Armadas", "Archery", "Battleship", "Battles", "Carrots", "Label"); Map<String, FieldProcessor> fixed = new HashMap<>(); fixed.put("Battles", new IdentityProcessor("Battles")); Map<String, FieldProcessor> regex = new HashMap<>(); try { regex.put("Arma*", new IdentityProcessor("Arma*")); regex.put("Monkeys", new IdentityProcessor("Monkeys")); RowProcessor<MockOutput> rowProcessor = new RowProcessor<>(Collections.emptyList(), null, new MockResponseProcessor("Label"), fixed, regex, new HashSet<>()); rowProcessor.expandRegexMapping(fieldNames); fail("Should have thrown an IllegalArgumentException"); } catch (IllegalArgumentException e) { // pass } catch (Exception e) { fail("Incorrect exception thrown."); } regex.clear(); try { regex.put("Battle*", new IdentityProcessor("Battle*")); RowProcessor<MockOutput> rowProcessor = new RowProcessor<>(Collections.emptyList(), null, new MockResponseProcessor("Label"), fixed, regex, new HashSet<>()); rowProcessor.expandRegexMapping(fieldNames); fail("Should have thrown an IllegalArgumentException"); } catch (IllegalArgumentException e) { // pass } catch (Exception e) { fail("Incorrect exception thrown."); } regex.clear(); try { regex.put("Arm*", new IdentityProcessor("Arm*")); regex.put("Armadil*", new IdentityProcessor("Armadil*")); RowProcessor<MockOutput> rowProcessor = new RowProcessor<>(Collections.emptyList(), null, new MockResponseProcessor("Label"), fixed, regex, new HashSet<>()); rowProcessor.expandRegexMapping(fieldNames); fail("Should have thrown an IllegalArgumentException"); } catch (IllegalArgumentException e) { // pass } catch (Exception e) { fail("Incorrect exception thrown."); } }
public static SpecificData get() { return INSTANCE; }
@Test void specificRecordBase() { final TestRecord record = new TestRecord(); record.put("x", 1); record.put("y", "str"); assertEquals(1, record.get("x")); assertEquals("str", record.get("y")); }
public static String fixPath(String path) { if (null == path) { return SLASH; } if (path.charAt(0) != '/') { path = SLASH + path; } if (path.length() > 1 && path.endsWith(SLASH)) { path = path.substring(0, path.length() - 1); } if (!path.contains("\\s")) { return path; } return VAR_FIXPATH_PATTERN.matcher(path).replaceAll("%20"); }
@Test public void testFixPath() { String path = PathKit.fixPath("/a/b/"); Assert.assertEquals("/a/b", path); String path2 = PathKit.cleanPath("/a//b//c//"); Assert.assertEquals("/a/b/c/", path2); }
public final void drainerFailed(Throwable t) { if (t == null) { throw new NullPointerException("ConcurrentConveyor.drainerFailed(null)"); } drainer = null; drainerDepartureCause = t; }
@Test(expected = NullPointerException.class) public void when_callDrainerFailedWithNull_then_throwNPE() { conveyor.drainerFailed(null); }
@SuppressWarnings("unchecked") @Override public <T> T getAttributeValue(String attributeName, Class<T> attributeType, Collection<? extends SynthesizedAnnotation> synthesizedAnnotations) { Object value = valueCaches.get(attributeName, attributeType); if (Objects.isNull(value)) { synchronized (valueCaches) { value = valueCaches.get(attributeName, attributeType); if (Objects.isNull(value)) { value = synthesizedAnnotations.stream() .filter(ma -> ma.hasAttribute(attributeName, attributeType)) .min(annotationComparator) .map(ma -> ma.getAttributeValue(attributeName)) .orElse(null); valueCaches.put(attributeName, attributeType, value); } } } return (T)value; }
@Test public void getAttributeValueTest() { CacheableSynthesizedAnnotationAttributeProcessor processor = new CacheableSynthesizedAnnotationAttributeProcessor(); Map<String, Object> values1 = MapBuilder.<String, Object> create().put("name", "name1").put("value", 111).build(); SynthesizedAnnotation annotation1 = new TestSynthesizedAnnotation(1, 0, values1); Map<String, Object> values2 = MapBuilder.<String, Object> create().put("name", "name2").put("value", "value2").build(); SynthesizedAnnotation annotation2 = new TestSynthesizedAnnotation(0, 0, values2); assertEquals("name2", processor.getAttributeValue("name", String.class, Arrays.asList(annotation1, annotation2))); assertEquals(Integer.valueOf(111), processor.getAttributeValue("value", Integer.class, Arrays.asList(annotation1, annotation2))); }
public String getShare() { return share; }
@Test void shareForValidURIShouldBeExtracted3() { var remoteConf = context.getEndpoint("azure-files://account/share/?", FilesEndpoint.class).getConfiguration(); assertEquals("share", remoteConf.getShare()); }
@Override @PublicAPI(usage = ACCESS) public JavaClass toErasure() { return erasure; }
@Test public void erased_type_variable_bound_by_multiple_generic_classes_and_interfaces_is_the_erasure_of_the_leftmost_bound() { @SuppressWarnings("unused") class ClassWithBoundTypeParameterWithMultipleGenericClassAndInterfaceBounds<T extends HashMap<String, String> & Iterable<String> & Serializable> { } JavaTypeVariable<JavaClass> type = new ClassFileImporter().importClass(ClassWithBoundTypeParameterWithMultipleGenericClassAndInterfaceBounds.class).getTypeParameters().get(0); assertThatType(type.toErasure()).matches(HashMap.class); }
public static boolean isScannerSide(Object extension) { return AnnotationUtils.getAnnotation(extension, ScannerSide.class) != null; }
@Test public void testIsScannerSide() { assertThat(ExtensionUtils.isDeprecatedScannerSide(ScannerService.class)).isTrue(); assertThat(ExtensionUtils.isDeprecatedScannerSide(ServerService.class)).isFalse(); assertThat(ExtensionUtils.isDeprecatedScannerSide(new ServerService())).isFalse(); assertThat(ExtensionUtils.isDeprecatedScannerSide(new WebServerService())).isFalse(); assertThat(ExtensionUtils.isDeprecatedScannerSide(new ComputeEngineService())).isFalse(); }
static Map<TopicPartition, List<Long>> parseOffsetJsonStringWithoutDedup(String jsonData) throws JsonProcessingException { JsonValue js = Json.parseFull(jsonData) .orElseThrow(() -> new AdminOperationException("The input string is not a valid JSON")); Optional<JsonValue> version = js.asJsonObject().get("version"); return parseJsonData(version.isPresent() ? version.get().to(INT) : EARLIEST_VERSION, js); }
@Test public void testParse() throws Exception { Map<TopicPartition, List<Long>> res = DeleteRecordsCommand.parseOffsetJsonStringWithoutDedup( "{\"partitions\":[" + "{\"topic\":\"t\", \"partition\":0, \"offset\":0}," + "{\"topic\":\"t\", \"partition\":1, \"offset\":1, \"ignored\":\"field\"}," + "{\"topic\":\"t\", \"partition\":0, \"offset\":2}," + "{\"topic\":\"t\", \"partition\":0, \"offset\":0}" + "]}" ); assertEquals(2, res.size()); assertEquals(Arrays.asList(0L, 2L, 0L), res.get(new TopicPartition("t", 0))); assertEquals(Collections.singletonList(1L), res.get(new TopicPartition("t", 1))); }
public static CommonsConfigurationBulkHeadConfiguration of(final Configuration configuration) throws ConfigParseException { CommonsConfigurationBulkHeadConfiguration obj = new CommonsConfigurationBulkHeadConfiguration(); try{ obj.getConfigs().putAll(obj.getProperties(configuration.subset(BULK_HEAD_CONFIGS_PREFIX))); obj.getInstances().putAll(obj.getProperties(configuration.subset(BULK_HEAD_INSTANCES_PREFIX))); return obj; }catch (Exception ex){ throw new ConfigParseException("Error creating bulkhead configuration", ex); } }
@Test public void testFromYamlFile() throws ConfigurationException { Configuration config = CommonsConfigurationUtil.getConfiguration(YAMLConfiguration.class, TestConstants.RESILIENCE_CONFIG_YAML_FILE_NAME); CommonsConfigurationBulkHeadConfiguration bulkHeadConfiguration = CommonsConfigurationBulkHeadConfiguration.of(config); assertConfigs(bulkHeadConfiguration.getConfigs()); assertInstances(bulkHeadConfiguration.getInstances()); }
@Override protected void processOptions(LinkedList<String> args) throws IOException { CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, OPTION_PATHONLY, OPTION_DIRECTORY, OPTION_HUMAN, OPTION_HIDENONPRINTABLE, OPTION_RECURSIVE, OPTION_REVERSE, OPTION_MTIME, OPTION_SIZE, OPTION_ATIME, OPTION_ECPOLICY); cf.parse(args); pathOnly = cf.getOpt(OPTION_PATHONLY); dirRecurse = !cf.getOpt(OPTION_DIRECTORY); setRecursive(cf.getOpt(OPTION_RECURSIVE) && dirRecurse); humanReadable = cf.getOpt(OPTION_HUMAN); hideNonPrintable = cf.getOpt(OPTION_HIDENONPRINTABLE); orderReverse = cf.getOpt(OPTION_REVERSE); orderTime = cf.getOpt(OPTION_MTIME); orderSize = !orderTime && cf.getOpt(OPTION_SIZE); useAtime = cf.getOpt(OPTION_ATIME); displayECPolicy = cf.getOpt(OPTION_ECPOLICY); if (args.isEmpty()) args.add(Path.CUR_DIR); initialiseOrderComparator(); }
@Test public void processPathDirsOrderMtime() throws IOException { TestFile testfile01 = new TestFile("testDirectory01", "testFile01"); TestFile testfile02 = new TestFile("testDirectory01", "testFile02"); TestFile testfile03 = new TestFile("testDirectory01", "testFile03"); TestFile testfile04 = new TestFile("testDirectory02", "testFile04"); TestFile testfile05 = new TestFile("testDirectory02", "testFile05"); TestFile testfile06 = new TestFile("testDirectory02", "testFile06"); // set file mtime in different order to file names testfile01.setMtime(NOW.getTime() + 10); testfile02.setMtime(NOW.getTime() + 30); testfile03.setMtime(NOW.getTime() + 20); testfile04.setMtime(NOW.getTime() + 60); testfile05.setMtime(NOW.getTime() + 40); testfile06.setMtime(NOW.getTime() + 50); TestFile testDir01 = new TestFile("", "testDirectory01"); testDir01.setIsDir(true); testDir01.addContents(testfile01, testfile02, testfile03); TestFile testDir02 = new TestFile("", "testDirectory02"); testDir02.setIsDir(true); testDir02.addContents(testfile04, testfile05, testfile06); LinkedList<PathData> pathData = new LinkedList<PathData>(); pathData.add(testDir01.getPathData()); pathData.add(testDir02.getPathData()); PrintStream out = mock(PrintStream.class); Ls ls = new Ls(); ls.out = out; LinkedList<String> options = new LinkedList<String>(); options.add("-t"); ls.processOptions(options); String lineFormat = TestFile.computeLineFormat(pathData); ls.processArguments(pathData); InOrder inOrder = inOrder(out); inOrder.verify(out).println("Found 3 items"); inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat)); inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat)); inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat)); inOrder.verify(out).println("Found 3 items"); inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat)); inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat)); inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat)); verifyNoMoreInteractions(out); }
public static DataSource createDataSource(final File yamlFile) throws SQLException, IOException { YamlJDBCConfiguration rootConfig = YamlEngine.unmarshal(yamlFile, YamlJDBCConfiguration.class); return createDataSource(new YamlDataSourceConfigurationSwapper().swapToDataSources(rootConfig.getDataSources()), rootConfig); }
@Test void assertCreateDataSourceWithFile() throws Exception { assertDataSource(YamlShardingSphereDataSourceFactory.createDataSource(new File(getYamlFileUrl().toURI()))); }
public static boolean withinDateRange(long date, long compareTo, int dayRange) { // ms = dayRange x 24 hours/day x 60 min/hour x 60 sec/min x 1000 ms/sec final long msRange = dayRange * 24L * 60L * 60L; return (compareTo - date) < msRange; }
@Test public void testWithinZonedDateRange() { ZonedDateTime lastRun = ZonedDateTime.parse("2023-11-15T11:15:03Z"); ZonedDateTime current = ZonedDateTime.parse("2023-11-17T11:15:03Z"); int range = 5; boolean expResult = true; boolean result = DateUtil.withinDateRange(lastRun, current, range); assertEquals(expResult, result); current = ZonedDateTime.parse("2023-11-21T11:15:03Z"); expResult = false; result = DateUtil.withinDateRange(lastRun, current, range); assertEquals(expResult, result); }
@SuppressWarnings("unchecked") @Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { if (!(statement.getStatement() instanceof CreateSource) && !(statement.getStatement() instanceof CreateAsSelect)) { return statement; } try { if (statement.getStatement() instanceof CreateSource) { final ConfiguredStatement<CreateSource> createStatement = (ConfiguredStatement<CreateSource>) statement; return (ConfiguredStatement<T>) forCreateStatement(createStatement).orElse(createStatement); } else { final ConfiguredStatement<CreateAsSelect> createStatement = (ConfiguredStatement<CreateAsSelect>) statement; return (ConfiguredStatement<T>) forCreateAsStatement(createStatement).orElse( createStatement); } } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { throw new KsqlStatementException( ErrorMessageUtil.buildErrorMessage(e), statement.getMaskedStatementText(), e.getCause()); } }
@Test public void shouldThrowIfCtasKeyTableElementsNotCompatibleWrongKeyType() { // Given: givenFormatsAndProps("protobuf", null, ImmutableMap.of("KEY_SCHEMA_ID", new IntegerLiteral(42))); givenDDLSchemaAndFormats(LOGICAL_SCHEMA_INT_KEY, "protobuf", "avro", SerdeFeature.WRAP_SINGLES, SerdeFeature.UNWRAP_SINGLES); // When: final Exception e = assertThrows( KsqlException.class, () -> injector.inject(ctasStatement) ); // Then: assertThat(e.getMessage(), containsString("The following key columns are changed, missing or reordered: " + "[`key` INTEGER KEY]. Schema from schema registry is [`key` STRING KEY]")); }
public static ResolvedSchema expandCompositeTypeToSchema(DataType dataType) { if (dataType instanceof FieldsDataType) { return expandCompositeType((FieldsDataType) dataType); } else if (dataType.getLogicalType() instanceof LegacyTypeInformationType && dataType.getLogicalType().getTypeRoot() == STRUCTURED_TYPE) { return expandLegacyCompositeType(dataType); } throw new IllegalArgumentException("Expected a composite type"); }
@Test void testExpandDistinctType() { FieldsDataType dataType = (FieldsDataType) ROW( FIELD("f0", INT()), FIELD("f1", STRING()), FIELD("f2", TIMESTAMP(5).bridgedTo(Timestamp.class)), FIELD("f3", TIMESTAMP(3))); LogicalType originalLogicalType = dataType.getLogicalType(); DistinctType distinctLogicalType = DistinctType.newBuilder( ObjectIdentifier.of("catalog", "database", "type"), originalLogicalType) .build(); DataType distinctDataType = new FieldsDataType(distinctLogicalType, dataType.getChildren()); ResolvedSchema schema = DataTypeUtils.expandCompositeTypeToSchema(distinctDataType); assertThat(schema) .isEqualTo( ResolvedSchema.of( Column.physical("f0", INT()), Column.physical("f1", STRING()), Column.physical("f2", TIMESTAMP(5).bridgedTo(Timestamp.class)), Column.physical( "f3", TIMESTAMP(3).bridgedTo(LocalDateTime.class)))); }
@Override public Mono<UserNotificationPreference> getByUser(String username) { var configName = buildUserPreferenceConfigMapName(username); return client.fetch(ConfigMap.class, configName) .map(config -> { if (config.getData() == null) { return new UserNotificationPreference(); } String s = config.getData().get(NOTIFICATION_PREFERENCE); if (StringUtils.isNotBlank(s)) { return JsonUtils.jsonToObject(s, UserNotificationPreference.class); } return new UserNotificationPreference(); }) .defaultIfEmpty(new UserNotificationPreference()); }
@Test void getByUserWhenConfigDataNotFound() { when(client.fetch(ConfigMap.class, "user-preferences-guqing")) .thenReturn(Mono.just(new ConfigMap())); userNotificationPreferenceService.getByUser("guqing") .as(StepVerifier::create) .consumeNextWith(preference -> assertThat(preference.getReasonTypeNotifier()).isNotNull() ) .verifyComplete(); verify(client).fetch(ConfigMap.class, "user-preferences-guqing"); }
@Override public ObjectNode encode(K8sApiConfig entity, CodecContext context) { ObjectNode node = context.mapper().createObjectNode() .put(CLUSTER_NAME, entity.clusterName()) .put(SEGMENT_ID, entity.segmentId()) .put(MODE, entity.mode().name()) .put(SCHEME, entity.scheme().name()) .put(IP_ADDRESS, entity.ipAddress().toString()) .put(PORT, entity.port()) .put(STATE, entity.state().name()) .put(DVR, entity.dvr()); if (entity.scheme() == HTTPS) { node.put(CA_CERT_DATA, entity.caCertData()) .put(CLIENT_CERT_DATA, entity.clientCertData()) .put(CLIENT_KEY_DATA, entity.clientKeyData()); if (entity.token() != null) { node.put(TOKEN, entity.token()); } } else { if (entity.token() != null) { node.put(TOKEN, entity.token()); } if (entity.caCertData() != null) { node.put(CA_CERT_DATA, entity.caCertData()); } if (entity.clientCertData() != null) { node.put(CLIENT_CERT_DATA, entity.clientCertData()); } if (entity.clientKeyData() != null) { node.put(CLIENT_KEY_DATA, entity.clientKeyData()); } } if (entity.extNetworkCidr() != null) { node.put(EXT_NETWORK_CIDR, entity.extNetworkCidr().toString()); } ArrayNode infos = context.mapper().createArrayNode(); entity.infos().forEach(info -> { ObjectNode infoJson = context.codec(HostNodesInfo.class).encode(info, context); infos.add(infoJson); }); node.set(HOST_NODES_INFO, infos); return node; }
@Test public void testK8sApiConfigEncode() { HostNodesInfo info = new DefaultHostNodesInfo.Builder() .hostIp(IpAddress.valueOf("192.168.10.10")) .nodes(ImmutableSet.of("master", "worker")) .build(); K8sApiConfig config = DefaultK8sApiConfig.builder() .clusterName("kubernetes") .segmentId(1) .extNetworkCidr(IpPrefix.valueOf("192.168.200.0/24")) .mode(K8sApiConfig.Mode.NORMAL) .scheme(K8sApiConfig.Scheme.HTTPS) .ipAddress(IpAddress.valueOf("10.10.10.23")) .port(6443) .state(CONNECTED) .token("token") .caCertData("caCertData") .clientCertData("clientCertData") .clientKeyData("clientKeyData") .infos(ImmutableSet.of(info)) .dvr(true) .build(); ObjectNode configJson = k8sApiConfigCodec.encode(config, context); assertThat(configJson, matchesK8sApiConfig(config)); }
@Nonnull public TpcConfig setEventloopCount(int eventloopCount) { this.eventloopCount = checkPositive("eventloopCount", eventloopCount); return this; }
@Test public void testConfigValidation() { TpcConfig tpcConfig = config.getTpcConfig(); assertThrows(IllegalArgumentException.class, () -> tpcConfig.setEventloopCount(0)); }
@GET @Path("{id}") @Timed @ApiOperation(value = "Get index set") @ApiResponses(value = { @ApiResponse(code = 403, message = "Unauthorized"), @ApiResponse(code = 404, message = "Index set not found"), }) public IndexSetSummary get(@ApiParam(name = "id", required = true) @PathParam("id") String id) { checkPermission(RestPermissions.INDEXSETS_READ, id); final IndexSetConfig defaultIndexSet = indexSetService.getDefault(); return indexSetService.get(id) .map(config -> IndexSetSummary.fromIndexSetConfig(config, config.equals(defaultIndexSet))) .orElseThrow(() -> new NotFoundException("Couldn't load index set with ID <" + id + ">")); }
@Test public void get() { final IndexSetConfig indexSetConfig = createTestConfig("id", "title"); when(indexSetService.get("id")).thenReturn(Optional.of(indexSetConfig)); final IndexSetSummary summary = indexSetsResource.get("id"); verify(indexSetService, times(1)).get("id"); verify(indexSetService, times(1)).getDefault(); verifyNoMoreInteractions(indexSetService); assertThat(summary).isEqualTo(IndexSetSummary.fromIndexSetConfig(indexSetConfig, false)); }
void enqueueRegularSessionMsg(ChannelHandlerContext ctx, MqttMessage msg) { final int queueSize = deviceSessionCtx.getMsgQueueSize(); if (queueSize >= context.getMessageQueueSizePerDeviceLimit()) { log.info("Closing current session because msq queue size for device {} exceed limit {} with msgQueueSize counter {} and actual queue size {}", deviceSessionCtx.getDeviceId(), context.getMessageQueueSizePerDeviceLimit(), queueSize, deviceSessionCtx.getMsgQueueSize()); closeCtx(ctx, MqttReasonCodes.Disconnect.QUOTA_EXCEEDED); return; } deviceSessionCtx.addToQueue(msg); processMsgQueue(ctx); //Under the normal conditions the msg queue will contain 0 messages. Many messages will be processed on device connect event in separate thread pool }
@Test public void givenQueueLimit_whenEnqueueRegularSessionMsgOverLimit_thenOK() { List<MqttPublishMessage> messages = Stream.generate(this::getMqttPublishMessage).limit(MSG_QUEUE_LIMIT).collect(Collectors.toList()); messages.forEach(msg -> handler.enqueueRegularSessionMsg(ctx, msg)); assertThat(handler.deviceSessionCtx.getMsgQueueSize(), is(MSG_QUEUE_LIMIT)); assertThat(handler.deviceSessionCtx.getMsgQueueSnapshot(), contains(messages.toArray())); }