focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public int completeName(String buffer, int cursor, List<InterpreterCompletion> candidates, Map<String, String> aliases) { CursorArgument cursorArgument = parseCursorArgument(buffer, cursor); // find schema and table name if they are String schema; String table; String column; if (cursorArgument.getSchema() == null) { // process all List<CharSequence> keywordsCandidates = new ArrayList<>(); List<CharSequence> schemaCandidates = new ArrayList<>(); int keywordsRes = completeKeyword(buffer, cursor, keywordsCandidates); int schemaRes = completeSchema(buffer, cursor, schemaCandidates); addCompletions(candidates, keywordsCandidates, CompletionType.keyword.name()); addCompletions(candidates, schemaCandidates, CompletionType.schema.name()); return NumberUtils.max(keywordsRes, schemaRes); } else { schema = cursorArgument.getSchema(); if (aliases.containsKey(schema)) { // process alias case String alias = aliases.get(schema); int pointPos = alias.indexOf('.'); schema = alias.substring(0, pointPos); table = alias.substring(pointPos + 1); column = cursorArgument.getColumn(); List<CharSequence> columnCandidates = new ArrayList<>(); int columnRes = completeColumn(schema, table, column, cursorArgument.getCursorPosition(), columnCandidates); addCompletions(candidates, columnCandidates, CompletionType.column.name()); // process schema.table case } else if (cursorArgument.getTable() != null && cursorArgument.getColumn() == null) { List<CharSequence> tableCandidates = new ArrayList<>(); table = cursorArgument.getTable(); int tableRes = completeTable(schema, table, cursorArgument.getCursorPosition(), tableCandidates); addCompletions(candidates, tableCandidates, CompletionType.table.name()); return tableRes; } else { List<CharSequence> columnCandidates = new ArrayList<>(); table = cursorArgument.getTable(); column = cursorArgument.getColumn(); int columnRes = completeColumn(schema, table, column, cursorArgument.getCursorPosition(), columnCandidates); addCompletions(candidates, columnCandidates, CompletionType.column.name()); } } return -1; }
@Test void testCompleteName_SimpleTable() { String buffer = "prod_dds.fin"; int cursor = 11; List<InterpreterCompletion> candidates = new ArrayList<>(); Map<String, String> aliases = new HashMap<>(); sqlCompleter.completeName(buffer, cursor, candidates, aliases); assertEquals(1, candidates.size()); assertTrue(candidates.contains( new InterpreterCompletion("financial_account", "financial_account", CompletionType.table.name()))); }
public static UBinary create(Kind binaryOp, UExpression lhs, UExpression rhs) { checkArgument( OP_CODES.containsKey(binaryOp), "%s is not a supported binary operation", binaryOp); return new AutoValue_UBinary(binaryOp, lhs, rhs); }
@Test public void bitwiseAnd() { assertUnifiesAndInlines( "4 & 17", UBinary.create(Kind.AND, ULiteral.intLit(4), ULiteral.intLit(17))); }
public void listenToUris(String clusterName) { // if cluster name is a symlink, watch for D2SymlinkNode instead String resourceName = D2_URI_NODE_PREFIX + clusterName; if (SymlinkUtil.isSymlinkNodeOrPath(clusterName)) { listenToSymlink(clusterName, resourceName); } else { _watchedUriResources.computeIfAbsent(clusterName, k -> { XdsClient.D2URIMapResourceWatcher watcher = getUriResourceWatcher(clusterName); _xdsClient.watchXdsResource(resourceName, watcher); return watcher; }); } }
@Test public void testListenToNormalUri() throws PropertySerializationException { XdsToD2PropertiesAdaptorFixture fixture = new XdsToD2PropertiesAdaptorFixture(); fixture.getSpiedAdaptor().listenToUris(PRIMARY_CLUSTER_NAME); verify(fixture._xdsClient).watchXdsResource(eq(PRIMARY_URI_RESOURCE_NAME), anyMapWatcher()); XdsD2.D2URI protoUri = getD2URI(PRIMARY_CLUSTER_NAME, URI_NAME, VERSION); Map<String, XdsD2.D2URI> uriMap = new HashMap<>(Collections.singletonMap(URI_NAME, protoUri)); fixture._uriMapWatcher.onChanged(new XdsClient.D2URIMapUpdate(uriMap)); verify(fixture._uriEventBus).publishInitialize(PRIMARY_CLUSTER_NAME, _uriSerializer.fromProto(protoUri)); verify(fixture._eventEmitter).emitSDStatusInitialRequestEvent( eq(PRIMARY_CLUSTER_NAME), eq(true), anyLong(), eq(true)); // no status update receipt event emitted for initial update verify(fixture._eventEmitter, never()).emitSDStatusUpdateReceiptEvent( any(), any(), anyInt(), any(), anyBoolean(), any(), any(), any(), any(), any(), anyLong()); // add uri 2 uriMap.put(URI_NAME_2, getD2URI(PRIMARY_CLUSTER_NAME, URI_NAME_2, VERSION)); fixture._uriMapWatcher.onChanged(new XdsClient.D2URIMapUpdate(uriMap)); verify(fixture._eventEmitter).emitSDStatusInitialRequestEvent( eq(PRIMARY_CLUSTER_NAME), eq(true), anyLong(), eq(true)); // no more initial request event emitted verify(fixture._eventEmitter).emitSDStatusUpdateReceiptEvent( // status update receipt event emitted for added uri any(), eq(HOST_2), anyInt(), eq(ServiceDiscoveryEventEmitter.StatusUpdateActionType.MARK_READY), anyBoolean(), any(), any(), any(), eq((int) VERSION), any(), anyLong()); // update uri 1, remove uri2, add uri3 uriMap.clear(); uriMap.put(URI_NAME, getD2URI(PRIMARY_CLUSTER_NAME, URI_NAME, VERSION_2)); uriMap.put(URI_NAME_3, getD2URI(PRIMARY_CLUSTER_NAME, URI_NAME_3, VERSION)); fixture._uriMapWatcher.onChanged(new XdsClient.D2URIMapUpdate(uriMap)); // events should be emitted only for remove/add, but not update verify(fixture._eventEmitter, never()).emitSDStatusUpdateReceiptEvent( any(), eq(HOST_1), anyInt(), eq(ServiceDiscoveryEventEmitter.StatusUpdateActionType.MARK_READY), anyBoolean(), any(), any(), any(), eq((int) VERSION_2), any(), anyLong()); verify(fixture._eventEmitter).emitSDStatusUpdateReceiptEvent( any(), eq(HOST_2), anyInt(), eq(StatusUpdateActionType.MARK_DOWN), anyBoolean(), any(), any(), any(), eq((int) VERSION), any(), anyLong()); verify(fixture._eventEmitter).emitSDStatusUpdateReceiptEvent( any(), eq(HOST_3), anyInt(), eq(ServiceDiscoveryEventEmitter.StatusUpdateActionType.MARK_READY), anyBoolean(), any(), any(), any(), eq((int) VERSION), any(), anyLong()); }
public void ensureActiveGroup() { while (!ensureActiveGroup(time.timer(Long.MAX_VALUE))) { log.warn("still waiting to ensure active group"); } }
@Test public void testWakeupAfterJoinGroupReceivedExternalCompletion() throws Exception { setupCoordinator(); mockClient.prepareResponse(groupCoordinatorResponse(node, Errors.NONE)); mockClient.prepareResponse(body -> { boolean isJoinGroupRequest = body instanceof JoinGroupRequest; if (isJoinGroupRequest) // wakeup after the request returns consumerClient.wakeup(); return isJoinGroupRequest; }, joinGroupFollowerResponse(1, memberId, leaderId, Errors.NONE)); mockClient.prepareResponse(syncGroupResponse(Errors.NONE)); AtomicBoolean heartbeatReceived = prepareFirstHeartbeat(); assertThrows(WakeupException.class, () -> coordinator.ensureActiveGroup(), "Should have woken up from ensureActiveGroup()"); assertEquals(1, coordinator.onJoinPrepareInvokes); assertEquals(0, coordinator.onJoinCompleteInvokes); assertFalse(heartbeatReceived.get()); // the join group completes in this poll() consumerClient.poll(mockTime.timer(0)); coordinator.ensureActiveGroup(); assertEquals(1, coordinator.onJoinPrepareInvokes); assertEquals(1, coordinator.onJoinCompleteInvokes); awaitFirstHeartbeat(heartbeatReceived); }
public List<PlainAccessConfig> getPlainAccessConfigs() { return plainAccessConfigs; }
@Test public void testGetPlainAccessConfigs() { AclConfig aclConfig = new AclConfig(); List<PlainAccessConfig> expected = Arrays.asList(new PlainAccessConfig(), new PlainAccessConfig()); aclConfig.setPlainAccessConfigs(expected); assertEquals("Plain access configs should match", expected, aclConfig.getPlainAccessConfigs()); assertEquals("The plainAccessConfigs list should be equal to 2", 2, aclConfig.getPlainAccessConfigs().size()); }
@Override @SuppressFBWarnings(value = "EI_EXPOSE_REP") public ImmutableSet<String> getSupportedProperties() { return SUPPORTED_PROPERTIES; }
@Test public void shouldGetSupportedProperties() { // Given: final JsonProperties properties = new JsonProperties(ImmutableMap.of()); // When: final ImmutableSet<String> supportedProperties = properties.getSupportedProperties(); // Then: assertThat(supportedProperties, is(JsonProperties.SUPPORTED_PROPERTIES)); }
public static BigDecimal jsToBigNumber( Object value, String classType ) { if ( classType.equalsIgnoreCase( JS_UNDEFINED ) ) { return null; } else if ( classType.equalsIgnoreCase( JS_NATIVE_NUM ) ) { Number nb = Context.toNumber( value ); return BigDecimal.valueOf( nb.doubleValue() ); } else if ( classType.equalsIgnoreCase( JS_NATIVE_JAVA_OBJ ) ) { // Is it a BigDecimal class ? return convertNativeJavaToBigDecimal( value ); } else if ( classType.equalsIgnoreCase( "java.lang.Byte" ) ) { return BigDecimal.valueOf( ( (Byte) value ).longValue() ); } else if ( classType.equalsIgnoreCase( "java.lang.Short" ) ) { return BigDecimal.valueOf( ( (Short) value ).longValue() ); } else if ( classType.equalsIgnoreCase( "java.lang.Integer" ) ) { return BigDecimal.valueOf( ( (Integer) value ).longValue() ); } else if ( classType.equalsIgnoreCase( "java.lang.Long" ) ) { return BigDecimal.valueOf( ( (Long) value ).longValue() ); } else if ( classType.equalsIgnoreCase( "java.lang.Double" ) ) { return BigDecimal.valueOf( ( (Double) value ).doubleValue() ); } else if ( classType.equalsIgnoreCase( "java.lang.String" ) ) { return BigDecimal.valueOf( ( new Long( (String) value ) ).longValue() ); } else { throw new UnsupportedOperationException( "JavaScript conversion to BigNumber not implemented for " + classType ); } }
@Test public void jsToBigNumber_Undefined() throws Exception { assertNull( JavaScriptUtils.jsToBigNumber( null, UNDEFINED ) ); Object objMock = mock( Object.class ); assertNull( JavaScriptUtils.jsToBigNumber( objMock, UNDEFINED ) ); }
ObjectReference createPodReference(Pod pod) { return new ObjectReferenceBuilder().withKind("Pod") .withNamespace(pod.getMetadata().getNamespace()) .withName(pod.getMetadata().getName()) .build(); }
@Test void testObjectReferenceFromPod() { ObjectMeta podMeta = new ObjectMeta(); podMeta.setName("cluster-kafka-0"); podMeta.setNamespace("strimzi-kafka"); Pod mockPod = Mockito.mock(Pod.class); when(mockPod.getMetadata()).thenReturn(podMeta); ObjectReference podRef = publisher.createPodReference(mockPod); assertThat(podRef.getName(), is("cluster-kafka-0")); assertThat(podRef.getNamespace(), is("strimzi-kafka")); assertThat(podRef.getKind(), is("Pod")); }
public CompositeRegistryEventConsumer(List<RegistryEventConsumer<E>> delegates) { this.delegates = new ArrayList<>(requireNonNull(delegates)); }
@Test public void testCompositeRegistryEventConsumer() { List<RegistryEventConsumer<String>> consumers = new ArrayList<>(); TestRegistryEventConsumer registryEventConsumer1 = new TestRegistryEventConsumer(); TestRegistryEventConsumer registryEventConsumer2 = new TestRegistryEventConsumer(); consumers.add(registryEventConsumer1); consumers.add(registryEventConsumer2); CompositeRegistryEventConsumer<String> compositeRegistryEventConsumer = new CompositeRegistryEventConsumer<>( consumers); TestRegistry testRegistry = new TestRegistry(compositeRegistryEventConsumer); String addedEntry1 = testRegistry.computeIfAbsent("name", () -> "entry1"); assertThat(addedEntry1).isEqualTo("entry1"); String addedEntry2 = testRegistry.computeIfAbsent("name2", () -> "entry2"); assertThat(addedEntry2).isEqualTo("entry2"); Optional<String> removedEntry = testRegistry.remove("name"); assertThat(removedEntry).isNotEmpty().hasValue("entry1"); Optional<String> replacedEntry = testRegistry.replace("name2", "entry3"); assertThat(replacedEntry).isNotEmpty().hasValue("entry2"); assertConsumer(registryEventConsumer1); assertConsumer(registryEventConsumer2); }
@Override public SccResult<V, E> search(Graph<V, E> graph, EdgeWeigher<V, E> weigher) { SccResult<V, E> result = new SccResult<>(graph); for (V vertex : graph.getVertexes()) { VertexData data = result.data(vertex); if (data == null) { connect(graph, vertex, weigher, result); } } return result.build(); }
@Test public void twoUnconnectedCluster() { graph = new AdjacencyListsGraph<>(vertexes(), of(new TestEdge(A, B), new TestEdge(B, C), new TestEdge(C, D), new TestEdge(D, A), new TestEdge(E, F), new TestEdge(F, G), new TestEdge(G, H), new TestEdge(H, E))); TarjanGraphSearch<TestVertex, TestEdge> gs = new TarjanGraphSearch<>(); SccResult<TestVertex, TestEdge> result = gs.search(graph, null); validate(result, 2); validate(result, 0, 4, 4); validate(result, 1, 4, 4); }
@GetMapping("favourites") public Mono<String> getFavouriteProductsPage(Model model, @RequestParam(name = "filter", required = false) String filter) { model.addAttribute("filter", filter); return this.favouriteProductsClient.findFavouriteProducts() .map(FavouriteProduct::productId) .collectList() .flatMap(favouriteProducts -> this.productsClient.findAllProducts(filter) .filter(product -> favouriteProducts.contains(product.id())) .collectList() .doOnNext(products -> model.addAttribute("products", products))) .thenReturn("customer/products/favourites"); }
@Test void getFavouriteProductsPage_ReturnsFavouriteProductsPage() { // given var model = new ConcurrentModel(); doReturn(Flux.fromIterable(List.of( new Product(1, "Отфильтрованный товар №1", "Описание отфильтрованного товара №1"), new Product(2, "Отфильтрованный товар №2", "Описание отфильтрованного товара №2"), new Product(3, "Отфильтрованный товар №3", "Описание отфильтрованного товара №3") ))).when(this.productsClient).findAllProducts("фильтр"); doReturn(Flux.fromIterable(List.of( new FavouriteProduct(UUID.fromString("a16f0218-cbaf-11ee-9e6c-6b0fa3631587"), 1), new FavouriteProduct(UUID.fromString("a42ff37c-cbaf-11ee-8b1d-cb00912914b5"), 3) ))).when(this.favouriteProductsClient).findFavouriteProducts(); // when StepVerifier.create(this.controller.getFavouriteProductsPage(model, "фильтр")) // then .expectNext("customer/products/favourites") .verifyComplete(); assertEquals("фильтр", model.getAttribute("filter")); assertEquals(List.of( new Product(1, "Отфильтрованный товар №1", "Описание отфильтрованного товара №1"), new Product(3, "Отфильтрованный товар №3", "Описание отфильтрованного товара №3")), model.getAttribute("products")); verify(this.productsClient).findAllProducts("фильтр"); verify(this.favouriteProductsClient).findFavouriteProducts(); verifyNoMoreInteractions(this.productsClient, this.favouriteProductsClient); }
@Override public List<InterpreterCompletion> completion(String buf, int cursor, InterpreterContext interpreterContext) { String[] words = splitAndRemoveEmpty(splitAndRemoveEmpty(buf, "\n"), " "); String lastWord = ""; if (words.length > 0) { lastWord = words[ words.length - 1 ]; } List<InterpreterCompletion> voices = new LinkedList<>(); for (String command : keywords) { if (command.startsWith(lastWord)) { voices.add(new InterpreterCompletion(command, command, CompletionType.command.name())); } } return voices; }
@Test void testCompletion() { List<InterpreterCompletion> expectedResultOne = Arrays.asList( new InterpreterCompletion("cat", "cat", CompletionType.command.name()), new InterpreterCompletion("chgrp", "chgrp", CompletionType.command.name()), new InterpreterCompletion("chmod", "chmod", CompletionType.command.name()), new InterpreterCompletion("chown", "chown", CompletionType.command.name()), new InterpreterCompletion("copyFromLocal", "copyFromLocal", CompletionType.command.name()), new InterpreterCompletion("copyToLocal", "copyToLocal", CompletionType.command.name()), new InterpreterCompletion("count", "count", CompletionType.command.name()), new InterpreterCompletion("createLineage", "createLineage", CompletionType.command.name())); List<InterpreterCompletion> expectedResultTwo = Arrays.asList( new InterpreterCompletion("copyFromLocal", "copyFromLocal", CompletionType.command.name()), new InterpreterCompletion("copyToLocal", "copyToLocal", CompletionType.command.name()), new InterpreterCompletion("count", "count", CompletionType.command.name())); List<InterpreterCompletion> expectedResultThree = Arrays.asList( new InterpreterCompletion("copyFromLocal", "copyFromLocal", CompletionType.command.name()), new InterpreterCompletion("copyToLocal", "copyToLocal", CompletionType.command.name())); List<InterpreterCompletion> expectedResultNone = new ArrayList<>(); List<InterpreterCompletion> resultOne = alluxioInterpreter.completion("c", 0, null); List<InterpreterCompletion> resultTwo = alluxioInterpreter.completion("co", 0, null); List<InterpreterCompletion> resultThree = alluxioInterpreter.completion("copy", 0, null); List<InterpreterCompletion> resultNotMatch = alluxioInterpreter.completion("notMatch", 0, null); List<InterpreterCompletion> resultAll = alluxioInterpreter.completion("", 0, null); assertEquals(expectedResultOne, resultOne); assertEquals(expectedResultTwo, resultTwo); assertEquals(expectedResultThree, resultThree); assertEquals(expectedResultNone, resultNotMatch); List<String> allCompletionList = new ArrayList<>(); for (InterpreterCompletion ic : resultAll) { allCompletionList.add(ic.getName()); } assertEquals(alluxioInterpreter.keywords, allCompletionList); }
@Override public Map<String, Object> processCsvFile(String encodedCsvData, boolean dryRun) throws JsonProcessingException { services = new HashMap<>(); serviceParentChildren = new HashMap<>(); Map<String, Object> result = super.processCsvFile(encodedCsvData, dryRun); if (!services.isEmpty()) { retrieveLegacyServiceIds(); saveAll(dryRun); processServiceParentChildren(serviceParentChildren, dryRun); } return result; }
@Test void processCsvFileSuccessUpdatedServiceTest() throws IOException { String csvData = """SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS"""; Service service = new Service(); Optional<Service> optService = Optional.of(service); when(serviceRepositoryMock.findFirstByServiceUuid("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS")).thenReturn(optService); mockAdmin(); mockconnection(); Map<String, Object> resultMap = csvService.processCsvFile(encodeCsv(csvData), false); assertEquals("Bestand verwerkt", resultMap.get("result")); assertTrue(((ArrayList) resultMap.get("failed")).isEmpty()); List<String> succeededArray = new ArrayList<>(); succeededArray.add("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS"); assertArrayEquals(((ArrayList) resultMap.get("succeeded")).toArray(), succeededArray.toArray()); assertEquals((Long)13L, service.getLegacyServiceId()); }
public CompletionService<MultiHttpRequestResponse> executePost(List<Pair<String, String>> urlsAndRequestBodies, @Nullable Map<String, String> requestHeaders, int timeoutMs) { return execute(urlsAndRequestBodies, requestHeaders, timeoutMs, "POST", HttpPost::new); }
@Test public void testMultiPost() { List<Pair<String, String>> urlsAndRequestBodies = List.of(Pair.of("http://localhost:" + String.valueOf(_portStart) + URI_PATH, "b0"), Pair.of("http://localhost:" + String.valueOf(_portStart + 1) + URI_PATH, "b1"), Pair.of("http://localhost:" + String.valueOf(_portStart + 2) + URI_PATH, "b2"), // 2nd request to the same server Pair.of("http://localhost:" + String.valueOf(_portStart) + URI_PATH, "b3"), Pair.of("http://localhost:" + String.valueOf(_portStart + 3) + URI_PATH, "b4")); MultiHttpRequest mpost = new MultiHttpRequest(Executors.newCachedThreadPool(), new PoolingHttpClientConnectionManager()); // timeout value needs to be less than 5000ms set above for // third server final int requestTimeoutMs = 1000; CompletionService<MultiHttpRequestResponse> completionService = mpost.executePost(urlsAndRequestBodies, null, requestTimeoutMs); TestResult result = collectResult(completionService, urlsAndRequestBodies.size()); Assert.assertEquals(result.getSuccess(), 3); Assert.assertEquals(result.getErrors(), 1); Assert.assertEquals(result.getTimeouts(), 1); }
@VisibleForTesting static boolean validateArenaBlockSize(long arenaBlockSize, long mutableLimit) { return arenaBlockSize <= mutableLimit; }
@Test public void testValidateArenaBlockSize() { long arenaBlockSize = 8 * 1024 * 1024; assertFalse( ForStMemoryControllerUtils.validateArenaBlockSize( arenaBlockSize, (long) (arenaBlockSize * 0.5))); assertTrue( ForStMemoryControllerUtils.validateArenaBlockSize( arenaBlockSize, (long) (arenaBlockSize * 1.5))); }
@JsonProperty public ConnectorFactory getConnector() { return connector; }
@Test void testGetPort() { assertThat(http.getConnector()) .isInstanceOfSatisfying(HttpConnectorFactory.class, httpConnectorFactory -> assertThat(httpConnectorFactory.getPort()).isZero()); }
@CanIgnoreReturnValue public final Ordered containsAtLeast( @Nullable Object firstExpected, @Nullable Object secondExpected, @Nullable Object @Nullable ... restOfExpected) { return containsAtLeastElementsIn(accumulate(firstExpected, secondExpected, restOfExpected)); }
@Test public void iterableContainsAtLeastWithDuplicateMissingElements() { expectFailureWhenTestingThat(asList(1, 2)).containsAtLeast(4, 4, 4); assertFailureValue("missing (3)", "4 [3 copies]"); }
public GroupInformation updateGroup(DbSession dbSession, GroupDto group, @Nullable String newName) { checkGroupIsNotDefault(dbSession, group); return groupDtoToGroupInformation(updateName(dbSession, group, newName), dbSession); }
@Test public void updateGroup_whenGroupNameDoesntChange_succeeds() { GroupDto group = mockGroupDto(); mockDefaultGroup(); assertThatNoException() .isThrownBy(() -> groupService.updateGroup(dbSession, group, group.getName())); verify(dbClient.groupDao(), never()).update(dbSession, group); }
public <T> T withPubSubConnection(final Function<StatefulRedisClusterPubSubConnection<K, V>, T> function) { try { return retry.executeCallable(() -> executeTimer.record(() -> function.apply(pubSubConnection))); } catch (final Throwable t) { if (t instanceof RedisException) { throw (RedisException) t; } else { throw new RedisException(t); } } }
@Test void testRetry() { when(pubSubCommands.get(anyString())) .thenThrow(new RedisCommandTimeoutException()) .thenThrow(new RedisCommandTimeoutException()) .thenReturn("value"); assertEquals("value", faultTolerantPubSubConnection.withPubSubConnection(connection -> connection.sync().get("key"))); when(pubSubCommands.get(anyString())) .thenThrow(new RedisCommandTimeoutException()) .thenThrow(new RedisCommandTimeoutException()) .thenThrow(new RedisCommandTimeoutException()) .thenReturn("value"); assertThrows(RedisCommandTimeoutException.class, () -> faultTolerantPubSubConnection.withPubSubConnection(connection -> connection.sync().get("key"))); }
public Map<String, Parameter> generateMergedStepParams( WorkflowSummary workflowSummary, Step stepDefinition, StepRuntime stepRuntime, StepRuntimeSummary runtimeSummary) { Map<String, ParamDefinition> allParamDefs = new LinkedHashMap<>(); // Start with default step level params if present Map<String, ParamDefinition> globalDefault = defaultParamManager.getDefaultStepParams(); if (globalDefault != null) { ParamsMergeHelper.mergeParams( allParamDefs, globalDefault, ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_DEFAULT)); } // Merge in injected params returned by step if present (template schema) Map<String, ParamDefinition> injectedParams = stepRuntime.injectRuntimeParams(workflowSummary, stepDefinition); maybeOverrideParamType(allParamDefs); if (injectedParams != null) { maybeOverrideParamType(injectedParams); ParamsMergeHelper.mergeParams( allParamDefs, injectedParams, ParamsMergeHelper.MergeContext.stepCreate(ParamSource.TEMPLATE_SCHEMA)); } // Merge in params applicable to step type Optional<Map<String, ParamDefinition>> defaultStepTypeParams = defaultParamManager.getDefaultParamsForType(stepDefinition.getType()); if (defaultStepTypeParams.isPresent()) { LOG.debug("Merging step level default for {}", stepDefinition.getType()); ParamsMergeHelper.mergeParams( allParamDefs, defaultStepTypeParams.get(), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_DEFAULT)); } // Merge in workflow and step info ParamsMergeHelper.mergeParams( allParamDefs, injectWorkflowAndStepInfoParams(workflowSummary, runtimeSummary), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_INJECTED)); // merge step run param and user provided restart step run params // first to get undefined params from both run param and restart params Map<String, ParamDefinition> undefinedRestartParams = new LinkedHashMap<>(); Optional<Map<String, ParamDefinition>> stepRestartParams = getUserStepRestartParam(workflowSummary, runtimeSummary); stepRestartParams.ifPresent(undefinedRestartParams::putAll); Optional<Map<String, ParamDefinition>> stepRunParams = getStepRunParams(workflowSummary, runtimeSummary); Map<String, ParamDefinition> systemInjectedRestartRunParams = new LinkedHashMap<>(); stepRunParams.ifPresent( params -> { params.forEach( (key, val) -> { if (runtimeSummary.getRestartConfig() != null && Constants.RESERVED_PARAM_NAMES.contains(key) && val.getMode() == ParamMode.CONSTANT && val.getSource() == ParamSource.SYSTEM_INJECTED) { ((AbstractParamDefinition) val) .getMeta() .put(Constants.METADATA_SOURCE_KEY, ParamSource.RESTART.name()); systemInjectedRestartRunParams.put(key, val); } }); systemInjectedRestartRunParams.keySet().forEach(params::remove); }); stepRunParams.ifPresent(undefinedRestartParams::putAll); Optional.ofNullable(stepDefinition.getParams()) .ifPresent( stepDefParams -> stepDefParams.keySet().stream() .filter(undefinedRestartParams::containsKey) .forEach(undefinedRestartParams::remove)); // Then merge undefined restart params if (!undefinedRestartParams.isEmpty()) { mergeUserProvidedStepParams(allParamDefs, undefinedRestartParams, workflowSummary); } // Final merge from step definition if (stepDefinition.getParams() != null) { maybeOverrideParamType(stepDefinition.getParams()); ParamsMergeHelper.mergeParams( allParamDefs, stepDefinition.getParams(), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.DEFINITION)); } // merge step run params stepRunParams.ifPresent( stepParams -> mergeUserProvidedStepParams(allParamDefs, stepParams, workflowSummary)); // merge all user provided restart step run params stepRestartParams.ifPresent( stepParams -> mergeUserProvidedStepParams(allParamDefs, stepParams, workflowSummary)); // merge all system injected restart step run params with mode and source already set. allParamDefs.putAll(systemInjectedRestartRunParams); // Cleanup any params that are missing and convert to params return ParamsMergeHelper.convertToParameters(ParamsMergeHelper.cleanupParams(allParamDefs)); }
@Test public void testRestartForeachStepRunParamMerge() throws IOException { DefaultParamManager defaultParamManager = new DefaultParamManager(JsonHelper.objectMapperWithYaml()); defaultParamManager.init(); ParamsManager paramsManager = new ParamsManager(defaultParamManager); Map<String, ParamDefinition> loopParamsDef = new HashMap<>(); loopParamsDef.put("a", ParamDefinition.buildParamDefinition("i", new long[] {1, 2})); Map<String, ParamDefinition> loopParamsRestart = new HashMap<>(); loopParamsRestart.put("a", ParamDefinition.buildParamDefinition("i", new long[] {1, 2, 3})); Map<String, Map<String, ParamDefinition>> stepRunParams = singletonMap( "stepid", singletonMap( "loop_params", MapParamDefinition.builder().name("loop_params").value(loopParamsDef).build())); Map<String, Map<String, ParamDefinition>> stepRestartParams = singletonMap( "stepid", singletonMap( "loop_params", MapParamDefinition.builder().name("loop_params").value(loopParamsRestart).build())); ManualInitiator manualInitiator = new ManualInitiator(); workflowSummary.setInitiator(manualInitiator); workflowSummary.setStepRunParams(stepRunParams); workflowSummary.setRestartConfig( RestartConfig.builder() .addRestartNode("sample-wf-map-params", 1, "stepid") .stepRestartParams(stepRestartParams) .build()); workflowSummary.setRunPolicy(RunPolicy.RESTART_FROM_SPECIFIC); Step step = Mockito.mock(Step.class); when(step.getType()).thenReturn(StepType.FOREACH); when(step.getParams()).thenReturn(null); AssertHelper.assertThrows( "Cannot modify param with MUTABLE_ON_START during restart", MaestroValidationException.class, "Cannot modify param with mode [MUTABLE_ON_START] for parameter [a]", () -> paramsManager.generateMergedStepParams( workflowSummary, step, stepRuntime, runtimeSummary)); }
public static Properties translate(Properties source) { Properties driverManagerProperties = PropertiesUtil.clone(source); driverManagerProperties.remove("jdbcUrl"); return driverManagerProperties; }
@Test public void testTranslatableProperties() { Properties hzProperties = new Properties(); String jdbcUrl = "jdbcUrl"; String user = "user"; String password = "password"; String myProperty = "5000"; hzProperties.setProperty(JDBC_URL, jdbcUrl); hzProperties.setProperty(USER, user); hzProperties.setProperty(PASSWORD, password); hzProperties.setProperty("myProperty", myProperty); Properties driverManagerProperties = translate(hzProperties); assertThat(driverManagerProperties).doesNotContainKey(JDBC_URL); assertThat(driverManagerProperties.getProperty("user")).isEqualTo(user); assertThat(driverManagerProperties.getProperty("password")).isEqualTo(password); assertThat(driverManagerProperties.getProperty("myProperty")).isEqualTo(myProperty); assertThat(driverManagerProperties).hasSize(3); }
@Override public RemotingCommand processRequest(final ChannelHandlerContext ctx, RemotingCommand request) throws RemotingCommandException { return this.processRequest(ctx.channel(), request, true, true); }
@Test public void test_LitePullRequestForbidden() throws Exception { brokerController.getBrokerConfig().setLitePullMessageEnable(false); RemotingCommand remotingCommand = createPullMsgCommand(RequestCode.LITE_PULL_MESSAGE); RemotingCommand response = pullMessageProcessor.processRequest(handlerContext, remotingCommand); assertThat(response).isNotNull(); assertThat(response.getCode()).isEqualTo(ResponseCode.NO_PERMISSION); }
public Field fieldFrom(Schema schema) { if (schema == null) return null; Schema current = schema; for (String pathSegment : stepsWithoutLast()) { final Field field = current.field(pathSegment); if (field != null) { current = field.schema(); } else { return null; } } return current.field(lastStep()); }
@Test void shouldFindField() { Schema barSchema = SchemaBuilder.struct().field("bar", Schema.INT32_SCHEMA).build(); Schema schema = SchemaBuilder.struct().field("foo", barSchema).build(); assertEquals(barSchema.field("bar"), pathV2("foo.bar").fieldFrom(schema)); assertEquals(schema.field("foo"), pathV2("foo").fieldFrom(schema)); }
public int getIoThreadCount() { return ioThreadCount; }
@Test void testGetIoThreadCount() { HttpClientConfig config = HttpClientConfig.builder().setIoThreadCount(90).build(); assertEquals(90, config.getIoThreadCount()); }
@Override @TpsControl(pointName = "ConfigPublish") @Secured(action = ActionTypes.WRITE, signType = SignType.CONFIG) @ExtractorManager.Extractor(rpcExtractor = ConfigRequestParamExtractor.class) public ConfigPublishResponse handle(ConfigPublishRequest request, RequestMeta meta) throws NacosException { try { String dataId = request.getDataId(); String group = request.getGroup(); String content = request.getContent(); final String tenant = request.getTenant(); final String srcIp = meta.getClientIp(); final String requestIpApp = request.getAdditionParam("requestIpApp"); final String tag = request.getAdditionParam("tag"); final String appName = request.getAdditionParam("appName"); final String type = request.getAdditionParam("type"); final String srcUser = request.getAdditionParam("src_user"); final String encryptedDataKey = request.getAdditionParam("encryptedDataKey"); // check tenant ParamUtils.checkParam(dataId, group, "datumId", content); ParamUtils.checkParam(tag); Map<String, Object> configAdvanceInfo = new HashMap<>(10); MapUtil.putIfValNoNull(configAdvanceInfo, "config_tags", request.getAdditionParam("config_tags")); MapUtil.putIfValNoNull(configAdvanceInfo, "desc", request.getAdditionParam("desc")); MapUtil.putIfValNoNull(configAdvanceInfo, "use", request.getAdditionParam("use")); MapUtil.putIfValNoNull(configAdvanceInfo, "effect", request.getAdditionParam("effect")); MapUtil.putIfValNoNull(configAdvanceInfo, "type", type); MapUtil.putIfValNoNull(configAdvanceInfo, "schema", request.getAdditionParam("schema")); ParamUtils.checkParam(configAdvanceInfo); if (AggrWhitelist.isAggrDataId(dataId)) { Loggers.REMOTE_DIGEST.warn("[aggr-conflict] {} attempt to publish single data, {}, {}", srcIp, dataId, group); throw new NacosException(NacosException.NO_RIGHT, "dataId:" + dataId + " is aggr"); } ConfigInfo configInfo = new ConfigInfo(dataId, group, tenant, appName, content); configInfo.setMd5(request.getCasMd5()); configInfo.setType(type); configInfo.setEncryptedDataKey(encryptedDataKey); String betaIps = request.getAdditionParam("betaIps"); ConfigOperateResult configOperateResult = null; String persistEvent = ConfigTraceService.PERSISTENCE_EVENT; if (StringUtils.isBlank(betaIps)) { if (StringUtils.isBlank(tag)) { if (StringUtils.isNotBlank(request.getCasMd5())) { configOperateResult = configInfoPersistService.insertOrUpdateCas(srcIp, srcUser, configInfo, configAdvanceInfo); if (!configOperateResult.isSuccess()) { return ConfigPublishResponse.buildFailResponse(ResponseCode.FAIL.getCode(), "Cas publish fail,server md5 may have changed."); } } else { configOperateResult = configInfoPersistService.insertOrUpdate(srcIp, srcUser, configInfo, configAdvanceInfo); } ConfigChangePublisher.notifyConfigChange(new ConfigDataChangeEvent(false, dataId, group, tenant, configOperateResult.getLastModified())); } else { if (StringUtils.isNotBlank(request.getCasMd5())) { configOperateResult = configInfoTagPersistService.insertOrUpdateTagCas(configInfo, tag, srcIp, srcUser); if (!configOperateResult.isSuccess()) { return ConfigPublishResponse.buildFailResponse(ResponseCode.FAIL.getCode(), "Cas publish tag config fail,server md5 may have changed."); } } else { configOperateResult = configInfoTagPersistService.insertOrUpdateTag(configInfo, tag, srcIp, srcUser); } persistEvent = ConfigTraceService.PERSISTENCE_EVENT_TAG + "-" + tag; ConfigChangePublisher.notifyConfigChange( new ConfigDataChangeEvent(false, dataId, group, tenant, tag, configOperateResult.getLastModified())); } } else { // beta publish if (StringUtils.isNotBlank(request.getCasMd5())) { configOperateResult = configInfoBetaPersistService.insertOrUpdateBetaCas(configInfo, betaIps, srcIp, srcUser); if (!configOperateResult.isSuccess()) { return ConfigPublishResponse.buildFailResponse(ResponseCode.FAIL.getCode(), "Cas publish beta config fail,server md5 may have changed."); } } else { configOperateResult = configInfoBetaPersistService.insertOrUpdateBeta(configInfo, betaIps, srcIp, srcUser); } persistEvent = ConfigTraceService.PERSISTENCE_EVENT_BETA; ConfigChangePublisher.notifyConfigChange( new ConfigDataChangeEvent(true, dataId, group, tenant, configOperateResult.getLastModified())); } ConfigTraceService.logPersistenceEvent(dataId, group, tenant, requestIpApp, configOperateResult.getLastModified(), srcIp, persistEvent, ConfigTraceService.PERSISTENCE_TYPE_PUB, content); return ConfigPublishResponse.buildSuccessResponse(); } catch (Exception e) { Loggers.REMOTE_DIGEST.error("[ConfigPublishRequestHandler] publish config error ,request ={}", request, e); return ConfigPublishResponse.buildFailResponse( (e instanceof NacosException) ? ((NacosException) e).getErrCode() : ResponseCode.FAIL.getCode(), e.getMessage()); } }
@Test void testNormalPublishConfigCasError() throws Exception { String dataId = "testNormalPublishConfigCasError"; String group = "group"; String tenant = "tenant"; String content = "content"; ConfigPublishRequest configPublishRequest = new ConfigPublishRequest(); configPublishRequest.setDataId(dataId); configPublishRequest.setGroup(group); configPublishRequest.setTenant(tenant); configPublishRequest.setContent(content); configPublishRequest.setCasMd5("12314532"); Map<String, String> keyMap = new HashMap<>(); String srcUser = "src_user111"; keyMap.put("src_user", srcUser); configPublishRequest.setAdditionMap(keyMap); RequestMeta requestMeta = new RequestMeta(); String clientIp = "127.0.0.1"; requestMeta.setClientIp(clientIp); ConfigInfoStateWrapper configInfoStateWrapper = new ConfigInfoStateWrapper(); configInfoStateWrapper.setId(12345678); long timeStamp = System.currentTimeMillis(); configInfoStateWrapper.setLastModified(timeStamp); AtomicReference<ConfigDataChangeEvent> reference = new AtomicReference<>(); NotifyCenter.registerSubscriber(new Subscriber() { @Override public void onEvent(Event event) { ConfigDataChangeEvent event1 = (ConfigDataChangeEvent) event; if (event1.dataId.equals(dataId)) { reference.set((ConfigDataChangeEvent) event); } } @Override public Class<? extends Event> subscribeType() { return ConfigDataChangeEvent.class; } }); ConfigOperateResult configOperateResult = new ConfigOperateResult(true); long timestamp = System.currentTimeMillis(); long id = timestamp / 1000; configOperateResult.setId(id); configOperateResult.setLastModified(timestamp); when(configInfoPersistService.insertOrUpdateCas(eq(requestMeta.getClientIp()), eq(srcUser), any(ConfigInfo.class), any(Map.class))).thenThrow(new NacosRuntimeException(502, "mock error")); ConfigPublishResponse response = configPublishRequestHandler.handle(configPublishRequest, requestMeta); assertEquals(ResponseCode.FAIL.getCode(), response.getResultCode()); assertTrue(response.getMessage().contains("mock error")); Thread.sleep(500L); assertTrue(reference.get() == null); }
@Override public void start() { if (reporter != null) { LOG.debug("Starting..."); reporter.start(reportingIntervalSecs, TimeUnit.SECONDS); } else { throw new IllegalStateException("Attempt to start without preparing " + getClass().getSimpleName()); } }
@Test public void testTls() throws IOException { pushGatewayContainer .withCopyFileToContainer(MountableFile.forClasspathResource("/pushgateway-ssl.yaml"), "/pushgateway/pushgateway-ssl.yaml") .withCommand("--web.config.file", "pushgateway-ssl.yaml") .start(); final PrometheusPreparableReporter sut = new PrometheusPreparableReporter(); final Map<String, Object> daemonConf = Map.of( "storm.daemon.metrics.reporter.plugin.prometheus.job", "test_simple", "storm.daemon.metrics.reporter.plugin.prometheus.endpoint", "localhost:" + pushGatewayContainer.getMappedPort(9091), "storm.daemon.metrics.reporter.plugin.prometheus.scheme", "https", "storm.daemon.metrics.reporter.plugin.prometheus.skip_tls_validation", true ); runTest(sut, daemonConf); }
@VisibleForTesting static CompletableFuture<JobResult> pollJobResultAsync( final Supplier<CompletableFuture<JobStatus>> jobStatusSupplier, final Supplier<CompletableFuture<JobResult>> jobResultSupplier, final ScheduledExecutor scheduledExecutor, final long retryMsTimeout) { return pollJobResultAsync( jobStatusSupplier, jobResultSupplier, scheduledExecutor, new CompletableFuture<>(), retryMsTimeout, 0); }
@Test void testFailedJobResult() throws ExecutionException, InterruptedException { final int maxAttemptCounter = 1; final ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); try { final ScheduledExecutor scheduledExecutor = new ScheduledExecutorServiceAdapter(executor); final CallCountingJobStatusSupplier jobStatusSupplier = new CallCountingJobStatusSupplier(maxAttemptCounter); final CompletableFuture<JobResult> result = JobStatusPollingUtils.pollJobResultAsync( jobStatusSupplier, () -> CompletableFuture.completedFuture( createFailedJobResult(new JobID(0, 0))), scheduledExecutor, 10); result.join(); assertThat(jobStatusSupplier.getAttemptCounter()).isEqualTo(maxAttemptCounter); assertThat(result) .isCompletedWithValueMatching( jobResult -> jobResult.getSerializedThrowable().isPresent()); } finally { ExecutorUtils.gracefulShutdown(5, TimeUnit.SECONDS, executor); } }
@Override public void terminate() throws Exception { isRunning = false; // wait for all containers to stop trackerOfReleasedResources.register(); trackerOfReleasedResources.arriveAndAwaitAdvance(); // shut down all components Exception exception = null; if (resourceManagerClient != null) { try { resourceManagerClient.stop(); } catch (Exception e) { exception = e; } } if (nodeManagerClient != null) { try { nodeManagerClient.stop(); } catch (Exception e) { exception = ExceptionUtils.firstOrSuppressed(e, exception); } } if (exception != null) { throw exception; } }
@Test void testOnErrorAfterTerminationIgnored() throws Exception { new Context() { { final CompletableFuture<Throwable> throwableCompletableFuture = new CompletableFuture<>(); resourceEventHandlerBuilder.setOnErrorConsumer( throwableCompletableFuture::complete); Throwable expectedThrowable = new Exception("test"); runTest( () -> { getDriver().terminate(); resourceManagerClientCallbackHandler.onError(expectedThrowable); assertThatThrownBy( () -> throwableCompletableFuture.get( TIMEOUT_SHOULD_NOT_HAPPEN_MS, TimeUnit.MILLISECONDS)) .isInstanceOf(TimeoutException.class); }); } }; }
@VisibleForTesting public static Domain getDomain(Type type, long rowCount, ColumnStatistics columnStatistics) { if (rowCount == 0) { return Domain.none(type); } if (columnStatistics == null) { return Domain.all(type); } if (columnStatistics.hasNumberOfValues() && columnStatistics.getNumberOfValues() == 0) { return Domain.onlyNull(type); } boolean hasNullValue = columnStatistics.getNumberOfValues() != rowCount; if (type.getJavaType() == boolean.class && columnStatistics.getBooleanStatistics() != null) { BooleanStatistics booleanStatistics = columnStatistics.getBooleanStatistics(); boolean hasTrueValues = (booleanStatistics.getTrueValueCount() != 0); boolean hasFalseValues = (columnStatistics.getNumberOfValues() != booleanStatistics.getTrueValueCount()); if (hasTrueValues && hasFalseValues) { return Domain.all(BOOLEAN); } if (hasTrueValues) { return Domain.create(ValueSet.of(BOOLEAN, true), hasNullValue); } if (hasFalseValues) { return Domain.create(ValueSet.of(BOOLEAN, false), hasNullValue); } } else if (isShortDecimal(type) && columnStatistics.getDecimalStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getDecimalStatistics(), value -> rescale(value, (DecimalType) type).unscaledValue().longValue()); } else if (isLongDecimal(type) && columnStatistics.getDecimalStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getDecimalStatistics(), value -> encodeUnscaledValue(rescale(value, (DecimalType) type).unscaledValue())); } else if (isCharType(type) && columnStatistics.getStringStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getStringStatistics(), value -> truncateToLengthAndTrimSpaces(value, type)); } else if (isVarcharType(type) && columnStatistics.getStringStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getStringStatistics()); } else if (type.getTypeSignature().getBase().equals(StandardTypes.DATE) && columnStatistics.getDateStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getDateStatistics(), value -> (long) value); } else if (type.getJavaType() == long.class && columnStatistics.getIntegerStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getIntegerStatistics()); } else if (type.getJavaType() == double.class && columnStatistics.getDoubleStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getDoubleStatistics()); } else if (REAL.equals(type) && columnStatistics.getDoubleStatistics() != null) { return createDomain(type, hasNullValue, columnStatistics.getDoubleStatistics(), value -> (long) floatToRawIntBits(value.floatValue())); } return Domain.create(ValueSet.all(type), hasNullValue); }
@Test public void testString() { assertEquals(getDomain(VARCHAR, 0, null), Domain.none(VARCHAR)); assertEquals(getDomain(VARCHAR, 10, null), Domain.all(VARCHAR)); assertEquals(getDomain(VARCHAR, 0, stringColumnStats(null, null, null)), Domain.none(VARCHAR)); assertEquals(getDomain(VARCHAR, 0, stringColumnStats(0L, null, null)), Domain.none(VARCHAR)); assertEquals(getDomain(VARCHAR, 0, stringColumnStats(0L, "taco", "taco")), Domain.none(VARCHAR)); assertEquals(getDomain(VARCHAR, 10, stringColumnStats(0L, null, null)), onlyNull(VARCHAR)); assertEquals(getDomain(VARCHAR, 10, stringColumnStats(10L, null, null)), notNull(VARCHAR)); assertEquals(getDomain(VARCHAR, 10, stringColumnStats(10L, "taco", "taco")), singleValue(VARCHAR, utf8Slice("taco"))); assertEquals(getDomain(VARCHAR, 10, stringColumnStats(10L, "apple", "taco")), create(ValueSet.ofRanges(range(VARCHAR, utf8Slice("apple"), true, utf8Slice("taco"), true)), false)); assertEquals(getDomain(VARCHAR, 10, stringColumnStats(10L, null, "taco")), create(ValueSet.ofRanges(lessThanOrEqual(VARCHAR, utf8Slice("taco"))), false)); assertEquals(getDomain(VARCHAR, 10, stringColumnStats(10L, "apple", null)), create(ValueSet.ofRanges(greaterThanOrEqual(VARCHAR, utf8Slice("apple"))), false)); assertEquals(getDomain(VARCHAR, 10, stringColumnStats(5L, "apple", "taco")), create(ValueSet.ofRanges(range(VARCHAR, utf8Slice("apple"), true, utf8Slice("taco"), true)), true)); assertEquals(getDomain(VARCHAR, 10, stringColumnStats(5L, null, "taco")), create(ValueSet.ofRanges(lessThanOrEqual(VARCHAR, utf8Slice("taco"))), true)); assertEquals(getDomain(VARCHAR, 10, stringColumnStats(5L, "apple", null)), create(ValueSet.ofRanges(greaterThanOrEqual(VARCHAR, utf8Slice("apple"))), true)); }
@Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return workItemState.get(namespace, address, StateContexts.nullContext()); }
@Test public void testMapAddBeforeRead() throws Exception { StateTag<MapState<String, Integer>> addr = StateTags.map("map", StringUtf8Coder.of(), VarIntCoder.of()); MapState<String, Integer> mapState = underTest.state(NAMESPACE, addr); final String tag1 = "tag1"; final String tag2 = "tag2"; final String tag3 = "tag3"; SettableFuture<Iterable<Map.Entry<ByteString, Integer>>> prefixFuture = SettableFuture.create(); when(mockReader.valuePrefixFuture( protoKeyFromUserKey(null, StringUtf8Coder.of()), STATE_FAMILY, VarIntCoder.of())) .thenReturn(prefixFuture); ReadableState<Iterable<Map.Entry<String, Integer>>> result = mapState.entries(); result = result.readLater(); mapState.put(tag1, 1); waitAndSet( prefixFuture, ImmutableList.of( new AbstractMap.SimpleEntry<>(protoKeyFromUserKey(tag2, StringUtf8Coder.of()), 2)), 200); Iterable<Map.Entry<String, Integer>> readData = result.read(); assertThat( readData, Matchers.containsInAnyOrder( new AbstractMap.SimpleEntry<>(tag1, 1), new AbstractMap.SimpleEntry<>(tag2, 2))); mapState.put(tag3, 3); assertThat( result.read(), Matchers.containsInAnyOrder( new AbstractMap.SimpleEntry<>(tag1, 1), new AbstractMap.SimpleEntry<>(tag2, 2), new AbstractMap.SimpleEntry<>(tag3, 3))); }
@Override public void run() throws Exception { // Get list of files to process. List<String> filteredFiles = SegmentGenerationUtils.listMatchedFilesWithRecursiveOption(_inputDirFS, _inputDirURI, _spec.getIncludeFileNamePattern(), _spec.getExcludeFileNamePattern(), _spec.isSearchRecursively()); if (_consistentPushEnabled) { ConsistentDataPushUtils.configureSegmentPostfix(_spec); } File localTempDir = new File(FileUtils.getTempDirectory(), "pinot-" + UUID.randomUUID()); try { int numInputFiles = filteredFiles.size(); _segmentCreationTaskCountDownLatch = new CountDownLatch(numInputFiles); if (!SegmentGenerationJobUtils.useGlobalDirectorySequenceId(_spec.getSegmentNameGeneratorSpec())) { Map<String, List<String>> localDirIndex = new HashMap<>(); for (String filteredFile : filteredFiles) { java.nio.file.Path filteredParentPath = Paths.get(filteredFile).getParent(); localDirIndex.computeIfAbsent(filteredParentPath.toString(), k -> new ArrayList<>()).add(filteredFile); } for (String parentPath : localDirIndex.keySet()) { List<String> siblingFiles = localDirIndex.get(parentPath); Collections.sort(siblingFiles); for (int i = 0; i < siblingFiles.size(); i++) { URI inputFileURI = SegmentGenerationUtils .getFileURI(siblingFiles.get(i), SegmentGenerationUtils.getDirectoryURI(parentPath)); submitSegmentGenTask(localTempDir, inputFileURI, i); } } } else { //iterate on the file list, for each for (int i = 0; i < numInputFiles; i++) { final URI inputFileURI = SegmentGenerationUtils.getFileURI(filteredFiles.get(i), _inputDirURI); submitSegmentGenTask(localTempDir, inputFileURI, i); } } _segmentCreationTaskCountDownLatch.await(); if (_failure.get() != null) { _executorService.shutdownNow(); throw _failure.get(); } } finally { //clean up FileUtils.deleteQuietly(localTempDir); _executorService.shutdown(); } }
@Test public void testSegmentGenerationWithConsistentPush() throws Exception { File testDir = makeTestDir(); File inputDir = new File(testDir, "input"); inputDir.mkdirs(); File inputFile = new File(inputDir, "input.csv"); FileUtils.writeLines(inputFile, Lists.newArrayList("col1,col2", "value1,1", "value2,2")); // Create an output directory File outputDir = new File(testDir, "output"); final String schemaName = "mySchema"; File schemaFile = makeSchemaFile(testDir, schemaName); File tableConfigFile = makeTableConfigFileWithConsistentPush(testDir, schemaName); SegmentGenerationJobSpec jobSpec = makeJobSpec(inputDir, outputDir, schemaFile, tableConfigFile); jobSpec.setOverwriteOutput(false); SegmentGenerationJobRunner jobRunner = new SegmentGenerationJobRunner(jobSpec); jobRunner.run(); // There should be a tar file generated with timestamp (13 digits) String[] list = outputDir.list((dir, name) -> name.matches("myTable_OFFLINE_\\d{13}_0.tar.gz")); assertEquals(list.length, 1); }
@Override public void generateLedgerId(BookkeeperInternalCallbacks.GenericCallback<Long> genericCallback) { ledgerIdGenPathPresent() .thenCompose(isIdGenPathPresent -> { if (isIdGenPathPresent) { // We've already started generating 63-bit ledger IDs. // Keep doing that. return generateLongLedgerId(); } else { // We've not moved onto 63-bit ledgers yet. return generateShortLedgerId(); } }).thenAccept(ledgerId -> genericCallback.operationComplete(BKException.Code.OK, ledgerId) ).exceptionally(ex -> { log.error("Error generating ledger id: {}", ex.getMessage()); genericCallback.operationComplete(BKException.Code.MetaStoreException, -1L); return null; }); }
@Test public void testGenerateLedgerIdWithZkPrefix() throws Exception { @Cleanup MetadataStoreExtended store = MetadataStoreExtended.create(zks.getConnectionString() + "/test", MetadataStoreConfig.builder().build()); @Cleanup PulsarLedgerIdGenerator ledgerIdGenerator = new PulsarLedgerIdGenerator(store, "/ledgers"); // Create *nThread* threads each generate *nLedgers* ledger id, // and then check there is no identical ledger id. final int nThread = 2; final int nLedgers = 2000; // Multiply by two. We're going to do half in the old legacy space and half in the new. CountDownLatch countDownLatch1 = new CountDownLatch(nThread * nLedgers); final AtomicInteger errCount = new AtomicInteger(0); final ConcurrentLinkedQueue<Long> shortLedgerIds = new ConcurrentLinkedQueue<Long>(); final ConcurrentLinkedQueue<Long> longLedgerIds = new ConcurrentLinkedQueue<Long>(); long start = System.currentTimeMillis(); @Cleanup(value = "shutdownNow") ExecutorService executor = Executors.newCachedThreadPool(); for (int i = 0; i < nThread; i++) { executor.submit(() -> { for (int j = 0; j < nLedgers; j++) { ledgerIdGenerator.generateLedgerId((rc, result) -> { if (KeeperException.Code.OK.intValue() == rc) { shortLedgerIds.add(result); } else { errCount.incrementAndGet(); } countDownLatch1.countDown(); }); } }); } countDownLatch1.await(); for (Long ledgerId : shortLedgerIds) { assertFalse(store.exists("/ledgers/idgen/ID-" + String.format("%010d", ledgerId)).get(), "Exception during deleting node for id generation : "); } CountDownLatch countDownLatch2 = new CountDownLatch(nThread * nLedgers); // Go and create the long-id directory in zookeeper. This should cause the id generator to generate ids with the // new algo once we clear it's stored status. store.put("/ledgers/idgen-long", new byte[0], Optional.empty()).join(); for (int i = 0; i < nThread; i++) { executor.submit(() -> { for (int j = 0; j < nLedgers; j++) { ledgerIdGenerator.generateLedgerId((rc, result) -> { if (KeeperException.Code.OK.intValue() == rc) { longLedgerIds.add(result); } else { errCount.incrementAndGet(); } countDownLatch2.countDown(); }); } }); } assertTrue(countDownLatch2.await(120, TimeUnit.SECONDS), "Wait ledger id generation threads to stop timeout : "); ///test/ledgers/idgen-long/HOB-0000000001/ID-0000000000 for (Long ledgerId : longLedgerIds) { assertFalse(store.exists("/ledgers/idgen-long/HOB-0000000001/ID-" + String.format("%010d", ledgerId >> 32)).get(), "Exception during deleting node for id generation : "); } log.info("Number of generated ledger id: {}, time used: {}", shortLedgerIds.size() + longLedgerIds.size(), System.currentTimeMillis() - start); assertEquals(errCount.get(), 0, "Error occur during ledger id generation : "); Set<Long> ledgers = new HashSet<>(); while (!shortLedgerIds.isEmpty()) { Long ledger = shortLedgerIds.poll(); assertNotNull(ledger, "Generated ledger id is null"); assertFalse(ledgers.contains(ledger), "Ledger id [" + ledger + "] conflict : "); ledgers.add(ledger); } while (!longLedgerIds.isEmpty()) { Long ledger = longLedgerIds.poll(); assertNotNull(ledger, "Generated ledger id is null"); assertFalse(ledgers.contains(ledger), "Ledger id [" + ledger + "] conflict : "); ledgers.add(ledger); } }
@Override public void createOperateLog(OperateLogCreateReqDTO createReqDTO) { OperateLogDO log = BeanUtils.toBean(createReqDTO, OperateLogDO.class); operateLogMapper.insert(log); }
@Test public void testCreateOperateLog() { OperateLogCreateReqDTO reqVO = RandomUtils.randomPojo(OperateLogCreateReqDTO.class); // 调研 operateLogServiceImpl.createOperateLog(reqVO); // 断言 OperateLogDO operateLogDO = operateLogMapper.selectOne(null); assertPojoEquals(reqVO, operateLogDO); }
public static Sensor commitRatioSensor(final String threadId, final StreamsMetricsImpl streamsMetrics) { final Sensor sensor = streamsMetrics.threadLevelSensor(threadId, COMMIT + RATIO_SUFFIX, Sensor.RecordingLevel.INFO); final Map<String, String> tagMap = streamsMetrics.threadLevelTagMap(threadId); addValueMetricToSensor( sensor, THREAD_LEVEL_GROUP, tagMap, COMMIT + RATIO_SUFFIX, COMMIT_RATIO_DESCRIPTION ); return sensor; }
@Test public void shouldGetCommitRatioSensor() { final String operation = "commit-ratio"; final String ratioDescription = "The fraction of time the thread spent on committing all tasks"; when(streamsMetrics.threadLevelSensor(THREAD_ID, operation, RecordingLevel.INFO)).thenReturn(expectedSensor); when(streamsMetrics.threadLevelTagMap(THREAD_ID)).thenReturn(tagMap); try (final MockedStatic<StreamsMetricsImpl> streamsMetricsStaticMock = mockStatic(StreamsMetricsImpl.class)) { final Sensor sensor = ThreadMetrics.commitRatioSensor(THREAD_ID, streamsMetrics); streamsMetricsStaticMock.verify( () -> StreamsMetricsImpl.addValueMetricToSensor( expectedSensor, THREAD_LEVEL_GROUP, tagMap, operation, ratioDescription ) ); assertThat(sensor, is(expectedSensor)); } }
public void formatSource(CharSource input, CharSink output) throws FormatterException, IOException { // TODO(cushon): proper support for streaming input/output. Input may // not be feasible (parsing) but output should be easier. output.write(formatSource(input.read())); }
@Test public void lineCommentTrailingThinSpace() throws FormatterException { // The Unicode thin space is matched by CharMatcher.whitespace() but not trim(). String input = "class T {\n // asd\u2009\n}\n"; String output = new Formatter().formatSource(input); String expect = "class T {\n // asd\n}\n"; assertThat(output).isEqualTo(expect); }
public StepExpression createExpression(StepDefinition stepDefinition) { List<ParameterInfo> parameterInfos = stepDefinition.parameterInfos(); if (parameterInfos.isEmpty()) { return createExpression( stepDefinition.getPattern(), stepDefinitionDoesNotTakeAnyParameter(stepDefinition), false); } ParameterInfo parameterInfo = parameterInfos.get(parameterInfos.size() - 1); return createExpression( stepDefinition.getPattern(), parameterInfo.getTypeResolver()::resolve, parameterInfo.isTransposed()); }
@SuppressWarnings("unchecked") @Test void table_expression_with_list_type_creates_list_of_ingredients_from_table() { registry.defineDataTableType(new DataTableType(Ingredient.class, listBeanMapper(registry))); StepDefinition stepDefinition = new StubStepDefinition("Given some stuff:", getTypeFromStepDefinition()); StepExpression expression = stepExpressionFactory.createExpression(stepDefinition); List<Argument> match = expression.match("Given some stuff:", table); List<Ingredient> ingredients = (List<Ingredient>) match.get(0).getValue(); Ingredient ingredient = ingredients.get(0); assertThat(ingredient.amount, is(equalTo(2))); }
public FilterFileSystem() { }
@Test public void testFilterFileSystem() throws Exception { int errors = 0; for (Method m : FileSystem.class.getDeclaredMethods()) { if (Modifier.isStatic(m.getModifiers()) || Modifier.isPrivate(m.getModifiers()) || Modifier.isFinal(m.getModifiers())) { continue; } try { MustNotImplement.class.getMethod(m.getName(), m.getParameterTypes()); try { FilterFileSystem.class.getDeclaredMethod(m.getName(), m.getParameterTypes()); LOG.error("FilterFileSystem MUST NOT implement " + m); errors++; } catch (NoSuchMethodException ex) { // Expected } } catch (NoSuchMethodException exc) { try{ FilterFileSystem.class.getDeclaredMethod(m.getName(), m.getParameterTypes()); } catch(NoSuchMethodException exc2){ LOG.error("FilterFileSystem MUST implement " + m); errors++; } } } assertTrue((errors + " methods were not overridden correctly - see" + " log"), errors <= 0); }
@Override public void chunk(final Path directory, final AttributedList<Path> list) throws ListCanceledException { if(directory.isRoot()) { if(list.size() >= container) { // Allow another chunk until limit is reached again container += preferences.getInteger("browser.list.limit.container"); throw new ListCanceledException(list); } } if(list.size() >= this.directory) { // Allow another chunk until limit is reached again this.directory += preferences.getInteger("browser.list.limit.directory"); throw new ListCanceledException(list); } }
@Test public void testChunk() throws Exception { new LimitedListProgressListener(new DisabledProgressListener()).chunk( new Path("/", EnumSet.of(Path.Type.volume, Path.Type.directory)), AttributedList.emptyList() ); }
public static CompletableFuture<Channel> toCompletableFuture(ChannelFuture channelFuture) { Objects.requireNonNull(channelFuture, "channelFuture cannot be null"); CompletableFuture<Channel> adapter = new CompletableFuture<>(); if (channelFuture.isDone()) { if (channelFuture.isSuccess()) { adapter.complete(channelFuture.channel()); } else { adapter.completeExceptionally(channelFuture.cause()); } } else { channelFuture.addListener((ChannelFuture cf) -> { if (cf.isSuccess()) { adapter.complete(cf.channel()); } else { adapter.completeExceptionally(cf.cause()); } }); } return adapter; }
@Test public void toCompletableFuture_shouldCompleteSuccessfully_channelFutureCompletedAfter() throws Exception { CompletableFuture<Channel> future = ChannelFutures.toCompletableFuture(channelFuture); Assert.assertFalse(future.isDone()); channelFuture.setSuccess(); Assert.assertEquals(future.get(1, TimeUnit.SECONDS), channel); }
public ModelContext.Properties getProperties() { return properties; }
@Test void testProperties() { DeployState.Builder builder = new DeployState.Builder(); DeployState state = builder.build(); assertEquals(ApplicationId.defaultId(), state.getProperties().applicationId()); ApplicationId customId = new ApplicationId.Builder() .tenant("bar") .applicationName("foo").instanceName("quux").build(); ModelContext.Properties properties = new TestProperties().setApplicationId(customId); builder.properties(properties); state = builder.build(); assertEquals(customId, state.getProperties().applicationId()); }
public static <T> T copyProperties(Object source, Class<T> tClass, String... ignoreProperties) { if (null == source) { return null; } T target = ReflectUtil.newInstanceIfPossible(tClass); copyProperties(source, target, CopyOptions.create().setIgnoreProperties(ignoreProperties)); return target; }
@Test public void copyPropertiesHasBooleanTest() { final SubPerson p1 = new SubPerson(); p1.setSlow(true); // 测试boolean参数值isXXX形式 final SubPerson p2 = new SubPerson(); BeanUtil.copyProperties(p1, p2); assertTrue(p2.getSlow()); // 测试boolean参数值非isXXX形式 final SubPerson2 p3 = new SubPerson2(); BeanUtil.copyProperties(p1, p3); assertTrue(p3.getSlow()); }
public static String toJson(UpdateRequirement updateRequirement) { return toJson(updateRequirement, false); }
@Test public void testAssertRefSnapshotIdFromJsonWithNullSnapshotId() { String requirementType = UpdateRequirementParser.ASSERT_REF_SNAPSHOT_ID; String refName = "snapshot-name"; Long snapshotId = null; String expected = String.format( "{\"type\":\"%s\",\"ref\":\"%s\",\"snapshot-id\":%d}", requirementType, refName, snapshotId); UpdateRequirement actual = new UpdateRequirement.AssertRefSnapshotID(refName, snapshotId); assertThat(UpdateRequirementParser.toJson(actual)) .as("AssertRefSnapshotId should convert to the correct JSON value") .isEqualTo(expected); }
@Override public Object convert(String value) { if (isNullOrEmpty(value)) { return value; } if (value.contains("=")) { final Map<String, String> fields = new HashMap<>(); Matcher m = PATTERN.matcher(value); while (m.find()) { if (m.groupCount() != 2) { continue; } fields.put(removeQuotes(m.group(1)), removeQuotes(m.group(2))); } return fields; } else { return Collections.emptyMap(); } }
@Test public void testFilterRetainsWhitespaceInQuotedValues() { TokenizerConverter f = new TokenizerConverter(new HashMap<String, Object>()); @SuppressWarnings("unchecked") Map<String, String> result = (Map<String, String>) f.convert("otters in k1= v1 k2=\" v2\" k3=\" v3 \" more otters"); assertThat(result) .hasSize(3) .containsEntry("k1", "v1") .containsEntry("k2", " v2") .containsEntry("k3", " v3 "); }
@Override public void showUpWebView(WebView webView, boolean isSupportJellyBean) { }
@Test public void testShowUpWebView1() { WebView webView = new WebView(mApplication); mSensorsAPI.showUpWebView(webView, false); }
@Override public DeptDO getDept(Long id) { return deptMapper.selectById(id); }
@Test public void testGetDept() { // mock 数据 DeptDO deptDO = randomPojo(DeptDO.class); deptMapper.insert(deptDO); // 准备参数 Long id = deptDO.getId(); // 调用 DeptDO dbDept = deptService.getDept(id); // 断言 assertEquals(deptDO, dbDept); }
public static <T> RuntimeTypeAdapterFactory<T> of(Class<T> baseType, String typeFieldName, boolean maintainType) { return new RuntimeTypeAdapterFactory<>(baseType, typeFieldName, maintainType); }
@Test public void testNullBaseType() { assertThatThrownBy(() -> RuntimeTypeAdapterFactory.of(null)) .isInstanceOf(NullPointerException.class); }
public String getAppGroupName() { return appGroupName; }
@Test public void testAppGroupNameSystemProp() throws Exception { String appGroup = "testAppGroupSystemProp"; ((ConcurrentCompositeConfiguration) ConfigurationManager.getConfigInstance()).setOverrideProperty("NETFLIX_APP_GROUP", appGroup); MyDataCenterInstanceConfig config = new MyDataCenterInstanceConfig(); Assert.assertEquals("Unexpected app group name", appGroup, config.getAppGroupName()); }
public CompletableFuture<SendMessageResponse> sendMessage(ProxyContext ctx, SendMessageRequest request) { CompletableFuture<SendMessageResponse> future = new CompletableFuture<>(); try { if (request.getMessagesCount() <= 0) { throw new GrpcProxyException(Code.MESSAGE_CORRUPTED, "no message to send"); } List<apache.rocketmq.v2.Message> messageList = request.getMessagesList(); apache.rocketmq.v2.Message message = messageList.get(0); Resource topic = message.getTopic(); validateTopic(topic); future = this.messagingProcessor.sendMessage( ctx, new SendMessageQueueSelector(request), topic.getName(), buildSysFlag(message), buildMessage(ctx, request.getMessagesList(), topic) ).thenApply(result -> convertToSendMessageResponse(ctx, request, result)); } catch (Throwable t) { future.completeExceptionally(t); } return future; }
@Test public void testParameterValidate() { // too large message body assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .setBody(ByteString.copyFrom(new byte[4 * 1024 * 1024 + 1])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.MESSAGE_BODY_TOO_LARGE, e.getCode()); throw e; } }); // black tag assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .setTag(" ") .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_MESSAGE_TAG, e.getCode()); throw e; } }); // tag with '|' assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .setTag("|") .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_MESSAGE_TAG, e.getCode()); throw e; } }); // tag with \t assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .setTag("\t") .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_MESSAGE_TAG, e.getCode()); throw e; } }); // blank message key assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .addKeys(" ") .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_MESSAGE_KEY, e.getCode()); throw e; } }); // blank message with \t assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .addKeys("\t") .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_MESSAGE_KEY, e.getCode()); throw e; } }); // blank message group assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .setMessageGroup(" ") .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_MESSAGE_GROUP, e.getCode()); throw e; } }); // long message group assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .setMessageGroup(createStr(ConfigurationManager.getProxyConfig().getMaxMessageGroupSize() + 1)) .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_MESSAGE_GROUP, e.getCode()); throw e; } }); // message group with \t assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .setMessageGroup("\t") .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_MESSAGE_GROUP, e.getCode()); throw e; } }); // too large message property assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .putUserProperties("key", createStr(16 * 1024 + 1)) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.MESSAGE_PROPERTIES_TOO_LARGE, e.getCode()); throw e; } }); // too large message property assertThrows(GrpcProxyException.class, () -> { Map<String, String> p = new HashMap<>(); for (int i = 0; i <= ConfigurationManager.getProxyConfig().getUserPropertyMaxNum(); i++) { p.put(String.valueOf(i), String.valueOf(i)); } try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .putAllUserProperties(p) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.MESSAGE_PROPERTIES_TOO_LARGE, e.getCode()); throw e; } }); // set system properties assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .putUserProperties(MessageConst.PROPERTY_TRACE_SWITCH, "false") .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_MESSAGE_PROPERTY_KEY, e.getCode()); throw e; } }); // set the key of user property with control character assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .putUserProperties("\u0000", "hello") .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_MESSAGE_PROPERTY_KEY, e.getCode()); throw e; } }); // set the value of user property with control character assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("msgId") .setQueueId(0) .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .putUserProperties("p", "\u0000") .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_MESSAGE_PROPERTY_KEY, e.getCode()); throw e; } }); // empty message id assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId(" ") .setQueueId(0) .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_MESSAGE_ID, e.getCode()); throw e; } }); // delay time assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("id") .setDeliveryTimestamp( Timestamps.fromMillis(System.currentTimeMillis() + Duration.ofDays(1).toMillis() + Duration.ofSeconds(10).toMillis())) .setQueueId(0) .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .build()) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.ILLEGAL_DELIVERY_TIME, e.getCode()); throw e; } }); // transactionRecoverySecond assertThrows(GrpcProxyException.class, () -> { try { this.sendMessageActivity.sendMessage( createContext(), SendMessageRequest.newBuilder() .addMessages(Message.newBuilder() .setTopic(Resource.newBuilder() .setName(TOPIC) .build()) .setSystemProperties(SystemProperties.newBuilder() .setMessageId("id") .setQueueId(0) .setMessageType(MessageType.NORMAL) .setBornTimestamp(Timestamps.fromMillis(System.currentTimeMillis())) .setBornHost(StringUtils.defaultString(NetworkUtil.getLocalAddress(), "127.0.0.1:1234")) .setOrphanedTransactionRecoveryDuration(Durations.fromHours(2)) .setMessageType(MessageType.TRANSACTION) .build()) .setBody(ByteString.copyFrom(new byte[3])) .build()) .build() ).get(); } catch (ExecutionException t) { GrpcProxyException e = (GrpcProxyException) t.getCause(); assertEquals(Code.BAD_REQUEST, e.getCode()); throw e; } }); }
public byte[] readStringNulByBytes() { byte[] result = new byte[byteBuf.bytesBefore((byte) 0)]; byteBuf.readBytes(result); byteBuf.skipBytes(1); return result; }
@Test void assertReadStringNulByBytes() { when(byteBuf.bytesBefore((byte) 0)).thenReturn(0); assertThat(new MySQLPacketPayload(byteBuf, StandardCharsets.UTF_8).readStringNulByBytes(), is(new byte[]{})); verify(byteBuf).skipBytes(1); }
public static boolean onlyOneIsTrueNonThrow(final boolean... expressions) { int count = 0; for (final boolean expression : expressions) { if (expression && ++count > 1) { return false; } } return 1 == count; }
@Test public void testOnlyOneIsTrueNonThrow() { Assertions.assertTrue(Utils.onlyOneIsTrueNonThrow(true)); Assertions.assertFalse(Utils.onlyOneIsTrueNonThrow(true, true, true)); Assertions.assertFalse(Utils.onlyOneIsTrueNonThrow(true, true, false)); Assertions.assertFalse(Utils.onlyOneIsTrueNonThrow(false, false, false)); }
private boolean isContainsLastInsertIdProjection(final Collection<Projection> projections) { for (Projection each : projections) { if (LAST_INSERT_ID_FUNCTION_EXPRESSION.equalsIgnoreCase(SQLUtils.getExactlyExpression(each.getExpression()))) { return true; } } return false; }
@Test void assertIsContainsLastInsertIdProjection() { ProjectionsContext lastInsertIdProjection = new ProjectionsContext(0, 0, false, Collections.singletonList(new ExpressionProjection(new ExpressionProjectionSegment(0, 0, "LAST_INSERT_ID()"), new IdentifierValue("id"), new MySQLDatabaseType()))); assertTrue(lastInsertIdProjection.isContainsLastInsertIdProjection()); ProjectionsContext maxProjection = new ProjectionsContext(0, 0, false, Collections.singletonList(new ExpressionProjection(new ExpressionProjectionSegment(0, 0, "MAX(id)"), new IdentifierValue("max"), new MySQLDatabaseType()))); assertFalse(maxProjection.isContainsLastInsertIdProjection()); }
public <U> Opt<U> flattedMap(Function<? super T, ? extends Optional<? extends U>> mapper) { Objects.requireNonNull(mapper); if (isEmpty()) { return empty(); } else { return ofNullable(mapper.apply(value).orElse(null)); } }
@Test public void flattedMapTest() { // 和Optional兼容的flatMap List<User> userList = new ArrayList<>(); // 以前,不兼容 // Opt.ofNullable(userList).map(List::stream).flatMap(Stream::findFirst); // 现在,兼容 User user = Opt.ofNullable(userList).map(List::stream) .flattedMap(Stream::findFirst).orElseGet(User.builder()::build); assertNull(user.getUsername()); assertNull(user.getNickname()); }
public String toBaseMessageIdString(Object messageId) { if (messageId == null) { return null; } else if (messageId instanceof String) { String stringId = (String) messageId; // If the given string has a type encoding prefix, // we need to escape it as an encoded string (even if // the existing encoding prefix was also for string) if (hasTypeEncodingPrefix(stringId)) { return AMQP_STRING_PREFIX + stringId; } else { return stringId; } } else if (messageId instanceof UUID) { return AMQP_UUID_PREFIX + messageId.toString(); } else if (messageId instanceof UnsignedLong) { return AMQP_ULONG_PREFIX + messageId.toString(); } else if (messageId instanceof Binary) { ByteBuffer dup = ((Binary) messageId).asByteBuffer(); byte[] bytes = new byte[dup.remaining()]; dup.get(bytes); String hex = convertBinaryToHexString(bytes); return AMQP_BINARY_PREFIX + hex; } else { throw new IllegalArgumentException("Unsupported type provided: " + messageId.getClass()); } }
@Test public void testToBaseMessageIdStringWithBinary() { byte[] bytes = new byte[] { (byte) 0x00, (byte) 0xAB, (byte) 0x09, (byte) 0xFF }; Binary binary = new Binary(bytes); String expected = AMQPMessageIdHelper.AMQP_BINARY_PREFIX + "00AB09FF"; String baseMessageIdString = messageIdHelper.toBaseMessageIdString(binary); assertNotNull("null string should not have been returned", baseMessageIdString); assertEquals("expected base id string was not returned", expected, baseMessageIdString); }
public final void isGreaterThan(int other) { isGreaterThan((long) other); }
@Test public void isGreaterThan_int() { expectFailureWhenTestingThat(2L).isGreaterThan(2); assertThat(2L).isGreaterThan(1); }
@Override public ImportResult importItem( UUID jobId, IdempotentImportExecutor idempotentImportExecutor, TokensAndUrlAuthData authData, PhotosContainerResource resource) throws Exception { KoofrClient koofrClient = koofrClientFactory.create(authData); monitor.debug( () -> String.format( "%s: Importing %s albums and %s photos before transmogrification", jobId, resource.getAlbums().size(), resource.getPhotos().size())); // Make the data Koofr compatible resource.transmogrify(transmogrificationConfig); monitor.debug( () -> String.format( "%s: Importing %s albums and %s photos after transmogrification", jobId, resource.getAlbums().size(), resource.getPhotos().size())); for (PhotoAlbum album : resource.getAlbums()) { // Create a Koofr folder and then save the id with the mapping data idempotentImportExecutor.executeAndSwallowIOExceptions( album.getId(), album.getName(), () -> createAlbumFolder(album, koofrClient)); } final LongAdder totalImportedFilesSizes = new LongAdder(); for (PhotoModel photoModel : resource.getPhotos()) { idempotentImportExecutor.importAndSwallowIOExceptions( photoModel, photo -> { ItemImportResult<String> fileImportResult = importSinglePhoto(photoModel, jobId, idempotentImportExecutor, koofrClient); if (fileImportResult != null && fileImportResult.hasBytes()) { totalImportedFilesSizes.add(fileImportResult.getBytes()); } return fileImportResult; }); } return ImportResult.OK.copyWithBytes(totalImportedFilesSizes.longValue()); }
@Test public void testSkipNotFoundAlbum() throws Exception { ByteArrayInputStream inputStream = new ByteArrayInputStream(new byte[] {0, 1, 2, 3, 4}); when(jobStore.getStream(any(), any())).thenReturn(new InputStreamWrapper(inputStream, 5L)); when(client.uploadFile( eq("/root/Album 1"), anyString(), any(), anyString(), any(), anyString())) .thenThrow(new KoofrClientIOException(buildErrorResponse())); UUID jobId = UUID.randomUUID(); when(executor.getCachedValue(eq("id1"))).thenReturn("/root/Album 1"); Collection<PhotoAlbum> albums = ImmutableList.of(new PhotoAlbum("id1", "Album 1", "This is a fake album")); Collection<PhotoModel> photos = ImmutableList.of( new PhotoModel( "pic1.jpg", "http://fake.com/1.jpg", "A pic", "image/jpeg", "p1", "id1", true)); PhotosContainerResource resource = spy(new PhotosContainerResource(albums, photos)); importer.importItem(jobId, executor, authData, resource); String importResult = capturedResult.get(); assertEquals(importResult, "skipped-p1"); }
protected static Object getOrDefault(Map<String, Object> configMap, String key, Object defaultValue) { if (configMap.containsKey(key)) return configMap.get(key); return defaultValue; }
@Test public void testGetOrDefault() { String existingKey = "exists"; String missingKey = "missing"; String value = "value"; String defaultValue = "default"; Map<String, Object> map = new HashMap<>(); map.put("exists", "value"); assertEquals(SSLUtils.getOrDefault(map, existingKey, defaultValue), value); assertEquals(SSLUtils.getOrDefault(map, missingKey, defaultValue), defaultValue); }
public static String execCommand(String... cmd) throws IOException { return execCommand(cmd, -1); }
@Test public void testRunProgramWithErrorReturn() { Shell.ExitCodeException e = assertThrows(Shell.ExitCodeException.class, () -> Shell.execCommand("head", "-c", "0", NONEXISTENT_PATH)); String message = e.getMessage(); assertTrue(message.contains("No such file") || message.contains("illegal byte count"), "Unexpected error message '" + message + "'"); }
@Override public ZonedDateTime next() { if (!hasNext()) { throw new NoSuchElementException(); } if (cursor == null) { cursor = start; } else { cursor = cursor.plusDays(increment); } return cursor; }
@Test void nextDescendantTest() { List<ZonedDateTime> expected = Arrays.asList(getZonedDateTime(2021, 1, 3, 10, 15), getZonedDateTime(2021, 1, 2, 10, 15), getZonedDateTime(2021, 1, 1, 10, 15)); ZonedDateTimeRangeIterator iterator = new ZonedDateTimeRangeIterator(after, before); IntStream.range(0, 3).forEach(i -> assertEquals(expected.get(i), iterator.next())); }
public Object clone() { MergeJoinMeta retval = (MergeJoinMeta) super.clone(); int nrKeys1 = keyFields1.length; int nrKeys2 = keyFields2.length; retval.allocate( nrKeys1, nrKeys2 ); System.arraycopy( keyFields1, 0, retval.keyFields1, 0, nrKeys1 ); System.arraycopy( keyFields2, 0, retval.keyFields2, 0, nrKeys2 ); StepIOMetaInterface stepIOMeta = new StepIOMeta( true, true, false, false, false, false ); List<StreamInterface> infoStreams = getStepIOMeta().getInfoStreams(); for ( StreamInterface infoStream : infoStreams ) { stepIOMeta.addStream( new Stream( infoStream ) ); } retval.setStepIOMeta( stepIOMeta ); return retval; }
@Test public void cloneTest() throws Exception { MergeJoinMeta meta = new MergeJoinMeta(); meta.allocate( 2, 3 ); meta.setKeyFields1( new String[] { "kf1-1", "kf1-2" } ); meta.setKeyFields2( new String[] { "kf2-1", "kf2-2", "kf2-3" } ); // scalars should be cloned using super.clone() - makes sure they're calling super.clone() meta.setJoinType( "INNER" ); MergeJoinMeta aClone = (MergeJoinMeta) meta.clone(); assertFalse( aClone == meta ); // Not same object returned by clone assertTrue( Arrays.equals( meta.getKeyFields1(), aClone.getKeyFields1() ) ); assertTrue( Arrays.equals( meta.getKeyFields2(), aClone.getKeyFields2() ) ); assertEquals( meta.getJoinType(), aClone.getJoinType() ); assertNotNull( aClone.getStepIOMeta() ); assertFalse( meta.getStepIOMeta() == aClone.getStepIOMeta() ); List<StreamInterface> infoStreams = meta.getStepIOMeta().getInfoStreams(); List<StreamInterface> cloneInfoStreams = aClone.getStepIOMeta().getInfoStreams(); assertFalse( infoStreams == cloneInfoStreams ); int streamSize = infoStreams.size(); assertTrue( streamSize == cloneInfoStreams.size() ); for ( int i = 0; i < streamSize; i++ ) { assertFalse( infoStreams.get( i ) == cloneInfoStreams.get( i ) ); } }
@Override public VersioningConfiguration getConfiguration(final Path file) throws BackgroundException { final Path bucket = containerService.getContainer(file); if(cache.contains(bucket)) { return cache.get(bucket); } try { final S3BucketVersioningStatus status = session.getClient().getBucketVersioningStatus(bucket.isRoot() ? StringUtils.EMPTY : bucket.getName()); if(null == status) { log.warn(String.format("Failure parsing versioning status for %s", bucket)); return VersioningConfiguration.empty(); } final VersioningConfiguration configuration = new VersioningConfiguration(status.isVersioningEnabled(), status.isMultiFactorAuthDeleteRequired()); cache.put(bucket, configuration); return configuration; } catch(ServiceException e) { try { throw new S3ExceptionMappingService().map("Cannot read container configuration", e); } catch(AccessDeniedException l) { log.warn(String.format("Missing permission to read versioning configuration for %s %s", bucket, e.getMessage())); return VersioningConfiguration.empty(); } catch(InteroperabilityException | NotfoundException i) { log.warn(String.format("Not supported to read versioning configuration for %s %s", bucket, e.getMessage())); return VersioningConfiguration.empty(); } } }
@Test public void testForbidden() throws Exception { session.getHost().getCredentials().setPassword(StringUtils.EMPTY); assertEquals(VersioningConfiguration.empty(), new S3VersioningFeature(session, new S3AccessControlListFeature(session)).getConfiguration(new Path("/dist.springframework.org", EnumSet.of(Path.Type.directory)))); }
@Override protected Rate getRate(final String key) { Rate rate = null; GetValue value = this.consulClient.getKVValue(buildValidConsulKey(key)).getValue(); if (value != null && value.getDecodedValue() != null) { try { rate = this.objectMapper.readValue(value.getDecodedValue(), Rate.class); } catch (IOException e) { log.error("Failed to deserialize Rate", e); } } return rate; }
@Test public void testGetRateException() throws IOException { GetValue getValue = new GetValue(); getValue.setValue(""); when(consulClient.getKVValue(any())).thenReturn(new Response<>(getValue, 1L, true, 1L)); when(objectMapper.readValue(anyString(), eq(Rate.class))).thenAnswer(invocation -> { throw new IOException(); }); ConsulRateLimiter consulRateLimiter = new ConsulRateLimiter(rateLimiterErrorHandler, consulClient, objectMapper); Rate rate = consulRateLimiter.getRate(""); assertThat(rate).isNull(); }
public static MySQLCommandPacket newInstance(final MySQLCommandPacketType commandPacketType, final MySQLPacketPayload payload, final ConnectionSession connectionSession) { switch (commandPacketType) { case COM_QUIT: return new MySQLComQuitPacket(); case COM_INIT_DB: return new MySQLComInitDbPacket(payload); case COM_FIELD_LIST: return new MySQLComFieldListPacket(payload); case COM_QUERY: return new MySQLComQueryPacket(payload); case COM_STMT_PREPARE: return new MySQLComStmtPreparePacket(payload); case COM_STMT_EXECUTE: MySQLServerPreparedStatement serverPreparedStatement = connectionSession.getServerPreparedStatementRegistry().getPreparedStatement(payload.getByteBuf().getIntLE(payload.getByteBuf().readerIndex())); return new MySQLComStmtExecutePacket(payload, serverPreparedStatement.getSqlStatementContext().getSqlStatement().getParameterCount()); case COM_STMT_SEND_LONG_DATA: return new MySQLComStmtSendLongDataPacket(payload); case COM_STMT_RESET: return new MySQLComStmtResetPacket(payload); case COM_STMT_CLOSE: return new MySQLComStmtClosePacket(payload); case COM_SET_OPTION: return new MySQLComSetOptionPacket(payload); case COM_PING: return new MySQLComPingPacket(); case COM_RESET_CONNECTION: return new MySQLComResetConnectionPacket(); default: return new MySQLUnsupportedCommandPacket(commandPacketType); } }
@Test void assertNewInstanceWithComStatisticsPacket() { assertThat(MySQLCommandPacketFactory.newInstance(MySQLCommandPacketType.COM_STATISTICS, payload, connectionSession), instanceOf(MySQLUnsupportedCommandPacket.class)); }
@Override public WriterCommitMessage commit() throws IOException { return new HoodieWriterCommitMessage(bulkInsertWriterHelper.getWriteStatuses()); }
@Test public void testDataInternalWriterHiveStylePartitioning() throws Exception { boolean sorted = true; boolean populateMetaFields = false; // init config and table HoodieWriteConfig cfg = getWriteConfig(populateMetaFields, "true"); HoodieTable table = HoodieSparkTable.create(cfg, context, metaClient); for (int i = 0; i < 1; i++) { String instantTime = "00" + i; // init writer HoodieBulkInsertDataInternalWriter writer = new HoodieBulkInsertDataInternalWriter(table, cfg, instantTime, RANDOM.nextInt(100000), RANDOM.nextLong(), RANDOM.nextLong(), STRUCT_TYPE, populateMetaFields, sorted); int size = 10 + RANDOM.nextInt(1000); // write N rows to partition1, N rows to partition2 and N rows to partition3 ... Each batch should create a new RowCreateHandle and a new file int batches = 3; Dataset<Row> totalInputRows = null; for (int j = 0; j < batches; j++) { String partitionPath = HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[j % 3]; Dataset<Row> inputRows = getRandomRows(sqlContext, size, partitionPath, false); writeRows(inputRows, writer); if (totalInputRows == null) { totalInputRows = inputRows; } else { totalInputRows = totalInputRows.union(inputRows); } } BaseWriterCommitMessage commitMetadata = (BaseWriterCommitMessage) writer.commit(); Option<List<String>> fileAbsPaths = Option.of(new ArrayList<>()); Option<List<String>> fileNames = Option.of(new ArrayList<>()); // verify write statuses assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames, true); // verify rows Dataset<Row> result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0])); assertOutput(totalInputRows, result, instantTime, fileNames, populateMetaFields); } }
long nextCheckTimeNs() { BrokerHeartbeatState broker = unfenced.first(); if (broker == null) { return Long.MAX_VALUE; } else { return broker.lastContactNs + sessionTimeoutNs; } }
@Test public void testNextCheckTimeNs() { BrokerHeartbeatManager manager = newBrokerHeartbeatManager(); MockTime time = (MockTime) manager.time(); assertEquals(Long.MAX_VALUE, manager.nextCheckTimeNs()); for (int brokerId = 0; brokerId < 4; brokerId++) { manager.register(brokerId, true); } manager.touch(0, false, 0); time.sleep(2); manager.touch(1, false, 0); time.sleep(1); manager.touch(2, false, 0); time.sleep(1); manager.touch(3, false, 0); assertEquals(Optional.empty(), manager.findOneStaleBroker()); assertEquals(10_000_000, manager.nextCheckTimeNs()); time.sleep(7); assertEquals(10_000_000, manager.nextCheckTimeNs()); assertEquals(Optional.of(0), manager.findOneStaleBroker()); manager.fence(0); assertEquals(12_000_000, manager.nextCheckTimeNs()); time.sleep(3); assertEquals(Optional.of(1), manager.findOneStaleBroker()); manager.fence(1); assertEquals(Optional.of(2), manager.findOneStaleBroker()); manager.fence(2); assertEquals(14_000_000, manager.nextCheckTimeNs()); }
public Path[] getFilePaths() { if (supportsMultiPaths()) { if (this.filePaths == null) { return new Path[0]; } return this.filePaths; } else { if (this.filePath == null) { return new Path[0]; } return new Path[] {filePath}; } }
@Test void testGetPathsWithoutSettingFirst() { final DummyFileInputFormat format = new DummyFileInputFormat(); Path[] paths = format.getFilePaths(); assertThat(paths).as("Paths should not be null.").isNotNull(); assertThat(paths).as("Paths should be empty.").isEmpty(); }
public boolean tryToMoveTo(State to) { AtomicReference<State> lastFrom = new AtomicReference<>(); State newState = this.state.updateAndGet(from -> { lastFrom.set(from); if (TRANSITIONS.get(from).contains(to)) { return to; } return from; }); boolean updated = newState == to && lastFrom.get() != to; LOG.trace("tryToMoveTo from {} to {} => {}", lastFrom.get(), to, updated); return updated; }
@Test public void can_move_to_OPERATIONAL_from_STARTED_only() { for (State state : values()) { boolean tryToMoveTo = newLifeCycle(state).tryToMoveTo(OPERATIONAL); if (state == STARTED) { assertThat(tryToMoveTo).describedAs("from state " + state).isTrue(); } else { assertThat(tryToMoveTo).describedAs("from state " + state).isFalse(); } } }
public static <T> RestResult<T> success() { return RestResult.<T>builder().withCode(200).build(); }
@Test void testSuccessWithMsg() { RestResult<String> restResult = RestResultUtils.success("test", "content"); assertRestResult(restResult, 200, "test", "content", true); }
public static DeviceDescription parseJuniperDescription(DeviceId deviceId, HierarchicalConfiguration sysInfoCfg, String chassisMacAddresses) { HierarchicalConfiguration info = sysInfoCfg.configurationAt(SYS_INFO); String hw = info.getString(HW_MODEL) == null ? UNKNOWN : info.getString(HW_MODEL); String sw = UNKNOWN; if (info.getString(OS_NAME) != null || info.getString(OS_VER) != null) { sw = info.getString(OS_NAME) + " " + info.getString(OS_VER); } String serial = info.getString(SER_NUM) == null ? UNKNOWN : info.getString(SER_NUM); return new DefaultDeviceDescription(deviceId.uri(), ROUTER, JUNIPER, hw, sw, serial, extractChassisId(chassisMacAddresses), DefaultAnnotations.EMPTY); }
@Test public void testDeviceDescriptionParsedFromJunos15() throws IOException { HierarchicalConfiguration getSystemInfoResp = XmlConfigParser.loadXml( getClass().getResourceAsStream("/Junos_get-system-information_response_15.1.xml")); String chassisText = CharStreams.toString( new InputStreamReader( getClass().getResourceAsStream("/Junos_get-chassis-mac-addresses_response_15.1.xml"))); DeviceDescription expected = new DefaultDeviceDescription(URI.create(DEVICE_ID), ROUTER, "JUNIPER", "mx240", "junos 15.1R5.5", "JN11AC665AFC", new ChassisId("8418889983c0")); assertEquals(expected, JuniperUtils.parseJuniperDescription(deviceId, getSystemInfoResp, chassisText)); }
@Override public void setConfigAttributes(Object attributes) { if (attributes == null) { return; } super.setConfigAttributes(attributes); Map map = (Map) attributes; if (map.containsKey(URL)) { this.url = new HgUrlArgument((String) map.get(URL)); } if (map.containsKey("userName")) { this.userName = (String) map.get("userName"); } if (map.containsKey(PASSWORD_CHANGED) && "1".equals(map.get(PASSWORD_CHANGED))) { String passwordToSet = (String) map.get(PASSWORD); resetPassword(passwordToSet); } if (map.containsKey(BRANCH)) { setBranchAttribute((String) map.get(BRANCH)); } }
@Test void shouldReturnIfAttributeMapIsNull() { HgMaterialConfig hgMaterialConfig = hg("", null); hgMaterialConfig.setConfigAttributes(null); assertThat(hgMaterialConfig).isEqualTo(hg("", null)); }
@Override public <R extends MessageResponse<?>> R chat(Prompt<R> prompt, ChatOptions options) { Map<String, String> headers = new HashMap<>(); headers.put("Content-Type", "application/json"); headers.put("Authorization", "Bearer " + getConfig().getApiKey()); Consumer<Map<String, String>> headersConfig = config.getHeadersConfig(); if (headersConfig != null) { headersConfig.accept(headers); } String payload = OpenAiLLmUtil.promptToPayload(prompt, config, options, false); String endpoint = config.getEndpoint(); String response = httpClient.post(endpoint + "/v1/chat/completions", headers, payload); if (StringUtil.noText(response)) { return null; } if (config.isDebug()) { System.out.println(">>>>receive payload:" + response); } JSONObject jsonObject = JSON.parseObject(response); JSONObject error = jsonObject.getJSONObject("error"); AbstractBaseMessageResponse<?> messageResponse; if (prompt instanceof FunctionPrompt) { messageResponse = new FunctionMessageResponse(((FunctionPrompt) prompt).getFunctions() , functionMessageParser.parse(jsonObject)); } else { messageResponse = new AiMessageResponse(aiMessageParser.parse(jsonObject)); } if (error != null && !error.isEmpty()) { messageResponse.setError(true); messageResponse.setErrorMessage(error.getString("message")); messageResponse.setErrorType(error.getString("type")); messageResponse.setErrorCode(error.getString("code")); } //noinspection unchecked return (R) messageResponse; }
@Test() public void testFunctionCalling() throws InterruptedException { OpenAiLlmConfig config = new OpenAiLlmConfig(); config.setApiKey("sk-rts5NF6n*******"); OpenAiLlm llm = new OpenAiLlm(config); FunctionPrompt prompt = new FunctionPrompt("今天北京的天气怎么样", WeatherFunctions.class); FunctionMessageResponse response = llm.chat(prompt); Object result = response.getFunctionResult(); System.out.println(result); // "Today it will be dull and overcast in 北京" }
public String getSha256sum() { if (sha256sum == null) { this.sha256sum = determineHashes(SHA256_HASHING_FUNCTION); } return sha256sum; }
@Test public void testGetSha256sum() { File file = BaseTest.getResourceAsFile(this, "struts2-core-2.1.2.jar"); Dependency instance = new Dependency(file); String expResult = "5c1847a10800027254fcd0073385cceb46b1dacee061f3cd465e314bec592e81"; String result = instance.getSha256sum(); assertEquals(expResult, result); }
@Override public Result invoke(Invocation invocation) throws RpcException { Result result; String value = getUrl().getMethodParameter( RpcUtils.getMethodName(invocation), MOCK_KEY, Boolean.FALSE.toString()) .trim(); if (ConfigUtils.isEmpty(value)) { // no mock result = this.invoker.invoke(invocation); } else if (value.startsWith(FORCE_KEY)) { if (logger.isWarnEnabled()) { logger.warn( CLUSTER_FAILED_MOCK_REQUEST, "force mock", "", "force-mock: " + RpcUtils.getMethodName(invocation) + " force-mock enabled , url : " + getUrl()); } // force:direct mock result = doMockInvoke(invocation, null); } else { // fail-mock try { result = this.invoker.invoke(invocation); // fix:#4585 if (result.getException() != null && result.getException() instanceof RpcException) { RpcException rpcException = (RpcException) result.getException(); if (rpcException.isBiz()) { throw rpcException; } else { result = doMockInvoke(invocation, rpcException); } } } catch (RpcException e) { if (e.isBiz()) { throw e; } if (logger.isWarnEnabled()) { logger.warn( CLUSTER_FAILED_MOCK_REQUEST, "failed to mock invoke", "", "fail-mock: " + RpcUtils.getMethodName(invocation) + " fail-mock enabled , url : " + getUrl(), e); } result = doMockInvoke(invocation, e); } } return result; }
@Test void testMockInvokerFromOverride_Invoke_force_throwCustemException() throws Throwable { URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName()) .addParameter( REFER_KEY, URL.encode( PATH_KEY + "=" + IHelloService.class.getName() + "&" + "getBoolean2.mock=force:throw org.apache.dubbo.rpc.cluster.support.wrapper.MyMockException")) .addParameter("invoke_return_error", "true"); Invoker<IHelloService> cluster = getClusterInvoker(url); // Configured with mock RpcInvocation invocation = new RpcInvocation(); invocation.setMethodName("getBoolean2"); try { cluster.invoke(invocation).recreate(); Assertions.fail(); } catch (MyMockException e) { } }
@Override protected void runChild(ScenarioRunnerDTO child, RunNotifier notifier) { KieContainer kieClasspathContainer = getKieContainer(); AbstractScenarioRunner scenarioRunner = newRunner(kieClasspathContainer, child); scenarioRunner.run(notifier); }
@Test public void runChildTest() throws InitializationError { getScenarioJunitActivator().runChild(scenarioRunnerDTOMock, runNotifierMock); verify(runnerMock, times(1)).run(runNotifierMock); }
@AroundInvoke public Object intercept(InvocationContext context) throws Exception { // NOPMD // cette méthode est appelée par le conteneur ejb grâce à l'annotation AroundInvoke if (DISABLED || !EJB_COUNTER.isDisplayed()) { return context.proceed(); } // nom identifiant la requête final String requestName = getRequestName(context); boolean systemError = false; try { EJB_COUNTER.bindContextIncludingCpu(requestName); return context.proceed(); } catch (final Error e) { // on catche Error pour avoir les erreurs systèmes // mais pas Exception qui sont fonctionnelles en général systemError = true; throw e; } finally { // on enregistre la requête dans les statistiques EJB_COUNTER.addRequestForCurrentContext(systemError); } }
@Test public void testInvoke() throws Exception { final Counter ejbCounter = MonitoringProxy.getEjbCounter(); ejbCounter.clear(); final MonitoringInterceptor interceptor = new MonitoringInterceptor(); ejbCounter.setDisplayed(false); interceptor.intercept(new InvokeContext(false)); assertSame("requestsCount", 0, ejbCounter.getRequestsCount()); ejbCounter.setDisplayed(true); interceptor.intercept(new InvokeContext(false)); assertSame("requestsCount", 1, ejbCounter.getRequestsCount()); ejbCounter.clear(); try { interceptor.intercept(new InvokeContext(true)); } catch (final Error e) { assertSame("requestsCount", 1, ejbCounter.getRequestsCount()); } }
public static boolean isNullOrEmptyAfterTrim(String s) { if (s == null) { return true; } return s.isBlank(); }
@Test void isNotBlank() { assertFalse(StringUtil.isNullOrEmptyAfterTrim("string")); // null unicode character assertFalse(StringUtil.isNullOrEmptyAfterTrim("\u0000")); // Non-breaking space assertFalse(StringUtil.isNullOrEmptyAfterTrim("\u00A0")); assertTrue(StringUtil.isNullOrEmptyAfterTrim(" ")); assertTrue(StringUtil.isNullOrEmptyAfterTrim("")); // Em quad assertTrue(StringUtil.isNullOrEmptyAfterTrim("\u2001")); // Tab assertTrue(StringUtil.isNullOrEmptyAfterTrim("\t")); assertTrue(StringUtil.isNullOrEmptyAfterTrim("\n\r ")); assertTrue(StringUtil.isNullOrEmptyAfterTrim(null)); }
public void setupProperties(Context context) { // legacy properties context.putProperty(CoreConstants.DATA_DIR_KEY, getFilesDirectoryPath()); final String extDir = getMountedExternalStorageDirectoryPath(); if (extDir != null) { context.putProperty(CoreConstants.EXT_DIR_KEY, extDir); } context.putProperty(CoreConstants.PACKAGE_NAME_KEY, getPackageName()); context.putProperty(CoreConstants.VERSION_CODE_KEY, getVersionCode()); context.putProperty(CoreConstants.VERSION_NAME_KEY, getVersionName()); }
@Test public void setupProperties() { LoggerContext loggerContext = new LoggerContext(); assertThat(loggerContext.getProperty(CoreConstants.DATA_DIR_KEY), is(nullValue())); assertThat(loggerContext.getProperty(CoreConstants.EXT_DIR_KEY), is(nullValue())); assertThat(loggerContext.getProperty(CoreConstants.VERSION_CODE_KEY), is(nullValue())); assertThat(loggerContext.getProperty(CoreConstants.VERSION_NAME_KEY), is(nullValue())); assertThat(loggerContext.getProperty(CoreConstants.PACKAGE_NAME_KEY), is(nullValue())); contextUtil.setupProperties(loggerContext); assertThat(loggerContext.getProperty(CoreConstants.DATA_DIR_KEY), is(contextUtil.getFilesDirectoryPath())); assertThat(loggerContext.getProperty(CoreConstants.EXT_DIR_KEY), is(contextUtil.getMountedExternalStorageDirectoryPath())); assertThat(loggerContext.getProperty(CoreConstants.VERSION_CODE_KEY), is(contextUtil.getVersionCode())); assertThat(loggerContext.getProperty(CoreConstants.VERSION_NAME_KEY), is(contextUtil.getVersionName())); assertThat(loggerContext.getProperty(CoreConstants.PACKAGE_NAME_KEY), is(contextUtil.getPackageName())); }
@Override public String getImageFile() { return imageFile; }
@Test public void testFragmentMergeWithNull() { PluginInterface plugin = new Plugin( new String[] {"plugintest"}, BasePluginType.class, String.class, "", "plugin test", "", "a", false, null, false, new HashMap<>(), Collections.emptyList(), null, null, null, null, null ); plugin.merge( null ); assertEquals( "a", plugin.getImageFile() ); PluginInterface fragment = mock( PluginInterface.class ); when( fragment.getImageFile() ).thenReturn( "b" ); plugin.merge( fragment ); assertEquals( "b", plugin.getImageFile() ); when( fragment.getImageFile() ).thenReturn( null ); plugin.merge( fragment ); assertEquals( "b", plugin.getImageFile() ); }
@Override public KeyValueIterator<K, V> reverseRange(final K from, final K to) { final NextIteratorFunction<K, V, ReadOnlyKeyValueStore<K, V>> nextIteratorFunction = new NextIteratorFunction<K, V, ReadOnlyKeyValueStore<K, V>>() { @Override public KeyValueIterator<K, V> apply(final ReadOnlyKeyValueStore<K, V> store) { try { return store.reverseRange(from, to); } catch (final InvalidStateStoreException e) { throw new InvalidStateStoreException("State store is not available anymore and may have been migrated to another instance; please re-discover its location from the state metadata."); } } }; final List<ReadOnlyKeyValueStore<K, V>> stores = storeProvider.stores(storeName, storeType); return new DelegatingPeekingKeyValueIterator<>( storeName, new CompositeKeyValueIterator<>(stores.iterator(), nextIteratorFunction)); }
@Test public void shouldThrowInvalidStoreExceptionOnReverseRangeDuringRebalance() { assertThrows(InvalidStateStoreException.class, () -> rebalancing().reverseRange("anything", "something")); }
public Page<VoucherSimpleResponse> findAllWithPaging(final Pageable pageable) { return voucherRepository.findAllWithPaging(pageable); }
@Test void 바우처를_페이징_조회한다() { // given PageRequest pageRequest = PageRequest.of(0, 10); // when Page<VoucherSimpleResponse> result = voucherRepository.findAllWithPaging(pageRequest); // then assertSoftly(softly -> { softly.assertThat(result.getContent()).hasSize(1); softly.assertThat(result.getContent().get(0).id()).isEqualTo(voucher.getId()); softly.assertThat(result.getContent().get(0).couponId()).isEqualTo(coupon.getId()); softly.assertThat(result.hasNext()).isEqualTo(false); }); }
@Transactional public void updateCustomChecklist(User user, CustomChecklistUpdateRequest request) { List<Integer> questionIds = request.questionIds(); validateCustomChecklistQuestionsIsNotEmpty(questionIds); validateCustomChecklistQuestionsDuplication(questionIds); customChecklistQuestionRepository.deleteAllByUser(user); List<CustomChecklistQuestion> customChecklistQuestions = questionIds.stream() .map(Question::fromId) .map(question -> new CustomChecklistQuestion(user, question)) .toList(); customChecklistQuestionRepository.saveAll(customChecklistQuestions); }
@DisplayName("커스텀 체크리스트 업데이트 실패 : 선택한 질문 개수가 0개일 때") @Test void updateCustomChecklist_empty_exception() { // given CustomChecklistUpdateRequest request = CUSTOM_CHECKLIST_UPDATE_REQUEST_EMPTY; // when & then assertThatThrownBy(() -> checklistService.updateCustomChecklist(USER1, request)) .isInstanceOf(BangggoodException.class) .hasMessage(ExceptionCode.CUSTOM_CHECKLIST_QUESTION_EMPTY.getMessage()); }
@Override public Object handle(String targetService, List<Object> invokers, Object invocation, Map<String, String> queryMap, String serviceInterface) { if (!shouldHandle(invokers)) { return invokers; } List<Object> targetInvokers; if (routerConfig.isUseRequestRouter()) { targetInvokers = getTargetInvokersByRequest(targetService, invokers, invocation); } else { targetInvokers = getTargetInvokersByRules(invokers, invocation, queryMap, targetService, serviceInterface); } return super.handle(targetService, targetInvokers, invocation, queryMap, serviceInterface); }
@Test public void testGetTargetInvokersByFlowRulesWithGlobalRules() { // initialize the routing rule RuleInitializationUtils.initGlobalAndServiceFlowMatchRules(); List<Object> invokers = new ArrayList<>(); ApacheInvoker<Object> invoker1 = new ApacheInvoker<>("1.0.0"); invokers.add(invoker1); ApacheInvoker<Object> invoker2 = new ApacheInvoker<>("1.0.1"); invokers.add(invoker2); Invocation invocation = new ApacheInvocation(); invocation.setAttachment("bar", "bar1"); Map<String, String> queryMap = new HashMap<>(); queryMap.put("side", "consumer"); queryMap.put("group", "fooGroup"); queryMap.put("version", "0.0.1"); queryMap.put("interface", "io.sermant.foo.FooTest"); DubboCache.INSTANCE.putApplication("io.sermant.foo.FooTest", "foo"); List<Object> targetInvokers = (List<Object>) flowRouteHandler.handle( DubboCache.INSTANCE.getApplication("io.sermant.foo.FooTest") , invokers, invocation, queryMap, "io.sermant.foo.FooTest"); Assert.assertEquals(1, targetInvokers.size()); Assert.assertEquals(invoker2, targetInvokers.get(0)); ConfigCache.getLabel(RouterConstant.DUBBO_CACHE_NAME).resetRouteRule(Collections.emptyMap()); ConfigCache.getLabel(RouterConstant.DUBBO_CACHE_NAME).resetGlobalRule(Collections.emptyList()); }
@Override public void open() { super.open(); for (String propertyKey : properties.stringPropertyNames()) { LOGGER.debug("propertyKey: {}", propertyKey); String[] keyValue = propertyKey.split("\\.", 2); if (2 == keyValue.length) { LOGGER.debug("key: {}, value: {}", keyValue[0], keyValue[1]); Properties prefixProperties; if (basePropertiesMap.containsKey(keyValue[0])) { prefixProperties = basePropertiesMap.get(keyValue[0]); } else { prefixProperties = new Properties(); basePropertiesMap.put(keyValue[0].trim(), prefixProperties); } prefixProperties.put(keyValue[1].trim(), getProperty(propertyKey)); } } Set<String> removeKeySet = new HashSet<>(); for (String key : basePropertiesMap.keySet()) { if (!COMMON_KEY.equals(key)) { Properties properties = basePropertiesMap.get(key); if (!properties.containsKey(DRIVER_KEY) || !properties.containsKey(URL_KEY)) { LOGGER.error("{} will be ignored. {}.{} and {}.{} is mandatory.", key, DRIVER_KEY, key, key, URL_KEY); removeKeySet.add(key); } } } for (String key : removeKeySet) { basePropertiesMap.remove(key); } LOGGER.debug("JDBC PropertiesMap: {}", basePropertiesMap); setMaxLineResults(); setMaxRows(); //TODO(zjffdu) Set different sql splitter for different sql dialects. this.sqlSplitter = new SqlSplitter(); }
@Test void testSelectQueryWithNull() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); String sqlQuery = "select * from test_table WHERE ID = 'c'"; InterpreterResult interpreterResult = t.interpret(sqlQuery, context); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals("ID\tNAME\nc\tnull\n", resultMessages.get(0).getData()); }
public void onNewUser(NewUserHandler.Context context) { LOG.debug("User created: {}. Notifying {} handlers...",context.getLogin(), NewUserHandler.class.getSimpleName() ); for (NewUserHandler handler : handlers) { handler.doOnNewUser(context); } }
@Test public void execute_handlers_on_new_user() { NewUserHandler handler1 = mock(NewUserHandler.class); NewUserHandler handler2 = mock(NewUserHandler.class); NewUserNotifier notifier = new NewUserNotifier(new NewUserHandler[]{handler1, handler2}); notifier.onNewUser(context); verify(handler1).doOnNewUser(context); verify(handler2).doOnNewUser(context); }
@ScalarOperator(MULTIPLY) @SqlType(StandardTypes.SMALLINT) public static long multiply(@SqlType(StandardTypes.SMALLINT) long left, @SqlType(StandardTypes.SMALLINT) long right) { try { return Shorts.checkedCast(left * right); } catch (IllegalArgumentException e) { throw new PrestoException(NUMERIC_VALUE_OUT_OF_RANGE, format("smallint multiplication overflow: %s * %s", left, right), e); } }
@Test public void testMultiply() { assertFunction("SMALLINT'37' * SMALLINT'37'", SMALLINT, (short) (37 * 37)); assertFunction("SMALLINT'37' * SMALLINT'17'", SMALLINT, (short) (37 * 17)); assertFunction("SMALLINT'17' * SMALLINT'37'", SMALLINT, (short) (17 * 37)); assertFunction("SMALLINT'17' * SMALLINT'17'", SMALLINT, (short) (17 * 17)); assertNumericOverflow(format("SMALLINT'%s' * SMALLINT'2'", Short.MAX_VALUE), "smallint multiplication overflow: 32767 * 2"); }
@Override public <T> ResponseFuture<T> sendRequest(Request<T> request, RequestContext requestContext) { doEvaluateDisruptContext(request, requestContext); return _client.sendRequest(request, requestContext); }
@Test public void testSendRequest5() { when(_controller.getDisruptContext(any(String.class), any(ResourceMethod.class))).thenReturn(_disrupt); _client.sendRequest(_request, _callback); verify(_underlying, times(1)).sendRequest(eq(_request), any(RequestContext.class), eq(_callback)); }
public void cancel(DefaultGoPublisher publisher, EnvironmentVariableContext environmentVariableContext, TaskExtension taskExtension, ArtifactExtension artifactExtension, Charset consoleLogCharset) { publisher.taggedConsumeLineWithPrefix(DefaultGoPublisher.CANCEL_TASK_START, "On Cancel Task: " + cancelBuilder.getDescription()); // odd capitalization, but consistent with UI try { cancelBuilder.build(publisher, environmentVariableContext, taskExtension, artifactExtension, null, consoleLogCharset); // As this message will output before the running task outputs its task status, do not use the same // wording (i.e. "Task status: %s") as the order of outputted lines may be confusing publisher.taggedConsumeLineWithPrefix(DefaultGoPublisher.CANCEL_TASK_PASS, "On Cancel Task completed"); } catch (Exception e) { publisher.taggedConsumeLineWithPrefix(DefaultGoPublisher.CANCEL_TASK_FAIL, "On Cancel Task failed"); LOGGER.error("", e); } }
@Test @DisabledOnOs(OS.WINDOWS) void shouldReportErrorWhenCancelCommandDoesNotExist() { StubBuilder stubBuilder = new StubBuilder(); CommandBuilder cancelBuilder = new CommandBuilder("echo2", "cancel task", new File("."), new RunIfConfigs(FAILED), stubBuilder, ""); CommandBuilder builder = new CommandBuilder("echo", "normal task", new File("."), new RunIfConfigs(FAILED), cancelBuilder, ""); builder.cancel(goPublisher, new EnvironmentVariableContext(), null, null, UTF_8); assertThat(goPublisher.getMessage()).contains("Error happened while attempting to execute 'echo2 cancel task'"); }
@Override public ArtifactPluginInfo pluginInfoFor(GoPluginDescriptor descriptor) { PluggableInstanceSettings storeConfigSettings = storeConfigMetadata(descriptor.id()); PluggableInstanceSettings publishArtifactConfigSettings = publishArtifactMetadata(descriptor.id()); PluggableInstanceSettings fetchArtifactConfigSettings = fetchArtifactMetadata(descriptor.id()); Image image = image(descriptor.id()); return new ArtifactPluginInfo(descriptor, storeConfigSettings, publishArtifactConfigSettings, fetchArtifactConfigSettings, image, getCapabilities(descriptor.id())); }
@Test public void shouldBuildPluginInfoWithFetchArtifactConfigSettings() { GoPluginDescriptor descriptor = GoPluginDescriptor.builder().id("plugin1").build(); List<PluginConfiguration> pluginConfigurations = List.of( new PluginConfiguration("FILENAME", new Metadata(true, false)), new PluginConfiguration("SECURE", new Metadata(true, true)) ); when(extension.getFetchArtifactMetadata(descriptor.id())).thenReturn(pluginConfigurations); when(extension.getFetchArtifactView(descriptor.id())).thenReturn("fetch_artifact_view"); ArtifactPluginInfo pluginInfo = new ArtifactPluginInfoBuilder(extension).pluginInfoFor(descriptor); assertThat(pluginInfo.getFetchArtifactSettings(), is(new PluggableInstanceSettings(pluginConfigurations, new PluginView("fetch_artifact_view")))); }
public static ShowResultSet execute(ShowStmt statement, ConnectContext context) { return GlobalStateMgr.getCurrentState().getShowExecutor().showExecutorVisitor.visit(statement, context); }
@Test public void testShowColumnFromUnknownTable() throws AnalysisException, DdlException { ctx.setGlobalStateMgr(globalStateMgr); ctx.setQualifiedUser("testUser"); ShowColumnStmt stmt = new ShowColumnStmt(new TableName("emptyDb", "testTable"), null, null, false); com.starrocks.sql.analyzer.Analyzer.analyze(stmt, ctx); expectedEx.expect(SemanticException.class); expectedEx.expectMessage("Unknown database 'emptyDb'"); ShowExecutor.execute(stmt, ctx); // empty table stmt = new ShowColumnStmt(new TableName("testDb", "emptyTable"), null, null, true); com.starrocks.sql.analyzer.Analyzer.analyze(stmt, ctx); expectedEx.expect(SemanticException.class); expectedEx.expectMessage("Unknown table 'testDb.emptyTable'"); ShowExecutor.execute(stmt, ctx); }
@Override public void release(Throwable cause) { LOG.debug("Release shared slot ({})", physicalSlotRequestId); Preconditions.checkState( state == State.ALLOCATED, "The shared slot has already been released."); // ensures that we won't call the external release callback if there are still logical slots // to release state = State.RELEASING; // copy the logical slot collection to avoid ConcurrentModificationException // if logical slot releases cause cancellation of other executions // which will try to call returnLogicalSlot and modify allocatedLogicalSlots collection final List<LogicalSlot> logicalSlotsToRelease = new ArrayList<>(allocatedLogicalSlots.values()); for (LogicalSlot allocatedLogicalSlot : logicalSlotsToRelease) { // this will also cause the logical slot to be returned allocatedLogicalSlot.releaseSlot(cause); } allocatedLogicalSlots.clear(); state = State.RELEASED; }
@Test void testReleaseDoesNotTriggersExternalRelease() { final TestingPhysicalSlot physicalSlot = TestingPhysicalSlot.builder().build(); final AtomicBoolean externalReleaseInitiated = new AtomicBoolean(false); final SharedSlot sharedSlot = new SharedSlot( new SlotRequestId(), physicalSlot, false, () -> externalReleaseInitiated.set(true)); sharedSlot.release(new Exception("test")); assertThat(externalReleaseInitiated).isFalse(); }
public static TypeDescription logicalTypeToOrcType(LogicalType type) { type = type.copy(true); switch (type.getTypeRoot()) { case CHAR: return TypeDescription.createChar().withMaxLength(((CharType) type).getLength()); case VARCHAR: int len = ((VarCharType) type).getLength(); if (len == VarCharType.MAX_LENGTH) { return TypeDescription.createString(); } else { return TypeDescription.createVarchar().withMaxLength(len); } case BOOLEAN: return TypeDescription.createBoolean(); case VARBINARY: if (type.equals(DataTypes.BYTES().getLogicalType())) { return TypeDescription.createBinary(); } else { throw new UnsupportedOperationException( "Not support other binary type: " + type); } case DECIMAL: DecimalType decimalType = (DecimalType) type; return TypeDescription.createDecimal() .withScale(decimalType.getScale()) .withPrecision(decimalType.getPrecision()); case TINYINT: return TypeDescription.createByte(); case SMALLINT: return TypeDescription.createShort(); case INTEGER: return TypeDescription.createInt(); case BIGINT: return TypeDescription.createLong(); case FLOAT: return TypeDescription.createFloat(); case DOUBLE: return TypeDescription.createDouble(); case DATE: return TypeDescription.createDate(); case TIMESTAMP_WITHOUT_TIME_ZONE: return TypeDescription.createTimestamp(); case ARRAY: ArrayType arrayType = (ArrayType) type; return TypeDescription.createList(logicalTypeToOrcType(arrayType.getElementType())); case MAP: MapType mapType = (MapType) type; return TypeDescription.createMap( logicalTypeToOrcType(mapType.getKeyType()), logicalTypeToOrcType(mapType.getValueType())); case ROW: RowType rowType = (RowType) type; TypeDescription struct = TypeDescription.createStruct(); for (int i = 0; i < rowType.getFieldCount(); i++) { struct.addField( rowType.getFieldNames().get(i), logicalTypeToOrcType(rowType.getChildren().get(i))); } return struct; default: throw new UnsupportedOperationException("Unsupported type: " + type); } }
@Test void testLogicalTypeToOrcType() { test("boolean", DataTypes.BOOLEAN()); test("char(123)", DataTypes.CHAR(123)); test("varchar(123)", DataTypes.VARCHAR(123)); test("string", DataTypes.STRING()); test("binary", DataTypes.BYTES()); test("tinyint", DataTypes.TINYINT()); test("smallint", DataTypes.SMALLINT()); test("int", DataTypes.INT()); test("bigint", DataTypes.BIGINT()); test("float", DataTypes.FLOAT()); test("double", DataTypes.DOUBLE()); test("date", DataTypes.DATE()); test("timestamp", DataTypes.TIMESTAMP()); test("array<float>", DataTypes.ARRAY(DataTypes.FLOAT())); test("map<float,bigint>", DataTypes.MAP(DataTypes.FLOAT(), DataTypes.BIGINT())); test( "struct<int0:int,str1:string,double2:double,row3:struct<int0:int,int1:int>>", DataTypes.ROW( DataTypes.FIELD("int0", DataTypes.INT()), DataTypes.FIELD("str1", DataTypes.STRING()), DataTypes.FIELD("double2", DataTypes.DOUBLE()), DataTypes.FIELD( "row3", DataTypes.ROW( DataTypes.FIELD("int0", DataTypes.INT()), DataTypes.FIELD("int1", DataTypes.INT()))))); test("decimal(4,2)", DataTypes.DECIMAL(4, 2)); }
@POST @Timed @ApiOperation( value = "Launch input on this node", response = InputCreated.class ) @ApiResponses(value = { @ApiResponse(code = 404, message = "No such input type registered"), @ApiResponse(code = 400, message = "Missing or invalid configuration"), @ApiResponse(code = 400, message = "Type is exclusive and already has input running") }) @RequiresPermissions(RestPermissions.INPUTS_CREATE) @AuditEvent(type = AuditEventTypes.MESSAGE_INPUT_CREATE) public Response create(@ApiParam(name = "JSON body", required = true) @Valid @NotNull InputCreateRequest lr) throws ValidationException { try { throwBadRequestIfNotGlobal(lr); // TODO Configuration type values need to be checked. See ConfigurationMapConverter.convertValues() final MessageInput messageInput = messageInputFactory.create(lr, getCurrentUser().getName(), lr.node()); if (config.isCloud() && !messageInput.isCloudCompatible()) { throw new BadRequestException(String.format(Locale.ENGLISH, "The input type <%s> is not allowed in the cloud environment!", lr.type())); } messageInput.checkConfiguration(); final Input input = this.inputService.create(messageInput.asMap()); final String newId = inputService.save(input); final URI inputUri = getUriBuilderToSelf().path(InputsResource.class) .path("{inputId}") .build(newId); return Response.created(inputUri).entity(InputCreated.create(newId)).build(); } catch (NoSuchInputTypeException e) { LOG.error("There is no such input type registered.", e); throw new NotFoundException("There is no such input type registered.", e); } catch (ConfigurationException e) { LOG.error("Missing or invalid input configuration.", e); throw new BadRequestException("Missing or invalid input configuration.", e); } }
@Test public void testCreateCloudCompatibleInputInCloud() throws Exception { when(configuration.isCloud()).thenReturn(true); when(inputCreateRequest.global()).thenReturn(true); when(messageInput.isCloudCompatible()).thenReturn(true); when(messageInputFactory.create(any(), any(), any())).thenReturn(messageInput); when(inputService.save(any())).thenReturn("id"); assertThat(inputsResource.create(inputCreateRequest).getStatus()).isEqualTo(201); }
public static int getPortFromHostPortString(String addr) throws IllegalArgumentException { String[] hostport = addr.split(":"); if (hostport.length != 2) { String errorMsg = "Address should be <host>:<port>, but it is " + addr; throw new IllegalArgumentException(errorMsg); } return Integer.parseInt(hostport[1]); }
@Test public void testGetPortFromHostPortString() throws Exception { assertEquals(1002, NetUtils.getPortFromHostPortString("testHost:1002")); LambdaTestUtils.intercept(IllegalArgumentException.class, () -> NetUtils.getPortFromHostPortString("testHost")); LambdaTestUtils.intercept(IllegalArgumentException.class, () -> NetUtils.getPortFromHostPortString("testHost:randomString")); }
public static boolean isEligibleForCarbonsDelivery(final Message stanza) { // To properly handle messages exchanged with a MUC (or similar service), the server must be able to identify MUC-related messages. // This can be accomplished by tracking the clients' presence in MUCs, or by checking for the <x xmlns="http://jabber.org/protocol/muc#user"> // element in messages. The following rules apply to MUC-related messages: if (stanza.getChildElement("x", "http://jabber.org/protocol/muc#user") != null) { // A <message/> containing a Direct MUC Invitations (XEP-0249) SHOULD be carbon-copied. if (containsChildElement(stanza, Set.of("x"), "jabber:x:conference")) { return true; } // A <message/> containing a Mediated Invitation SHOULD be carbon-copied. if (stanza.getChildElement("x", "http://jabber.org/protocol/muc#user") != null && stanza.getChildElement("x", "http://jabber.org/protocol/muc#user").element("invite") != null) { return true; } // A private <message/> from a local user to a MUC participant (sent to a full JID) SHOULD be carbon-copied // The server SHOULD limit carbon-copying to the clients sharing a Multi-Session Nick in that MUC, and MAY // inject the <x/> element into such carbon copies. Clients can not respond to carbon-copies of MUC-PMs // related to a MUC they are not joined to. Therefore, they SHOULD either ignore such carbon copies, or // provide a way for the user to join the MUC before answering. if (stanza.getTo() != null && stanza.getTo().getResource() != null && stanza.getFrom() != null && stanza.getFrom().getNode() != null && XMPPServer.getInstance().isLocal(stanza.getFrom())) { return true; // TODO The server SHOULD limit carbon-copying to the clients sharing a Multi-Session Nick in that MUC (OF-2780). } // A private <message/> from a MUC participant (received from a full JID) to a local user SHOULD NOT be // carbon-copied (these messages are already replicated by the MUC service to all joined client instances). if (stanza.getFrom() != null && stanza.getFrom().getResource() != null && stanza.getTo() != null && stanza.getTo().getNode() != null && XMPPServer.getInstance().isLocal(stanza.getTo())) { return false; } } // A <message/> of type "groupchat" SHOULD NOT be carbon-copied. if (stanza.getType() == Message.Type.groupchat) { return false; } // A <message/> is eligible for carbons delivery if it does not contain a <private/> child element... if (containsChildElement(stanza, Set.of("private", "received"), "urn:xmpp:carbons")) { return false; } // and if at least one of the following is true: // ... it is of type "chat". if (stanza.getType() == Message.Type.chat) { return true; } // ... it is of type "normal" and contains a <body> element. if ((stanza.getType() == null || stanza.getType() == Message.Type.normal) && stanza.getBody() != null) { return true; } // ... it contains payload elements typically used in IM if (containsChildElement(stanza, Set.of("request", "received"), "urn:xmpp:receipts") // Message Delivery Receipts (XEP-0184) || containsChildElement(stanza, Set.of("active", "inactive", "gone", "composing", "paused"), "http://jabber.org/protocol/chatstates") // Chat State Notifications (XEP-0085) || (containsChildElement(stanza, Set.of("markable", "received", "displayed", "acknowledged"), "urn:xmpp:chat-markers")) // Chat Markers (XEP-0333)). ) { return true; } // ... it is of type "error" and it was sent in response to a <message/> that was eligible for carbons delivery. // TODO implement me (OF-2779) return false; }
@Test public void testChatPrivate() throws Exception { // Setup test fixture. final Message input = new Message(); input.setType(Message.Type.chat); input.getElement().addElement("private", "urn:xmpp:carbons:2"); // Execute system under test. final boolean result = Forwarded.isEligibleForCarbonsDelivery(input); // Verify results. assertFalse(result); }
@Override public Database getDb(String dbName) { return get(databaseCache, dbName); }
@Test public void testGetDb() { CachingDeltaLakeMetastore cachingDeltaLakeMetastore = CachingDeltaLakeMetastore.createCatalogLevelInstance(metastore, executor, expireAfterWriteSec, refreshAfterWriteSec, 100); Database database = cachingDeltaLakeMetastore.getDb("db1"); Assert.assertEquals("db1", database.getFullName()); }
boolean peerChange(JRaftMaintainService maintainService, Set<String> newPeers) { // This is only dealing with node deletion, the Raft protocol, where the node adds itself to the cluster when it starts up Set<String> oldPeers = new HashSet<>(this.raftConfig.getMembers()); this.raftConfig.setMembers(localPeerId.toString(), newPeers); oldPeers.removeAll(newPeers); if (oldPeers.isEmpty()) { return true; } Set<String> waitRemove = oldPeers; AtomicInteger successCnt = new AtomicInteger(0); multiRaftGroup.forEach(new BiConsumer<String, RaftGroupTuple>() { @Override public void accept(String group, RaftGroupTuple tuple) { Map<String, String> params = new HashMap<>(); params.put(JRaftConstants.GROUP_ID, group); params.put(JRaftConstants.COMMAND_NAME, JRaftConstants.REMOVE_PEERS); params.put(JRaftConstants.COMMAND_VALUE, StringUtils.join(waitRemove, StringUtils.COMMA)); RestResult<String> result = maintainService.execute(params); if (result.ok()) { successCnt.incrementAndGet(); } else { Loggers.RAFT.error("Node removal failed : {}", result); } } }); return successCnt.get() == multiRaftGroup.size(); }
@Test void testPeerChange() { AtomicBoolean changed = new AtomicBoolean(false); JRaftMaintainService service = new JRaftMaintainService(server) { @Override public RestResult<String> execute(Map<String, String> args) { changed.set(true); return RestResultUtils.success(); } }; Collection<Member> firstEvent = Arrays.asList(Member.builder().ip("1.1.1.1").port(7848).build(), Member.builder().ip("127.0.0.1").port(80).build(), Member.builder().ip("127.0.0.2").port(81).build(), Member.builder().ip("127.0.0.3").port(82).build()); server.peerChange(service, ProtocolManager.toCPMembersInfo(firstEvent)); assertFalse(changed.get()); changed.set(false); Collection<Member> secondEvent = Arrays.asList(Member.builder().ip("1.1.1.1").port(7848).build(), Member.builder().ip("127.0.0.1").port(80).build(), Member.builder().ip("127.0.0.2").port(81).build(), Member.builder().ip("127.0.0.4").port(83).build()); server.peerChange(service, ProtocolManager.toCPMembersInfo(secondEvent)); assertTrue(changed.get()); changed.set(false); Collection<Member> thirdEvent = Arrays.asList(Member.builder().ip("1.1.1.1").port(7848).build(), Member.builder().ip("127.0.0.2").port(81).build(), Member.builder().ip("127.0.0.5").port(82).build()); server.peerChange(service, ProtocolManager.toCPMembersInfo(thirdEvent)); assertTrue(changed.get()); changed.set(false); Collection<Member> fourEvent = Arrays.asList(Member.builder().ip("1.1.1.1").port(7848).build(), Member.builder().ip("127.0.0.1").port(80).build()); server.peerChange(service, ProtocolManager.toCPMembersInfo(fourEvent)); assertTrue(changed.get()); changed.set(false); Collection<Member> fiveEvent = Arrays.asList(Member.builder().ip("1.1.1.1").port(7848).build(), Member.builder().ip("127.0.0.1").port(80).build(), Member.builder().ip("127.0.0.3").port(81).build()); server.peerChange(service, ProtocolManager.toCPMembersInfo(fiveEvent)); assertFalse(changed.get()); changed.set(false); }
public boolean same(final KsqlVersion version) { return isAtLeast(version) && version.isAtLeast(this); }
@Test public void shouldOnlyCompareMajorMinorVersionsInSame() { assertThat(new KsqlVersion("5.1.1").same(new KsqlVersion("5.1.0")), is(true)); assertThat(new KsqlVersion("5.1.1").same(new KsqlVersion("5.1.1")), is(true)); assertThat(new KsqlVersion("5.1.1").same(new KsqlVersion("5.1.2")), is(true)); assertThat(new KsqlVersion("5.1.0").same(new KsqlVersion("5.0.0")), is(false)); assertThat(new KsqlVersion("5.1.0").same(new KsqlVersion("5.2.0")), is(false)); assertThat(new KsqlVersion("5.1.0").same(new KsqlVersion("6.1.0")), is(false)); assertThat(new KsqlVersion("5.1.0").same(new KsqlVersion("6.1.0")), is(false)); }
public List<QueryMetadata> sql(final String sql) { return sql(sql, Collections.emptyMap()); }
@Test public void shouldThrowIfFailedToInferTopic() { // Given: when(topicInjector.inject(any())) .thenThrow(new RuntimeException("Boom")); // When: final Exception e = assertThrows( RuntimeException.class, () -> ksqlContext.sql("Some SQL", SOME_PROPERTIES) ); // Then: assertThat(e.getMessage(), containsString("Boom")); }
@Override public List<Node> sniff(List<Node> nodes) { if (attribute == null || value == null) { return nodes; } return nodes.stream() .filter(node -> nodeMatchesFilter(node, attribute, value)) .collect(Collectors.toList()); }
@Test void doesNotFilterNodesIfNoFilterIsSet() throws Exception { final List<Node> nodes = mockNodes(); final NodesSniffer nodesSniffer = new FilteredOpenSearchNodesSniffer(null, null); assertThat(nodesSniffer.sniff(nodes)).isEqualTo(nodes); }
void startPortScanning(ImmutableList<PluginMatchingResult<PortScanner>> selectedPortScanners) { checkState(currentExecutionStage.equals(ExecutionStage.START)); this.portScanningTimer.start(); this.currentExecutionStage = ExecutionStage.PORT_SCANNING; this.selectedPortScanners = checkNotNull(selectedPortScanners); }
@Test public void startPortScanning_whenExecutionStageIsNotStart_throwsException() { ExecutionTracer executionTracer = new ExecutionTracer( portScanningTimer, serviceFingerprintingTimer, vulnerabilityDetectingTimer); ImmutableList<PluginMatchingResult<PortScanner>> installedPortScanners = pluginManager.getPortScanners(); executionTracer.startPortScanning(installedPortScanners); assertThrows( IllegalStateException.class, () -> executionTracer.startPortScanning(installedPortScanners)); }
public List<BlameLine> blame(Path baseDir, String fileName) throws Exception { BlameOutputProcessor outputProcessor = new BlameOutputProcessor(); try { this.processWrapperFactory.create( baseDir, outputProcessor::process, gitCommand, GIT_DIR_FLAG, String.format(GIT_DIR_ARGUMENT, baseDir), GIT_DIR_FORCE_FLAG, baseDir.toString(), BLAME_COMMAND, BLAME_LINE_PORCELAIN_FLAG, IGNORE_WHITESPACES, FILENAME_SEPARATOR_FLAG, fileName) .execute(); } catch (UncommittedLineException e) { LOG.debug("Unable to blame file '{}' - it has uncommitted changes", fileName); return emptyList(); } return outputProcessor.getBlameLines(); }
@Test public void throw_exception_if_symlink_found() throws Exception { assumeTrue(!System2.INSTANCE.isOsWindows()); File projectDir = temp.newFolder(); javaUnzip("dummy-git.zip", projectDir); Path baseDir = projectDir.toPath().resolve("dummy-git"); String relativePath2 = "src/main/java/org/dummy/Dummy2.java"; // Create symlink Files.createSymbolicLink(baseDir.resolve(relativePath2), baseDir.resolve(DUMMY_JAVA)); blameCommand.blame(baseDir, DUMMY_JAVA); assertThatThrownBy(() -> blameCommand.blame(baseDir, relativePath2)).isInstanceOf(IllegalStateException.class); }