focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@VisibleForTesting public boolean syncTableMetaInternal(Database db, OlapTable table, boolean forceDeleteData) throws DdlException { StarOSAgent starOSAgent = GlobalStateMgr.getCurrentState().getStarOSAgent(); HashMap<Long, Set<Long>> redundantGroupToShards = new HashMap<>(); List<PhysicalPartition> physicalPartitions = new ArrayList<>(); Locker locker = new Locker(); locker.lockDatabase(db, LockType.READ); try { if (db.getTable(table.getId()) == null) { return false; // table might be dropped } GlobalStateMgr.getCurrentState().getLocalMetastore() .getAllPartitionsIncludeRecycleBin(table) .stream() .map(Partition::getSubPartitions) .forEach(physicalPartitions::addAll); table.setShardGroupChanged(false); } finally { locker.unLockDatabase(db, LockType.READ); } for (PhysicalPartition physicalPartition : physicalPartitions) { locker.lockDatabase(db, LockType.READ); try { // schema change might replace the shards in the original shard group if (table.getState() != OlapTable.OlapTableState.NORMAL) { return false; } // automatic bucketing will create new shards in the original shard group if (table.isAutomaticBucketing()) { return false; } // automatic bucketing will change physicalPartitions make shard group changed even after it's done if (table.hasShardGroupChanged()) { return false; } // no need to check db/table/partition again, everything still works long groupId = physicalPartition.getShardGroupId(); Set<Long> starmgrShardIdsSet = null; if (redundantGroupToShards.get(groupId) != null) { starmgrShardIdsSet = redundantGroupToShards.get(groupId); } else { List<Long> starmgrShardIds = starOSAgent.listShard(groupId); starmgrShardIdsSet = new HashSet<>(starmgrShardIds); } for (MaterializedIndex materializedIndex : physicalPartition.getMaterializedIndices(MaterializedIndex.IndexExtState.ALL)) { for (Tablet tablet : materializedIndex.getTablets()) { starmgrShardIdsSet.remove(tablet.getId()); } } // collect shard in starmgr but not in fe redundantGroupToShards.put(groupId, starmgrShardIdsSet); } finally { locker.unLockDatabase(db, LockType.READ); } } // try to delete data, if fail, still delete redundant shard meta in starmgr Set<Long> shardToDelete = new HashSet<>(); for (Map.Entry<Long, Set<Long>> entry : redundantGroupToShards.entrySet()) { if (forceDeleteData) { try { List<Long> shardIds = new ArrayList<>(); shardIds.addAll(entry.getValue()); dropTabletAndDeleteShard(shardIds, starOSAgent); } catch (Exception e) { // ignore exception LOG.info(e.getMessage()); } } shardToDelete.addAll(entry.getValue()); } // do final meta delete, regardless whether above tablet deleted or not if (!shardToDelete.isEmpty()) { starOSAgent.deleteShards(shardToDelete); } return !shardToDelete.isEmpty(); }
@Test public void testSyncTableMetaInternal() throws Exception { long dbId = 100; long tableId = 1000; List<Long> shards = new ArrayList<>(); Database db = new Database(dbId, "db"); new MockUp<GlobalStateMgr>() { @Mock public Database getDb(String dbName) { return db; } @Mock public Database getDb(long id) { return db; } @Mock public List<Long> getDbIds() { return Lists.newArrayList(dbId); } }; List<Column> baseSchema = new ArrayList<>(); KeysType keysType = KeysType.AGG_KEYS; PartitionInfo partitionInfo = new PartitionInfo(PartitionType.RANGE); DistributionInfo defaultDistributionInfo = new HashDistributionInfo(); Table table = new LakeTable(tableId, "bbb", baseSchema, keysType, partitionInfo, defaultDistributionInfo); new MockUp<Database>() { @Mock public Table getTable(String tableName) { return table; } @Mock public Table getTable(long tableId) { return table; } @Mock public List<Table> getTables() { return Lists.newArrayList(table); } }; new MockUp<MaterializedIndex>() { @Mock public List<Tablet> getTablets() { List<Tablet> tablets = new ArrayList<>(); tablets.add(new LakeTablet(111)); tablets.add(new LakeTablet(222)); tablets.add(new LakeTablet(333)); return tablets; } }; new MockUp<PhysicalPartition>() { @Mock public long getShardGroupId() { return 444; } }; new MockUp<StarOSAgent>() { @Mock public List<Long> listShard(long groupId) throws DdlException { return shards; } @Mock public void deleteShards(Set<Long> shardIds) throws DdlException { shards.removeAll(shardIds); } }; new MockUp<ColocateTableIndex>() { @Mock public boolean isLakeColocateTable(long tableId) { return true; } @Mock public void updateLakeTableColocationInfo(OlapTable olapTable, boolean isJoin, GroupId expectGroupId) throws DdlException { return; } }; new MockUp<SystemInfoService>() { @Mock public ComputeNode getBackendOrComputeNode(long nodeId) { return null; } }; shards.clear(); shards.add(111L); shards.add(222L); shards.add(333L); starMgrMetaSyncer.syncTableMetaInternal(db, (OlapTable) table, true); Assert.assertEquals(3, shards.size()); }
@Override public void configServerProtocolHandler(URL url, ChannelOperator operator) { List<ChannelHandler> handlers = new ArrayList<>(); // operator(for now nettyOperator)'s duties // 1. config codec2 for the protocol(load by extension loader) // 2. config handlers passed by wire protocol // ( for triple, some h2 netty handler and logic handler to handle connection; // for dubbo, nothing, an empty handlers is used to trigger operator logic) // 3. config Dubbo Inner handler(for dubbo protocol, this handler handles connection) operator.configChannelHandler(handlers); }
@Test void testConfigServerProtocolHandler() { URL url = URL.valueOf("dubbo://localhost:20880"); List<ChannelHandler> handlers = new ArrayList<>(); dubboWireProtocol.configServerProtocolHandler(url, channelOperator); verify(channelOperator).configChannelHandler(handlers); }
@Override public Object[] toArray() { return underlying().toArray(); }
@Test public void testDelegationOfToArrayIntoGivenDestination() { Object[] destinationArray = new Object[0]; new PCollectionsHashSetWrapperDelegationChecker<>() .defineMockConfigurationForFunctionInvocation(mock -> mock.toArray(eq(destinationArray)), new Object[0]) .defineWrapperFunctionInvocationAndMockReturnValueTransformation(wrapper -> wrapper.toArray(destinationArray), identity()) .doFunctionDelegationCheck(); }
@Override public Map<String, String> getSourcesMap() { return Collections.unmodifiableMap(sourcesMap); }
@Test void addToGetSourcesMap() { assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> { Map<String, String> retrieved = kiePMMLModelWithSources.getSourcesMap(); retrieved.put("KEY", "VALUE"); }); }
@Override public Output load(String streamOutputId) throws NotFoundException { final Output output = coll.findOneById(streamOutputId); if (output == null) { throw new NotFoundException("Couldn't find output with id " + streamOutputId); } return output; }
@Test @MongoDBFixtures("OutputServiceImplTest.json") public void loadThrowsNotFoundExceptionForNonExistingOutput() { assertThatThrownBy(() -> outputService.load("54e300000000000000000000")) .isInstanceOf(NotFoundException.class); }
public static boolean pathPattern(final String pathPattern, final String realPath) { PathPattern pattern = PathPatternParser.defaultInstance.parse(pathPattern); return pattern.matches(PathContainer.parsePath(realPath)); }
@Test public void testPathPattern() { // test matching with *'s assertTrue(PathMatchUtils.pathPattern("*", "test")); assertTrue(PathMatchUtils.pathPattern("test*", "test")); assertTrue(PathMatchUtils.pathPattern("test*", "testTest")); assertFalse(PathMatchUtils.pathPattern("test*aaa", "testblaaab")); // test matching with **'s assertTrue(PathMatchUtils.pathPattern("/**", "/testing/testing")); assertTrue(PathMatchUtils.pathPattern("/test/**", "/test/test")); }
public void updateView(GiantModel giantModel) { giantView.displayGiant(giantModel); }
@Test void testUpdateView() { final var model = new GiantModel("giant1", Health.HEALTHY, Fatigue.ALERT, Nourishment.SATURATED); GiantView giantView = new GiantView(); Dispatcher dispatcher = new Dispatcher(giantView); assertDoesNotThrow(() -> dispatcher.updateView(model)); }
@Override public Integer clusterGetSlotForKey(byte[] key) { RFuture<Integer> f = executorService.readAsync((String)null, StringCodec.INSTANCE, RedisCommands.KEYSLOT, key); return syncFuture(f); }
@Test public void testClusterGetSlotForKey() { Integer slot = connection.clusterGetSlotForKey("123".getBytes()); assertThat(slot).isNotNull(); }
@Override public UpdateSchema updateColumn(String name, Type.PrimitiveType newType) { Types.NestedField field = findField(name); Preconditions.checkArgument(field != null, "Cannot update missing column: %s", name); Preconditions.checkArgument( !deletes.contains(field.fieldId()), "Cannot update a column that will be deleted: %s", field.name()); if (field.type().equals(newType)) { return this; } Preconditions.checkArgument( TypeUtil.isPromotionAllowed(field.type(), newType), "Cannot change column type: %s: %s -> %s", name, field.type(), newType); // merge with a rename, if present int fieldId = field.fieldId(); Types.NestedField update = updates.get(fieldId); if (update != null) { updates.put( fieldId, Types.NestedField.of(fieldId, update.isOptional(), update.name(), newType, update.doc())); } else { updates.put( fieldId, Types.NestedField.of(fieldId, field.isOptional(), field.name(), newType, field.doc())); } return this; }
@Test public void testUpdateMissingColumn() { assertThatThrownBy( () -> { UpdateSchema update = new SchemaUpdate(SCHEMA, SCHEMA_LAST_COLUMN_ID); update.updateColumn("col", Types.DateType.get()); }) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot update missing column: col"); }
public T send() throws IOException { return web3jService.send(this, responseType); }
@Test public void testDbPutHex() throws Exception { web3j.dbPutHex("testDB", "myKey", "0x68656c6c6f20776f726c64").send(); verifyResult( "{\"jsonrpc\":\"2.0\",\"method\":\"db_putHex\"," + "\"params\":[\"testDB\",\"myKey\",\"0x68656c6c6f20776f726c64\"],\"id\":1}"); }
public static String builderData(final String paramType, final String paramName, final ServerWebExchange exchange) { return newInstance(paramType).builder(paramName, exchange); }
@Test public void testBuildURIData() { ServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("/uri/path") .remoteAddress(new InetSocketAddress("localhost", 8080)) .build()); assertEquals("/uri/path", ParameterDataFactory.builderData("uri", null, exchange)); }
public static Object parseMockValue(String mock) throws Exception { return parseMockValue(mock, null); }
@Test void testParseMockValue() throws Exception { Assertions.assertNull(MockInvoker.parseMockValue("null")); Assertions.assertNull(MockInvoker.parseMockValue("empty")); Assertions.assertTrue((Boolean) MockInvoker.parseMockValue("true")); Assertions.assertFalse((Boolean) MockInvoker.parseMockValue("false")); Assertions.assertEquals(123, MockInvoker.parseMockValue("123")); Assertions.assertEquals("foo", MockInvoker.parseMockValue("foo")); Assertions.assertEquals("foo", MockInvoker.parseMockValue("\"foo\"")); Assertions.assertEquals("foo", MockInvoker.parseMockValue("\'foo\'")); Assertions.assertEquals(new HashMap<>(), MockInvoker.parseMockValue("{}")); Assertions.assertEquals(new ArrayList<>(), MockInvoker.parseMockValue("[]")); Assertions.assertEquals("foo", MockInvoker.parseMockValue("foo", new Type[] {String.class})); }
public boolean isScheduled() { return (future != null) && !future.isDone(); }
@Test public void isScheduled_nullFuture() { pacer.future = null; assertThat(pacer.isScheduled()).isFalse(); }
private KsqlScalarFunction createFunction( final Class theClass, final UdfDescription udfDescriptionAnnotation, final Udf udfAnnotation, final Method method, final String path, final String sensorName, final Class<? extends Kudf> udfClass ) { // sanity check FunctionLoaderUtils .instantiateFunctionInstance(method.getDeclaringClass(), udfDescriptionAnnotation.name()); final FunctionInvoker invoker = FunctionLoaderUtils.createFunctionInvoker(method); final String functionName = udfDescriptionAnnotation.name(); LOGGER.info("Adding function " + functionName + " for method " + method); final List<ParameterInfo> parameters = FunctionLoaderUtils .createParameters(method, functionName, typeParser); final ParamType javaReturnSchema = FunctionLoaderUtils .getReturnType(method, udfAnnotation.schema(), typeParser); final SchemaProvider schemaProviderFunction = FunctionLoaderUtils .handleUdfReturnSchema( theClass, javaReturnSchema, udfAnnotation.schema(), typeParser, udfAnnotation.schemaProvider(), udfDescriptionAnnotation.name(), method.isVarArgs() ); return KsqlScalarFunction.create( schemaProviderFunction, javaReturnSchema, parameters, FunctionName.of(functionName.toUpperCase()), udfClass, getUdfFactory(method, udfDescriptionAnnotation, functionName, invoker, sensorName), udfAnnotation.description(), path, method.isVarArgs() ); }
@Test public void shouldThrowIfMissingAggregateTypeSchema() throws Exception { // When: UdafFactoryInvoker invoker = createUdafLoader().createUdafFactoryInvoker( UdfLoaderTest.class.getMethod("missingAggregateSchemaAnnotationUdaf"), of("test"), "desc", new String[]{""}, "", "" ); final Exception e = assertThrows( KsqlException.class, () -> invoker.createFunction(AggregateFunctionInitArguments.EMPTY_ARGS, Collections.emptyList()) ); // Then: assertThat(e.getCause().getMessage(), containsString("Must specify 'aggregateSchema' for STRUCT" + " parameter in @UdafFactory or implement getAggregateSqlType()/getReturnSqlType().")); }
public long getNumSegmentsQueried() { return _brokerResponse.has(NUM_SEGMENTS_QUERIED) ? _brokerResponse.get(NUM_SEGMENTS_QUERIED).asLong() : -1L; }
@Test public void testGetNumSegmentsQueried() { // Run the test final long result = _executionStatsUnderTest.getNumSegmentsQueried(); // Verify the results assertEquals(10L, result); }
@Override public <T> TransformEvaluator<T> forApplication( AppliedPTransform<?, ?, ?> application, CommittedBundle<?> inputBundle) throws Exception { @SuppressWarnings({"unchecked", "rawtypes"}) TransformEvaluator<T> evaluator = (TransformEvaluator<T>) createEvaluator((AppliedPTransform) application, (CommittedBundle) inputBundle); return evaluator; }
@Test public void testUnprocessedElements() throws Exception { // To test the factory, first we set up a pipeline and then we use the constructed // pipeline to create the right parameters to pass to the factory final String stateId = "my-state-id"; // For consistency, window it into FixedWindows. Actually we will fabricate an input bundle. PCollection<KV<String, Integer>> mainInput = pipeline .apply(Create.of(KV.of("hello", 1), KV.of("hello", 2))) .apply(Window.into(FixedWindows.of(Duration.millis(10)))); final PCollectionView<List<Integer>> sideInput = pipeline .apply("Create side input", Create.of(42)) .apply("Window side input", Window.into(FixedWindows.of(Duration.millis(10)))) .apply("View side input", View.asList()); TupleTag<Integer> mainOutput = new TupleTag<>(); PCollection<Integer> produced = mainInput .apply( new ParDoMultiOverrideFactory.GbkThenStatefulParDo<>( new DoFn<KV<String, Integer>, Integer>() { @StateId(stateId) private final StateSpec<ValueState<String>> spec = StateSpecs.value(StringUtf8Coder.of()); @ProcessElement public void process(ProcessContext c) {} }, mainOutput, TupleTagList.empty(), Collections.singletonList(sideInput), DoFnSchemaInformation.create(), Collections.emptyMap())) .get(mainOutput) .setCoder(VarIntCoder.of()); StatefulParDoEvaluatorFactory<String, Integer, Integer> factory = new StatefulParDoEvaluatorFactory<>(mockEvaluationContext, options); // This will be the stateful ParDo from the expansion AppliedPTransform< PCollection<KeyedWorkItem<String, KV<String, Integer>>>, PCollectionTuple, StatefulParDo<String, Integer, Integer>> producingTransform = (AppliedPTransform) DirectGraphs.getProducer(produced); // Then there will be a digging down to the step context to get the state internals when(mockEvaluationContext.getExecutionContext( eq(producingTransform), Mockito.<StructuralKey>any())) .thenReturn(mockExecutionContext); when(mockExecutionContext.getStepContext(any())).thenReturn(mockStepContext); when(mockEvaluationContext.createBundle(Matchers.<PCollection<Integer>>any())) .thenReturn(mockUncommittedBundle); when(mockStepContext.getTimerUpdate()).thenReturn(TimerUpdate.empty()); // And digging to check whether the window is ready when(mockEvaluationContext.createSideInputReader(anyList())).thenReturn(mockSideInputReader); when(mockSideInputReader.isReady(Matchers.any(), Matchers.any())).thenReturn(false); IntervalWindow firstWindow = new IntervalWindow(new Instant(0), new Instant(9)); // A single bundle with some elements in the global window; it should register cleanup for the // global window state merely by having the evaluator created. The cleanup logic does not // depend on the window. String key = "hello"; WindowedValue<KV<String, Integer>> firstKv = WindowedValue.of(KV.of(key, 1), new Instant(3), firstWindow, PaneInfo.NO_FIRING); WindowedValue<KeyedWorkItem<String, KV<String, Integer>>> gbkOutputElement = firstKv.withValue( KeyedWorkItems.elementsWorkItem( "hello", ImmutableList.of( firstKv, firstKv.withValue(KV.of(key, 13)), firstKv.withValue(KV.of(key, 15))))); CommittedBundle<KeyedWorkItem<String, KV<String, Integer>>> inputBundle = BUNDLE_FACTORY .createBundle( (PCollection<KeyedWorkItem<String, KV<String, Integer>>>) Iterables.getOnlyElement( TransformInputs.nonAdditionalInputs(producingTransform))) .add(gbkOutputElement) .commit(Instant.now()); TransformEvaluator<KeyedWorkItem<String, KV<String, Integer>>> evaluator = factory.forApplication(producingTransform, inputBundle); evaluator.processElement(gbkOutputElement); // This should push back every element as a KV<String, Iterable<Integer>> // in the appropriate window. Since the keys are equal they are single-threaded TransformResult<KeyedWorkItem<String, KV<String, Integer>>> result = evaluator.finishBundle(); List<Integer> pushedBackInts = new ArrayList<>(); for (WindowedValue<? extends KeyedWorkItem<String, KV<String, Integer>>> unprocessedElement : result.getUnprocessedElements()) { assertThat( Iterables.getOnlyElement(unprocessedElement.getWindows()), equalTo((BoundedWindow) firstWindow)); assertThat(unprocessedElement.getValue().key(), equalTo("hello")); for (WindowedValue<KV<String, Integer>> windowedKv : unprocessedElement.getValue().elementsIterable()) { pushedBackInts.add(windowedKv.getValue().getValue()); } } assertThat(pushedBackInts, containsInAnyOrder(1, 13, 15)); }
@Override public X process(T input, Context context) throws Exception { if (!this.initialized) { initialize(context); } // record must be PulsarFunctionRecord. Record<T> record = (Record<T>) context.getCurrentRecord(); // windows function processing semantics requires separate processing if (windowConfig.getProcessingGuarantees() == WindowConfig.ProcessingGuarantees.ATMOST_ONCE) { record.ack(); } if (isEventTime()) { long ts = this.timestampExtractor.extractTimestamp(record.getValue()); if (this.waterMarkEventGenerator.track(record.getTopicName().get(), ts)) { this.windowManager.add(record, ts, record); } else { if (this.windowConfig.getLateDataTopic() != null) { context.newOutputMessage(this.windowConfig.getLateDataTopic(), null).value(input).sendAsync(); } else { log.info(String.format( "Received a late tuple %s with ts %d. This will not be " + "processed" + ".", input, ts)); } } } else { this.windowManager.add(record, System.currentTimeMillis(), record); } return null; }
@Test public void testExecuteWithTs() throws Exception { long[] timestamps = {603, 605, 607, 618, 626, 636}; for (long ts : timestamps) { Record<?> record = mock(Record.class); doReturn(Optional.of("test-topic")).when(record).getTopicName(); doReturn(record).when(context).getCurrentRecord(); doReturn(ts).when(record).getValue(); testWindowedPulsarFunction.process(ts, context); } testWindowedPulsarFunction.waterMarkEventGenerator.run(); assertEquals(3, testWindowedPulsarFunction.windows.size()); Window<Record<Long>> first = testWindowedPulsarFunction.windows.get(0); assertArrayEquals( new long[]{603, 605, 607}, new long[]{first.get().get(0).getValue(), first.get().get(1).getValue(), first.get().get(2).getValue()}); Window<Record<Long>> second = testWindowedPulsarFunction.windows.get(1); assertArrayEquals( new long[]{603, 605, 607, 618}, new long[]{second.get().get(0).getValue(), second.get().get(1).getValue(), second.get().get(2).getValue(), second.get().get(3).getValue()}); Window<Record<Long>> third = testWindowedPulsarFunction.windows.get(2); assertArrayEquals(new long[]{618, 626}, new long[]{third.get().get(0).getValue(), third.get().get(1).getValue()}); }
@Override @Nullable protected HttpHost determineProxy(HttpHost target, HttpContext context) throws HttpException { for (Pattern nonProxyHostPattern : nonProxyHostPatterns) { if (nonProxyHostPattern.matcher(target.getHostName()).matches()) { return null; } } return super.determineProxy(target, context); }
@Test void testHostNotInBlackList() throws Exception { assertThat(routePlanner.determineProxy(new HttpHost("dropwizard.io"), httpContext)) .isEqualTo(proxy); }
public String getUserInformation() { StringBuilder ret = new StringBuilder(); if (StringUtils.isEmpty(getUsername()) && StringUtils.isEmpty(getPassword())) { return ret.toString(); } if (StringUtils.isNotEmpty(getUsername())) { ret.append(getUsername()); } ret.append(':'); if (StringUtils.isNotEmpty(getPassword())) { ret.append(getPassword()); } return ret.length() == 0 ? null : ret.toString(); }
@Test void testGetUserInformation() { URL url = URL.valueOf("admin1:hello1234@10.20.130.230:20880/context/path?version=1.0.0&application=app1"); assertEquals("admin1:hello1234", url.getUserInformation()); URL urlWithoutUsername = URL.valueOf(":hello1234@10.20.130.230:20880/context/path?version=1.0.0&application=app1"); assertEquals(":hello1234@10.20.130.230:20880", urlWithoutUsername.getAuthority()); URL urlWithoutPassword = URL.valueOf("admin1:@10.20.130.230:20880/context/path?version=1.0.0&application=app1"); assertEquals("admin1:@10.20.130.230:20880", urlWithoutPassword.getAuthority()); URL urlWithoutUserInformation = URL.valueOf("10.20.130.230:20880/context/path?version=1.0.0&application=app1"); assertEquals("10.20.130.230:20880", urlWithoutUserInformation.getAuthority()); }
public static Validator validUrl() { return (name, val) -> { if (!(val instanceof String)) { throw new IllegalArgumentException("validator should only be used with STRING defs"); } try { new URL((String)val); } catch (final Exception e) { throw new ConfigException(name, val, "Not valid URL: " + e.getMessage()); } }; }
@Test public void shouldThrowOnInvalidURL() { // Given: final Validator validator = ConfigValidators.validUrl(); // When: final Exception e = assertThrows( ConfigException.class, () -> validator.ensureValid("propName", "INVALID") ); // Then: assertThat(e.getMessage(), containsString("Invalid value INVALID for configuration propName: Not valid URL: no protocol: INVALID")); }
public static HazelcastInstance newHazelcastInstance(Config config) { if (config == null) { config = Config.load(); } return newHazelcastInstance( config, config.getInstanceName(), new DefaultNodeContext() ); }
@Test public void fixedNameGeneratedIfPropertyDisabled() { Config config = new Config(); config.getProperties().put(ClusterProperty.MOBY_NAMING_ENABLED.getName(), "false"); hazelcastInstance = HazelcastInstanceFactory.newHazelcastInstance(config); String name = hazelcastInstance.getName(); assertNotNull(name); assertContains(name, "_hzInstance_"); }
@Override public long get() { return kubeClient .getConfigMap(configMapName) .map(this::getCurrentCounter) .orElseThrow( () -> new FlinkRuntimeException( new KubernetesException( "ConfigMap " + configMapName + " does not exist."))); }
@Test void testGetWhenConfigMapNotExist() throws Exception { new Context() { { runTest( () -> { final KubernetesCheckpointIDCounter checkpointIDCounter = new KubernetesCheckpointIDCounter( flinkKubeClient, LEADER_CONFIGMAP_NAME, LOCK_IDENTITY); final String errMsg = "ConfigMap " + LEADER_CONFIGMAP_NAME + " does not exist."; assertThatThrownBy( () -> checkpointIDCounter.get(), "We should get an exception when trying to get checkpoint id counter but ConfigMap does not exist.") .satisfies(anyCauseMatches(errMsg)); }); } }; }
@Override public List<String> retrieveAll(GroupVersionKind type, ListOptions options, Sort sort) { return doRetrieve(type, options, sort); }
@Test void retrieveAll() { var spyIndexedQueryEngine = spy(indexedQueryEngine); doReturn(List.of()).when(spyIndexedQueryEngine) .doRetrieve(any(), any(), eq(Sort.unsorted())); var gvk = GroupVersionKind.fromExtension(DemoExtension.class); var result = spyIndexedQueryEngine.retrieveAll(gvk, new ListOptions(), Sort.unsorted()); assertThat(result).isEmpty(); verify(spyIndexedQueryEngine).doRetrieve(eq(gvk), any(), eq(Sort.unsorted())); }
public static int MCRF4XX(@NonNull final byte[] data, final int offset, final int length) { return CRC(0x1021, 0xFFFF, data, offset, length, true, true, 0x0000); }
@Test public void MCRF4XX_A() { final byte[] data = new byte[] { 'A' }; assertEquals(0x5C0A, CRC16.MCRF4XX(data, 0, 1)); }
public static HazelcastSqlOperatorTable instance() { return INSTANCE; }
@Test public void testReturnTypeInference() { for (SqlOperator operator : HazelcastSqlOperatorTable.instance().getOperatorList()) { if (operator instanceof HazelcastTableFunction || operator == HazelcastSqlOperatorTable.IN || operator == HazelcastSqlOperatorTable.NOT_IN || operator == HazelcastSqlOperatorTable.UNION || operator == HazelcastSqlOperatorTable.UNION_ALL || operator == HazelcastSqlOperatorTable.ARGUMENT_ASSIGNMENT || operator == HazelcastSqlOperatorTable.DOT) { continue; } boolean valid = operator.getReturnTypeInference() instanceof HazelcastReturnTypeInference; assertTrue("Operator must have " + HazelcastReturnTypeInference.class.getSimpleName() + ": " + operator.getClass().getSimpleName(), valid); } }
public static <T> T readStaticFieldOrNull(String className, String fieldName) { try { Class<?> clazz = Class.forName(className); return readStaticField(clazz, fieldName); } catch (ClassNotFoundException | NoSuchFieldException | IllegalAccessException | SecurityException e) { return null; } }
@Test public void readStaticFieldOrNull_readFromPublicField() { String field = ReflectionUtils.readStaticFieldOrNull(MyClass.class.getName(), "staticPublicField"); assertEquals("staticPublicFieldContent", field); }
@Override public void execute(GraphModel graphModel) { Graph graph; if (isDirected) { graph = graphModel.getDirectedGraphVisible(); } else { graph = graphModel.getUndirectedGraphVisible(); } execute(graph); }
@Test public void testColumnReplace() { GraphModel graphModel = GraphGenerator.generateNullUndirectedGraph(1); graphModel.getNodeTable().addColumn(PageRank.PAGERANK, String.class); PageRank pr = new PageRank(); pr.execute(graphModel); }
public Predicate convert(ScalarOperator operator) { if (operator == null) { return null; } return operator.accept(this, null); }
@Test public void testLike() { ConstantOperator value = ConstantOperator.createVarchar("ttt%"); ScalarOperator op = new LikePredicateOperator(LikePredicateOperator.LikeType.LIKE, F1, value); Predicate result = CONVERTER.convert(op); Assert.assertTrue(result instanceof LeafPredicate); LeafPredicate leafPredicate = (LeafPredicate) result; Assert.assertTrue(leafPredicate.function() instanceof StartsWith); Assert.assertEquals("ttt", leafPredicate.literals().get(0).toString()); }
public WorkflowSummary createWorkflowSummaryFromInstance(WorkflowInstance instance) { WorkflowSummary summary = new WorkflowSummary(); summary.setWorkflowId(instance.getWorkflowId()); summary.setInternalId(instance.getInternalId()); summary.setWorkflowVersionId(instance.getWorkflowVersionId()); summary.setWorkflowName(getWorkflowNameOrDefault(instance.getRuntimeWorkflow())); summary.setWorkflowInstanceId(instance.getWorkflowInstanceId()); summary.setCreationTime(instance.getCreateTime()); summary.setWorkflowRunId(instance.getWorkflowRunId()); summary.setCorrelationId(instance.getCorrelationId()); summary.setWorkflowUuid(instance.getWorkflowUuid()); if (instance.getRunConfig() != null) { summary.setRunPolicy(instance.getRunConfig().getPolicy()); summary.setRestartConfig(instance.getRunConfig().getRestartConfig()); } summary.setRunProperties(instance.getRunProperties()); summary.setInitiator(instance.getInitiator()); summary.setParams(instance.getParams()); summary.setStepRunParams(instance.getStepRunParams()); summary.setTags(instance.getRuntimeWorkflow().getTags()); summary.setRuntimeDag(instance.getRuntimeDag()); summary.setCriticality(instance.getRuntimeWorkflow().getCriticality()); summary.setInstanceStepConcurrency(instance.getRuntimeWorkflow().getInstanceStepConcurrency()); return summary; }
@Test public void testCreateWorkflowSummaryFromInstanceWithInstanceStepConcurrency() { WorkflowSummary summary = workflowHelper.createWorkflowSummaryFromInstance(instance); assertEquals(instance.getWorkflowId(), summary.getWorkflowId()); assertEquals(instance.getWorkflowVersionId(), summary.getWorkflowVersionId()); assertEquals(instance.getCreateTime(), summary.getCreationTime()); assertEquals(instance.getInternalId(), summary.getInternalId()); assertNull(summary.getInstanceStepConcurrency()); instance.getRunProperties().setStepConcurrency(10L); summary = workflowHelper.createWorkflowSummaryFromInstance(instance); assertNull(summary.getInstanceStepConcurrency()); // use runtime workflow instance_step_concurrency instance.setRuntimeWorkflow( instance.getRuntimeWorkflow().toBuilder().instanceStepConcurrency(20L).build()); summary = workflowHelper.createWorkflowSummaryFromInstance(instance); assertEquals(20L, summary.getInstanceStepConcurrency().longValue()); }
List<MappingField> resolveAndValidateFields(List<MappingField> userFields, Map<String, ?> options) { if (options.get(OPTION_FORMAT) == null) { throw QueryException.error("Missing '" + OPTION_FORMAT + "' option"); } if (options.get(OPTION_PATH) == null) { throw QueryException.error("Missing '" + OPTION_PATH + "' option"); } List<MappingField> fields = findMetadataResolver(options).resolveAndValidateFields(userFields, options); if (fields.isEmpty()) { throw QueryException.error("The resolved field list is empty"); } return fields; }
@Test public void when_pathIsMissing_then_throws() { assertThatThrownBy(() -> resolvers.resolveAndValidateFields(emptyList(), singletonMap(OPTION_FORMAT, FORMAT))) .isInstanceOf(QueryException.class) .hasMessageContaining("Missing 'path"); }
public static UriTemplate create(String template, Charset charset) { return new UriTemplate(template, true, charset); }
@Test void simpleTemplateWithRegularExpressions() { String template = "https://www.example.com/{foo:[0-9]{4}}/{bar}"; UriTemplate uriTemplate = UriTemplate.create(template, Util.UTF_8); assertThat(uriTemplate.getVariables()).contains("foo", "bar").hasSize(2); Map<String, Object> variables = new LinkedHashMap<>(); variables.put("foo", 1234); variables.put("bar", "stuff"); String expandedTemplate = uriTemplate.expand(variables); assertThat(expandedTemplate).isEqualToIgnoringCase("https://www.example.com/1234/stuff"); assertThat(URI.create(expandedTemplate)).isNotNull(); }
protected String putFieldInMsgMapCode(String fieldNameInProto, String getFieldMethodName, String optionalDecodeMethod, String optionalAdditionalCalls) { if (StringUtils.isBlank(optionalAdditionalCalls)) { optionalAdditionalCalls = ""; } if (!StringUtils.isBlank(optionalDecodeMethod)) { return String.format("msgMap.put(\"%s\", %s(msg.%s()%s))", fieldNameInProto, optionalDecodeMethod, getFieldMethodName, optionalAdditionalCalls); } return String.format("msgMap.put(\"%s\", msg.%s()%s)", fieldNameInProto, getFieldMethodName, optionalAdditionalCalls); }
@Test public void testPutFieldInMsgMapCode() { MessageCodeGen messageCodeGen = new MessageCodeGen(); assertEquals( messageCodeGen.putFieldInMsgMapCode("field1", "getField1", null, null), "msgMap.put(\"field1\", msg.getField1())"); assertEquals( messageCodeGen.putFieldInMsgMapCode("field1", "getField1", "", ""), "msgMap.put(\"field1\", msg.getField1())"); assertEquals( messageCodeGen.putFieldInMsgMapCode("field1", "getField1", "decodeField1", null), "msgMap.put(\"field1\", decodeField1(msg.getField1()))"); assertEquals( messageCodeGen.putFieldInMsgMapCode("field1", "getField1", "decodeField1", ""), "msgMap.put(\"field1\", decodeField1(msg.getField1()))"); assertEquals( messageCodeGen.putFieldInMsgMapCode("field1", "getField1", null, ".toByteArray()"), "msgMap.put(\"field1\", msg.getField1().toByteArray())"); assertEquals( messageCodeGen.putFieldInMsgMapCode("field1", "getField1", "", ".toByteArray()"), "msgMap.put(\"field1\", msg.getField1().toByteArray())"); assertEquals( messageCodeGen.putFieldInMsgMapCode("field1", "getField1", "decodeField1", ".toString()"), "msgMap.put(\"field1\", decodeField1(msg.getField1().toString()))"); }
public int put(final int key, final int value) { final int initialValue = this.initialValue; if (initialValue == value) { throw new IllegalArgumentException("cannot accept initialValue"); } final int[] entries = this.entries; @DoNotSub final int mask = entries.length - 1; @DoNotSub int index = Hashing.evenHash(key, mask); int oldValue; while (initialValue != (oldValue = entries[index + 1])) { if (key == entries[index]) { break; } index = next(index, mask); } if (initialValue == oldValue) { ++size; entries[index] = key; } entries[index + 1] = value; increaseCapacity(); return oldValue; }
@Test void shouldNotAllowInitialValueAsValue() { assertThrows(IllegalArgumentException.class, () -> map.put(1, INITIAL_VALUE)); }
public static Gson instance() { return SingletonHolder.INSTANCE; }
@Test void rejectsDeserializationOfDESCipherProvider() { final IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> Serialization.instance().fromJson("{ \"whatever\": \"actual payload doesn't matter\" }", DESCipherProvider.class)); assertEquals(format("Refusing to deserialize a %s in the JSON stream!", DESCipherProvider.class.getName()), e.getMessage()); }
public static ConfigDefinitionKey createConfigDefinitionKeyFromZKString(String nodeName) { final String name; final String namespace; String tempName = ConfigUtils.getNameFromSerializedString(nodeName); // includes namespace Tuple2<String, String> tuple = ConfigUtils.getNameAndNamespaceFromString(tempName); name = tuple.first; namespace = tuple.second; return new ConfigDefinitionKey(name, namespace); }
@Test public void testCreateConfigDefinitionKeyFromZKString() { ConfigDefinitionKey def1 = ConfigUtils.createConfigDefinitionKeyFromZKString("bar.foo,1"); assertEquals("foo", def1.getName()); assertEquals("bar", def1.getNamespace()); ConfigDefinitionKey def2 = ConfigUtils.createConfigDefinitionKeyFromZKString("bar.foo,"); assertEquals("foo", def2.getName()); assertEquals("bar", def2.getNamespace()); ConfigDefinitionKey def3 = ConfigUtils.createConfigDefinitionKeyFromZKString("bar.foo"); assertEquals("foo", def3.getName()); assertEquals("bar", def3.getNamespace()); }
public FEELFnResult<Boolean> invoke(@ParameterName("string") String string, @ParameterName("match") String match) { if ( string == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "string", "cannot be null")); } if ( match == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "match", "cannot be null")); } return FEELFnResult.ofResult( string.startsWith( match ) ); }
@Test void invokeNotStartsWith() { FunctionTestUtil.assertResult(startsWithFunction.invoke("test", "tte"), false); FunctionTestUtil.assertResult(startsWithFunction.invoke("test", "tt"), false); FunctionTestUtil.assertResult(startsWithFunction.invoke("test", "ttest"), false); }
protected GelfMessage toGELFMessage(final Message message) { final DateTime timestamp; final Object fieldTimeStamp = message.getField(Message.FIELD_TIMESTAMP); if (fieldTimeStamp instanceof DateTime) { timestamp = (DateTime) fieldTimeStamp; } else { timestamp = Tools.nowUTC(); } final GelfMessageLevel messageLevel = extractLevel(message.getField(Message.FIELD_LEVEL)); final String fullMessage = (String) message.getField(Message.FIELD_FULL_MESSAGE); final String forwarder = GelfOutput.class.getCanonicalName(); final GelfMessageBuilder builder = new GelfMessageBuilder(message.getMessage(), message.getSource()) .timestamp(timestamp.getMillis() / 1000.0d) .additionalField("_forwarder", forwarder) .additionalFields(message.getFields()); if (messageLevel != null) { builder.level(messageLevel); } if (fullMessage != null) { builder.fullMessage(fullMessage); } return builder.build(); }
@Test public void testToGELFMessageTimestamp() throws Exception { final GelfTransport transport = mock(GelfTransport.class); final GelfOutput gelfOutput = new GelfOutput(transport); final DateTime now = DateTime.now(DateTimeZone.UTC); final Message message = messageFactory.createMessage("Test", "Source", now); final GelfMessage gelfMessage = gelfOutput.toGELFMessage(message); assertEquals(gelfMessage.getTimestamp(), now.getMillis() / 1000.0d, 0.0d); }
public static Cache withDirectory(Path cacheDirectory) throws CacheDirectoryCreationException { try { Files.createDirectories(cacheDirectory); } catch (IOException ex) { throw new CacheDirectoryCreationException(ex); } return new Cache(new CacheStorageFiles(cacheDirectory)); }
@Test public void testWithDirectory_existsButNotDirectory() throws IOException { Path file = temporaryFolder.newFile().toPath(); try { Cache.withDirectory(file); Assert.fail(); } catch (CacheDirectoryCreationException ex) { MatcherAssert.assertThat( ex.getCause(), CoreMatchers.instanceOf(FileAlreadyExistsException.class)); } }
@GetMapping("/findPageByQuery") @RequiresPermissions("system:authen:list") public ShenyuAdminResult findPageByQuery(final String appKey, final String phone, @RequestParam @NotNull(message = "currentPage not null") final Integer currentPage, @RequestParam @NotNull(message = "pageSize not null") final Integer pageSize) { AppAuthQuery query = new AppAuthQuery(); query.setPhone(phone); query.setAppKey(appKey); query.setPageParameter(new PageParameter(currentPage, pageSize)); CommonPager<AppAuthVO> commonPager = appAuthService.listByPage(query); return ShenyuAdminResult.success(ShenyuResultMessage.QUERY_SUCCESS, commonPager); }
@Test public void testFindPageByQuery() throws Exception { final PageParameter pageParameter = new PageParameter(); final AppAuthQuery appAuthQuery = new AppAuthQuery("testAppKey", "18600000000", pageParameter); final CommonPager<AppAuthVO> commonPager = new CommonPager<>(pageParameter, Collections.singletonList(appAuthVO)); given(this.appAuthService.listByPage(appAuthQuery)).willReturn(commonPager); this.mockMvc.perform(MockMvcRequestBuilders.get("/appAuth/findPageByQuery") .param("appKey", "testAppKey") .param("phone", "18600000000") .param("currentPage", String.valueOf(pageParameter.getCurrentPage())) .param("pageSize", String.valueOf(pageParameter.getPageSize()))) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.QUERY_SUCCESS))) .andExpect(jsonPath("$.data.dataList[0].appKey", is(appAuthVO.getAppKey()))) .andReturn(); }
public ObjectMapper getObjectMapper() { return mapObjectMapper; }
@Test public void shouldSerializeAndDeserializeThrowable() throws JsonProcessingException { //given ObjectMapper objectMapper = JsonJacksonCodec.INSTANCE.getObjectMapper(); //when String serialized = objectMapper.writeValueAsString(new RuntimeException("Example message")); RuntimeException deserialized = objectMapper.readValue(serialized, RuntimeException.class); //then Assertions.assertEquals("Example message", deserialized.getMessage()); }
public DropSourceCommand create(final DropStream statement) { return create( statement.getName(), statement.getIfExists(), statement.isDeleteTopic(), DataSourceType.KSTREAM ); }
@Test public void shouldFailDeleteTopicForSourceTable() { // Given: final DropTable dropTable = new DropTable(TABLE_NAME, false, true); when(ksqlTable.isSource()).thenReturn(true); // When: final Exception e = assertThrows( KsqlException.class, () -> dropSourceFactory.create(dropTable) ); // Then: assertThat(e.getMessage(), containsString("Cannot delete topic for read-only source: tablename")); }
@Override public AbstractWALEvent decode(final ByteBuffer data, final BaseLogSequenceNumber logSequenceNumber) { AbstractWALEvent result; byte[] bytes = new byte[data.remaining()]; data.get(bytes); String dataText = new String(bytes, StandardCharsets.UTF_8); if (decodeWithTX) { result = decodeDataWithTX(dataText); } else { result = decodeDataIgnoreTX(dataText); } result.setLogSequenceNumber(logSequenceNumber); return result; }
@Test void assertDecodeWithDaterange() { MppTableData tableData = new MppTableData(); tableData.setTableName("public.test"); tableData.setOpType("INSERT"); tableData.setColumnsName(new String[]{"data"}); tableData.setColumnsType(new String[]{"daterange"}); tableData.setColumnsVal(new String[]{"'[2020-01-02,2021-01-02)'"}); ByteBuffer data = ByteBuffer.wrap(JsonUtils.toJsonString(tableData).getBytes()); WriteRowEvent actual = (WriteRowEvent) new MppdbDecodingPlugin(null, false, false).decode(data, logSequenceNumber); Object byteaObj = actual.getAfterRow().get(0); assertThat(byteaObj, instanceOf(PGobject.class)); assertThat(byteaObj.toString(), is("[2020-01-02,2021-01-02)")); }
public static MetadataUpdate fromJson(String json) { return JsonUtil.parse(json, MetadataUpdateParser::fromJson); }
@Test public void testSetSnapshotRefTagFromJsonDefault_ExplicitNullValues() { String action = MetadataUpdateParser.SET_SNAPSHOT_REF; long snapshotId = 1L; SnapshotRefType type = SnapshotRefType.TAG; String refName = "hank"; Integer minSnapshotsToKeep = null; Long maxSnapshotAgeMs = null; Long maxRefAgeMs = null; String json = "{\"action\":\"set-snapshot-ref\",\"ref-name\":\"hank\",\"snapshot-id\":1,\"type\":\"tag\"," + "\"min-snapshots-to-keep\":null,\"max-snapshot-age-ms\":null,\"max-ref-age-ms\":null}"; MetadataUpdate expected = new MetadataUpdate.SetSnapshotRef( refName, snapshotId, type, minSnapshotsToKeep, maxSnapshotAgeMs, maxRefAgeMs); assertEquals(action, expected, MetadataUpdateParser.fromJson(json)); }
@Override public boolean isEnabled() { return settings.isEnabled(); }
@Test public void is_enabled() { settings.setProperty("sonar.auth.github.clientId.secured", "id"); settings.setProperty("sonar.auth.github.clientSecret.secured", "secret"); settings.setProperty("sonar.auth.github.enabled", true); assertThat(underTest.isEnabled()).isTrue(); settings.setProperty("sonar.auth.github.enabled", false); assertThat(underTest.isEnabled()).isFalse(); }
public static List<AclEntry> replaceAclEntries(List<AclEntry> existingAcl, List<AclEntry> inAclSpec) throws AclException { ValidatedAclSpec aclSpec = new ValidatedAclSpec(inAclSpec); ArrayList<AclEntry> aclBuilder = Lists.newArrayListWithCapacity(MAX_ENTRIES); // Replacement is done separately for each scope: access and default. EnumMap<AclEntryScope, AclEntry> providedMask = Maps.newEnumMap(AclEntryScope.class); EnumSet<AclEntryScope> maskDirty = EnumSet.noneOf(AclEntryScope.class); EnumSet<AclEntryScope> scopeDirty = EnumSet.noneOf(AclEntryScope.class); for (AclEntry aclSpecEntry: aclSpec) { scopeDirty.add(aclSpecEntry.getScope()); if (aclSpecEntry.getType() == MASK) { providedMask.put(aclSpecEntry.getScope(), aclSpecEntry); maskDirty.add(aclSpecEntry.getScope()); } else { aclBuilder.add(aclSpecEntry); } } // Copy existing entries if the scope was not replaced. for (AclEntry existingEntry: existingAcl) { if (!scopeDirty.contains(existingEntry.getScope())) { if (existingEntry.getType() == MASK) { providedMask.put(existingEntry.getScope(), existingEntry); } else { aclBuilder.add(existingEntry); } } } copyDefaultsIfNeeded(aclBuilder); calculateMasks(aclBuilder, providedMask, maskDirty, scopeDirty); return buildAndValidateAcl(aclBuilder); }
@Test public void testReplaceAclEntriesAutomaticDefaultOther() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, GROUP, READ), aclEntry(ACCESS, OTHER, NONE), aclEntry(DEFAULT, USER, READ_WRITE), aclEntry(DEFAULT, USER, "bruce", READ), aclEntry(DEFAULT, GROUP, READ_WRITE), aclEntry(DEFAULT, MASK, READ_WRITE)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, READ_WRITE)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ_WRITE)) .add(aclEntry(DEFAULT, MASK, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, replaceAclEntries(existing, aclSpec)); }
@Override public Long getTimeMillisAndRemove(K name) { return null; }
@Test public void testGetTimeMillisAndRemove() { assertNull(HEADERS.getTimeMillisAndRemove("name1")); }
public static Set<Result> anaylze(String log) { Set<Result> results = new HashSet<>(); for (Rule rule : Rule.values()) { Matcher matcher = rule.pattern.matcher(log); if (matcher.find()) { results.add(new Result(rule, log, matcher)); } } return results; }
@Test public void jadeForestOptifine() throws IOException { CrashReportAnalyzer.Result result = findResultByRule( CrashReportAnalyzer.anaylze(loadLog("/logs/jade_forest_optifine.txt")), CrashReportAnalyzer.Rule.JADE_FOREST_OPTIFINE); }
@Nullable @VisibleForTesting static List<String> computeEntrypoint( RawConfiguration rawConfiguration, ProjectProperties projectProperties, JibContainerBuilder jibContainerBuilder) throws MainClassInferenceException, InvalidAppRootException, IOException, InvalidContainerizingModeException { Optional<List<String>> rawEntrypoint = rawConfiguration.getEntrypoint(); List<String> rawExtraClasspath = rawConfiguration.getExtraClasspath(); boolean entrypointDefined = rawEntrypoint.isPresent() && !rawEntrypoint.get().isEmpty(); if (entrypointDefined && (rawConfiguration.getMainClass().isPresent() || !rawConfiguration.getJvmFlags().isEmpty() || !rawExtraClasspath.isEmpty() || rawConfiguration.getExpandClasspathDependencies())) { projectProperties.log( LogEvent.info( "mainClass, extraClasspath, jvmFlags, and expandClasspathDependencies are ignored " + "when entrypoint is specified")); } if (projectProperties.isWarProject()) { if (entrypointDefined) { return rawEntrypoint.get().size() == 1 && "INHERIT".equals(rawEntrypoint.get().get(0)) ? null : rawEntrypoint.get(); } if (rawConfiguration.getMainClass().isPresent() || !rawConfiguration.getJvmFlags().isEmpty() || !rawExtraClasspath.isEmpty() || rawConfiguration.getExpandClasspathDependencies()) { projectProperties.log( LogEvent.warn( "mainClass, extraClasspath, jvmFlags, and expandClasspathDependencies are ignored " + "for WAR projects")); } return rawConfiguration.getFromImage().isPresent() ? null // Inherit if a custom base image. : Arrays.asList("java", "-jar", "/usr/local/jetty/start.jar", "--module=ee10-deploy"); } List<String> classpath = new ArrayList<>(rawExtraClasspath); AbsoluteUnixPath appRoot = getAppRootChecked(rawConfiguration, projectProperties); ContainerizingMode mode = getContainerizingModeChecked(rawConfiguration, projectProperties); switch (mode) { case EXPLODED: classpath.add(appRoot.resolve("resources").toString()); classpath.add(appRoot.resolve("classes").toString()); break; case PACKAGED: classpath.add(appRoot.resolve("classpath/*").toString()); break; default: throw new IllegalStateException("unknown containerizing mode: " + mode); } if (projectProperties.getMajorJavaVersion() >= 9 || rawConfiguration.getExpandClasspathDependencies()) { List<Path> jars = projectProperties.getDependencies(); Map<String, Long> occurrences = jars.stream() .map(path -> path.getFileName().toString()) .collect(Collectors.groupingBy(filename -> filename, Collectors.counting())); List<String> duplicates = occurrences.entrySet().stream() .filter(entry -> entry.getValue() > 1) .map(Map.Entry::getKey) .collect(Collectors.toList()); for (Path jar : jars) { // Handle duplicates by appending filesize to the end of the file. This renaming logic // must be in sync with the code that does the same in the other place. See // https://github.com/GoogleContainerTools/jib/issues/3331 String jarName = jar.getFileName().toString(); if (duplicates.contains(jarName)) { jarName = jarName.replaceFirst("\\.jar$", "-" + Files.size(jar)) + ".jar"; } classpath.add(appRoot.resolve("libs").resolve(jarName).toString()); } } else { classpath.add(appRoot.resolve("libs/*").toString()); } String classpathString = String.join(":", classpath); String mainClass; try { mainClass = MainClassResolver.resolveMainClass( rawConfiguration.getMainClass().orElse(null), projectProperties); } catch (MainClassInferenceException ex) { if (entrypointDefined) { // We will use the user-given entrypoint, so don't fail. mainClass = "could-not-infer-a-main-class"; } else { throw ex; } } addJvmArgFilesLayer( rawConfiguration, projectProperties, jibContainerBuilder, classpathString, mainClass); if (projectProperties.getMajorJavaVersion() >= 9) { classpathString = "@" + appRoot.resolve(JIB_CLASSPATH_FILE); } if (entrypointDefined) { return rawEntrypoint.get().size() == 1 && "INHERIT".equals(rawEntrypoint.get().get(0)) ? null : rawEntrypoint.get(); } List<String> entrypoint = new ArrayList<>(4 + rawConfiguration.getJvmFlags().size()); entrypoint.add("java"); entrypoint.addAll(rawConfiguration.getJvmFlags()); entrypoint.add("-cp"); entrypoint.add(classpathString); entrypoint.add(mainClass); return entrypoint; }
@Test public void testComputeEntrypoint_packaged() throws MainClassInferenceException, InvalidAppRootException, IOException, InvalidContainerizingModeException { when(rawConfiguration.getContainerizingMode()).thenReturn("packaged"); assertThat( PluginConfigurationProcessor.computeEntrypoint( rawConfiguration, projectProperties, jibContainerBuilder)) .containsExactly("java", "-cp", "/app/classpath/*:/app/libs/*", "java.lang.Object") .inOrder(); }
public Set<Map.Entry<K, Set<V>>> entrySet() { return backingMap.entrySet(); }
@Test public void entrySet_whenEmpty_thenReturnEmptySet() { Set<?> entries = multiMap.entrySet(); assertThat(entries).isEmpty(); }
@ApiOperation(value = "Create Or Update Device Profile (saveDeviceProfile)", notes = "Create or update the Device Profile. When creating device profile, platform generates device profile id as " + UUID_WIKI_LINK + "The newly created device profile id will be present in the response. " + "Specify existing device profile id to update the device profile. " + "Referencing non-existing device profile Id will cause 'Not Found' error. " + NEW_LINE + "Device profile name is unique in the scope of tenant. Only one 'default' device profile may exist in scope of tenant." + DEVICE_PROFILE_DATA + "Remove 'id', 'tenantId' from the request body example (below) to create new Device Profile entity. " + TENANT_AUTHORITY_PARAGRAPH) @PreAuthorize("hasAuthority('TENANT_ADMIN')") @RequestMapping(value = "/deviceProfile", method = RequestMethod.POST) @ResponseBody public DeviceProfile saveDeviceProfile( @Parameter(description = "A JSON value representing the device profile.") @RequestBody DeviceProfile deviceProfile) throws Exception { deviceProfile.setTenantId(getTenantId()); checkEntity(deviceProfile.getId(), deviceProfile, Resource.DEVICE_PROFILE); return tbDeviceProfileService.save(deviceProfile, getCurrentUser()); }
@Test public void testSaveDeviceProfile() throws Exception { DeviceProfile deviceProfile = this.createDeviceProfile("Device Profile"); Mockito.reset(tbClusterService, auditLogService); DeviceProfile savedDeviceProfile = doPost("/api/deviceProfile", deviceProfile, DeviceProfile.class); Assert.assertNotNull(savedDeviceProfile); Assert.assertNotNull(savedDeviceProfile.getId()); Assert.assertTrue(savedDeviceProfile.getCreatedTime() > 0); Assert.assertEquals(deviceProfile.getName(), savedDeviceProfile.getName()); Assert.assertEquals(deviceProfile.getDescription(), savedDeviceProfile.getDescription()); Assert.assertEquals(deviceProfile.getProfileData(), savedDeviceProfile.getProfileData()); Assert.assertEquals(deviceProfile.isDefault(), savedDeviceProfile.isDefault()); Assert.assertEquals(deviceProfile.getDefaultRuleChainId(), savedDeviceProfile.getDefaultRuleChainId()); Assert.assertEquals(DeviceProfileProvisionType.DISABLED, savedDeviceProfile.getProvisionType()); testNotifyEntityBroadcastEntityStateChangeEventOneTime(savedDeviceProfile, savedDeviceProfile.getId(), savedDeviceProfile.getId(), savedTenant.getId(), tenantAdmin.getCustomerId(), tenantAdmin.getId(), tenantAdmin.getEmail(), ActionType.ADDED); savedDeviceProfile.setName("New device profile"); doPost("/api/deviceProfile", savedDeviceProfile, DeviceProfile.class); DeviceProfile foundDeviceProfile = doGet("/api/deviceProfile/" + savedDeviceProfile.getId().getId().toString(), DeviceProfile.class); Assert.assertEquals(savedDeviceProfile.getName(), foundDeviceProfile.getName()); testNotifyEntityBroadcastEntityStateChangeEventOneTime(foundDeviceProfile, foundDeviceProfile.getId(), foundDeviceProfile.getId(), savedTenant.getId(), tenantAdmin.getCustomerId(), tenantAdmin.getId(), tenantAdmin.getEmail(), ActionType.UPDATED); }
public int doWork() { final long nowNs = nanoClock.nanoTime(); trackTime(nowNs); int workCount = 0; workCount += processTimers(nowNs); if (!asyncClientCommandInFlight) { workCount += clientCommandAdapter.receive(); } workCount += drainCommandQueue(); workCount += trackStreamPositions(workCount, nowNs); workCount += nameResolver.doWork(cachedEpochClock.time()); workCount += freeEndOfLifeResources(ctx.resourceFreeLimit()); return workCount; }
@Test void shouldErrorWhenOriginalPublicationHasNoDistinguishingCharacteristicBeyondTag() { final String expectedMessage = "URI must have explicit control, endpoint, or be manual control-mode when original:"; driverProxy.addPublication("aeron:udp?tags=1001", STREAM_ID_1); driverConductor.doWork(); verify(mockErrorHandler).onError(argThat( (ex) -> { assertThat(ex, instanceOf(InvalidChannelException.class)); assertThat(ex.getMessage(), containsString(expectedMessage)); return true; })); }
@Override public RankedEntry<V> rankEntry(V o) { return get(rankEntryAsync(o)); }
@Test public void testRankEntry() { RScoredSortedSet<String> set = redisson.getScoredSortedSet("test"); set.add(1.1, "v1"); set.add(1.2, "v2"); set.add(1.3, "v3"); RankedEntry<String> v1 = set.rankEntry("v1"); assertThat(v1.getRank()).isEqualTo(0); assertThat(v1.getScore()).isEqualTo(1.1); RankedEntry<String> v3 = set.rankEntry("v3"); assertThat(v3.getRank()).isEqualTo(2); assertThat(v3.getScore()).isEqualTo(1.3); RankedEntry<String> v4 = set.rankEntry("v4"); assertThat(v4).isNull(); }
@Override public DataSourceConfigDO getDataSourceConfig(Long id) { // 如果 id 为 0,默认为 master 的数据源 if (Objects.equals(id, DataSourceConfigDO.ID_MASTER)) { return buildMasterDataSourceConfig(); } // 从 DB 中读取 return dataSourceConfigMapper.selectById(id); }
@Test public void testGetDataSourceConfig_master() { // 准备参数 Long id = 0L; // mock 方法 // 调用 DataSourceConfigDO dataSourceConfig = dataSourceConfigService.getDataSourceConfig(id); // 断言 assertEquals(id, dataSourceConfig.getId()); assertEquals("primary", dataSourceConfig.getName()); assertEquals("http://localhost:3306", dataSourceConfig.getUrl()); assertEquals("yunai", dataSourceConfig.getUsername()); assertEquals("tudou", dataSourceConfig.getPassword()); }
public void setIssueComponent(DefaultIssue issue, String newComponentUuid, String newComponentKey, Date updateDate) { if (!Objects.equals(newComponentUuid, issue.componentUuid())) { issue.setComponentUuid(newComponentUuid); issue.setUpdateDate(updateDate); issue.setChanged(true); } // other fields (such as module, modulePath, componentKey) are read-only and set/reset for consistency only issue.setComponentKey(newComponentKey); }
@Test void setIssueComponent_changes_component_uuid() { String oldComponentUuid = "a"; String newComponentUuid = "b"; String componentKey = "key"; issue.setComponentUuid(oldComponentUuid); underTest.setIssueComponent(issue, newComponentUuid, componentKey, context.date()); assertThat(issue.componentUuid()).isEqualTo(newComponentUuid); assertThat(issue.componentKey()).isEqualTo(componentKey); assertThat(issue.isChanged()).isTrue(); assertThat(issue.updateDate()).isEqualTo(DateUtils.truncate(context.date(), Calendar.SECOND)); }
@Override public void moveTo(long position) { if (position < 0 || length() < position) { throw new IllegalArgumentException("Position out of the bounds of the file!"); } fp = position; }
@Test public void available() throws IOException { int amount = 12; cs.moveTo((int) len - amount); assertEquals(amount, cs.availableExact()); }
@Override public void validateConnectorConfig(Map<String, String> connectorProps, Callback<ConfigInfos> callback) { validateConnectorConfig(connectorProps, callback, true); }
@Test public void testConfigValidationInvalidTopics() { final Class<? extends Connector> connectorClass = SampleSinkConnector.class; AbstractHerder herder = createConfigValidationHerder(connectorClass, noneConnectorClientConfigOverridePolicy); Map<String, String> config = new HashMap<>(); config.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, connectorClass.getName()); config.put(SinkConnectorConfig.TOPICS_CONFIG, "topic1,topic2"); config.put(SinkConnectorConfig.TOPICS_REGEX_CONFIG, "topic.*"); ConfigInfos validation = herder.validateConnectorConfig(config, s -> null, false); ConfigInfo topicsListInfo = findInfo(validation, SinkConnectorConfig.TOPICS_CONFIG); assertNotNull(topicsListInfo); assertEquals(1, topicsListInfo.configValue().errors().size()); ConfigInfo topicsRegexInfo = findInfo(validation, SinkConnectorConfig.TOPICS_REGEX_CONFIG); assertNotNull(topicsRegexInfo); assertEquals(1, topicsRegexInfo.configValue().errors().size()); verifyValidationIsolation(); }
@Override public QuoteCharacter getQuoteCharacter() { return QuoteCharacter.BACK_QUOTE; }
@Test void assertGetQuoteCharacter() { assertThat(dialectDatabaseMetaData.getQuoteCharacter(), is(QuoteCharacter.BACK_QUOTE)); }
@Override public ValidationResponse validate(ValidationRequest req) { if (req.isEmptyQuery()) { return ValidationResponse.ok(); } try { final ParsedQuery parsedQuery = luceneQueryParser.parse(req.rawQuery()); final ValidationContext context = ValidationContext.builder() .request(req) .query(parsedQuery) .availableFields(fields.fieldTypesByStreamIds(req.streams(), req.timerange())) .build(); final List<ValidationMessage> explanations = validators.stream() .flatMap(val -> val.validate(context).stream()) .collect(Collectors.toList()); return ValidationResponse.withDetectedStatus(explanations); } catch (Exception e) { return ValidationResponse.error(ValidationErrors.create(e)); } }
@Test void validateWithWarning() { // validator returns one warning final QueryValidator queryValidator = context -> Collections.singletonList( ValidationMessage.builder(ValidationStatus.WARNING, ValidationType.INVALID_OPERATOR) .errorMessage("Invalid operator detected") .build()); final QueryValidationServiceImpl service = new QueryValidationServiceImpl( LUCENE_QUERY_PARSER, FIELD_TYPES_SERVICE, Collections.singleton(queryValidator)); final ValidationResponse validationResponse = service.validate(req()); assertThat(validationResponse.status()).isEqualTo(ValidationStatus.WARNING); assertThat(validationResponse.explanations()) .hasOnlyOneElementSatisfying(message -> { assertThat(message.validationType()).isEqualTo(ValidationType.INVALID_OPERATOR); assertThat(message.validationStatus()).isEqualTo(ValidationStatus.WARNING); }); }
public Set<String> makeReady(final Map<String, InternalTopicConfig> topics) { // we will do the validation / topic-creation in a loop, until we have confirmed all topics // have existed with the expected number of partitions, or some create topic returns fatal errors. log.debug("Starting to validate internal topics {} in partition assignor.", topics); long currentWallClockMs = time.milliseconds(); final long deadlineMs = currentWallClockMs + retryTimeoutMs; Set<String> topicsNotReady = new HashSet<>(topics.keySet()); final Set<String> newlyCreatedTopics = new HashSet<>(); while (!topicsNotReady.isEmpty()) { final Set<String> tempUnknownTopics = new HashSet<>(); topicsNotReady = validateTopics(topicsNotReady, topics, tempUnknownTopics); newlyCreatedTopics.addAll(topicsNotReady); if (!topicsNotReady.isEmpty()) { final Set<NewTopic> newTopics = new HashSet<>(); for (final String topicName : topicsNotReady) { if (tempUnknownTopics.contains(topicName)) { // for the tempUnknownTopics, don't create topic for them // we'll check again later if remaining retries > 0 continue; } final InternalTopicConfig internalTopicConfig = Objects.requireNonNull(topics.get(topicName)); final Map<String, String> topicConfig = internalTopicConfig.properties(defaultTopicConfigs, windowChangeLogAdditionalRetention); log.debug("Going to create topic {} with {} partitions and config {}.", internalTopicConfig.name(), internalTopicConfig.numberOfPartitions(), topicConfig); newTopics.add( new NewTopic( internalTopicConfig.name(), internalTopicConfig.numberOfPartitions(), Optional.of(replicationFactor)) .configs(topicConfig)); } // it's possible that although some topics are not ready yet because they // are temporarily not available, not that they do not exist; in this case // the new topics to create may be empty and hence we can skip here if (!newTopics.isEmpty()) { final CreateTopicsResult createTopicsResult = adminClient.createTopics(newTopics); for (final Map.Entry<String, KafkaFuture<Void>> createTopicResult : createTopicsResult.values().entrySet()) { final String topicName = createTopicResult.getKey(); try { createTopicResult.getValue().get(); topicsNotReady.remove(topicName); } catch (final InterruptedException fatalException) { // this should not happen; if it ever happens it indicate a bug Thread.currentThread().interrupt(); log.error(INTERRUPTED_ERROR_MESSAGE, fatalException); throw new IllegalStateException(INTERRUPTED_ERROR_MESSAGE, fatalException); } catch (final ExecutionException executionException) { final Throwable cause = executionException.getCause(); if (cause instanceof TopicExistsException) { // This topic didn't exist earlier or its leader not known before; just retain it for next round of validation. log.info( "Could not create topic {}. Topic is probably marked for deletion (number of partitions is unknown).\n" + "Will retry to create this topic in {} ms (to let broker finish async delete operation first).\n" + "Error message was: {}", topicName, retryBackOffMs, cause.toString()); } else { log.error("Unexpected error during topic creation for {}.\n" + "Error message was: {}", topicName, cause.toString()); if (cause instanceof UnsupportedVersionException) { final String errorMessage = cause.getMessage(); if (errorMessage != null && errorMessage.startsWith("Creating topics with default partitions/replication factor are only supported in CreateTopicRequest version 4+")) { throw new StreamsException(String.format( "Could not create topic %s, because brokers don't support configuration replication.factor=-1." + " You can change the replication.factor config or upgrade your brokers to version 2.4 or newer to avoid this error.", topicName) ); } } else if (cause instanceof TimeoutException) { log.error("Creating topic {} timed out.\n" + "Error message was: {}", topicName, cause.toString()); } else { throw new StreamsException( String.format("Could not create topic %s.", topicName), cause ); } } } } } } if (!topicsNotReady.isEmpty()) { currentWallClockMs = time.milliseconds(); if (currentWallClockMs >= deadlineMs) { final String timeoutError = String.format("Could not create topics within %d milliseconds. " + "This can happen if the Kafka cluster is temporarily not available.", retryTimeoutMs); log.error(timeoutError); throw new TimeoutException(timeoutError); } log.info( "Topics {} could not be made ready. Will retry in {} milliseconds. Remaining time in milliseconds: {}", topicsNotReady, retryBackOffMs, deadlineMs - currentWallClockMs ); Utils.sleep(retryBackOffMs); } } log.debug("Completed validating internal topics and created {}", newlyCreatedTopics); return newlyCreatedTopics; }
@Test public void shouldCompleteTopicValidationOnRetry() { final AdminClient admin = mock(AdminClient.class); final InternalTopicManager topicManager = new InternalTopicManager( time, admin, new StreamsConfig(config) ); final TopicPartitionInfo partitionInfo = new TopicPartitionInfo(0, broker1, Collections.singletonList(broker1), Collections.singletonList(broker1)); final KafkaFutureImpl<TopicDescription> topicDescriptionSuccessFuture = new KafkaFutureImpl<>(); final KafkaFutureImpl<TopicDescription> topicDescriptionFailFuture = new KafkaFutureImpl<>(); topicDescriptionSuccessFuture.complete( new TopicDescription(topic1, false, Collections.singletonList(partitionInfo), Collections.emptySet()) ); topicDescriptionFailFuture.completeExceptionally(new UnknownTopicOrPartitionException("KABOOM!")); final KafkaFutureImpl<CreateTopicsResult.TopicMetadataAndConfig> topicCreationFuture = new KafkaFutureImpl<>(); topicCreationFuture.completeExceptionally(new TopicExistsException("KABOOM!")); // let the first describe succeed on topic, and fail on topic2, and then let creation throws topics-existed; // it should retry with just topic2 and then let it succeed when(admin.describeTopics(mkSet(topic1, topic2))) .thenAnswer(answer -> new MockDescribeTopicsResult(mkMap( mkEntry(topic1, topicDescriptionSuccessFuture), mkEntry(topic2, topicDescriptionFailFuture) ))); when(admin.createTopics(Collections.singleton(new NewTopic(topic2, Optional.of(1), Optional.of((short) 1)) .configs(mkMap(mkEntry(TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT), mkEntry(TopicConfig.MESSAGE_TIMESTAMP_TYPE_CONFIG, "CreateTime")))))) .thenAnswer(answer -> new MockCreateTopicsResult(Collections.singletonMap(topic2, topicCreationFuture))); when(admin.describeTopics(Collections.singleton(topic2))) .thenAnswer(answer -> new MockDescribeTopicsResult(Collections.singletonMap(topic2, topicDescriptionSuccessFuture))); final InternalTopicConfig topicConfig = new UnwindowedUnversionedChangelogTopicConfig(topic1, Collections.emptyMap()); topicConfig.setNumberOfPartitions(1); final InternalTopicConfig topic2Config = new UnwindowedUnversionedChangelogTopicConfig(topic2, Collections.emptyMap()); topic2Config.setNumberOfPartitions(1); topicManager.makeReady(mkMap( mkEntry(topic1, topicConfig), mkEntry(topic2, topic2Config) )); }
@Override public String getResourceInputNodeType() { return DictionaryConst.NODE_TYPE_FILE_FIELD; }
@Test public void testGetResourceInputNodeType() throws Exception { assertEquals( DictionaryConst.NODE_TYPE_FILE_FIELD, analyzer.getResourceInputNodeType() ); }
String upload(File report) { LOG.debug("Upload report"); long startTime = System.currentTimeMillis(); Part filePart = new Part(MediaTypes.ZIP, report); PostRequest post = new PostRequest("api/ce/submit") .setMediaType(MediaTypes.PROTOBUF) .setParam("projectKey", moduleHierarchy.root().key()) .setParam("projectName", moduleHierarchy.root().getOriginalName()) .setPart("report", filePart); ciConfiguration.getDevOpsPlatformInfo().ifPresent(devOpsPlatformInfo -> { post.setParam(CHARACTERISTIC, buildCharacteristicParam(DEVOPS_PLATFORM_URL ,devOpsPlatformInfo.getUrl())); post.setParam(CHARACTERISTIC, buildCharacteristicParam(DEVOPS_PLATFORM_PROJECT_IDENTIFIER, devOpsPlatformInfo.getProjectIdentifier())); }); String branchName = branchConfiguration.branchName(); if (branchName != null) { if (branchConfiguration.branchType() != PULL_REQUEST) { post.setParam(CHARACTERISTIC, buildCharacteristicParam(CeTaskCharacteristics.BRANCH, branchName)); post.setParam(CHARACTERISTIC, buildCharacteristicParam(BRANCH_TYPE, branchConfiguration.branchType().name())); } else { post.setParam(CHARACTERISTIC, buildCharacteristicParam(CeTaskCharacteristics.PULL_REQUEST, branchConfiguration.pullRequestKey())); } } WsResponse response; try { post.setWriteTimeOutInMs(properties.reportPublishTimeout() * 1000); response = wsClient.call(post); } catch (Exception e) { throw new IllegalStateException("Failed to upload report: " + e.getMessage(), e); } try { response.failIfNotSuccessful(); } catch (HttpException e) { throw MessageException.of(String.format("Server failed to process report. Please check server logs: %s", DefaultScannerWsClient.createErrorMessage(e))); } try (InputStream protobuf = response.contentStream()) { return Ce.SubmitResponse.parser().parseFrom(protobuf).getTaskId(); } catch (Exception e) { throw new RuntimeException(e); } finally { long stopTime = System.currentTimeMillis(); LOG.info("Analysis report uploaded in " + (stopTime - startTime) + "ms"); } }
@Test public void upload_whenDevOpsPlatformInformationPresentInCiConfiguration_shouldUploadDevOpsPlatformInfoAsCharacteristic() throws Exception { String branchName = "feature"; String pullRequestId = "pr-123"; DevOpsPlatformInfo devOpsPlatformInfo = new DevOpsPlatformInfo("https://devops.example.com", "projectId"); when(branchConfiguration.branchName()).thenReturn(branchName); when(branchConfiguration.branchType()).thenReturn(PULL_REQUEST); when(branchConfiguration.pullRequestKey()).thenReturn(pullRequestId); when(ciConfiguration.getDevOpsPlatformInfo()).thenReturn(Optional.of(devOpsPlatformInfo)); WsResponse response = mock(WsResponse.class); PipedOutputStream out = new PipedOutputStream(); PipedInputStream in = new PipedInputStream(out); Ce.SubmitResponse.newBuilder().build().writeTo(out); out.close(); when(response.failIfNotSuccessful()).thenReturn(response); when(response.contentStream()).thenReturn(in); when(wsClient.call(any(WsRequest.class))).thenReturn(response); underTest.upload(reportTempFolder.newFile()); ArgumentCaptor<WsRequest> capture = ArgumentCaptor.forClass(WsRequest.class); verify(wsClient).call(capture.capture()); WsRequest wsRequest = capture.getValue(); assertThat(wsRequest.getParameters().getValues("characteristic")) .contains( "devOpsPlatformUrl=" + devOpsPlatformInfo.getUrl(), "devOpsPlatformProjectIdentifier=" + devOpsPlatformInfo.getProjectIdentifier()); }
public LoggerContext configure() { LoggerContext ctx = helper.getRootContext(); ctx.reset(); helper.enableJulChangePropagation(ctx); configureConsole(ctx); configureWithLogbackWritingToFile(ctx); helper.apply( LogLevelConfig.newBuilder(helper.getRootLoggerName()) .rootLevelFor(ProcessId.APP) .immutableLevel("com.hazelcast", Level.toLevel("WARN")) .build(), appSettings.getProps()); return ctx; }
@Test public void configure_no_rotation_on_sonar_file() { settings.getProps().set("sonar.log.rollingPolicy", "none"); LoggerContext ctx = underTest.configure(); Logger rootLogger = ctx.getLogger(ROOT_LOGGER_NAME); Appender<ILoggingEvent> appender = rootLogger.getAppender("file_sonar"); assertThat(appender) .isNotInstanceOf(RollingFileAppender.class) .isInstanceOf(FileAppender.class); }
public FontMetrics parse() throws IOException { return parseFontMetric(false); }
@Test void testHelveticaKernPairsReducedDataset() throws IOException { AFMParser parser = new AFMParser( new FileInputStream("src/test/resources/afm/Helvetica.afm")); FontMetrics fontMetrics = parser.parse(true); // KernPairs, empty due to reducedDataset == true assertTrue(fontMetrics.getKernPairs().isEmpty()); // KernPairs0 assertTrue(fontMetrics.getKernPairs0().isEmpty()); // KernPairs1 assertTrue(fontMetrics.getKernPairs1().isEmpty()); // composite data assertTrue(fontMetrics.getComposites().isEmpty()); }
@Override public ConfigData get(String path) { return get(path, Files::isRegularFile); }
@Test public void testNoSubdirs() { // Only regular files directly in the path directory are allowed, not in subdirs Set<String> keys = toSet(asList(subdir, String.join(File.separator, subdir, subdirFileName))); ConfigData configData = provider.get(dir, keys); assertTrue(configData.data().isEmpty()); assertNull(configData.ttl()); }
public static <T, S> T convert(S source, Class<T> clazz) { return Optional.ofNullable(source) .map(each -> BEAN_MAPPER_BUILDER.map(each, clazz)) .orElse(null); }
@Test public void SetToSetConvertTest() { final Set<Person> sets = new HashSet<>(); sets.add(Person.builder().name("one").age(1).build()); sets.add(Person.builder().name("two").age(2).build()); sets.add(Person.builder().name("three").age(3).build()); final Set<PersonVo> persons = BeanUtil.convert(sets, PersonVo.class); Assert.assertEquals(sets.size(), persons.size()); }
public Collection<ConstraintMetaData> revise(final String tableName, final Collection<ConstraintMetaData> originalMetaDataList) { Optional<? extends ConstraintReviser<T>> reviser = reviseEntry.getConstraintReviser(rule, tableName); return reviser.isPresent() ? originalMetaDataList.stream() .map(each -> reviser.get().revise(tableName, each, rule)).filter(Optional::isPresent).map(Optional::get).collect(Collectors.toCollection(LinkedHashSet::new)) : originalMetaDataList; }
@SuppressWarnings("unchecked") @Test void assertReviseWithReturnsRevisedConstraints() { String tableName = "tableName"; ConstraintReviser<ShardingSphereRule> reviser = mock(ConstraintReviser.class); doReturn(Optional.of(reviser)).when(mockMetaDataReviseEntry).getConstraintReviser(mockRule, tableName); ConstraintMetaData constraint1 = new ConstraintMetaData("constraint1", tableName); ConstraintMetaData constraint2 = new ConstraintMetaData("constraint2", tableName); ConstraintMetaData constraint3 = new ConstraintMetaData("constraint3", tableName); when(reviser.revise(tableName, constraint1, mockRule)).thenReturn(Optional.of(constraint3)); when(reviser.revise(tableName, constraint2, mockRule)).thenReturn(Optional.empty()); assertThat(engine.revise(tableName, Arrays.asList(constraint1, constraint2)), is(Collections.singleton(constraint3))); }
@Override public boolean handleResult(int returncode, GoPublisher goPublisher) { if (returncode == HttpURLConnection.HTTP_NOT_FOUND) { deleteQuietly(checksumFile); goPublisher.taggedConsumeLineWithPrefix(GoPublisher.ERR, "[WARN] The md5checksum property file was not found on the server. Hence, Go can not verify the integrity of the artifacts."); return true; } if (returncode == HttpURLConnection.HTTP_NOT_MODIFIED) { LOG.info("[Agent Fetch Artifact] Not downloading checksum file as it has not changed"); return true; } if (returncode == HttpURLConnection.HTTP_OK) { LOG.info("[Agent Fetch Artifact] Saved checksum property file [{}]", checksumFile); return true; } return returncode < HttpURLConnection.HTTP_BAD_REQUEST; }
@Test public void shouldHandleResultIfHttpCodeIsSuccessful() { StubGoPublisher goPublisher = new StubGoPublisher(); assertThat(checksumFileHandler.handleResult(HttpServletResponse.SC_OK, goPublisher), is(true)); }
static Optional<RawMetric> parse(String s) { Matcher matcher = PATTERN.matcher(s); if (matcher.matches()) { String value = matcher.group("value"); String metricName = matcher.group("metricName"); if (metricName == null || !NumberUtils.isCreatable(value)) { return Optional.empty(); } var labels = Arrays.stream(matcher.group("properties").split(",")) .filter(str -> !"".equals(str)) .map(str -> str.split("=")) .filter(spit -> spit.length == 2) .collect(Collectors.toUnmodifiableMap( str -> str[0].trim(), str -> str[1].trim().replace("\"", ""))); return Optional.of(RawMetric.create(metricName, labels, new BigDecimal(value))); } return Optional.empty(); }
@Test void test() { String metricsString = "kafka_server_BrokerTopicMetrics_FifteenMinuteRate" + "{name=\"BytesOutPerSec\",topic=\"__confluent.support.metrics\",} 123.1234"; Optional<RawMetric> parsedOpt = PrometheusEndpointMetricsParser.parse(metricsString); assertThat(parsedOpt).hasValueSatisfying(metric -> { assertThat(metric.name()).isEqualTo("kafka_server_BrokerTopicMetrics_FifteenMinuteRate"); assertThat(metric.value()).isEqualTo("123.1234"); assertThat(metric.labels()).containsExactlyEntriesOf( Map.of( "name", "BytesOutPerSec", "topic", "__confluent.support.metrics" )); }); }
public static String extName(File file) { return FileNameUtil.extName(file); }
@Test public void extNameTest() { String path = FileUtil.isWindows() ? "d:\\aaa\\bbb\\cc\\ddd\\" : "~/Desktop/hutool/ddd/"; String mainName = FileUtil.extName(path); assertEquals("", mainName); path = FileUtil.isWindows() ? "d:\\aaa\\bbb\\cc\\ddd" : "~/Desktop/hutool/ddd"; mainName = FileUtil.extName(path); assertEquals("", mainName); path = FileUtil.isWindows() ? "d:\\aaa\\bbb\\cc\\ddd.jpg" : "~/Desktop/hutool/ddd.jpg"; mainName = FileUtil.extName(path); assertEquals("jpg", mainName); path = FileUtil.isWindows() ? "d:\\aaa\\bbb\\cc\\fff.xlsx" : "~/Desktop/hutool/fff.xlsx"; mainName = FileUtil.extName(path); assertEquals("xlsx", mainName); path = FileUtil.isWindows() ? "d:\\aaa\\bbb\\cc\\fff.tar.gz" : "~/Desktop/hutool/fff.tar.gz"; mainName = FileUtil.extName(path); assertEquals("tar.gz", mainName); path = FileUtil.isWindows() ? "d:\\aaa\\bbb\\cc\\fff.tar.Z" : "~/Desktop/hutool/fff.tar.Z"; mainName = FileUtil.extName(path); assertEquals("tar.Z", mainName); path = FileUtil.isWindows() ? "d:\\aaa\\bbb\\cc\\fff.tar.bz2" : "~/Desktop/hutool/fff.tar.bz2"; mainName = FileUtil.extName(path); assertEquals("tar.bz2", mainName); path = FileUtil.isWindows() ? "d:\\aaa\\bbb\\cc\\fff.tar.xz" : "~/Desktop/hutool/fff.tar.xz"; mainName = FileUtil.extName(path); assertEquals("tar.xz", mainName); }
@Override public int hashCode() { return Objects.hash(issueKey, issueURL); }
@Test public void issueEqualityAndHashCode() { BlueJiraIssue issue1 = new BlueJiraIssue("TEST-123", "http://jira.example.com/browse/TEST-123"); BlueJiraIssue issue2 = new BlueJiraIssue("TEST-124", "http://jira.example.com/browse/TEST-124"); Assert.assertEquals(issue1, issue1); Assert.assertNotEquals(issue1, issue2); Assert.assertNotEquals(issue1, new Object()); Assert.assertNotEquals(issue1.hashCode(), issue2.hashCode()); }
@Override public String environmentSubstitute( String aString ) { if ( aString == null || aString.length() == 0 ) { return aString; } return StringUtil.environmentSubstitute( aString, properties ); }
@Test public void testEnvironmentSubstitute() { Variables vars = new Variables(); vars.setVariable( "VarOne", "DataOne" ); vars.setVariable( "VarTwo", "DataTwo" ); assertNull( vars.environmentSubstitute( (String) null ) ); assertEquals( "", vars.environmentSubstitute( "" ) ); assertEquals( "DataTwo", vars.environmentSubstitute( "${VarTwo}" ) ); assertEquals( "DataTwoEnd", vars.environmentSubstitute( "${VarTwo}End" ) ); assertEquals( 0, vars.environmentSubstitute( new String[0] ).length ); assertArrayEquals( new String[]{ "DataOne", "TheDataOne" }, vars.environmentSubstitute( new String[]{ "${VarOne}", "The${VarOne}" } ) ); }
@Override public Column convert(BasicTypeDefine typeDefine) { Long typeDefineLength = typeDefine.getLength(); PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .columnLength(typeDefineLength) .scale(typeDefine.getScale()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String irisDataType = typeDefine.getDataType().toUpperCase(); long charOrBinaryLength = Objects.nonNull(typeDefineLength) && typeDefineLength > 0 ? typeDefineLength : 1; switch (irisDataType) { case IRIS_NULL: builder.dataType(BasicType.VOID_TYPE); break; case IRIS_BIT: builder.dataType(BasicType.BOOLEAN_TYPE); break; case IRIS_NUMERIC: case IRIS_MONEY: case IRIS_SMALLMONEY: case IRIS_NUMBER: case IRIS_DEC: case IRIS_DECIMAL: DecimalType decimalType; if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) { decimalType = new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale()); } else { decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE); } builder.dataType(decimalType); builder.columnLength(Long.valueOf(decimalType.getPrecision())); builder.scale(decimalType.getScale()); break; case IRIS_INT: case IRIS_INTEGER: case IRIS_MEDIUMINT: builder.dataType(BasicType.INT_TYPE); break; case IRIS_ROWVERSION: case IRIS_BIGINT: case IRIS_SERIAL: builder.dataType(BasicType.LONG_TYPE); break; case IRIS_TINYINT: builder.dataType(BasicType.BYTE_TYPE); break; case IRIS_SMALLINT: builder.dataType(BasicType.SHORT_TYPE); break; case IRIS_FLOAT: builder.dataType(BasicType.FLOAT_TYPE); break; case IRIS_DOUBLE: case IRIS_REAL: case IRIS_DOUBLE_PRECISION: builder.dataType(BasicType.DOUBLE_TYPE); break; case IRIS_CHAR: case IRIS_CHAR_VARYING: case IRIS_CHARACTER_VARYING: case IRIS_NATIONAL_CHAR: case IRIS_NATIONAL_CHAR_VARYING: case IRIS_NATIONAL_CHARACTER: case IRIS_NATIONAL_CHARACTER_VARYING: case IRIS_NATIONAL_VARCHAR: case IRIS_NCHAR: case IRIS_SYSNAME: case IRIS_VARCHAR2: case IRIS_VARCHAR: case IRIS_NVARCHAR: case IRIS_UNIQUEIDENTIFIER: case IRIS_GUID: case IRIS_CHARACTER: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(charOrBinaryLength); break; case IRIS_NTEXT: case IRIS_CLOB: case IRIS_LONG_VARCHAR: case IRIS_LONG: case IRIS_LONGTEXT: case IRIS_MEDIUMTEXT: case IRIS_TEXT: case IRIS_LONGVARCHAR: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(Long.valueOf(Integer.MAX_VALUE)); break; case IRIS_DATE: builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case IRIS_TIME: builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); break; case IRIS_DATETIME: case IRIS_DATETIME2: case IRIS_SMALLDATETIME: case IRIS_TIMESTAMP: case IRIS_TIMESTAMP2: case IRIS_POSIXTIME: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); break; case IRIS_BINARY: case IRIS_BINARY_VARYING: case IRIS_RAW: case IRIS_VARBINARY: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(charOrBinaryLength); break; case IRIS_LONGVARBINARY: case IRIS_BLOB: case IRIS_IMAGE: case IRIS_LONG_BINARY: case IRIS_LONG_RAW: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(Long.valueOf(Integer.MAX_VALUE)); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.IRIS, irisDataType, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertChar() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder().name("test").columnType("char").dataType("char").build(); Column column = IrisTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(1, column.getColumnLength()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("char(10)") .dataType("char") .length(10L) .build(); column = IrisTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(10, column.getColumnLength()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); }
@Override public void transform(Message message, DataType fromType, DataType toType) { final Optional<ValueRange> valueRange = getValueRangeBody(message); String range = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "range", "A:A").toString(); String majorDimension = message .getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "majorDimension", RangeCoordinate.DIMENSION_ROWS).toString(); String spreadsheetId = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "spreadsheetId", "").toString(); String[] columnNames = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "columnNames", "A").toString().split(","); boolean splitResults = Boolean .parseBoolean(message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "splitResults", "false").toString()); if (valueRange.isPresent()) { message.setBody( transformFromValueRangeModel(message, valueRange.get(), spreadsheetId, range, majorDimension, columnNames)); } else if (splitResults) { message.setBody(transformFromSplitValuesModel(message, spreadsheetId, range, majorDimension, columnNames)); } else { String valueInputOption = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "valueInputOption", "USER_ENTERED").toString(); message.setBody( transformToValueRangeModel(message, spreadsheetId, range, majorDimension, valueInputOption, columnNames)); } }
@Test public void testTransformToEmptyValueRange() throws Exception { Exchange inbound = new DefaultExchange(camelContext); inbound.getMessage().setHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "spreadsheetId", spreadsheetId); inbound.getMessage().setHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "range", "A1"); inbound.getMessage().setHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "valueInputOption", "RAW"); transformer.transform(inbound.getMessage(), DataType.ANY, DataType.ANY); Assertions.assertEquals(spreadsheetId, inbound.getMessage().getHeader(GoogleSheetsStreamConstants.SPREADSHEET_ID)); Assertions.assertEquals("A1", inbound.getMessage().getHeader(GoogleSheetsStreamConstants.RANGE)); Assertions.assertEquals(RangeCoordinate.DIMENSION_ROWS, inbound.getMessage().getHeader(GoogleSheetsStreamConstants.MAJOR_DIMENSION)); Assertions.assertEquals("RAW", inbound.getMessage().getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "valueInputOption")); ValueRange valueRange = (ValueRange) inbound.getMessage().getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "values"); Assertions.assertEquals(0L, valueRange.getValues().size()); }
@SuppressWarnings( "fallthrough" ) public static Object truncDate( Context actualContext, Scriptable actualObject, Object[] ArgList, Function FunctionContext ) { // 2 arguments: truncation of dates to a certain precision // if ( ArgList.length == 2 ) { if ( isNull( ArgList[0] ) ) { return null; } else if ( isUndefined( ArgList[0] ) ) { return Context.getUndefinedValue(); } // This is the truncation of a date... // The second argument specifies the level: ms, s, min, hour, day, month, year // Date dArg1 = null; Integer level = null; try { dArg1 = (java.util.Date) Context.jsToJava( ArgList[0], java.util.Date.class ); level = (Integer) Context.jsToJava( ArgList[1], Integer.class ); } catch ( Exception e ) { throw Context.reportRuntimeError( e.toString() ); } return truncDate( dArg1, level ); } else { throw Context .reportRuntimeError( "The function call truncDate requires 2 arguments: a date and a level (int)" ); } }
@Test public void testTruncDate() { Date dateBase = new Date( 118, Calendar.FEBRUARY, 15, 11, 11, 11 ); // 2018-02-15 11:11:11 Calendar c = Calendar.getInstance(); c.set( 2011, Calendar.NOVEMBER, 11, 11, 11, 11 ); // 2011-11-11 11:11:11 c.set( Calendar.MILLISECOND, 11 ); Date rtn = null; Calendar c2 = Calendar.getInstance(); rtn = ScriptValuesAddedFunctions.truncDate( dateBase, 5 ); c2.setTime( rtn ); Assert.assertEquals( Calendar.JANUARY, c2.get( Calendar.MONTH ) ); rtn = ScriptValuesAddedFunctions.truncDate( dateBase, 4 ); c2.setTime( rtn ); Assert.assertEquals( 1, c2.get( Calendar.DAY_OF_MONTH ) ); rtn = ScriptValuesAddedFunctions.truncDate( dateBase, 3 ); c2.setTime( rtn ); Assert.assertEquals( 0, c2.get( Calendar.HOUR_OF_DAY ) ); rtn = ScriptValuesAddedFunctions.truncDate( dateBase, 2 ); c2.setTime( rtn ); Assert.assertEquals( 0, c2.get( Calendar.MINUTE ) ); rtn = ScriptValuesAddedFunctions.truncDate( dateBase, 1 ); c2.setTime( rtn ); Assert.assertEquals( 0, c2.get( Calendar.SECOND ) ); rtn = ScriptValuesAddedFunctions.truncDate( dateBase, 0 ); c2.setTime( rtn ); Assert.assertEquals( 0, c2.get( Calendar.MILLISECOND ) ); try { ScriptValuesAddedFunctions.truncDate( rtn, 6 ); // Should throw exception Assert.fail( "Expected exception - passed in level > 5 to truncDate" ); } catch ( Exception expected ) { // Should get here } try { ScriptValuesAddedFunctions.truncDate( rtn, -7 ); // Should throw exception Assert.fail( "Expected exception - passed in level < 0 to truncDate" ); } catch ( Exception expected ) { // Should get here } }
public Path getLogPath() { return logPath; }
@Test public void testLogPath() { final DistCpOptions.Builder builder = new DistCpOptions.Builder( Collections.singletonList(new Path("hdfs://localhost:8020/source")), new Path("hdfs://localhost:8020/target/")); Assert.assertNull(builder.build().getLogPath()); final Path logPath = new Path("hdfs://localhost:8020/logs"); builder.withLogPath(logPath); Assert.assertEquals(logPath, builder.build().getLogPath()); }
public static void scale(File srcImageFile, File destImageFile, float scale) { BufferedImage image = null; try { image = read(srcImageFile); scale(image, destImageFile, scale); } finally { flush(image); } }
@Test @Disabled public void scaleTest2() { ImgUtil.scale( FileUtil.file("d:/test/2.png"), FileUtil.file("d:/test/2_result.jpg"), 600, 337, null); }
public static PostgreSQLCommandPacket newInstance(final PostgreSQLCommandPacketType commandPacketType, final PostgreSQLPacketPayload payload) { if (!PostgreSQLCommandPacketType.isExtendedProtocolPacketType(commandPacketType)) { payload.getByteBuf().skipBytes(1); return getPostgreSQLCommandPacket(commandPacketType, payload); } List<PostgreSQLCommandPacket> result = new ArrayList<>(); while (payload.hasCompletePacket()) { PostgreSQLCommandPacketType type = PostgreSQLCommandPacketType.valueOf(payload.readInt1()); int length = payload.getByteBuf().getInt(payload.getByteBuf().readerIndex()); PostgreSQLPacketPayload slicedPayload = new PostgreSQLPacketPayload(payload.getByteBuf().readSlice(length), payload.getCharset()); result.add(getPostgreSQLCommandPacket(type, slicedPayload)); } return new PostgreSQLAggregatedCommandPacket(result); }
@Test void assertNewInstanceWithDescribeComPacket() { assertThat(PostgreSQLCommandPacketFactory.newInstance(PostgreSQLCommandPacketType.DESCRIBE_COMMAND, payload), instanceOf(PostgreSQLAggregatedCommandPacket.class)); }
public static Catalog loadIcebergCatalog(SparkSession spark, String catalogName) { CatalogPlugin catalogPlugin = spark.sessionState().catalogManager().catalog(catalogName); Preconditions.checkArgument( catalogPlugin instanceof HasIcebergCatalog, String.format( "Cannot load Iceberg catalog from catalog %s because it does not contain an Iceberg Catalog. " + "Actual Class: %s", catalogName, catalogPlugin.getClass().getName())); return ((HasIcebergCatalog) catalogPlugin).icebergCatalog(); }
@Test public void testLoadIcebergCatalog() throws Exception { spark.conf().set("spark.sql.catalog.test_cat", SparkCatalog.class.getName()); spark.conf().set("spark.sql.catalog.test_cat.type", "hive"); Catalog catalog = Spark3Util.loadIcebergCatalog(spark, "test_cat"); Assert.assertTrue( "Should retrieve underlying catalog class", catalog instanceof CachingCatalog); }
@Override public void doFilter(HttpRequest request, HttpResponse response, FilterChain chain) throws IOException { if (userSession.hasSession() && userSession.isLoggedIn() && userSession.shouldResetPassword()) { redirectTo(response, request.getContextPath() + RESET_PASSWORD_PATH); } chain.doFilter(request, response); }
@Test public void do_not_redirect_if_no_session() throws Exception { when(session.hasSession()).thenReturn(false); underTest.doFilter(request, response, chain); verify(response, never()).sendRedirect(any()); }
public static ExecutableStage forGrpcPortRead( QueryablePipeline pipeline, PipelineNode.PCollectionNode inputPCollection, Set<PipelineNode.PTransformNode> initialNodes) { checkArgument( !initialNodes.isEmpty(), "%s must contain at least one %s.", GreedyStageFuser.class.getSimpleName(), PipelineNode.PTransformNode.class.getSimpleName()); // Choose the environment from an arbitrary node. The initial nodes may not be empty for this // subgraph to make any sense, there has to be at least one processor node // (otherwise the stage is gRPC Read -> gRPC Write, which doesn't do anything). Environment environment = getStageEnvironment(pipeline, initialNodes); ImmutableSet.Builder<PipelineNode.PTransformNode> fusedTransforms = ImmutableSet.builder(); fusedTransforms.addAll(initialNodes); Set<SideInputReference> sideInputs = new LinkedHashSet<>(); Set<UserStateReference> userStates = new LinkedHashSet<>(); Set<TimerReference> timers = new LinkedHashSet<>(); Set<PipelineNode.PCollectionNode> fusedCollections = new LinkedHashSet<>(); Set<PipelineNode.PCollectionNode> materializedPCollections = new LinkedHashSet<>(); Queue<PipelineNode.PCollectionNode> fusionCandidates = new ArrayDeque<>(); for (PipelineNode.PTransformNode initialConsumer : initialNodes) { fusionCandidates.addAll(pipeline.getOutputPCollections(initialConsumer)); sideInputs.addAll(pipeline.getSideInputs(initialConsumer)); userStates.addAll(pipeline.getUserStates(initialConsumer)); timers.addAll(pipeline.getTimers(initialConsumer)); } while (!fusionCandidates.isEmpty()) { PipelineNode.PCollectionNode candidate = fusionCandidates.poll(); if (fusedCollections.contains(candidate) || materializedPCollections.contains(candidate)) { // This should generally mean we get to a Flatten via multiple paths through the graph and // we've already determined what to do with the output. LOG.debug( "Skipping fusion candidate {} because it is {} in this {}", candidate, fusedCollections.contains(candidate) ? "fused" : "materialized", ExecutableStage.class.getSimpleName()); continue; } PCollectionFusibility fusibility = canFuse(pipeline, candidate, environment, fusedCollections); switch (fusibility) { case MATERIALIZE: materializedPCollections.add(candidate); break; case FUSE: // All of the consumers of the candidate PCollection can be fused into this stage. Do so. fusedCollections.add(candidate); fusedTransforms.addAll(pipeline.getPerElementConsumers(candidate)); for (PipelineNode.PTransformNode consumer : pipeline.getPerElementConsumers(candidate)) { // The outputs of every transform fused into this stage must be either materialized or // themselves fused away, so add them to the set of candidates. fusionCandidates.addAll(pipeline.getOutputPCollections(consumer)); sideInputs.addAll(pipeline.getSideInputs(consumer)); } break; default: throw new IllegalStateException( String.format( "Unknown type of %s %s", PCollectionFusibility.class.getSimpleName(), fusibility)); } } return ImmutableExecutableStage.ofFullComponents( pipeline.getComponents(), environment, inputPCollection, sideInputs, userStates, timers, fusedTransforms.build(), materializedPCollections, ExecutableStage.DEFAULT_WIRE_CODER_SETTINGS); }
@Test public void noInitialConsumersThrows() { // (impulse.out) -> () is not a meaningful stage, so it should never be called QueryablePipeline p = QueryablePipeline.forPrimitivesIn(partialComponents); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("at least one PTransform"); GreedyStageFuser.forGrpcPortRead(p, impulseOutputNode, Collections.emptySet()); }
public PrepareResult prepare(HostValidator hostValidator, DeployLogger logger, PrepareParams params, Optional<ApplicationVersions> activeApplicationVersions, Instant now, File serverDbSessionDir, ApplicationPackage applicationPackage, SessionZooKeeperClient sessionZooKeeperClient) { ApplicationId applicationId = params.getApplicationId(); Preparation preparation = new Preparation(hostValidator, logger, params, activeApplicationVersions, TenantRepository.getTenantPath(applicationId.tenant()), serverDbSessionDir, applicationPackage, sessionZooKeeperClient, onnxModelCost, endpointCertificateSecretStores); preparation.preprocess(); try { AllocatedHosts allocatedHosts = preparation.buildModels(now); preparation.makeResult(allocatedHosts); if ( ! params.isDryRun()) { FileReference fileReference = preparation.triggerDistributionOfApplicationPackage(); preparation.writeStateZK(fileReference); preparation.writeEndpointCertificateMetadataZK(); preparation.writeContainerEndpointsZK(); } log.log(Level.FINE, () -> "time used " + params.getTimeoutBudget().timesUsed() + " : " + applicationId); return preparation.result(); } catch (IllegalArgumentException e) { if (e instanceof InvalidApplicationException) throw e; throw new InvalidApplicationException("Invalid application package", e); } }
@Test public void require_that_application_validation_exception_is_ignored_if_forced() throws IOException { prepare(invalidTestApp, new PrepareParams.Builder() .applicationId(applicationId()) .ignoreValidationErrors(true) .timeoutBudget(TimeoutBudgetTest.day()) .build(), 1); }
@Override protected String getMessageId(Exchange exchange) { return exchange.getIn().getHeader(JMS_MESSAGE_ID, String.class); }
@Test public void testGetMessageId() { String messageId = "abcd"; Exchange exchange = Mockito.mock(Exchange.class); Message message = Mockito.mock(Message.class); Mockito.when(exchange.getIn()).thenReturn(message); Mockito.when(message.getHeader(JmsSpanDecorator.JMS_MESSAGE_ID, String.class)).thenReturn(messageId); AbstractMessagingSpanDecorator decorator = new JmsSpanDecorator(); assertEquals(messageId, decorator.getMessageId(exchange)); }
@Override public DescribeConsumerGroupsResult describeConsumerGroups(final Collection<String> groupIds, final DescribeConsumerGroupsOptions options) { SimpleAdminApiFuture<CoordinatorKey, ConsumerGroupDescription> future = DescribeConsumerGroupsHandler.newFuture(groupIds); DescribeConsumerGroupsHandler handler = new DescribeConsumerGroupsHandler(options.includeAuthorizedOperations(), logContext); invokeDriver(handler, future, options.timeoutMs); return new DescribeConsumerGroupsResult(future.all().entrySet().stream() .collect(Collectors.toMap(entry -> entry.getKey().idValue, Map.Entry::getValue))); }
@Test public void testDescribeNonConsumerGroups() throws Exception { try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(mockCluster(1, 0))) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponse(prepareFindCoordinatorResponse(Errors.NONE, env.cluster().controller())); // The first request sent will be a ConsumerGroupDescribe request. Let's // fail it in order to fail back to using the classic version. env.kafkaClient().prepareUnsupportedVersionResponse( request -> request instanceof ConsumerGroupDescribeRequest); DescribeGroupsResponseData data = new DescribeGroupsResponseData(); data.groups().add(DescribeGroupsResponse.groupMetadata( GROUP_ID, Errors.NONE, "", "non-consumer", "", emptyList(), Collections.emptySet())); env.kafkaClient().prepareResponse(new DescribeGroupsResponse(data)); final DescribeConsumerGroupsResult result = env.adminClient().describeConsumerGroups(singletonList(GROUP_ID)); TestUtils.assertFutureError(result.describedGroups().get(GROUP_ID), IllegalArgumentException.class); } }
private Table<String, String, Set<String>> loadProperties(String path) throws IOException { InputStream inputStream = CSPResources.class.getResourceAsStream(path); Properties properties = new Properties(); properties.load(inputStream); Table<String, String, Set<String>> resources = HashBasedTable.create(); for (String propertyName : properties.stringPropertyNames()) { String[] substrings = propertyName.split("[.]"); if (substrings.length != 2) { LOG.warn("Skipping malformed property {}: expecting format <group>.<key>", propertyName); } else { String[] valueArray = properties.getProperty(propertyName).split(" "); resources.put(substrings[0], substrings[1], new HashSet<>(Arrays.asList(valueArray))); } } return resources; }
@Test void loadPropertiesTest() { assertThat(cspResources.cspString("default")).isEqualTo( "connect-src url1.com:9999 url2.com;default-src 'self';img-src https://url3.com:9999 https://url4.com:9999;script-src 'self' 'unsafe-eval';style-src 'self' 'unsafe-inline'"); assertThat(cspResources.cspString("swagger")).isEqualTo( "connect-src url4.com;img-src https://url5.com:9999;script-src 'self' 'unsafe-eval' 'unsafe-inline';style-src 'self' 'unsafe-inline'"); }
@Override protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) { WafConfig wafConfig = Singleton.INST.get(WafConfig.class); if (Objects.isNull(selector) && Objects.isNull(rule)) { if (WafModelEnum.BLACK.getName().equals(wafConfig.getModel())) { return chain.execute(exchange); } exchange.getResponse().setStatusCode(HttpStatus.FORBIDDEN); Object error = ShenyuResultWrap.error(exchange, HttpStatus.FORBIDDEN.value(), Constants.REJECT_MSG, null); return WebFluxResultUtils.result(exchange, error); } WafHandle wafHandle = buildRuleHandle(rule); if (Objects.isNull(wafHandle) || StringUtils.isBlank(wafHandle.getPermission())) { LOG.error("waf handler can not configuration:{}", wafHandle); return chain.execute(exchange); } if (WafEnum.REJECT.getName().equals(wafHandle.getPermission())) { exchange.getResponse().setStatusCode(HttpStatus.FORBIDDEN); Object error = ShenyuResultWrap.error(exchange, Integer.parseInt(wafHandle.getStatusCode()), Constants.REJECT_MSG, null); return WebFluxResultUtils.result(exchange, error); } return chain.execute(exchange); }
@Test public void testWafPluginNotConfiguration() { Mono<Void> execute = wafPluginUnderTest.doExecute(exchange, chain, selectorData, ruleData); StepVerifier.create(execute).expectSubscription().verifyComplete(); }
public void addFileExtensionMapping(String fileExtension, String contentType) { this.fileExtensionToContentType.put(fileExtension, contentType); }
@Test void addFileExtensionMapping() { DefaultContentTypeResolver contentTypeResolver = new DefaultContentTypeResolver(); contentTypeResolver.addFileExtensionMapping("docx", "application/vnd.openxmlformats-officedocument.wordprocessingml.document"); assertThat(contentTypeResolver.resolveContentType("test.docx")).isEqualTo("application/vnd.openxmlformats-officedocument.wordprocessingml.document"); assertThat(contentTypeResolver.resolveContentType("test.doc")).isEqualTo("application/octet-stream"); }
public static String sanitizeUri(String uri) { // use xxxxx as replacement as that works well with JMX also String sanitized = uri; if (uri != null) { sanitized = ALL_SECRETS.matcher(sanitized).replaceAll("$1=xxxxxx"); sanitized = USERINFO_PASSWORD.matcher(sanitized).replaceFirst("$1xxxxxx$3"); } return sanitized; }
@Test public void testSanitizeSaslJaasConfig() { String out1 = URISupport.sanitizeUri( "kafka://MY-TOPIC-NAME?saslJaasConfig=org.apache.kafka.common.security.plain.PlainLoginModule required username=scott password=tiger"); assertEquals("kafka://MY-TOPIC-NAME?saslJaasConfig=xxxxxx", out1); }
public Double getSystemCpuLoad() { return getMXBeanValueAsDouble("SystemCpuLoad"); }
@Test void ifOperatingSystemMXBeanReturnsNaNForSystemCpuLoadOnLaterCalls_CachedValueIsReturned() throws JMException { when(mBeanServer.getAttribute(objectName, "SystemCpuLoad")).thenReturn(0.7, Double.NaN, 0.5); assertThat(jobServerStats.getSystemCpuLoad()).isEqualTo(0.7); assertThat(jobServerStats.getSystemCpuLoad()).isEqualTo(0.7); assertThat(jobServerStats.getSystemCpuLoad()).isEqualTo(0.5); }
public HollowHashIndexResult findMatches(Object... query) { if (hashStateVolatile == null) { throw new IllegalStateException(this + " wasn't initialized"); } int hashCode = 0; for(int i=0;i<query.length;i++) { if(query[i] == null) throw new IllegalArgumentException("querying by null unsupported; i=" + i); hashCode ^= HashCodes.hashInt(keyHashCode(query[i], i)); } HollowHashIndexResult result; HollowHashIndexState hashState; do { result = null; hashState = hashStateVolatile; long bucket = hashCode & hashState.getMatchHashMask(); long hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry(); boolean bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0; while (!bucketIsEmpty) { if (matchIsEqual(hashState.getMatchHashTable(), hashBucketBit, query)) { int selectSize = (int) hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey(), hashState.getBitsPerSelectTableSize()); long selectBucketPointer = hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey() + hashState.getBitsPerSelectTableSize(), hashState.getBitsPerSelectTablePointer()); result = new HollowHashIndexResult(hashState, selectBucketPointer, selectSize); break; } bucket = (bucket + 1) & hashState.getMatchHashMask(); hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry(); bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0; } } while (hashState != hashStateVolatile); return result; }
@Test public void testIndexingLongTypeFieldWithNullValues() throws Exception { mapper.add(new TypeLong(null)); mapper.add(new TypeLong(3L)); roundTripSnapshot(); HollowHashIndex index = new HollowHashIndex(readStateEngine, "TypeLong", "", "data.value"); Assert.assertNull(index.findMatches(2L)); assertIteratorContainsAll(index.findMatches(3L).iterator(), 1); }
public synchronized TopologyDescription describe() { return internalTopologyBuilder.describe(); }
@Test public void timeWindowedCogroupedNamedMaterializedCountShouldPreserveTopologyStructure() { final StreamsBuilder builder = new StreamsBuilder(); builder.stream("input-topic") .groupByKey() .cogroup((key, value, aggregate) -> value) .windowedBy(TimeWindows.ofSizeWithNoGrace(ofMillis(1))) .aggregate(() -> "", Materialized.<Object, Object, WindowStore<Bytes, byte[]>>as("aggregate-store") .withStoreType(Materialized.StoreType.IN_MEMORY)); final Topology topology = builder.build(); final TopologyDescription describe = topology.describe(); assertEquals( "Topologies:\n" + " Sub-topology: 0\n" + " Source: KSTREAM-SOURCE-0000000000 (topics: [input-topic])\n" + " --> COGROUPKSTREAM-AGGREGATE-0000000001\n" + " Processor: COGROUPKSTREAM-AGGREGATE-0000000001 (stores: [aggregate-store])\n" + " --> COGROUPKSTREAM-MERGE-0000000002\n" + " <-- KSTREAM-SOURCE-0000000000\n" + " Processor: COGROUPKSTREAM-MERGE-0000000002 (stores: [])\n" + " --> none\n" + " <-- COGROUPKSTREAM-AGGREGATE-0000000001\n\n", describe.toString() ); topology.internalTopologyBuilder.setStreamsConfig(streamsConfig); assertThat(topology.internalTopologyBuilder.setApplicationId("test").buildTopology().hasPersistentLocalStore(), is(false)); }
public void encryptColumns( String inputFile, String outputFile, List<String> paths, FileEncryptionProperties fileEncryptionProperties) throws IOException { Path inPath = new Path(inputFile); Path outPath = new Path(outputFile); RewriteOptions options = new RewriteOptions.Builder(conf, inPath, outPath) .encrypt(paths) .encryptionProperties(fileEncryptionProperties) .build(); ParquetRewriter rewriter = new ParquetRewriter(options); rewriter.processBlocks(); rewriter.close(); }
@Test public void testFooterEncryption() throws IOException { String[] encryptColumns = {"DocId"}; testSetup("GZIP"); columnEncryptor.encryptColumns( inputFile.getFileName(), outputFile, Arrays.asList(encryptColumns), EncDecProperties.getFileEncryptionProperties(encryptColumns, ParquetCipher.AES_GCM_CTR_V1, true)); verifyResultDecryptionWithValidKey(); }
public boolean isQueryResultLimitEnabled() { return isQueryResultLimitEnabled; }
@Test public void testNodeResultFeatureDisabled() { initMocksWithConfiguration(-1); assertFalse(limiter.isQueryResultLimitEnabled()); }
RegistryEndpointProvider<URL> writer(URL location, Consumer<Long> writtenByteCountListener) { return new Writer(location, writtenByteCountListener); }
@Test public void testWriter_GetAccept() { Assert.assertEquals(0, testBlobPusher.writer(mockUrl, ignored -> {}).getAccept().size()); }
@VisibleForTesting static String toString(@Nullable TaskManagerLocation location) { // '(unassigned)' being the default value is added to support backward-compatibility for the // deprecated fields return location != null ? location.getEndpoint() : "(unassigned)"; }
@Test void testTaskManagerLocationFallbackHandling() { assertThat(JobExceptionsHandler.toString((TaskManagerLocation) null)) .isEqualTo("(unassigned)"); }
public void updateCheckboxes( EnumSet<RepositoryFilePermission> permissionEnumSet ) { updateCheckboxes( false, permissionEnumSet ); }
@Test public void testUpdateCheckboxesReadPermissionsAppropriateTrue() { permissionsCheckboxHandler.updateCheckboxes( true, EnumSet.of( RepositoryFilePermission.READ ) ); verify( readCheckbox, times( 1 ) ).setChecked( true ); verify( writeCheckbox, times( 1 ) ).setChecked( false ); verify( deleteCheckbox, times( 1 ) ).setChecked( false ); verify( manageCheckbox, times( 1 ) ).setChecked( false ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( false ); verify( deleteCheckbox, times( 1 ) ).setDisabled( true ); verify( manageCheckbox, times( 1 ) ).setDisabled( true ); }
@Override public String toString() { return MoreObjects.toStringHelper(getClass()) .add("src", src) .add("dst", dst) .toString(); }
@Test public void testToString() { LinkKey k1 = LinkKey.linkKey(SRC1, DST1); String k1String = k1.toString(); assertThat(k1String, allOf(containsString("LinkKey{"), containsString("src=1/1"), containsString("dst=2/1}"))); }
@VisibleForTesting void validateCaptcha(AuthLoginReqVO reqVO) { // 如果验证码关闭,则不进行校验 if (!captchaEnable) { return; } // 校验验证码 ValidationUtils.validate(validator, reqVO, AuthLoginReqVO.CodeEnableGroup.class); CaptchaVO captchaVO = new CaptchaVO(); captchaVO.setCaptchaVerification(reqVO.getCaptchaVerification()); ResponseModel response = captchaService.verification(captchaVO); // 验证不通过 if (!response.isSuccess()) { // 创建登录失败日志(验证码不正确) createLoginLog(null, reqVO.getUsername(), LoginLogTypeEnum.LOGIN_USERNAME, LoginResultEnum.CAPTCHA_CODE_ERROR); throw exception(AUTH_LOGIN_CAPTCHA_CODE_ERROR, response.getRepMsg()); } }
@Test public void testValidateCaptcha_successWithEnable() { // 准备参数 AuthLoginReqVO reqVO = randomPojo(AuthLoginReqVO.class); // mock 验证码打开 ReflectUtil.setFieldValue(authService, "captchaEnable", true); // mock 验证通过 when(captchaService.verification(argThat(captchaVO -> { assertEquals(reqVO.getCaptchaVerification(), captchaVO.getCaptchaVerification()); return true; }))).thenReturn(ResponseModel.success()); // 调用,无需断言 authService.validateCaptcha(reqVO); }
public CompletableFuture<CompletedCheckpoint> triggerSavepoint( @Nullable final String targetLocation, final SavepointFormatType formatType) { final CheckpointProperties properties = CheckpointProperties.forSavepoint(!unalignedCheckpointsEnabled, formatType); return triggerSavepointInternal(properties, targetLocation); }
@Test void testMinDelayBetweenSavepoints() throws Exception { CheckpointCoordinatorConfiguration chkConfig = new CheckpointCoordinatorConfiguration.CheckpointCoordinatorConfigurationBuilder() .setMinPauseBetweenCheckpoints( 100000000L) // very long min delay => should not affect savepoints .setMaxConcurrentCheckpoints(1) .build(); CheckpointCoordinator checkpointCoordinator = new CheckpointCoordinatorBuilder() .setCheckpointCoordinatorConfiguration(chkConfig) .setCompletedCheckpointStore(new StandaloneCompletedCheckpointStore(2)) .setTimer(manuallyTriggeredScheduledExecutor) .build(EXECUTOR_RESOURCE.getExecutor()); String savepointDir = TempDirUtils.newFolder(tmpFolder).getAbsolutePath(); CompletableFuture<CompletedCheckpoint> savepoint0 = checkpointCoordinator.triggerSavepoint(savepointDir, SavepointFormatType.CANONICAL); assertThat(savepoint0).as("Did not trigger savepoint").isNotDone(); CompletableFuture<CompletedCheckpoint> savepoint1 = checkpointCoordinator.triggerSavepoint(savepointDir, SavepointFormatType.CANONICAL); assertThat(savepoint1).as("Did not trigger savepoint").isNotDone(); }
@Override public String decodeKey(String key) { return key; }
@Test public void testDeccodeValidKeys() { assertEquals("foo", strategy.decodeKey("foo")); assertEquals("foo123bar", strategy.decodeKey("foo123bar")); assertEquals("CamelFileName", strategy.decodeKey("CamelFileName")); assertEquals("Content-Type", strategy.decodeKey("Content-Type")); assertEquals("My-Header.You", strategy.decodeKey("My-Header.You")); }
public String getApplication() { return application; }
@Test public void testGetApplication() { assertEquals("getApplication", "test collector", createCollectorWithOneCounter().getApplication()); }