focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static <T> PTransform<PCollection<T>, PCollection<T>> intersectDistinct( PCollection<T> rightCollection) { checkNotNull(rightCollection, "rightCollection argument is null"); return new SetImpl<>(rightCollection, intersectDistinct()); }
@Test @Category(NeedsRunner.class) public void testIntersection() { PAssert.that(first.apply("strings", Sets.intersectDistinct(second))) .containsInAnyOrder("a", "b", "c", "d"); PCollection<Row> results = firstRows.apply("rows", Sets.intersectDistinct(secondRows)); PAssert.that(results).containsInAnyOrder(toRows("a", "b", "c", "d")); assertEquals(schema, results.getSchema()); p.run(); }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CoordinatorRecord record = (CoordinatorRecord) o; if (!Objects.equals(key, record.key)) return false; return Objects.equals(value, record.value); }
@Test public void testEquals() { ApiMessageAndVersion key = new ApiMessageAndVersion(mock(ApiMessage.class), (short) 0); ApiMessageAndVersion value = new ApiMessageAndVersion(mock(ApiMessage.class), (short) 0); CoordinatorRecord record1 = new CoordinatorRecord(key, value); CoordinatorRecord record2 = new CoordinatorRecord(key, value); assertEquals(record1, record2); }
@VisibleForTesting Object evaluate(final GenericRow row) { return term.getValue(new TermEvaluationContext(row)); }
@Test public void shouldEvaluateComparisons_int() { // Given: final Expression expression1 = new ComparisonExpression( ComparisonExpression.Type.GREATER_THAN, COL7, new IntegerLiteral(10) ); final Expression expression2 = new ComparisonExpression( ComparisonExpression.Type.LESS_THAN, COL7, new IntegerLiteral(20) ); final Expression expression3 = new ComparisonExpression( ComparisonExpression.Type.EQUAL, COL7, new DoubleLiteral(30) ); // When: InterpretedExpression interpreter1 = interpreter(expression1); InterpretedExpression interpreter2 = interpreter(expression2); InterpretedExpression interpreter3 = interpreter(expression3); // Then: assertThat(interpreter1.evaluate(make(7, 30)), is(true)); assertThat(interpreter1.evaluate(make(7, 4)), is(false)); assertThat(interpreter2.evaluate(make(7, 13)), is(true)); assertThat(interpreter2.evaluate(make(7, 20)), is(false)); assertThat(interpreter3.evaluate(make(7, 30)), is(true)); assertThat(interpreter3.evaluate(make(7, 31)), is(false)); }
public static MetricName name(Class<?> klass, String... names) { return name(klass.getName(), names); }
@Test public void concatenatesClassesWithoutCanonicalNamesWithStrings() throws Exception { final Gauge<String> g = () -> null; assertThat(name(g.getClass(), "one", "two")) .isEqualTo(MetricName.build(g.getClass().getName() + ".one.two")); }
protected boolean writeRowTo( Object[] row ) throws KettleException { if ( meta.isServletOutput( ) ) { return writeRowToServlet( row ); } else { return writeRowToFile( row ); } }
@Test public void testWriteRowToFile_NoinitServletStreamWriter() throws Exception { // SETUP textFileOutput = new TextFileOutputTestHandler( stepMockHelper.stepMeta, stepMockHelper.stepDataInterface, 0, stepMockHelper.transMeta, stepMockHelper.trans ); TextFileOutputMeta mockTFOMeta = mock( TextFileOutputMeta.class ); when( mockTFOMeta.isServletOutput() ).thenReturn( true ); textFileOutput.meta = mockTFOMeta; TextFileOutputData data = new TextFileOutputData(); data.binarySeparator = " ".getBytes(); data.binaryEnclosure = "\"".getBytes(); data.binaryNewline = "\n".getBytes(); textFileOutput.data = data; data.outputRowMeta = new RowMeta(); data.outputRowMeta.addValueMeta( new ValueMetaString( "name" ) ); OutputStream originalWriter = mock( BufferedOutputStream.class ); // variable set textFileOutput.data.writer = originalWriter; // EXECUTE textFileOutput.writeRowTo( secondRow ); // VERIFY assertEquals( originalWriter, textFileOutput.data.writer ); }
@NotNull public DataSize getReservedSystemMemory() { return reservedSystemMemory; }
@Test public void testDefaults() { // This can't use assertRecordedDefaults because the default value is dependent on the current max heap size, which varies based on the current size of the survivor space. for (int i = 0; i < 1_000; i++) { DataSize expected = new DataSize(Runtime.getRuntime().maxMemory() * 0.4, BYTE); ReservedSystemMemoryConfig config = new ReservedSystemMemoryConfig(); if (expected.equals(config.getReservedSystemMemory())) { return; } } // We can't make this 100% deterministic, since we don't know when the survivor space will change sizes, but assume that something is broken if we got the wrong answer 1000 times fail(); }
@Override protected int command() { if (!validateConfigFilePresent()) { return 1; } final MigrationConfig config; try { config = MigrationConfig.load(getConfigFile()); } catch (KsqlException | MigrationException e) { LOGGER.error(e.getMessage()); return 1; } return command( config, MigrationsUtil::getKsqlClient, getMigrationsDir(getConfigFile(), config) ); }
@Test public void shouldValidateSingleMigration() throws Exception { // Given: final List<String> versions = ImmutableList.of("1"); final List<String> checksums = givenExistingMigrationFiles(versions); givenAppliedMigrations(versions, checksums); // When: final int result = command.command(config, cfg -> ksqlClient, migrationsDir); // Then: assertThat(result, is(0)); verifyClientCallsForVersions(versions); }
@Override public PositionReader openPositionRead(String path, long fileLength) { return new OSSPositionReader(mClient, mBucketName, stripPrefixIfPresent(path), fileLength); }
@Test public void testOpenPositionRead() { PositionReader result = mOSSUnderFileSystem.openPositionRead(KEY, 1L); Assert.assertTrue(result instanceof OSSPositionReader); }
@Override public Optional<FunctionAuthData> cacheAuthData(Function.FunctionDetails funcDetails, AuthenticationDataSource authenticationDataSource) { String id = null; String tenant = funcDetails.getTenant(); String namespace = funcDetails.getNamespace(); String name = funcDetails.getName(); try { String token = getToken(authenticationDataSource); if (token != null) { id = createSecret(token, funcDetails); } } catch (Exception e) { log.warn("Failed to get token for function {}", FunctionCommon.getFullyQualifiedName(tenant, namespace, name), e); // ignore exception and continue since anonymous user might to used } if (id != null) { return Optional.of(FunctionAuthData.builder().data(id.getBytes()).build()); } return Optional.empty(); }
@Test public void testCacheAuthData() throws ApiException { CoreV1Api coreV1Api = mock(CoreV1Api.class); doReturn(new V1Secret()).when(coreV1Api).createNamespacedSecret(anyString(), any(), anyString(), anyString(), anyString(), anyString()); KubernetesSecretsTokenAuthProvider kubernetesSecretsTokenAuthProvider = new KubernetesSecretsTokenAuthProvider(); kubernetesSecretsTokenAuthProvider.initialize(coreV1Api, null, (fd) -> "default"); Function.FunctionDetails funcDetails = Function.FunctionDetails.newBuilder().setTenant("test-tenant").setNamespace("test-ns").setName("test-func").build(); Optional<FunctionAuthData> functionAuthData = kubernetesSecretsTokenAuthProvider.cacheAuthData(funcDetails, new AuthenticationDataSource() { @Override public boolean hasDataFromCommand() { return true; } @Override public String getCommandData() { return "test-token"; } }); Assert.assertTrue(functionAuthData.isPresent()); Assert.assertTrue(StringUtils.isNotBlank(new String(functionAuthData.get().getData()))); }
public boolean isAdmin() { return this.memberRole.isAdministrator(); }
@Test void 어드민인_경우에_true를_반환한다() { // given Member admin = 어드민_유저_생성(); // when boolean result = admin.isAdmin(); // then assertThat(result).isTrue(); }
public Pair<ElectMasterResponseHeader, BrokerMemberGroup> electMaster(String controllerAddr, String clusterName, String brokerName, Long brokerId) throws MQBrokerException, RemotingConnectException, RemotingSendRequestException, RemotingTimeoutException, InterruptedException, RemotingCommandException { //get controller leader address final GetMetaDataResponseHeader controllerMetaData = this.getControllerMetaData(controllerAddr); assert controllerMetaData != null; assert controllerMetaData.getControllerLeaderAddress() != null; final String leaderAddress = controllerMetaData.getControllerLeaderAddress(); ElectMasterRequestHeader electRequestHeader = ElectMasterRequestHeader.ofAdminTrigger(clusterName, brokerName, brokerId); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.CONTROLLER_ELECT_MASTER, electRequestHeader); final RemotingCommand response = this.remotingClient.invokeSync(leaderAddress, request, 3000); assert response != null; switch (response.getCode()) { case ResponseCode.SUCCESS: { BrokerMemberGroup brokerMemberGroup = RemotingSerializable.decode(response.getBody(), BrokerMemberGroup.class); ElectMasterResponseHeader responseHeader = (ElectMasterResponseHeader) response.decodeCommandCustomHeader(ElectMasterResponseHeader.class); return new Pair<>(responseHeader, brokerMemberGroup); } default: break; } throw new MQBrokerException(response.getCode(), response.getRemark()); }
@Test public void assertElectMaster() throws RemotingException, InterruptedException, MQBrokerException { mockInvokeSync(); BrokerMemberGroup responseBody = new BrokerMemberGroup(); setResponseBody(responseBody); GetMetaDataResponseHeader getMetaDataResponseHeader = new GetMetaDataResponseHeader(); getMetaDataResponseHeader.setControllerLeaderAddress(defaultBrokerAddr); when(response.decodeCommandCustomHeader(GetMetaDataResponseHeader.class)).thenReturn(getMetaDataResponseHeader); ElectMasterResponseHeader responseHeader = new ElectMasterResponseHeader(); when(response.decodeCommandCustomHeader(ElectMasterResponseHeader.class)).thenReturn(responseHeader); Pair<ElectMasterResponseHeader, BrokerMemberGroup> actual = mqClientAPI.electMaster(defaultBrokerAddr, clusterName, brokerName, MixAll.MASTER_ID); assertNotNull(actual); assertEquals(responseHeader, actual.getObject1()); assertEquals(responseBody, actual.getObject2()); }
public static Map<String, String> translateParameterMap(Map<String, String[]> parameterMap) throws Exception { Map<String, String> map = new HashMap<>(16); for (Map.Entry<String, String[]> entry : parameterMap.entrySet()) { map.put(entry.getKey(), entry.getValue()[0]); } return map; }
@Test void testTranslateParameterMap() throws Exception { Map<String, String[]> map = Collections.singletonMap("K", new String[] {"V1", "V2"}); Map<String, String> resultMap = HttpUtils.translateParameterMap(map); assertEquals(Collections.singletonMap("K", "V1"), resultMap); }
Optional<CheckpointTriggerRequest> chooseRequestToExecute( CheckpointTriggerRequest newRequest, boolean isTriggering, long lastCompletionMs) { if (queuedRequests.size() >= maxQueuedRequests && !queuedRequests.last().isPeriodic) { // there are only non-periodic (ie user-submitted) requests enqueued - retain them and // drop the new one newRequest.completeExceptionally(new CheckpointException(TOO_MANY_CHECKPOINT_REQUESTS)); return Optional.empty(); } else { queuedRequests.add(newRequest); if (queuedRequests.size() > maxQueuedRequests) { queuedRequests .pollLast() .completeExceptionally( new CheckpointException(TOO_MANY_CHECKPOINT_REQUESTS)); } Optional<CheckpointTriggerRequest> request = chooseRequestToExecute(isTriggering, lastCompletionMs); request.ifPresent(CheckpointRequestDecider::logInQueueTime); return request; } }
@Test void testQueueSizeLimit() { final int maxQueuedRequests = 10; final boolean isTriggering = true; CheckpointRequestDecider decider = decider(maxQueuedRequests); List<CheckpointTriggerRequest> requests = rangeClosed(0, maxQueuedRequests) .mapToObj(i -> regularCheckpoint()) .collect(toList()); int numAdded = 0; for (CheckpointTriggerRequest request : requests) { assertThat(decider.chooseRequestToExecute(request, isTriggering, 0)).isNotPresent(); List<CheckpointTriggerRequest> completed = requests.stream() .filter(r1 -> r1.getOnCompletionFuture().isDone()) .collect(toList()); completed.forEach(r -> assertFailed(r, TOO_MANY_CHECKPOINT_REQUESTS)); assertThat(completed).hasSize(Math.max(++numAdded - maxQueuedRequests, 0)); } }
public static List<String> splitPlainTextParagraphs( List<String> lines, int maxTokensPerParagraph) { return internalSplitTextParagraphs( lines, maxTokensPerParagraph, (text) -> internalSplitLines( text, maxTokensPerParagraph, false, s_plaintextSplitOptions)); }
@Test public void canSplitTextParagraphsOnHyphens() { List<String> input = Arrays.asList( "This is a test of the emergency broadcast system-This is only a test", "We repeat-this is only a test-A unit test", "A small note-And another-And once again-Seriously, this is the end-We're" + " finished-All set-Bye.", "Done."); List<String> expected = Arrays.asList( "This is a test of the emergency", "broadcast system-This is only a test", "We repeat-this is only a test-A unit test", "A small note-And another-And once again-Seriously,", "this is the end-We're finished-All set-Bye." + "\n" + "Done."); List<String> result = TextChunker.splitPlainTextParagraphs(input, 15); Assertions.assertEquals(expected, result); }
public static Read<GenericRecord> readAvroGenericRecords(org.apache.avro.Schema avroSchema) { AvroCoder<GenericRecord> coder = AvroCoder.of(avroSchema); Schema schema = AvroUtils.getSchema(GenericRecord.class, avroSchema); return Read.newBuilder(parsePayloadUsingCoder(coder)) .setCoder( SchemaCoder.of( schema, TypeDescriptor.of(GenericRecord.class), AvroUtils.getToRowFunction(GenericRecord.class, avroSchema), AvroUtils.getFromRowFunction(GenericRecord.class))) .build(); }
@Test public void testAvroGenericRecords() { AvroCoder<GenericRecord> coder = AvroCoder.of(SCHEMA); List<GenericRecord> inputs = ImmutableList.of( new AvroGeneratedUser("Bob", 256, null), new AvroGeneratedUser("Alice", 128, null), new AvroGeneratedUser("Ted", null, "white")); setupTestClient(inputs, coder); PCollection<GenericRecord> read = pipeline.apply( PubsubIO.readAvroGenericRecords(SCHEMA) .fromSubscription(SUBSCRIPTION.getPath()) .withClock(CLOCK) .withClientFactory(clientFactory)); PAssert.that(read).containsInAnyOrder(inputs); pipeline.run(); }
@Override public String execute(CommandContext commandContext, String[] args) { logger.info("received publishMetadata command."); StringBuilder stringBuilder = new StringBuilder(); List<ApplicationModel> applicationModels = frameworkModel.getApplicationModels(); for (ApplicationModel applicationModel : applicationModels) { if (ArrayUtils.isEmpty(args)) { ServiceInstanceMetadataUtils.refreshMetadataAndInstance(applicationModel); stringBuilder .append("publish metadata succeeded. App:") .append(applicationModel.getApplicationName()) .append("\n"); } else { try { int delay = Integer.parseInt(args[0]); FrameworkExecutorRepository frameworkExecutorRepository = applicationModel .getFrameworkModel() .getBeanFactory() .getBean(FrameworkExecutorRepository.class); frameworkExecutorRepository .nextScheduledExecutor() .schedule( () -> ServiceInstanceMetadataUtils.refreshMetadataAndInstance(applicationModel), delay, TimeUnit.SECONDS); } catch (NumberFormatException e) { logger.error(CONFIG_PARAMETER_FORMAT_ERROR, "", "", "Wrong delay param", e); return "publishMetadata failed! Wrong delay param!"; } stringBuilder .append("publish task submitted, will publish in ") .append(args[0]) .append(" seconds. App:") .append(applicationModel.getApplicationName()) .append("\n"); } } return stringBuilder.toString(); }
@Test void testExecute() { PublishMetadata publishMetadata = new PublishMetadata(frameworkModel); String result = publishMetadata.execute(Mockito.mock(CommandContext.class), new String[0]); String expectResult = "publish metadata succeeded. App:APP_0\n" + "publish metadata succeeded. App:APP_1\n" + "publish metadata succeeded. App:APP_2\n"; Assertions.assertEquals(result, expectResult); // delay 5s result = publishMetadata.execute(Mockito.mock(CommandContext.class), new String[] {"5"}); expectResult = "publish task submitted, will publish in 5 seconds. App:APP_0\n" + "publish task submitted, will publish in 5 seconds. App:APP_1\n" + "publish task submitted, will publish in 5 seconds. App:APP_2\n"; Assertions.assertEquals(result, expectResult); // wrong delay param result = publishMetadata.execute(Mockito.mock(CommandContext.class), new String[] {"A"}); expectResult = "publishMetadata failed! Wrong delay param!"; Assertions.assertEquals(result, expectResult); }
@GetMapping("/plugin/selector/delete") public Mono<String> deleteSelector(@RequestParam("pluginName") final String pluginName, @RequestParam("id") final String id) { SelectorData selectorData = SelectorData.builder().pluginName(pluginName).id(id).build(); subscriber.unSelectorSubscribe(selectorData); return Mono.just(Constants.SUCCESS); }
@Test public void testDeleteSelector() throws Exception { final String selectorPluginName = "testSaveSelector"; final String testSelectorId = "id"; final SelectorData selectorData = new SelectorData(); selectorData.setId(testSelectorId); selectorData.setPluginName(selectorPluginName); subscriber.onSelectorSubscribe(selectorData); this.mockMvc .perform(MockMvcRequestBuilders.get("/shenyu/plugin/selector/delete") .param("pluginName", selectorPluginName) .param("id", testSelectorId)) .andExpect(status().isOk()) .andReturn(); assertThat(baseDataCache.obtainSelectorData(selectorPluginName)).isEmpty(); }
@PutMapping("/{id}") @RequiresPermissions("system:pluginHandler:edit") public ShenyuAdminResult updatePluginHandle(@PathVariable("id") @Valid @Existed(provider = PluginHandleMapper.class, message = "rule not exited") final String id, @Valid @RequestBody final PluginHandleDTO pluginHandleDTO) { pluginHandleDTO.setId(id); return ShenyuAdminResult.success(ShenyuResultMessage.UPDATE_SUCCESS, pluginHandleService.createOrUpdate(pluginHandleDTO)); }
@Test public void testUpdatePluginHandle() throws Exception { PluginHandleDTO pluginHandleDTO = new PluginHandleDTO(); pluginHandleDTO.setId("1"); pluginHandleDTO.setPluginId("1213"); pluginHandleDTO.setDataType(1); pluginHandleDTO.setField("f"); pluginHandleDTO.setType(1); pluginHandleDTO.setSort(1); SpringBeanUtils.getInstance().setApplicationContext(mock(ConfigurableApplicationContext.class)); when(SpringBeanUtils.getInstance().getBean(PluginHandleMapper.class)).thenReturn(handleMapper); when(handleMapper.existed(pluginHandleDTO.getId())).thenReturn(true); given(this.pluginHandleService.createOrUpdate(pluginHandleDTO)).willReturn(1); this.mockMvc.perform(MockMvcRequestBuilders.put("/plugin-handle/{id}", "1") .contentType(MediaType.APPLICATION_JSON) .content(GsonUtils.getInstance().toJson(pluginHandleDTO))) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.UPDATE_SUCCESS))) .andReturn(); }
public static void destroyAll() { for (Map.Entry<RegistryConfig, Registry> entry : ALL_REGISTRIES.entrySet()) { RegistryConfig config = entry.getKey(); Registry registry = entry.getValue(); try { registry.destroy(); ALL_REGISTRIES.remove(config); } catch (Exception e) { LOGGER.error(LogCodes.getLog(LogCodes.ERROR_DESTRORY_REGISTRY, config), e); } } }
@Test public void destroyAll() { }
@Override public CoordinatorRecord deserialize( ByteBuffer keyBuffer, ByteBuffer valueBuffer ) throws RuntimeException { final short recordType = readVersion(keyBuffer, "key"); final ApiMessage keyMessage = apiMessageKeyFor(recordType); readMessage(keyMessage, keyBuffer, recordType, "key"); if (valueBuffer == null) { return new CoordinatorRecord(new ApiMessageAndVersion(keyMessage, recordType), null); } final ApiMessage valueMessage = apiMessageValueFor(recordType); final short valueVersion = readVersion(valueBuffer, "value"); readMessage(valueMessage, valueBuffer, valueVersion, "value"); return new CoordinatorRecord( new ApiMessageAndVersion(keyMessage, recordType), new ApiMessageAndVersion(valueMessage, valueVersion) ); }
@Test public void testDeserializeWithInvalidKeyBytes() { GroupCoordinatorRecordSerde serde = new GroupCoordinatorRecordSerde(); ByteBuffer keyBuffer = ByteBuffer.allocate(2); keyBuffer.putShort((short) 3); keyBuffer.rewind(); ByteBuffer valueBuffer = ByteBuffer.allocate(2); valueBuffer.putShort((short) 0); valueBuffer.rewind(); RuntimeException ex = assertThrows(RuntimeException.class, () -> serde.deserialize(keyBuffer, valueBuffer)); assertTrue(ex.getMessage().startsWith("Could not read record with version 3 from key's buffer due to"), ex.getMessage()); }
public static boolean isDownvoteCounter(Counter counter) { String sceneValue = counter.getId().getTag(SCENE); if (StringUtils.isBlank(sceneValue)) { return false; } return DOWNVOTE_SCENE.equals(sceneValue); }
@Test void isDownvoteCounter() { MeterRegistry meterRegistry = new SimpleMeterRegistry(); Counter downvoteCounter = MeterUtils.downvoteCounter(meterRegistry, "posts.content.halo.run/fake-post"); assertThat(MeterUtils.isDownvoteCounter(downvoteCounter)).isTrue(); assertThat(MeterUtils.isVisitCounter(downvoteCounter)).isFalse(); }
@Override public void accept(Point newPoint) { //ensure this method is never called by multiple threads at the same time. parallelismDetector.run( () -> doAccept(newPoint) ); }
@Test public void testTrackClosure_multipleTracks() { Duration TIME_LIMIT = Duration.ofSeconds(5); TestConsumer consumer = new TestConsumer(); TrackMaker maker = new TrackMaker(TIME_LIMIT, consumer); assertTrue( consumer.numCallsToAccept == 0, "The consumer has not been access yet" ); maker.accept(newPoint("track1", Instant.EPOCH)); maker.accept(newPoint("track1", Instant.EPOCH.plus(TIME_LIMIT.plusSeconds(1)))); maker.accept(newPoint("differentTrack", Instant.EPOCH.plus(TIME_LIMIT.multipliedBy(5)))); assertTrue( consumer.numCallsToAccept == 2, "there should be two \"track1\" tracks, both contain exactly 1 point" ); }
@Override public synchronized UdfFactory ensureFunctionFactory(final UdfFactory factory) { validateFunctionName(factory.getName()); final String functionName = factory.getName().toUpperCase(); if (udafs.containsKey(functionName)) { throw new KsqlException("UdfFactory already registered as aggregate: " + functionName); } if (udtfs.containsKey(functionName)) { throw new KsqlException("UdfFactory already registered as table function: " + functionName); } final UdfFactory existing = udfs.putIfAbsent(functionName, factory); if (existing != null && !existing.matches(factory)) { throw new KsqlException("UdfFactory not compatible with existing factory." + " function: " + functionName + " existing: " + existing + ", factory: " + factory); } return existing == null ? factory : existing; }
@Test public void shouldThrowOnEnsureUdfFactoryOnDifferentX() { // Given: functionRegistry.ensureFunctionFactory(udfFactory); when(udfFactory.matches(udfFactory1)).thenReturn(false); // When: final Exception e = assertThrows( KsqlException.class, () -> functionRegistry.ensureFunctionFactory(udfFactory1) ); // Then: assertThat(e.getMessage(), containsString("UdfFactory not compatible with existing factory")); }
public static boolean isBlank(String str) { int strLen; if (str == null || (strLen = str.length()) == 0) { return true; } for (int i = 0; i < strLen; i++) { if ((!Character.isWhitespace(str.charAt(i)))) { return false; } } return true; }
@Test public void testIsBlank() { Assert.assertFalse(StringUtil.isBlank("!!!!")); Assert.assertTrue(StringUtil.isBlank(null)); Assert.assertTrue(StringUtil.isBlank("\n\n")); Assert.assertTrue(StringUtil.isBlank("")); }
@PostMapping("/delete.json") @AuthAction(AuthService.PrivilegeType.DELETE_RULE) public Result<Long> deleteFlowRule(Long id) { if (id == null) { return Result.ofFail(-1, "id can't be null"); } GatewayFlowRuleEntity oldEntity = repository.findById(id); if (oldEntity == null) { return Result.ofSuccess(null); } try { repository.delete(id); } catch (Throwable throwable) { logger.error("delete gateway flow rule error:", throwable); return Result.ofThrowable(-1, throwable); } if (!publishRules(oldEntity.getApp(), oldEntity.getIp(), oldEntity.getPort())) { logger.warn("publish gateway flow rules fail after delete"); } return Result.ofSuccess(id); }
@Test public void testDeleteFlowRule() throws Exception { String path = "/gateway/flow/delete.json"; // Add one entity into memory repository for delete GatewayFlowRuleEntity addEntity = new GatewayFlowRuleEntity(); addEntity.setId(1L); addEntity.setApp(TEST_APP); addEntity.setIp(TEST_IP); addEntity.setPort(TEST_PORT); addEntity.setResource("httpbin_route"); addEntity.setResourceMode(RESOURCE_MODE_ROUTE_ID); addEntity.setGrade(FLOW_GRADE_QPS); addEntity.setCount(5D); addEntity.setInterval(30L); addEntity.setIntervalUnit(GatewayFlowRuleEntity.INTERVAL_UNIT_SECOND); addEntity.setControlBehavior(CONTROL_BEHAVIOR_DEFAULT); addEntity.setBurst(0); addEntity.setMaxQueueingTimeoutMs(0); Date date = new Date(); date = DateUtils.addSeconds(date, -1); addEntity.setGmtCreate(date); addEntity.setGmtModified(date); GatewayParamFlowItemEntity addItemEntity = new GatewayParamFlowItemEntity(); addEntity.setParamItem(addItemEntity); addItemEntity.setParseStrategy(PARAM_PARSE_STRATEGY_CLIENT_IP); repository.save(addEntity); given(sentinelApiClient.modifyGatewayFlowRules(eq(TEST_APP), eq(TEST_IP), eq(TEST_PORT), any())).willReturn(true); MockHttpServletRequestBuilder requestBuilder = MockMvcRequestBuilders.post(path); requestBuilder.param("id", String.valueOf(addEntity.getId())); // Do controller logic MvcResult mvcResult = mockMvc.perform(requestBuilder) .andExpect(MockMvcResultMatchers.status().isOk()).andDo(MockMvcResultHandlers.print()).andReturn(); // Verify the modifyGatewayFlowRules method has been called verify(sentinelApiClient).modifyGatewayFlowRules(eq(TEST_APP), eq(TEST_IP), eq(TEST_PORT), any()); // Verify the result Result<Long> result = JSONObject.parseObject(mvcResult.getResponse().getContentAsString(), new TypeReference<Result<Long>>() {}); assertTrue(result.isSuccess()); assertEquals(addEntity.getId(), result.getData()); // Now no entities in memory List<GatewayFlowRuleEntity> entitiesInMem = repository.findAllByApp(TEST_APP); assertEquals(0, entitiesInMem.size()); }
@Override public SentWebAppMessage deserializeResponse(String answer) throws TelegramApiRequestException { return deserializeResponse(answer, SentWebAppMessage.class); }
@Test public void testAnswerWebAppQueryDeserializeErrorResponse() { String responseText = "{\"ok\":false,\"error_code\": 404,\"description\": \"Error message\"}"; AnswerWebAppQuery answerWebAppQuery = AnswerWebAppQuery .builder() .webAppQueryId("123456789") .queryResult(InlineQueryResultArticle .builder() .id("") .title("Text") .inputMessageContent(InputTextMessageContent .builder() .messageText("My own text") .build()) .build()) .build(); try { answerWebAppQuery.deserializeResponse(responseText); fail(); } catch (TelegramApiRequestException e) { assertEquals(404, e.getErrorCode()); assertEquals("Error message", e.getApiResponse()); } }
@Override public void destroy() { mCurrentRunningLocalProxy.dispose(); mContext.unregisterReceiver(mMediaInsertionAvailableReceiver); }
@Test public void testReceiverLifeCycle() { Assert.assertEquals( 1, mShadowApplication.getRegisteredReceivers().stream() .filter( wrapper -> wrapper.broadcastReceiver instanceof RemoteInsertionImpl.MediaInsertionAvailableReceiver) .count()); mUnderTest.destroy(); Assert.assertEquals( 0, mShadowApplication.getRegisteredReceivers().stream() .filter( wrapper -> wrapper.broadcastReceiver instanceof RemoteInsertionImpl.MediaInsertionAvailableReceiver) .count()); }
public abstract boolean hasError();
@Test public void testPretty() throws UnsupportedEncodingException, IOException { String schemaText = "{ " + " \"type\": \"record\"," + " \"name\": \"LongList\"," + " \"fields\" : [" + " {\"name\": \"value\", \"type\": \"long\"}, " + " {\"name\": \"next\", \"type\": [\"LongList\", \"null\"]} " + " ]" + "}"; String br = System.getProperty("line.separator"); Object[][] testData = { { JsonBuilder.Pretty.COMPACT, "{\"type\":\"record\",\"name\":\"LongList\",\"fields\":[{\"name\":\"value\",\"type\":\"long\"},{\"name\":\"next\",\"type\":[\"LongList\",\"null\"]}]}" }, { JsonBuilder.Pretty.SPACES, "{ \"type\" : \"record\", \"name\" : \"LongList\", \"fields\" : [ { \"name\" : \"value\", \"type\" : \"long\" }, { \"name\" : \"next\", \"type\" : [ \"LongList\", \"null\" ] } ] }" }, { JsonBuilder.Pretty.INDENTED, "{" + br + " \"type\" : \"record\"," + br + " \"name\" : \"LongList\"," + br + " \"fields\" : [ {" + br + " \"name\" : \"value\"," + br + " \"type\" : \"long\"" + br + " }, {" + br + " \"name\" : \"next\"," + br + " \"type\" : [ \"LongList\", \"null\" ]" + br + " } ]" + br + "}" }, }; for (Object[] input : testData) { PegasusSchemaParser parser = schemaParserFromString(schemaText); String result; if (parser.hasError()) { result = "ERROR: " + parser.errorMessage(); } else { result = SchemaToJsonEncoder.schemasToJson(parser.topLevelDataSchemas(), (JsonBuilder.Pretty) input[0]); } if (input[1] != null) { assertEquals(result, input[1]); } else { out.println(result); } } }
public DoubleArrayAsIterable usingTolerance(double tolerance) { return new DoubleArrayAsIterable(tolerance(tolerance), iterableSubject()); }
@Test public void usingTolerance_containsExactly_primitiveDoubleArray_failure() { expectFailureWhenTestingThat(array(1.1, TOLERABLE_2POINT2, 3.3)) .usingTolerance(DEFAULT_TOLERANCE) .containsExactly(array(2.2, 1.1)); assertFailureKeys( "value of", "unexpected (1)", "---", "expected", "testing whether", "but was"); assertFailureValue("unexpected (1)", "3.3"); }
public static <K> KStreamHolder<K> build( final KStreamHolder<K> stream, final StreamSelect<K> step, final RuntimeBuildContext buildContext ) { final QueryContext queryContext = step.getProperties().getQueryContext(); final LogicalSchema sourceSchema = stream.getSchema(); final Optional<ImmutableList<ColumnName>> selectedKeys = step.getSelectedKeys(); final Selection<K> selection = Selection.of( sourceSchema, step.getKeyColumnNames(), selectedKeys, step.getSelectExpressions(), buildContext.getKsqlConfig(), buildContext.getFunctionRegistry() ); final ImmutableList.Builder<Integer> keyIndexBuilder = ImmutableList.builder(); if (selectedKeys.isPresent()) { final ImmutableList<ColumnName> keyNames = sourceSchema.key().stream() .map(Column::name) .collect(ImmutableList.toImmutableList()); for (final ColumnName keyName : selectedKeys.get()) { keyIndexBuilder.add(keyNames.indexOf(keyName)); } } final ImmutableList<Integer> keyIndices = keyIndexBuilder.build(); final SelectValueMapper<K> selectMapper = selection.getMapper(); final ProcessingLogger logger = buildContext.getProcessingLogger(queryContext); final Named selectName = Named.as(StreamsUtil.buildOpName(queryContext)); if (selectedKeys.isPresent() && !selectedKeys.get().containsAll( sourceSchema.key().stream().map(Column::name).collect(ImmutableList.toImmutableList()) )) { return stream.withStream( stream.getStream().transform( () -> new KsTransformer<>( (readOnlyKey, value, ctx) -> { if (keyIndices.isEmpty()) { return null; } if (readOnlyKey instanceof GenericKey) { final GenericKey keys = (GenericKey) readOnlyKey; final Builder resultKeys = GenericKey.builder(keyIndices.size()); for (final int keyIndex : keyIndices) { resultKeys.append(keys.get(keyIndex)); } return (K) resultKeys.build(); } else { throw new UnsupportedOperationException(); } }, selectMapper.getTransformer(logger) ), selectName ), selection.getSchema() ); } else { return stream.withStream( stream.getStream().transformValues( () -> new KsValueTransformer<>(selectMapper.getTransformer(logger)), selectName ), selection.getSchema() ); } }
@Test public void shouldReturnCorrectSchema() { // When: final KStreamHolder<Struct> result = step.build(planBuilder, planInfo); // Then: assertThat( result.getSchema(), is(LogicalSchema.builder() .keyColumn(SystemColumns.ROWKEY_NAME, SqlTypes.STRING) .valueColumn(ColumnName.of("expr1"), SqlTypes.STRING) .valueColumn(ColumnName.of("expr2"), SqlTypes.INTEGER) .build()) ); }
protected Properties getDefaultProps() { final Properties props = new Properties(); props.setProperty( CONFLUENT_SUPPORT_METRICS_ENABLE_CONFIG, CONFLUENT_SUPPORT_METRICS_ENABLE_DEFAULT ); props.setProperty(CONFLUENT_SUPPORT_CUSTOMER_ID_CONFIG, CONFLUENT_SUPPORT_CUSTOMER_ID_DEFAULT); props.setProperty( CONFLUENT_SUPPORT_METRICS_REPORT_INTERVAL_HOURS_CONFIG, CONFLUENT_SUPPORT_METRICS_REPORT_INTERVAL_HOURS_DEFAULT ); props.setProperty( CONFLUENT_SUPPORT_METRICS_ENDPOINT_INSECURE_ENABLE_CONFIG, CONFLUENT_SUPPORT_METRICS_ENDPOINT_INSECURE_ENABLE_DEFAULT ); props.setProperty( CONFLUENT_SUPPORT_METRICS_ENDPOINT_SECURE_ENABLE_CONFIG, CONFLUENT_SUPPORT_METRICS_ENDPOINT_SECURE_ENABLE_DEFAULT ); props.setProperty(CONFLUENT_SUPPORT_PROXY_CONFIG, CONFLUENT_SUPPORT_PROXY_DEFAULT); return props; }
@Test public void testGetDefaultProps() { // Given Properties overrideProps = new Properties(); // When BaseSupportConfig supportConfig = new TestSupportConfig(overrideProps); // Then assertTrue(supportConfig.getMetricsEnabled()); assertEquals("anonymous", supportConfig.getCustomerId()); assertEquals(24 * 60 * 60 * 1000, supportConfig.getReportIntervalMs()); assertTrue(supportConfig.isHttpEnabled()); assertTrue(supportConfig.isHttpsEnabled()); assertTrue(supportConfig.isProactiveSupportEnabled()); assertEquals("", supportConfig.getProxy()); assertEquals("http://support-metrics.confluent.io/anon", supportConfig.getEndpointHttp()); assertEquals("https://support-metrics.confluent.io/anon", supportConfig.getEndpointHttps()); }
public MigrationResult run(Set<String> completedAlertConditions, Set<String> completedAlarmCallbacks) { final MigrationResult.Builder result = MigrationResult.builder(); streamsCollection.find().forEach(stream -> { final String streamId = stream.getObjectId("_id").toHexString(); final String streamTitle = stream.getString("title"); final FindIterable<Document> iterable = alarmCallbacksCollection.find(Filters.eq("stream_id", streamId)); final Set<NotificationDto> notifications = Streams.stream(iterable) .map(alarmCallback -> { final String callbackId = alarmCallback.getObjectId("_id").toHexString(); if (completedAlarmCallbacks.contains(callbackId)) { result.addCompletedAlarmCallback(callbackId); return dbNotificationService.get(callbackId).orElse(null); } try { final NotificationDto notificationDto = migrateAlarmCallback(alarmCallback); result.addCompletedAlarmCallback(callbackId); return notificationDto; } catch (Exception e) { LOG.error("Couldn't migrate legacy alarm callback on stream <{}/{}>: {}", streamTitle, streamId, alarmCallback, e); return null; } }) .filter(Objects::nonNull) .collect(Collectors.toSet()); if (!stream.containsKey("alert_conditions")) { return; } @SuppressWarnings("unchecked") final List<Document> list = (List<Document>) stream.get("alert_conditions"); list.forEach(alertCondition -> { final String conditionId = alertCondition.getString("id"); final String conditionType = alertCondition.getString("type"); if (completedAlertConditions.contains(conditionId)) { result.addCompletedAlertCondition(conditionId); return; } try { switch (conditionType) { case "message_count": migrateMessageCount(new Helper(stream, alertCondition, notifications)); result.addCompletedAlertCondition(conditionId); break; case "field_value": migrateFieldValue(new Helper(stream, alertCondition, notifications)); result.addCompletedAlertCondition(conditionId); break; case "field_content_value": migrateFieldContentValue(new Helper(stream, alertCondition, notifications)); result.addCompletedAlertCondition(conditionId); break; default: LOG.warn("Couldn't migrate unknown legacy alert condition type: {}", conditionType); } } catch (Exception e) { LOG.error("Couldn't migrate legacy alert condition on stream <{}/{}>: {}", streamTitle, streamId, alertCondition, e); } }); }); return result.build(); }
@Test @MongoDBFixtures("legacy-alert-conditions.json") public void run() { final int migratedConditions = 10; final int migratedCallbacks = 4; assertThat(migrator.run(Collections.emptySet(), Collections.emptySet())).satisfies(result -> { assertThat(result.completedAlertConditions()).containsOnly( "00000000-0000-0000-0000-000000000001", "00000000-0000-0000-0000-000000000002", "00000000-0000-0000-0000-000000000003", "00000000-0000-0000-0000-000000000004", "00000000-0000-0000-0000-000000000005", "00000000-0000-0000-0000-000000000006", "00000000-0000-0000-0000-000000000007", "00000000-0000-0000-0000-000000000008", "00000000-0000-0000-0000-000000000009", "00000000-0000-0000-0000-000000000010" ); assertThat(result.completedAlarmCallbacks()).containsOnly( "54e3deadbeefdeadbeef0001", "54e3deadbeefdeadbeef0002", "54e3deadbeefdeadbeef0003", "54e3deadbeefdeadbeef0004" ); }); // Make sure we use the EventDefinitionHandler to create the event definitions verify(eventDefinitionHandler, times(migratedConditions)).create(any(EventDefinitionDto.class), any(Optional.class)); // Make sure we use the NotificationResourceHandler to create the notifications verify(notificationResourceHandler, times(migratedCallbacks)).create(any(NotificationDto.class), any(Optional.class)); assertThat(eventDefinitionService.streamAll().count()).isEqualTo(migratedConditions); assertThat(notificationService.streamAll().count()).isEqualTo(migratedCallbacks); final NotificationDto httpNotification = notificationService.streamAll() .filter(n -> n.title().equals("HTTP Callback Test")) .findFirst() .orElse(null); assertThat(httpNotification).isNotNull(); assertThat(httpNotification.title()).isEqualTo("HTTP Callback Test"); assertThat(httpNotification.description()).isEqualTo("Migrated legacy alarm callback"); assertThat(httpNotification.config()).isInstanceOf(LegacyAlarmCallbackEventNotificationConfig.class); assertThat((LegacyAlarmCallbackEventNotificationConfig) httpNotification.config()).satisfies(config -> { assertThat(config.callbackType()).isEqualTo("org.graylog2.alarmcallbacks.HTTPAlarmCallback"); assertThat(config.configuration().get("url")).isEqualTo("http://localhost:11000/"); }); final NotificationDto httpNotificationWithoutTitle = notificationService.streamAll() .filter(n -> n.title().equals("Untitled")) .findFirst() .orElse(null); assertThat(httpNotificationWithoutTitle).isNotNull(); assertThat(httpNotificationWithoutTitle.title()).isEqualTo("Untitled"); assertThat(httpNotificationWithoutTitle.description()).isEqualTo("Migrated legacy alarm callback"); assertThat(httpNotificationWithoutTitle.config()).isInstanceOf(LegacyAlarmCallbackEventNotificationConfig.class); assertThat((LegacyAlarmCallbackEventNotificationConfig) httpNotificationWithoutTitle.config()).satisfies(config -> { assertThat(config.callbackType()).isEqualTo("org.graylog2.alarmcallbacks.HTTPAlarmCallback"); assertThat(config.configuration().get("url")).isEqualTo("http://localhost:11000/"); }); final NotificationDto emailNotification = notificationService.streamAll() .filter(n -> n.title().equals("Email Callback Test")) .findFirst() .orElse(null); assertThat(emailNotification).isNotNull(); assertThat(emailNotification.title()).isEqualTo("Email Callback Test"); assertThat(emailNotification.description()).isEqualTo("Migrated legacy alarm callback"); assertThat(emailNotification.config()).isInstanceOf(LegacyAlarmCallbackEventNotificationConfig.class); assertThat((LegacyAlarmCallbackEventNotificationConfig) emailNotification.config()).satisfies(config -> { assertThat(config.callbackType()).isEqualTo("org.graylog2.alarmcallbacks.EmailAlarmCallback"); assertThat(config.configuration().get("sender")).isEqualTo("graylog@example.org"); assertThat(config.configuration().get("subject")).isEqualTo("Graylog alert for stream: ${stream.title}: ${check_result.resultDescription}"); assertThat((String) config.configuration().get("body")).contains("Alert Description: ${check_result.resultDescription}\nDate: "); assertThat(config.configuration().get("user_receivers")).isEqualTo(Collections.emptyList()); assertThat(config.configuration().get("email_receivers")).isEqualTo(Collections.singletonList("jane@example.org")); }); final NotificationDto slackNotification = notificationService.streamAll() .filter(n -> n.title().equals("Slack Callback Test")) .findFirst() .orElse(null); assertThat(slackNotification).isNotNull(); assertThat(slackNotification.title()).isEqualTo("Slack Callback Test"); assertThat(slackNotification.description()).isEqualTo("Migrated legacy alarm callback"); assertThat(slackNotification.config()).isInstanceOf(LegacyAlarmCallbackEventNotificationConfig.class); assertThat((LegacyAlarmCallbackEventNotificationConfig) slackNotification.config()).satisfies(config -> { assertThat(config.callbackType()).isEqualTo("org.graylog2.plugins.slack.callback.SlackAlarmCallback"); assertThat(config.configuration().get("icon_url")).isEqualTo(""); assertThat(config.configuration().get("graylog2_url")).isEqualTo(""); assertThat(config.configuration().get("link_names")).isEqualTo(true); assertThat(config.configuration().get("webhook_url")).isEqualTo("http://example.com/slack-hook"); assertThat(config.configuration().get("color")).isEqualTo("#FF0000"); assertThat(config.configuration().get("icon_emoji")).isEqualTo(""); assertThat(config.configuration().get("user_name")).isEqualTo("Graylog"); assertThat(config.configuration().get("backlog_items")).isEqualTo(5); assertThat(config.configuration().get("custom_fields")).isEqualTo(""); assertThat(config.configuration().get("proxy_address")).isEqualTo(""); assertThat(config.configuration().get("channel")).isEqualTo("#channel"); assertThat(config.configuration().get("notify_channel")).isEqualTo(false); assertThat(config.configuration().get("add_attachment")).isEqualTo(true); assertThat(config.configuration().get("short_mode")).isEqualTo(false); }); assertThat(eventDefinitionService.streamAll().filter(ed -> ed.title().equals("Message Count - MORE")).findFirst()) .get() .satisfies(eventDefinition -> { assertThat(eventDefinition.alert()).isTrue(); assertThat(eventDefinition.priority()).isEqualTo(2); assertThat(eventDefinition.keySpec()).isEmpty(); assertThat(eventDefinition.notificationSettings().gracePeriodMs()).isEqualTo(120000); assertThat(eventDefinition.notificationSettings().backlogSize()).isEqualTo(10); assertThat(eventDefinition.notifications()).hasSize(2); assertThat(eventDefinition.notifications().stream().map(EventNotificationHandler.Config::notificationId).collect(Collectors.toList())) .containsOnly(httpNotification.id(), httpNotificationWithoutTitle.id()); assertThat((AggregationEventProcessorConfig) eventDefinition.config()).satisfies(config -> { assertThat(config.streams()).containsExactly("54e3deadbeefdeadbeef0001"); assertThat(config.query()).isEqualTo("hello:world"); assertThat(config.groupBy()).isEmpty(); assertThat(config.searchWithinMs()).isEqualTo(10 * 60 * 1000); assertThat(config.executeEveryMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.series()).hasSize(1); assertThat(config.series().get(0).id()).isNotBlank(); assertThat(config.series().get(0).type()).isEqualTo(Count.NAME); assertThat(((HasOptionalField) config.series().get(0)).field()).isEmpty(); assertThat(config.conditions()).get().satisfies(conditions -> { assertThat(conditions.expression()).get().satisfies(expression -> { assertThat(expression).isInstanceOf(Expr.Greater.class); final Expr.Greater greater = (Expr.Greater) expression; assertThat(greater.left()).isEqualTo(Expr.NumberReference.create(config.series().get(0).id())); assertThat(greater.right()).isEqualTo(Expr.NumberValue.create(1)); }); }); }); }); assertThat(eventDefinitionService.streamAll().filter(ed -> ed.title().equals("Message Count - LESS")).findFirst()) .get() .satisfies(eventDefinition -> { assertThat(eventDefinition.alert()).isTrue(); assertThat(eventDefinition.priority()).isEqualTo(2); assertThat(eventDefinition.keySpec()).isEmpty(); assertThat(eventDefinition.notificationSettings().gracePeriodMs()).isEqualTo(0); assertThat(eventDefinition.notificationSettings().backlogSize()).isEqualTo(0); assertThat(eventDefinition.notifications()).hasSize(2); assertThat(eventDefinition.notifications().stream().map(EventNotificationHandler.Config::notificationId).collect(Collectors.toList())) .containsOnly(httpNotification.id(), httpNotificationWithoutTitle.id()); assertThat((AggregationEventProcessorConfig) eventDefinition.config()).satisfies(config -> { assertThat(config.streams()).containsExactly("54e3deadbeefdeadbeef0001"); assertThat(config.query()).isEmpty(); assertThat(config.groupBy()).isEmpty(); assertThat(config.searchWithinMs()).isEqualTo(4 * 60 * 1000); assertThat(config.executeEveryMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.series()).hasSize(1); assertThat(config.series().get(0).id()).isNotBlank(); assertThat(config.series().get(0).type()).isEqualTo(Count.NAME); assertThat(((HasOptionalField) config.series().get(0)).field()).isEmpty(); assertThat(config.conditions()).get().satisfies(conditions -> { assertThat(conditions.expression()).get().satisfies(expression -> { assertThat(expression).isInstanceOf(Expr.Lesser.class); final Expr.Lesser lesser = (Expr.Lesser) expression; assertThat(lesser.left()).isEqualTo(Expr.NumberReference.create(config.series().get(0).id())); assertThat(lesser.right()).isEqualTo(Expr.NumberValue.create(42)); }); }); }); }); assertThat(eventDefinitionService.streamAll().filter(ed -> ed.title().equals("Field Value - HIGHER - MEAN")).findFirst()) .get() .satisfies(eventDefinition -> { assertThat(eventDefinition.alert()).isTrue(); assertThat(eventDefinition.priority()).isEqualTo(2); assertThat(eventDefinition.keySpec()).isEmpty(); assertThat(eventDefinition.notificationSettings().gracePeriodMs()).isEqualTo(60000); assertThat(eventDefinition.notificationSettings().backlogSize()).isEqualTo(15); assertThat(eventDefinition.notifications()).isEmpty(); assertThat((AggregationEventProcessorConfig) eventDefinition.config()).satisfies(config -> { assertThat(config.streams()).containsExactly("54e3deadbeefdeadbeef0002"); assertThat(config.query()).isEqualTo("*"); assertThat(config.groupBy()).isEmpty(); assertThat(config.searchWithinMs()).isEqualTo(5 * 60 * 1000); assertThat(config.executeEveryMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.series()).hasSize(1); assertThat(config.series().get(0).id()).isNotBlank(); assertThat(config.series().get(0).type()).isEqualTo(Average.NAME); assertThat(((HasField) config.series().get(0)).field()).isEqualTo("test_field_1"); assertThat(config.conditions()).get().satisfies(conditions -> { assertThat(conditions.expression()).get().satisfies(expression -> { assertThat(expression).isInstanceOf(Expr.Greater.class); final Expr.Greater greater = (Expr.Greater) expression; assertThat(greater.left()).isEqualTo(Expr.NumberReference.create(config.series().get(0).id())); assertThat(greater.right()).isEqualTo(Expr.NumberValue.create(23)); }); }); }); }); assertThat(eventDefinitionService.streamAll().filter(ed -> ed.title().equals("Field Value - LOWER - SUM")).findFirst()) .get() .satisfies(eventDefinition -> { assertThat(eventDefinition.alert()).isTrue(); assertThat(eventDefinition.priority()).isEqualTo(2); assertThat(eventDefinition.keySpec()).isEmpty(); assertThat(eventDefinition.notificationSettings().gracePeriodMs()).isEqualTo(60000); assertThat(eventDefinition.notificationSettings().backlogSize()).isEqualTo(15); assertThat(eventDefinition.notifications()).isEmpty(); assertThat((AggregationEventProcessorConfig) eventDefinition.config()).satisfies(config -> { assertThat(config.streams()).containsExactly("54e3deadbeefdeadbeef0002"); assertThat(config.query()).isEqualTo("*"); assertThat(config.groupBy()).isEmpty(); assertThat(config.searchWithinMs()).isEqualTo(5 * 60 * 1000); assertThat(config.executeEveryMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.series()).hasSize(1); assertThat(config.series().get(0).id()).isNotBlank(); assertThat(config.series().get(0).type()).isEqualTo(Sum.NAME); assertThat(((HasField) config.series().get(0)).field()).isEqualTo("test_field_1"); assertThat(config.conditions()).get().satisfies(conditions -> { assertThat(conditions.expression()).get().satisfies(expression -> { assertThat(expression).isInstanceOf(Expr.Lesser.class); final Expr.Lesser lesser = (Expr.Lesser) expression; assertThat(lesser.left()).isEqualTo(Expr.NumberReference.create(config.series().get(0).id())); assertThat(lesser.right()).isEqualTo(Expr.NumberValue.create(23)); }); }); }); }); assertThat(eventDefinitionService.streamAll().filter(ed -> ed.title().equals("Field Value - LOWER - MIN")).findFirst()) .get() .satisfies(eventDefinition -> { assertThat(eventDefinition.alert()).isTrue(); assertThat(eventDefinition.priority()).isEqualTo(2); assertThat(eventDefinition.keySpec()).isEmpty(); assertThat(eventDefinition.notificationSettings().gracePeriodMs()).isEqualTo(60000); assertThat(eventDefinition.notificationSettings().backlogSize()).isEqualTo(15); assertThat(eventDefinition.notifications()).isEmpty(); assertThat((AggregationEventProcessorConfig) eventDefinition.config()).satisfies(config -> { assertThat(config.streams()).containsExactly("54e3deadbeefdeadbeef0002"); assertThat(config.query()).isEqualTo("*"); assertThat(config.groupBy()).isEmpty(); assertThat(config.searchWithinMs()).isEqualTo(5 * 60 * 1000); assertThat(config.executeEveryMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.series()).hasSize(1); assertThat(config.series().get(0).id()).isNotBlank(); assertThat(config.series().get(0).type()).isEqualTo(Min.NAME); assertThat(((HasField) config.series().get(0)).field()).isEqualTo("test_field_1"); assertThat(config.conditions()).get().satisfies(conditions -> { assertThat(conditions.expression()).get().satisfies(expression -> { assertThat(expression).isInstanceOf(Expr.Lesser.class); final Expr.Lesser lesser = (Expr.Lesser) expression; assertThat(lesser.left()).isEqualTo(Expr.NumberReference.create(config.series().get(0).id())); assertThat(lesser.right()).isEqualTo(Expr.NumberValue.create(23)); }); }); }); }); assertThat(eventDefinitionService.streamAll().filter(ed -> ed.title().equals("Field Value - LOWER - MAX")).findFirst()) .get() .satisfies(eventDefinition -> { assertThat(eventDefinition.alert()).isTrue(); assertThat(eventDefinition.priority()).isEqualTo(2); assertThat(eventDefinition.keySpec()).isEmpty(); assertThat(eventDefinition.notificationSettings().gracePeriodMs()).isEqualTo(60000); assertThat(eventDefinition.notificationSettings().backlogSize()).isEqualTo(15); assertThat(eventDefinition.notifications()).isEmpty(); assertThat((AggregationEventProcessorConfig) eventDefinition.config()).satisfies(config -> { assertThat(config.streams()).containsExactly("54e3deadbeefdeadbeef0002"); assertThat(config.query()).isEqualTo("*"); assertThat(config.groupBy()).isEmpty(); assertThat(config.searchWithinMs()).isEqualTo(5 * 60 * 1000); assertThat(config.executeEveryMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.series()).hasSize(1); assertThat(config.series().get(0).id()).isNotBlank(); assertThat(config.series().get(0).type()).isEqualTo(Max.NAME); assertThat(((HasField) config.series().get(0)).field()).isEqualTo("test_field_1"); assertThat(config.conditions()).get().satisfies(conditions -> { assertThat(conditions.expression()).get().satisfies(expression -> { assertThat(expression).isInstanceOf(Expr.Lesser.class); final Expr.Lesser lesser = (Expr.Lesser) expression; assertThat(lesser.left()).isEqualTo(Expr.NumberReference.create(config.series().get(0).id())); assertThat(lesser.right()).isEqualTo(Expr.NumberValue.create(23)); }); }); }); }); assertThat(eventDefinitionService.streamAll().filter(ed -> ed.title().equals("Field Value - LOWER - STDDEV")).findFirst()) .get() .satisfies(eventDefinition -> { assertThat(eventDefinition.alert()).isTrue(); assertThat(eventDefinition.priority()).isEqualTo(2); assertThat(eventDefinition.keySpec()).isEmpty(); assertThat(eventDefinition.notificationSettings().gracePeriodMs()).isEqualTo(60000); assertThat(eventDefinition.notificationSettings().backlogSize()).isEqualTo(15); assertThat(eventDefinition.notifications()).isEmpty(); assertThat((AggregationEventProcessorConfig) eventDefinition.config()).satisfies(config -> { assertThat(config.streams()).containsExactly("54e3deadbeefdeadbeef0002"); assertThat(config.query()).isEqualTo("*"); assertThat(config.groupBy()).isEmpty(); assertThat(config.searchWithinMs()).isEqualTo(5 * 60 * 1000); assertThat(config.executeEveryMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.series()).hasSize(1); assertThat(config.series().get(0).id()).isNotBlank(); assertThat(config.series().get(0).type()).isEqualTo(StdDev.NAME); assertThat(((HasField) config.series().get(0)).field()).isEqualTo("test_field_1"); assertThat(config.conditions()).get().satisfies(conditions -> { assertThat(conditions.expression()).get().satisfies(expression -> { assertThat(expression).isInstanceOf(Expr.Greater.class); final Expr.Greater greater = (Expr.Greater) expression; assertThat(greater.left()).isEqualTo(Expr.NumberReference.create(config.series().get(0).id())); assertThat(greater.right()).isEqualTo(Expr.NumberValue.create(23)); }); }); }); }); assertThat(eventDefinitionService.streamAll().filter(ed -> ed.title().equals("Field Content - WITHOUT QUERY")).findFirst()) .get() .satisfies(eventDefinition -> { assertThat(eventDefinition.alert()).isTrue(); assertThat(eventDefinition.priority()).isEqualTo(2); assertThat(eventDefinition.keySpec()).isEmpty(); assertThat(eventDefinition.notificationSettings().gracePeriodMs()).isEqualTo(120000); assertThat(eventDefinition.notificationSettings().backlogSize()).isEqualTo(100); assertThat(eventDefinition.notifications()).hasSize(2); assertThat(eventDefinition.notifications().stream().map(EventNotificationHandler.Config::notificationId).collect(Collectors.toSet())) .containsOnly(emailNotification.id(), slackNotification.id()); assertThat((AggregationEventProcessorConfig) eventDefinition.config()).satisfies(config -> { assertThat(config.streams()).containsExactly("54e3deadbeefdeadbeef0003"); assertThat(config.query()).isEqualTo("test_field_2:\"hello\""); assertThat(config.groupBy()).isEmpty(); assertThat(config.searchWithinMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.executeEveryMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.series()).hasSize(1); assertThat(config.series().get(0).id()).isNotBlank(); assertThat(config.series().get(0).type()).isEqualTo(Count.NAME); assertThat(((HasOptionalField) config.series().get(0)).field()).isEmpty(); assertThat(config.conditions()).get().satisfies(conditions -> { assertThat(conditions.expression()).get().satisfies(expression -> { assertThat(expression).isInstanceOf(Expr.Greater.class); final Expr.Greater greater = (Expr.Greater) expression; assertThat(greater.left()).isEqualTo(Expr.NumberReference.create(config.series().get(0).id())); assertThat(greater.right()).isEqualTo(Expr.NumberValue.create(0)); }); }); }); }); assertThat(eventDefinitionService.streamAll().filter(ed -> ed.title().equals("Field Content - WITH QUERY")).findFirst()) .get() .satisfies(eventDefinition -> { assertThat(eventDefinition.alert()).isTrue(); assertThat(eventDefinition.priority()).isEqualTo(2); assertThat(eventDefinition.keySpec()).isEmpty(); assertThat(eventDefinition.notificationSettings().gracePeriodMs()).isEqualTo(0); assertThat(eventDefinition.notificationSettings().backlogSize()).isEqualTo(0); assertThat(eventDefinition.notifications()).hasSize(2); assertThat(eventDefinition.notifications().stream().map(EventNotificationHandler.Config::notificationId).collect(Collectors.toSet())) .containsOnly(emailNotification.id(), slackNotification.id()); assertThat((AggregationEventProcessorConfig) eventDefinition.config()).satisfies(config -> { assertThat(config.streams()).containsExactly("54e3deadbeefdeadbeef0003"); assertThat(config.query()).isEqualTo("test_field_3:\"foo\" AND foo:bar"); assertThat(config.groupBy()).isEmpty(); assertThat(config.searchWithinMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.executeEveryMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.series()).hasSize(1); assertThat(config.series().get(0).id()).isNotBlank(); assertThat(config.series().get(0).type()).isEqualTo(Count.NAME); assertThat(((HasOptionalField) config.series().get(0)).field()).isEmpty(); assertThat(config.conditions()).get().satisfies(conditions -> { assertThat(conditions.expression()).get().satisfies(expression -> { assertThat(expression).isInstanceOf(Expr.Greater.class); final Expr.Greater greater = (Expr.Greater) expression; assertThat(greater.left()).isEqualTo(Expr.NumberReference.create(config.series().get(0).id())); assertThat(greater.right()).isEqualTo(Expr.NumberValue.create(0)); }); }); }); }); assertThat(eventDefinitionService.streamAll().filter(ed -> ed.title().equals("Untitled")).findFirst()) .get() .satisfies(eventDefinition -> { assertThat(eventDefinition.alert()).isTrue(); assertThat(eventDefinition.priority()).isEqualTo(2); assertThat(eventDefinition.keySpec()).isEmpty(); assertThat(eventDefinition.notificationSettings().gracePeriodMs()).isEqualTo(0); assertThat(eventDefinition.notificationSettings().backlogSize()).isEqualTo(0); assertThat(eventDefinition.notifications()).hasSize(2); assertThat(eventDefinition.notifications().stream().map(EventNotificationHandler.Config::notificationId).collect(Collectors.toSet())) .containsOnly(emailNotification.id(), slackNotification.id()); assertThat((AggregationEventProcessorConfig) eventDefinition.config()).satisfies(config -> { assertThat(config.streams()).containsExactly("54e3deadbeefdeadbeef0003"); assertThat(config.query()).isEqualTo("test_field_3:\"foo\" AND foo:bar"); assertThat(config.groupBy()).isEmpty(); assertThat(config.searchWithinMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.executeEveryMs()).isEqualTo(CHECK_INTERVAL * 1000); assertThat(config.series()).hasSize(1); assertThat(config.series().get(0).id()).isNotBlank(); assertThat(config.series().get(0).type()).isEqualTo(Count.NAME); assertThat(((HasOptionalField) config.series().get(0)).field()).isEmpty(); assertThat(config.conditions()).get().satisfies(conditions -> { assertThat(conditions.expression()).get().satisfies(expression -> { assertThat(expression).isInstanceOf(Expr.Greater.class); final Expr.Greater greater = (Expr.Greater) expression; assertThat(greater.left()).isEqualTo(Expr.NumberReference.create(config.series().get(0).id())); assertThat(greater.right()).isEqualTo(Expr.NumberValue.create(0)); }); }); }); }); }
public static String[] decodeBasicAuth(String credential) { String[] values = decodeBase64(credential).split(BASIC_AUTH_SEPARATOR, BASIC_AUTH_LENGTH); validateBasicAuth(values); return values; }
@Test @DisplayName("BasicAuth 인증 정보 디코딩 성공: 여러 개의 구분자 포함일 경우 첫 구분자 이후 문자열을 비밀번호로 인식") void decodeValidAuthWithMultipleSeparators() { String name = "codezap"; String password = "pass:word:123"; String credential = HttpHeaders.encodeBasicAuth(name, password, StandardCharsets.UTF_8); String[] result = BasicAuthDecoder.decodeBasicAuth(credential); assertThat(result).hasSize(2) .containsExactly(name, password); }
@Override public Row poll() { return poll(Duration.ZERO); }
@Test public void shouldNotPollIfFailed() throws Exception { // Given handleQueryResultError(); // When final Exception e = assertThrows(IllegalStateException.class, () -> queryResult.poll()); // Then assertThat(e.getMessage(), containsString("Cannot poll on StreamedQueryResult that has failed")); }
@VisibleForTesting void validateLevelUnique(List<MemberLevelDO> list, Long id, Integer level) { for (MemberLevelDO levelDO : list) { if (ObjUtil.notEqual(levelDO.getLevel(), level)) { continue; } if (id == null || !id.equals(levelDO.getId())) { throw exception(LEVEL_VALUE_EXISTS, levelDO.getLevel(), levelDO.getName()); } } }
@Test public void testUpdateLevel_levelUnique() { // 准备参数 Long id = randomLongId(); Integer level = randomInteger(); String name = randomString(); // mock 数据 memberlevelMapper.insert(randomLevelDO(o -> { o.setLevel(level); o.setName(name); })); // 调用,校验异常 List<MemberLevelDO> list = memberlevelMapper.selectList(); assertServiceException(() -> levelService.validateLevelUnique(list, id, level), LEVEL_VALUE_EXISTS, level, name); }
@Override public String getNext() { return String.valueOf(queryIdCounter.getAndIncrement()); }
@Test public void shouldGenerateMonotonicallyIncrementingIds() { assertThat(generator.getNext(), is("0")); assertThat(generator.getNext(), is("1")); assertThat(generator.getNext(), is("2")); }
public synchronized int sendFetches() { final Map<Node, FetchSessionHandler.FetchRequestData> fetchRequests = prepareFetchRequests(); sendFetchesInternal( fetchRequests, (fetchTarget, data, clientResponse) -> { synchronized (Fetcher.this) { handleFetchSuccess(fetchTarget, data, clientResponse); } }, (fetchTarget, data, error) -> { synchronized (Fetcher.this) { handleFetchFailure(fetchTarget, data, error); } }); return fetchRequests.size(); }
@Test public void testLeaderEpochInConsumerRecord() { buildFetcher(); assignFromUser(singleton(tp0)); subscriptions.seek(tp0, 0); int partitionLeaderEpoch = 1; ByteBuffer buffer = ByteBuffer.allocate(1024); MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, Compression.NONE, TimestampType.CREATE_TIME, 0L, System.currentTimeMillis(), partitionLeaderEpoch); builder.append(0L, "key".getBytes(), Integer.toString(partitionLeaderEpoch).getBytes()); builder.append(0L, "key".getBytes(), Integer.toString(partitionLeaderEpoch).getBytes()); builder.close(); partitionLeaderEpoch += 7; builder = MemoryRecords.builder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, Compression.NONE, TimestampType.CREATE_TIME, 2L, System.currentTimeMillis(), partitionLeaderEpoch); builder.append(0L, "key".getBytes(), Integer.toString(partitionLeaderEpoch).getBytes()); builder.close(); partitionLeaderEpoch += 5; builder = MemoryRecords.builder(buffer, RecordBatch.CURRENT_MAGIC_VALUE, Compression.NONE, TimestampType.CREATE_TIME, 3L, System.currentTimeMillis(), partitionLeaderEpoch); builder.append(0L, "key".getBytes(), Integer.toString(partitionLeaderEpoch).getBytes()); builder.append(0L, "key".getBytes(), Integer.toString(partitionLeaderEpoch).getBytes()); builder.append(0L, "key".getBytes(), Integer.toString(partitionLeaderEpoch).getBytes()); builder.close(); buffer.flip(); MemoryRecords records = MemoryRecords.readableRecords(buffer); assertEquals(1, sendFetches()); assertFalse(fetcher.hasCompletedFetches()); client.prepareResponse(fullFetchResponse(tidp0, records, Errors.NONE, 100L, 0)); consumerClient.poll(time.timer(0)); assertTrue(fetcher.hasCompletedFetches()); Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords = fetchRecords(); assertTrue(partitionRecords.containsKey(tp0)); assertEquals(6, partitionRecords.get(tp0).size()); for (ConsumerRecord<byte[], byte[]> record : partitionRecords.get(tp0)) { int expectedLeaderEpoch = Integer.parseInt(Utils.utf8(record.value())); assertEquals(Optional.of(expectedLeaderEpoch), record.leaderEpoch()); } }
public static AvroGenericCoder of(Schema schema) { return AvroGenericCoder.of(schema); }
@Test public void testDeterministicSimple() { assertDeterministic(AvroCoder.of(SimpleDeterministicClass.class)); }
@Deprecated public DomainNameMapping<V> add(String hostname, V output) { map.put(normalizeHostname(checkNotNull(hostname, "hostname")), checkNotNull(output, "output")); return this; }
@Test public void testNullValuesAreForbidden() { assertThrows(NullPointerException.class, new Executable() { @Override public void execute() { new DomainNameMappingBuilder<String>("NotFound").add("Some key", null); } }); }
public static ShardingRouteEngine newInstance(final ShardingRule shardingRule, final ShardingSphereDatabase database, final QueryContext queryContext, final ShardingConditions shardingConditions, final ConfigurationProperties props, final ConnectionContext connectionContext) { SQLStatementContext sqlStatementContext = queryContext.getSqlStatementContext(); SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); if (sqlStatement instanceof TCLStatement) { return new ShardingDatabaseBroadcastRoutingEngine(); } if (sqlStatement instanceof DDLStatement) { if (sqlStatementContext instanceof CursorAvailable) { return getCursorRouteEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props); } return getDDLRoutingEngine(shardingRule, database, sqlStatementContext); } if (sqlStatement instanceof DALStatement) { return getDALRoutingEngine(shardingRule, database, sqlStatementContext, connectionContext); } if (sqlStatement instanceof DCLStatement) { return getDCLRoutingEngine(shardingRule, database, sqlStatementContext); } return getDQLRoutingEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props, connectionContext); }
@Test void assertNewInstanceForComplex() { SQLStatement sqlStatement = mock(SQLStatement.class); when(sqlStatementContext.getSqlStatement()).thenReturn(sqlStatement); tableNames.add("1"); tableNames.add("2"); when(shardingRule.getShardingLogicTableNames(tableNames)).thenReturn(tableNames); QueryContext queryContext = new QueryContext(sqlStatementContext, "", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class)); ShardingRouteEngine actual = ShardingRouteEngineFactory.newInstance(shardingRule, database, queryContext, shardingConditions, props, new ConnectionContext(Collections::emptySet)); assertThat(actual, instanceOf(ShardingComplexRoutingEngine.class)); }
public void setOuterJoinType(OuterJoinType outerJoinType) { this.outerJoinType = outerJoinType; }
@Test void testFullOuterJoinWithoutMatchingPartners() throws Exception { final List<String> leftInput = Arrays.asList("foo", "bar", "foobar"); final List<String> rightInput = Arrays.asList("oof", "rab", "raboof"); baseOperator.setOuterJoinType(OuterJoinOperatorBase.OuterJoinType.FULL); List<String> expected = Arrays.asList( "bar,null", "foo,null", "foobar,null", "null,oof", "null,rab", "null,raboof"); testOuterJoin(leftInput, rightInput, expected); }
@Override public PollResult poll(long currentTimeMs) { return pollInternal( prepareFetchRequests(), this::handleFetchSuccess, this::handleFetchFailure ); }
@Test public void testInvalidDefaultRecordBatch() { buildFetcher(); ByteBuffer buffer = ByteBuffer.allocate(1024); ByteBufferOutputStream out = new ByteBufferOutputStream(buffer); MemoryRecordsBuilder builder = new MemoryRecordsBuilder(out, DefaultRecordBatch.CURRENT_MAGIC_VALUE, Compression.NONE, TimestampType.CREATE_TIME, 0L, 10L, 0L, (short) 0, 0, false, false, 0, 1024); builder.append(10L, "key".getBytes(), "value".getBytes()); builder.close(); buffer.flip(); // Garble the CRC buffer.position(17); buffer.put("beef".getBytes()); buffer.position(0); assignFromUser(singleton(tp0)); subscriptions.seek(tp0, 0); // normal fetch assertEquals(1, sendFetches()); client.prepareResponse(fullFetchResponse(tidp0, MemoryRecords.readableRecords(buffer), Errors.NONE, 100L, 0)); networkClientDelegate.poll(time.timer(0)); for (int i = 0; i < 2; i++) { // the fetchRecords() should always throw exception due to the bad batch. assertThrows(KafkaException.class, this::collectFetch); assertEquals(0, subscriptions.position(tp0).offset); } }
@Override public V compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) { if (key == null) { return null; } return super.compute(key, remappingFunction); }
@Test public void testCompute() { Assert.assertEquals(VALUE, map.get(KEY)); Assert.assertEquals(null, map.compute(null, (key, value) -> "")); Assert.assertEquals(VALUE, map.get(KEY)); }
@Override public SingleRule build(final SingleRuleConfiguration ruleConfig, final String databaseName, final DatabaseType protocolType, final ResourceMetaData resourceMetaData, final Collection<ShardingSphereRule> builtRules, final ComputeNodeInstanceContext computeNodeInstanceContext) { return new SingleRule(ruleConfig, databaseName, protocolType, resourceMetaData.getDataSourceMap(), builtRules); }
@SuppressWarnings({"rawtypes", "unchecked"}) @Test void assertBuild() { DatabaseRuleBuilder builder = OrderedSPILoader.getServices(DatabaseRuleBuilder.class).iterator().next(); DatabaseRule actual = builder.build(mock(SingleRuleConfiguration.class), "", new MySQLDatabaseType(), mock(ResourceMetaData.class), Collections.singleton(mock(ShardingSphereRule.class, RETURNS_DEEP_STUBS)), mock(ComputeNodeInstanceContext.class)); assertThat(actual, instanceOf(SingleRule.class)); }
@Subscribe public void onChatMessage(ChatMessage event) { final String message = event.getMessage(); if (event.getType() != ChatMessageType.SPAM && event.getType() != ChatMessageType.GAMEMESSAGE) { return; } if (message.contains(DODGY_NECKLACE_PROTECTION_MESSAGE) || message.contains(SHADOW_VEIL_PROTECTION_MESSAGE)) { removeGameTimer(PICKPOCKET_STUN); } if (message.contains(PICKPOCKET_FAILURE_MESSAGE) && config.showPickpocketStun() && message.contains("pocket")) { if (message.contains("hero") || message.contains("elf")) { createGameTimer(PICKPOCKET_STUN, Duration.ofSeconds(6)); } else { createGameTimer(PICKPOCKET_STUN, Duration.ofSeconds(5)); } } if (message.equals(ABYSSAL_SIRE_STUN_MESSAGE) && config.showAbyssalSireStun()) { createGameTimer(ABYSSAL_SIRE_STUN); } if (config.showCannon()) { if (message.equals(CANNON_BASE_MESSAGE) || message.equals(CANNON_STAND_MESSAGE) || message.equals(CANNON_BARRELS_MESSAGE) || message.equals(CANNON_FURNACE_MESSAGE) || message.contains(CANNON_REPAIR_MESSAGE)) { removeGameTimer(CANNON_REPAIR); TimerTimer cannonTimer = createGameTimer(CANNON); cannonTimer.setTooltip(cannonTimer.getTooltip() + " - World " + client.getWorld()); } else if (message.equals(CANNON_BROKEN_MESSAGE)) { removeGameTimer(CANNON); TimerTimer cannonTimer = createGameTimer(CANNON_REPAIR); cannonTimer.setTooltip(cannonTimer.getTooltip() + " - World " + client.getWorld()); } else if (message.equals(CANNON_PICKUP_MESSAGE) || message.equals(CANNON_DESTROYED_MESSAGE)) { removeGameTimer(CANNON); removeGameTimer(CANNON_REPAIR); } } if (config.showPrayerEnhance() && message.startsWith("You drink some of your") && message.contains("prayer enhance")) { createGameTimer(PRAYER_ENHANCE); } if (config.showPrayerEnhance() && message.equals(PRAYER_ENHANCE_EXPIRED)) { removeGameTimer(PRAYER_ENHANCE); } if (config.showStaffOfTheDead() && message.contains(STAFF_OF_THE_DEAD_SPEC_MESSAGE)) { createGameTimer(STAFF_OF_THE_DEAD); } if (config.showStaffOfTheDead() && message.contains(STAFF_OF_THE_DEAD_SPEC_EXPIRED_MESSAGE)) { removeGameTimer(STAFF_OF_THE_DEAD); } if (config.showFreezes() && message.equals(FROZEN_MESSAGE)) { freezeTimer = createGameTimer(ICEBARRAGE); freezeTime = client.getTickCount(); } if (config.showArceuus()) { final int magicLevel = client.getRealSkillLevel(Skill.MAGIC); if (message.endsWith(SHADOW_VEIL_MESSAGE)) { createGameTimer(SHADOW_VEIL, Duration.of(magicLevel, RSTimeUnit.GAME_TICKS)); } else if (message.endsWith(WARD_OF_ARCEUUS_MESSAGE)) { createGameTimer(WARD_OF_ARCEUUS, Duration.of(magicLevel, RSTimeUnit.GAME_TICKS)); } else if (message.endsWith(MARK_OF_DARKNESS_MESSAGE)) { createGameTimer(MARK_OF_DARKNESS, Duration.of(magicLevel, RSTimeUnit.GAME_TICKS)); } else if (message.contains(RESURRECT_THRALL_MESSAGE_START) && message.endsWith(RESURRECT_THRALL_MESSAGE_END)) { // by default the thrall lasts 1 tick per magic level int t = client.getBoostedSkillLevel(Skill.MAGIC); // ca tiers being completed boosts this if (client.getVarbitValue(Varbits.COMBAT_ACHIEVEMENT_TIER_GRANDMASTER) == 2) { t += t; // 100% boost } else if (client.getVarbitValue(Varbits.COMBAT_ACHIEVEMENT_TIER_MASTER) == 2) { t += t / 2; // 50% boost } createGameTimer(RESURRECT_THRALL, Duration.of(t, RSTimeUnit.GAME_TICKS)); } } if (config.showArceuusCooldown()) { final int magicLevel = client.getRealSkillLevel(Skill.MAGIC); if (message.endsWith(MARK_OF_DARKNESS_MESSAGE)) { createGameTimer(MARK_OF_DARKNESS_COOLDOWN, Duration.of(magicLevel - 10, RSTimeUnit.GAME_TICKS)); } } if (TZHAAR_PAUSED_MESSAGE.matcher(message).find()) { log.debug("Pausing tzhaar timer"); config.tzhaarLastTime(Instant.now()); if (config.showTzhaarTimers()) { createTzhaarTimer(); } return; } Matcher matcher = TZHAAR_WAVE_MESSAGE.matcher(message); if (matcher.find()) { int wave = Integer.parseInt(matcher.group(1)); if (wave == 1) { log.debug("Starting tzhaar timer"); Instant now = Instant.now(); if (isInInferno()) { // The first wave message of the inferno comes six seconds after the ingame timer starts counting config.tzhaarStartTime(now.minus(Duration.ofSeconds(6))); } else { config.tzhaarStartTime(now); } config.tzhaarLastTime(null); if (config.showTzhaarTimers()) { createTzhaarTimer(); } } else if (config.tzhaarStartTime() != null && config.tzhaarLastTime() != null) { log.debug("Unpausing tzhaar timer"); // Advance start time by how long it has been paused Instant tzhaarStartTime = config.tzhaarStartTime(); tzhaarStartTime = tzhaarStartTime.plus(Duration.between(config.tzhaarLastTime(), Instant.now())); config.tzhaarStartTime(tzhaarStartTime); config.tzhaarLastTime(null); if (config.showTzhaarTimers()) { createTzhaarTimer(); } } } if (message.equals(SILK_DRESSING_MESSAGE) && config.showSilkDressing()) { createGameTimer(SILK_DRESSING); } if (message.equals(BLESSED_CRYSTAL_SCARAB_MESSAGE) && config.showBlessedCrystalScarab()) { createGameTimer(BLESSED_CRYSTAL_SCARAB); } if (message.equals(LIQUID_ADRENALINE_MESSAGE) && config.showLiquidAdrenaline()) { createGameTimer(LIQUID_ADRENALINE); } }
@Test public void testShadowVeil() { when(timersAndBuffsConfig.showArceuus()).thenReturn(true); when(client.getRealSkillLevel(Skill.MAGIC)).thenReturn(57); ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", "<col=6800bf>Your thieving abilities have been enhanced.</col>", "", 0); timersAndBuffsPlugin.onChatMessage(chatMessage); ArgumentCaptor<InfoBox> captor = ArgumentCaptor.forClass(InfoBox.class); verify(infoBoxManager).addInfoBox(captor.capture()); TimerTimer infoBox = (TimerTimer) captor.getValue(); assertEquals(GameTimer.SHADOW_VEIL, infoBox.getTimer()); }
public static Expression convert(Predicate[] predicates) { Expression expression = Expressions.alwaysTrue(); for (Predicate predicate : predicates) { Expression converted = convert(predicate); Preconditions.checkArgument( converted != null, "Cannot convert Spark predicate to Iceberg expression: %s", predicate); expression = Expressions.and(expression, converted); } return expression; }
@Test public void testNestedInInsideNot() { NamedReference namedReference1 = FieldReference.apply("col1"); LiteralValue v1 = new LiteralValue(1, DataTypes.IntegerType); LiteralValue v2 = new LiteralValue(2, DataTypes.IntegerType); org.apache.spark.sql.connector.expressions.Expression[] attrAndValue1 = new org.apache.spark.sql.connector.expressions.Expression[] {namedReference1, v1}; Predicate equal = new Predicate("=", attrAndValue1); NamedReference namedReference2 = FieldReference.apply("col2"); org.apache.spark.sql.connector.expressions.Expression[] attrAndValue2 = new org.apache.spark.sql.connector.expressions.Expression[] {namedReference2, v1, v2}; Predicate in = new Predicate("IN", attrAndValue2); Not filter = new Not(new And(equal, in)); Expression converted = SparkV2Filters.convert(filter); Assert.assertNull("Expression should not be converted", converted); }
@Override public void start(final QueryId queryId) { }
@Test public void shouldStartQuery() { //When: validationSharedKafkaStreamsRuntime.start(queryId); //Then: assertThat("Query was not added", validationSharedKafkaStreamsRuntime.getQueries().contains(queryId)); }
public static long btcToSatoshi(BigDecimal coins) throws ArithmeticException { return coins.movePointRight(SMALLEST_UNIT_EXPONENT).longValueExact(); }
@Test(expected = ArithmeticException.class) public void testBtcToSatoshi_tooBig() { btcToSatoshi(new BigDecimal("92233720368.54775808")); // .00000001 more than maximum value }
@Override void handle(Connection connection, DatabaseCharsetChecker.State state) throws SQLException { // PostgreSQL does not have concept of case-sensitive collation. Only charset ("encoding" in postgresql terminology) // must be verified. expectUtf8AsDefault(connection); if (state == DatabaseCharsetChecker.State.UPGRADE || state == DatabaseCharsetChecker.State.STARTUP) { // no need to check columns on fresh installs... as they are not supposed to exist! expectUtf8Columns(connection); } }
@Test public void fresh_install_verifies_that_default_charset_is_utf8() throws SQLException { answerDefaultCharset("utf8"); underTest.handle(connection, DatabaseCharsetChecker.State.FRESH_INSTALL); // no errors, charset has been verified verify(metadata).getDefaultCharset(same(connection)); verifyNoInteractions(sqlExecutor); }
@Override public void write(ProjectDump.Metadata metadata) { checkNotPublished(); if (metadataWritten.get()) { throw new IllegalStateException("Metadata has already been written"); } File file = new File(rootDir, METADATA.filename()); try (FileOutputStream output = FILES2.openOutputStream(file, false)) { PROTOBUF2.writeTo(metadata, output); metadataWritten.set(true); } catch (IOException e) { throw new IllegalStateException("Can not write to file " + file, e); } }
@Test public void writeMetadata_writes_to_file() { underTest.write(newMetadata()); assertThat(dumpReader.metadata().getProjectKey()).isEqualTo("foo"); }
public void trackGTClickDelayed(String messageId, String title, String content) { try { Message message = Message.obtain(); message.what = GT_PUSH_MSG; message.obj = messageId; mGeTuiPushInfoMap.put(messageId, new NotificationInfo(title, content, System.currentTimeMillis())); mPushHandler.sendMessageDelayed(message, 200); SALog.i(TAG, "sendMessageDelayed,msgId = " + messageId); } catch (Exception e) { SALog.printStackTrace(e); } }
@Test public void trackGTClickDelayed() { SAHelper.initSensors(mApplication); PushProcess.getInstance().trackGTClickDelayed("sdajh-asdjfhjas", "mock_title", "mock_content"); }
@Override public void validateDeleteGroup() throws ApiException { if (state() != ShareGroupState.EMPTY) { throw Errors.NON_EMPTY_GROUP.exception(); } }
@Test public void testValidateDeleteGroup() { ShareGroup shareGroup = createShareGroup("foo"); assertEquals(ShareGroupState.EMPTY, shareGroup.state()); assertDoesNotThrow(shareGroup::validateDeleteGroup); ShareGroupMember member1 = new ShareGroupMember.Builder("member1") .setMemberEpoch(1) .setPreviousMemberEpoch(0) .build(); shareGroup.updateMember(member1); assertEquals(ShareGroupState.STABLE, shareGroup.state()); assertThrows(GroupNotEmptyException.class, shareGroup::validateDeleteGroup); shareGroup.setGroupEpoch(1); assertEquals(ShareGroupState.STABLE, shareGroup.state()); assertThrows(GroupNotEmptyException.class, shareGroup::validateDeleteGroup); shareGroup.setTargetAssignmentEpoch(1); assertEquals(ShareGroupState.STABLE, shareGroup.state()); assertThrows(GroupNotEmptyException.class, shareGroup::validateDeleteGroup); }
public String getServiceAccount() { return flinkConfig.get(KubernetesConfigOptions.JOB_MANAGER_SERVICE_ACCOUNT); }
@Test void testGetServiceAccountShouldReturnDefaultIfNotExplicitlySet() { assertThat(kubernetesJobManagerParameters.getServiceAccount()).isEqualTo("default"); }
@VisibleForTesting public List<Partition> getNewPartitionsFromPartitions(Database db, OlapTable olapTable, List<Long> sourcePartitionIds, Map<Long, String> origPartitions, OlapTable copiedTbl, String namePostfix, Set<Long> tabletIdSet, List<Long> tmpPartitionIds, DistributionDesc distributionDesc, long warehouseId) throws DdlException { List<Partition> newPartitions = Lists.newArrayListWithCapacity(sourcePartitionIds.size()); for (int i = 0; i < sourcePartitionIds.size(); ++i) { long newPartitionId = tmpPartitionIds.get(i); long sourcePartitionId = sourcePartitionIds.get(i); String newPartitionName = origPartitions.get(sourcePartitionId) + namePostfix; if (olapTable.checkPartitionNameExist(newPartitionName, true)) { // to prevent creating the same partitions when failover // this will happen when OverwriteJob crashed after created temp partitions, // but before changing to PREPARED state LOG.warn("partition:{} already exists in table:{}", newPartitionName, olapTable.getName()); continue; } PartitionInfo partitionInfo = copiedTbl.getPartitionInfo(); partitionInfo.setTabletType(newPartitionId, partitionInfo.getTabletType(sourcePartitionId)); partitionInfo.setIsInMemory(newPartitionId, partitionInfo.getIsInMemory(sourcePartitionId)); partitionInfo.setReplicationNum(newPartitionId, partitionInfo.getReplicationNum(sourcePartitionId)); partitionInfo.setDataProperty(newPartitionId, partitionInfo.getDataProperty(sourcePartitionId)); if (copiedTbl.isCloudNativeTableOrMaterializedView()) { partitionInfo.setDataCacheInfo(newPartitionId, partitionInfo.getDataCacheInfo(sourcePartitionId)); } Partition newPartition = null; if (distributionDesc != null) { DistributionInfo distributionInfo = distributionDesc.toDistributionInfo(olapTable.getColumns()); if (distributionInfo.getBucketNum() == 0) { Partition sourcePartition = olapTable.getPartition(sourcePartitionId); olapTable.optimizeDistribution(distributionInfo, sourcePartition); } newPartition = createPartition( db, copiedTbl, newPartitionId, newPartitionName, null, tabletIdSet, distributionInfo, warehouseId); } else { newPartition = createPartition(db, copiedTbl, newPartitionId, newPartitionName, null, tabletIdSet, warehouseId); } newPartitions.add(newPartition); } return newPartitions; }
@Test public void testGetNewPartitionsFromPartitions() throws DdlException { Database db = connectContext.getGlobalStateMgr().getDb("test"); Table table = db.getTable("t1"); Assert.assertTrue(table instanceof OlapTable); OlapTable olapTable = (OlapTable) table; Partition sourcePartition = olapTable.getPartition("t1"); List<Long> sourcePartitionIds = Lists.newArrayList(sourcePartition.getId()); List<Long> tmpPartitionIds = Lists.newArrayList(connectContext.getGlobalStateMgr().getNextId()); LocalMetastore localMetastore = connectContext.getGlobalStateMgr().getLocalMetastore(); Map<Long, String> origPartitions = Maps.newHashMap(); OlapTable copiedTable = localMetastore.getCopiedTable(db, olapTable, sourcePartitionIds, origPartitions); Assert.assertEquals(olapTable.getName(), copiedTable.getName()); Set<Long> tabletIdSet = Sets.newHashSet(); List<Partition> newPartitions = localMetastore.getNewPartitionsFromPartitions(db, olapTable, sourcePartitionIds, origPartitions, copiedTable, "_100", tabletIdSet, tmpPartitionIds, null, WarehouseManager.DEFAULT_WAREHOUSE_ID); Assert.assertEquals(sourcePartitionIds.size(), newPartitions.size()); Assert.assertEquals(1, newPartitions.size()); Partition newPartition = newPartitions.get(0); Assert.assertEquals("t1_100", newPartition.getName()); olapTable.addTempPartition(newPartition); PartitionInfo partitionInfo = olapTable.getPartitionInfo(); partitionInfo.addPartition(newPartition.getId(), partitionInfo.getDataProperty(sourcePartition.getId()), partitionInfo.getReplicationNum(sourcePartition.getId()), partitionInfo.getIsInMemory(sourcePartition.getId())); olapTable.replacePartition("t1", "t1_100"); Assert.assertEquals(newPartition.getId(), olapTable.getPartition("t1").getId()); }
public void publishArtifacts(List<ArtifactPlan> artifactPlans, EnvironmentVariableContext environmentVariableContext) { final File pluggableArtifactFolder = publishPluggableArtifacts(artifactPlans, environmentVariableContext); try { final List<ArtifactPlan> mergedPlans = artifactPlanFilter.getBuiltInMergedArtifactPlans(artifactPlans); if (isMetadataFolderEmpty(pluggableArtifactFolder)) { LOGGER.info("Pluggable metadata folder is empty."); } else if (pluggableArtifactFolder != null) { mergedPlans.add(0, new ArtifactPlan(ArtifactPlanType.file, format("%s%s*", pluggableArtifactFolder.getName(), File.separator), PLUGGABLE_ARTIFACT_METADATA_FOLDER)); } for (ArtifactPlan artifactPlan : mergedPlans) { try { artifactPlan.publishBuiltInArtifacts(goPublisher, workingDirectory); } catch (Exception e) { failedArtifact.add(artifactPlan); } } if (!failedArtifact.isEmpty()) { StringBuilder builder = new StringBuilder(); for (ArtifactPlan artifactPlan : failedArtifact) { artifactPlan.printArtifactInfo(builder); } throw new RuntimeException(format("[%s] Uploading finished. Failed to upload %s.", PRODUCT_NAME, builder)); } } finally { FileUtils.deleteQuietly(pluggableArtifactFolder); } }
@Test public void shouldAddPluggableArtifactMetadataFileArtifactPlanAtTop() throws Exception { TestFileUtil.createTestFile(workingFolder, "installer.zip"); TestFileUtil.createTestFile(workingFolder, "testreports.xml"); final ArtifactStore artifactStore = new ArtifactStore("s3", "cd.go.s3", create("Foo", false, "Bar")); final ArtifactStores artifactStores = new ArtifactStores(artifactStore); final ArtifactPlan artifactPlan = new ArtifactPlan(new PluggableArtifactConfig("installers", "s3", create("Baz", true, "Car"))); List<ArtifactPlan> artifactPlans = List.of( new ArtifactPlan(ArtifactPlanType.file, "installer.zip", "dist"), new ArtifactPlan(ArtifactPlanType.unit, "testreports.xml", "testreports"), artifactPlan ); when(artifactExtension.publishArtifact(eq("cd.go.s3"), eq(artifactPlan), eq(artifactStore), anyString(), eq(env))) .thenReturn(new PublishArtifactResponse(Collections.singletonMap("Foo", "Bar"))); final GoPublisher publisher = mock(GoPublisher.class); new ArtifactsPublisher(publisher, artifactExtension, artifactStores, registry, workingFolder) .publishArtifacts(artifactPlans, env); InOrder inOrder = inOrder(publisher); inOrder.verify(publisher).upload(any(), eq("pluggable-artifact-metadata")); inOrder.verify(publisher).upload(any(), eq("dist")); inOrder.verify(publisher).upload(any(), eq("testreports")); }
public static Long fromHeaders(final String header, final Map<String, String> response) { final Map<String, String> headers = new HashMap<>(response.entrySet() .stream() .map(entry -> Maps.immutableEntry(StringUtils.lowerCase(entry.getKey()), entry.getValue())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); if(headers.containsKey(StringUtils.lowerCase(header))) { try { return normalizeToMilliseconds(Double.valueOf(headers.get(StringUtils.lowerCase(header))).longValue()); } catch(NumberFormatException ignored) { // ignore } } return -1L; }
@Test public void testFindMillisecondsTimestamp() throws Exception { final Map<String, String> headers = new HashMap<>(); headers.put("Mtime", "1530305150672"); // milliseconds assertEquals(1530305150672L, S3TimestampFeature.fromHeaders( S3TimestampFeature.METADATA_MODIFICATION_DATE, headers).longValue()); headers.put("Mtime", "1530305150"); // seconds assertEquals(1530305150000L, S3TimestampFeature.fromHeaders( S3TimestampFeature.METADATA_MODIFICATION_DATE, headers).longValue()); }
@Override public GroupAssignment assign( GroupSpec groupSpec, SubscribedTopicDescriber subscribedTopicDescriber ) throws PartitionAssignorException { if (groupSpec.memberIds().isEmpty()) { return new GroupAssignment(Collections.emptyMap()); } else if (groupSpec.subscriptionType() == SubscriptionType.HOMOGENEOUS) { return assignHomogeneousGroup(groupSpec, subscribedTopicDescriber); } else { return assignHeterogeneousGroup(groupSpec, subscribedTopicDescriber); } }
@Test public void testFirstAssignmentThreeMembersThreeTopicsDifferentSubscriptions() { Map<Uuid, TopicMetadata> topicMetadata = new HashMap<>(); topicMetadata.put(topic1Uuid, new TopicMetadata( topic1Uuid, topic1Name, 3, Collections.emptyMap() )); topicMetadata.put(topic2Uuid, new TopicMetadata( topic2Uuid, topic2Name, 3, Collections.emptyMap() )); topicMetadata.put(topic3Uuid, new TopicMetadata( topic3Uuid, topic3Name, 2, Collections.emptyMap() )); Map<String, MemberSubscriptionAndAssignmentImpl> members = new TreeMap<>(); members.put(memberA, new MemberSubscriptionAndAssignmentImpl( Optional.empty(), Optional.empty(), mkSet(topic1Uuid, topic2Uuid), Assignment.EMPTY )); members.put(memberB, new MemberSubscriptionAndAssignmentImpl( Optional.empty(), Optional.empty(), mkSet(topic3Uuid), Assignment.EMPTY )); members.put(memberC, new MemberSubscriptionAndAssignmentImpl( Optional.empty(), Optional.empty(), mkSet(topic2Uuid, topic3Uuid), Assignment.EMPTY )); GroupSpec groupSpec = new GroupSpecImpl( members, HETEROGENEOUS, invertedTargetAssignment(members) ); SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata); GroupAssignment computedAssignment = assignor.assign( groupSpec, subscribedTopicMetadata ); Map<String, Map<Uuid, Set<Integer>>> expectedAssignment = new HashMap<>(); expectedAssignment.put(memberA, mkAssignment( mkTopicAssignment(topic1Uuid, 0, 1, 2), mkTopicAssignment(topic2Uuid, 0, 1) )); expectedAssignment.put(memberB, mkAssignment( mkTopicAssignment(topic3Uuid, 0) )); expectedAssignment.put(memberC, mkAssignment( mkTopicAssignment(topic2Uuid, 2), mkTopicAssignment(topic3Uuid, 1) )); assertAssignment(expectedAssignment, computedAssignment); }
private static GuardedByExpression bind(JCTree.JCExpression exp, BinderContext context) { GuardedByExpression expr = BINDER.visit(exp, context); checkGuardedBy(expr != null, String.valueOf(exp)); checkGuardedBy(expr.kind() != Kind.TYPE_LITERAL, "Raw type literal: %s", exp); return expr; }
@Test public void staticOnStatic() { assertThat( bind( "Test", "Test.lock", forSourceLines( "threadsafety/Test.java", "package threadsafety;", "class Test {", " static final Object lock = new Object();", "}"))) .isEqualTo("(SELECT (TYPE_LITERAL threadsafety.Test) lock)"); }
@Override public GenericAvroRecord read(byte[] bytes, int offset, int length) { try { if (offset == 0 && this.offset > 0) { offset = this.offset; } Decoder decoder = DecoderFactory.get().binaryDecoder(bytes, offset, length - offset, null); org.apache.avro.generic.GenericRecord avroRecord = (org.apache.avro.generic.GenericRecord) reader.read( null, decoder); return new GenericAvroRecord(schemaVersion, schema, fields, avroRecord); } catch (IOException | IndexOutOfBoundsException e) { throw new SchemaSerializationException(e); } }
@Test public void testGenericAvroReaderByWriterSchema() { byte[] fooBytes = fooSchema.encode(foo); GenericAvroReader genericAvroSchemaByWriterSchema = new GenericAvroReader(fooSchema.getAvroSchema()); GenericRecord genericRecordByWriterSchema = genericAvroSchemaByWriterSchema.read(fooBytes); assertEquals(genericRecordByWriterSchema.getField("field1"), "foo1"); assertEquals(genericRecordByWriterSchema.getField("field2"), "bar1"); assertEquals(genericRecordByWriterSchema.getField("fieldUnableNull"), "notNull"); }
public long addAndGet(long delta) { return getAndAddVal( delta) + delta; }
@Test public void testAddAndGet() { PaddedAtomicLong counter = new PaddedAtomicLong(); long value = counter.addAndGet(1); assertEquals(1, value); assertEquals(1, counter.get()); }
public FileStoreInfo getFileStore(String fsKey) throws DdlException { try { return client.getFileStore(fsKey, serviceId); } catch (StarClientException e) { if (e.getCode() == StatusCode.NOT_EXIST) { return null; } throw new DdlException("Failed to get file store, error: " + e.getMessage()); } }
@Test public void testGetFileStore() throws StarClientException, DdlException { S3FileStoreInfo s3FsInfo = S3FileStoreInfo.newBuilder() .setRegion("region").setEndpoint("endpoint").build(); FileStoreInfo fsInfo = FileStoreInfo.newBuilder().setFsKey("test-fskey") .setFsName("test-fsname").setFsType(FileStoreType.S3).setS3FsInfo(s3FsInfo).build(); new Expectations() { { client.getFileStore("test-fskey", "1"); result = fsInfo; minTimes = 0; client.getFileStore("test-fskey", "2"); result = new StarClientException(StatusCode.INVALID_ARGUMENT, "mocked exception"); } }; Deencapsulation.setField(starosAgent, "serviceId", "1"); Assert.assertEquals("test-fskey", starosAgent.getFileStore("test-fskey").getFsKey()); Deencapsulation.setField(starosAgent, "serviceId", "2"); ExceptionChecker.expectThrowsWithMsg(DdlException.class, "Failed to get file store, error: INVALID_ARGUMENT:mocked exception", () -> starosAgent.getFileStore("test-fskey")); }
public AggregateAnalysisResult analyze( final ImmutableAnalysis analysis, final List<SelectExpression> finalProjection ) { if (!analysis.getGroupBy().isPresent()) { throw new IllegalArgumentException("Not an aggregate query"); } final AggAnalyzer aggAnalyzer = new AggAnalyzer(analysis, functionRegistry); aggAnalyzer.process(finalProjection); return aggAnalyzer.result(); }
@Test public void shouldNotThrowOnOtherExpressionTypesInProjection() { // Given: final Expression someExpression = mock(Expression.class); givenSelectExpression(someExpression); // When: analyzer.analyze(analysis, selects); // Then: did not throw. }
public synchronized void addService(URL url) { // fixme, pass in application mode context during initialization of MetadataInfo. if (this.loader == null) { this.loader = url.getOrDefaultApplicationModel().getExtensionLoader(MetadataParamsFilter.class); } List<MetadataParamsFilter> filters = loader.getActivateExtension(url, "params-filter"); // generate service level metadata ServiceInfo serviceInfo = new ServiceInfo(url, filters); this.services.put(serviceInfo.getMatchKey(), serviceInfo); // extract common instance level params extractInstanceParams(url, filters); if (exportedServiceURLs == null) { exportedServiceURLs = new ConcurrentSkipListMap<>(); } addURL(exportedServiceURLs, url); updated = true; }
@Test void testJsonFormat() { MetadataInfo metadataInfo = new MetadataInfo("demo"); // export normal url again metadataInfo.addService(url); System.out.println(JsonUtils.toJson(metadataInfo)); MetadataInfo metadataInfo2 = new MetadataInfo("demo"); // export normal url again metadataInfo2.addService(url); metadataInfo2.addService(url2); System.out.println(JsonUtils.toJson(metadataInfo2)); }
@Override public void accept(ModemVisitor modemVisitor) { if (modemVisitor instanceof ZoomVisitor) { ((ZoomVisitor) modemVisitor).visit(this); } else { LOGGER.info("Only ZoomVisitor is allowed to visit Zoom modem"); } }
@Test void testAcceptForDos() { var zoom = new Zoom(); var mockVisitor = mock(ConfigureForDosVisitor.class); zoom.accept(mockVisitor); verify((ZoomVisitor) mockVisitor).visit(eq(zoom)); }
@Override public void encode(final ChannelHandlerContext context, final DatabasePacket message, final ByteBuf out) { boolean isIdentifierPacket = message instanceof PostgreSQLIdentifierPacket; if (isIdentifierPacket) { prepareMessageHeader(out, ((PostgreSQLIdentifierPacket) message).getIdentifier().getValue()); } PostgreSQLPacketPayload payload = new PostgreSQLPacketPayload(out, context.channel().attr(CommonConstants.CHARSET_ATTRIBUTE_KEY).get()); try { message.write(payload); // CHECKSTYLE:OFF } catch (final RuntimeException ex) { // CHECKSTYLE:ON payload.getByteBuf().resetWriterIndex(); // TODO consider what severity to use PostgreSQLErrorResponsePacket errorResponsePacket = PostgreSQLErrorResponsePacket.newBuilder( PostgreSQLMessageSeverityLevel.ERROR, PostgreSQLVendorError.SYSTEM_ERROR, ex.getMessage()).build(); isIdentifierPacket = true; prepareMessageHeader(out, errorResponsePacket.getIdentifier().getValue()); errorResponsePacket.write(payload); } finally { if (isIdentifierPacket) { updateMessageLength(out); } } }
@Test void assertEncodePostgreSQLIdentifierPacket() { PostgreSQLIdentifierPacket packet = mock(PostgreSQLIdentifierPacket.class); when(packet.getIdentifier()).thenReturn(PostgreSQLMessagePacketType.AUTHENTICATION_REQUEST); when(byteBuf.readableBytes()).thenReturn(9); new PostgreSQLPacketCodecEngine().encode(context, packet, byteBuf); verify(byteBuf).writeByte(PostgreSQLMessagePacketType.AUTHENTICATION_REQUEST.getValue()); verify(byteBuf).writeInt(0); verify(packet).write(any(PostgreSQLPacketPayload.class)); verify(byteBuf).setInt(1, 8); }
public Object toIdObject(String baseId) throws AmqpProtocolException { if (baseId == null) { return null; } try { if (hasAmqpUuidPrefix(baseId)) { String uuidString = strip(baseId, AMQP_UUID_PREFIX_LENGTH); return UUID.fromString(uuidString); } else if (hasAmqpUlongPrefix(baseId)) { String longString = strip(baseId, AMQP_ULONG_PREFIX_LENGTH); return UnsignedLong.valueOf(longString); } else if (hasAmqpStringPrefix(baseId)) { return strip(baseId, AMQP_STRING_PREFIX_LENGTH); } else if (hasAmqpBinaryPrefix(baseId)) { String hexString = strip(baseId, AMQP_BINARY_PREFIX_LENGTH); byte[] bytes = convertHexStringToBinary(hexString); return new Binary(bytes); } else { // We have a string without any type prefix, transmit it as-is. return baseId; } } catch (IllegalArgumentException e) { throw new AmqpProtocolException("Unable to convert ID value"); } }
@Test public void testToIdObjectWithNull() throws Exception { assertNull("null object should have been returned", messageIdHelper.toIdObject(null)); }
@Override public boolean evaluate(Map<String, Object> values) { boolean toReturn = false; if (values.containsKey(name)) { logger.debug("found matching parameter, evaluating... "); toReturn = evaluation(values.get(name)); } return toReturn; }
@Test void evaluateStringNotIn() { ARRAY_TYPE arrayType = ARRAY_TYPE.STRING; List<Object> values = getObjects(arrayType, 4); KiePMMLSimpleSetPredicate kiePMMLSimpleSetPredicate = getKiePMMLSimpleSetPredicate(values, arrayType, IN_NOTIN.NOT_IN); Map<String, Object> inputData = new HashMap<>(); inputData.put("FAKE", "NOT"); assertThat(kiePMMLSimpleSetPredicate.evaluate(inputData)).isFalse(); inputData.put(SIMPLE_SET_PREDICATE_NAME, values.get(0)); assertThat(kiePMMLSimpleSetPredicate.evaluate(inputData)).isFalse(); inputData.put(SIMPLE_SET_PREDICATE_NAME, "NOT"); assertThat(kiePMMLSimpleSetPredicate.evaluate(inputData)).isTrue(); }
@Override public boolean remove(Object o) { return false; }
@Test public void remove() { SelectedSelectionKeySet set = new SelectedSelectionKeySet(); assertTrue(set.add(mockKey)); assertFalse(set.remove(mockKey)); assertFalse(set.remove(mockKey2)); }
@Override protected void unprotectedExecuteJob() throws LoadException { LoadTask task = new BrokerLoadPendingTask(this, fileGroupAggInfo.getAggKeyToFileGroups(), brokerDesc); idToTasks.put(task.getSignature(), task); submitTask(GlobalStateMgr.getCurrentState().getPendingLoadTaskScheduler(), task); }
@Test public void testExecuteJob(@Mocked LeaderTaskExecutor leaderTaskExecutor) throws LoadException { new Expectations() { { leaderTaskExecutor.submit((LeaderTask) any); minTimes = 0; result = true; } }; GlobalStateMgr.getCurrentState().setEditLog(new EditLog(new ArrayBlockingQueue<>(100))); new MockUp<EditLog>() { @Mock public void logSaveNextId(long nextId) { } }; BrokerLoadJob brokerLoadJob = new BrokerLoadJob(); brokerLoadJob.unprotectedExecuteJob(); Map<Long, LoadTask> idToTasks = Deencapsulation.getField(brokerLoadJob, "idToTasks"); Assert.assertEquals(1, idToTasks.size()); }
@Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) { if (!(msg instanceof HttpMessage || msg instanceof HttpContent)) { ctx.write(msg, promise); return; } boolean release = true; SimpleChannelPromiseAggregator promiseAggregator = new SimpleChannelPromiseAggregator(promise, ctx.channel(), ctx.executor()); try { Http2ConnectionEncoder encoder = encoder(); boolean endStream = false; if (msg instanceof HttpMessage) { final HttpMessage httpMsg = (HttpMessage) msg; // Provide the user the opportunity to specify the streamId currentStreamId = getStreamId(httpMsg.headers()); // Add HttpScheme if it's defined in constructor and header does not contain it. if (httpScheme != null && !httpMsg.headers().contains(HttpConversionUtil.ExtensionHeaderNames.SCHEME.text())) { httpMsg.headers().set(HttpConversionUtil.ExtensionHeaderNames.SCHEME.text(), httpScheme.name()); } // Convert and write the headers. Http2Headers http2Headers = HttpConversionUtil.toHttp2Headers(httpMsg, validateHeaders); endStream = msg instanceof FullHttpMessage && !((FullHttpMessage) msg).content().isReadable(); writeHeaders(ctx, encoder, currentStreamId, httpMsg.headers(), http2Headers, endStream, promiseAggregator); } if (!endStream && msg instanceof HttpContent) { boolean isLastContent = false; HttpHeaders trailers = EmptyHttpHeaders.INSTANCE; Http2Headers http2Trailers = EmptyHttp2Headers.INSTANCE; if (msg instanceof LastHttpContent) { isLastContent = true; // Convert any trailing headers. final LastHttpContent lastContent = (LastHttpContent) msg; trailers = lastContent.trailingHeaders(); http2Trailers = HttpConversionUtil.toHttp2Headers(trailers, validateHeaders); } // Write the data final ByteBuf content = ((HttpContent) msg).content(); endStream = isLastContent && trailers.isEmpty(); encoder.writeData(ctx, currentStreamId, content, 0, endStream, promiseAggregator.newPromise()); release = false; if (!trailers.isEmpty()) { // Write trailing headers. writeHeaders(ctx, encoder, currentStreamId, trailers, http2Trailers, true, promiseAggregator); } } } catch (Throwable t) { onError(ctx, true, t); promiseAggregator.setFailure(t); } finally { if (release) { ReferenceCountUtil.release(msg); } promiseAggregator.doneAllocatingPromises(); } }
@Test public void testChunkedRequestWithBodyAndTrailingHeaders() throws Exception { final String text = "foooooo"; final String text2 = "goooo"; final List<String> receivedBuffers = Collections.synchronizedList(new ArrayList<String>()); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock in) throws Throwable { receivedBuffers.add(((ByteBuf) in.getArguments()[2]).toString(UTF_8)); return null; } }).when(serverListener).onDataRead(any(ChannelHandlerContext.class), eq(3), any(ByteBuf.class), eq(0), eq(false)); bootstrapEnv(4, 1, 1); final HttpRequest request = new DefaultHttpRequest(HTTP_1_1, POST, "http://your_user-name123@www.example.org:5555/example"); final HttpHeaders httpHeaders = request.headers(); httpHeaders.set(HttpHeaderNames.HOST, "www.example.org:5555"); httpHeaders.add(HttpHeaderNames.TRANSFER_ENCODING, "chunked"); httpHeaders.add(of("foo"), of("goo")); httpHeaders.add(of("foo"), of("goo2")); httpHeaders.add(of("foo2"), of("goo2")); final Http2Headers http2Headers = new DefaultHttp2Headers().method(new AsciiString("POST")).path(new AsciiString("/example")) .authority(new AsciiString("www.example.org:5555")).scheme(new AsciiString("http")) .add(new AsciiString("foo"), new AsciiString("goo")) .add(new AsciiString("foo"), new AsciiString("goo2")) .add(new AsciiString("foo2"), new AsciiString("goo2")); final DefaultHttpContent httpContent = new DefaultHttpContent(Unpooled.copiedBuffer(text, UTF_8)); final LastHttpContent lastHttpContent = new DefaultLastHttpContent(Unpooled.copiedBuffer(text2, UTF_8)); lastHttpContent.trailingHeaders().add(of("trailing"), of("bar")); final Http2Headers http2TrailingHeaders = new DefaultHttp2Headers() .add(new AsciiString("trailing"), new AsciiString("bar")); ChannelPromise writePromise = newPromise(); ChannelFuture writeFuture = clientChannel.write(request, writePromise); ChannelPromise contentPromise = newPromise(); ChannelFuture contentFuture = clientChannel.write(httpContent, contentPromise); ChannelPromise lastContentPromise = newPromise(); ChannelFuture lastContentFuture = clientChannel.write(lastHttpContent, lastContentPromise); clientChannel.flush(); assertTrue(writePromise.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(writePromise.isSuccess()); assertTrue(writeFuture.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(writeFuture.isSuccess()); assertTrue(contentPromise.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(contentPromise.isSuccess()); assertTrue(contentFuture.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(contentFuture.isSuccess()); assertTrue(lastContentPromise.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(lastContentPromise.isSuccess()); assertTrue(lastContentFuture.awaitUninterruptibly(WAIT_TIME_SECONDS, SECONDS)); assertTrue(lastContentFuture.isSuccess()); awaitRequests(); verify(serverListener).onHeadersRead(any(ChannelHandlerContext.class), eq(3), eq(http2Headers), eq(0), anyShort(), anyBoolean(), eq(0), eq(false)); verify(serverListener).onDataRead(any(ChannelHandlerContext.class), eq(3), any(ByteBuf.class), eq(0), eq(false)); verify(serverListener).onHeadersRead(any(ChannelHandlerContext.class), eq(3), eq(http2TrailingHeaders), eq(0), anyShort(), anyBoolean(), eq(0), eq(true)); assertEquals(1, receivedBuffers.size()); assertEquals(text + text2, receivedBuffers.get(0)); }
@Override public TCreatePartitionResult createPartition(TCreatePartitionRequest request) throws TException { LOG.info("Receive create partition: {}", request); TCreatePartitionResult result; try { if (partitionRequestNum.incrementAndGet() >= Config.thrift_server_max_worker_threads / 4) { result = new TCreatePartitionResult(); TStatus errorStatus = new TStatus(SERVICE_UNAVAILABLE); errorStatus.setError_msgs(Lists.newArrayList( String.format("Too many create partition requests, please try again later txn_id=%d", request.getTxn_id()))); result.setStatus(errorStatus); return result; } result = createPartitionProcess(request); } catch (Exception t) { LOG.warn(DebugUtil.getStackTrace(t)); result = new TCreatePartitionResult(); TStatus errorStatus = new TStatus(RUNTIME_ERROR); errorStatus.setError_msgs(Lists.newArrayList(String.format("txn_id=%d failed. %s", request.getTxn_id(), t.getMessage()))); result.setStatus(errorStatus); } finally { partitionRequestNum.decrementAndGet(); } return result; }
@Test public void testAutomaticPartitionPerLoadLimitExceed() throws TException { TransactionState state = new TransactionState(); new MockUp<GlobalTransactionMgr>() { @Mock public TransactionState getTransactionState(long dbId, long transactionId) { return state; } }; Database db = GlobalStateMgr.getCurrentState().getDb("test"); Table table = db.getTable("site_access_month"); List<List<String>> partitionValues = Lists.newArrayList(); List<String> values = Lists.newArrayList(); values.add("1999-04-29"); partitionValues.add(values); List<String> values2 = Lists.newArrayList(); values2.add("1999-03-28"); partitionValues.add(values2); FrontendServiceImpl impl = new FrontendServiceImpl(exeEnv); TCreatePartitionRequest request = new TCreatePartitionRequest(); request.setDb_id(db.getId()); request.setTable_id(table.getId()); request.setPartition_values(partitionValues); TCreatePartitionResult partition = impl.createPartition(request); Assert.assertEquals(TStatusCode.OK, partition.getStatus().getStatus_code()); Config.max_partitions_in_one_batch = 1; partition = impl.createPartition(request); Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.RUNTIME_ERROR); Assert.assertTrue(partition.getStatus().getError_msgs().get(0).contains("max_partitions_in_one_batch")); Config.max_partitions_in_one_batch = 4096; }
@Initializer public static void relocateOldLogs() { relocateOldLogs(Jenkins.get().getRootDir()); }
@Test public void testRelocate() throws Exception { File d = File.createTempFile("jenkins", "test"); FilePath dir = new FilePath(d); try { dir.delete(); dir.mkdirs(); dir.child("slave-abc.log").touch(0); dir.child("slave-def.log.5").touch(0); Computer.relocateOldLogs(d); assertEquals(1, dir.list().size()); // asserting later that this one child is the logs/ directory assertTrue(dir.child("logs/slaves/abc/slave.log").exists()); assertTrue(dir.child("logs/slaves/def/slave.log.5").exists()); } finally { dir.deleteRecursive(); } }
public static Object convertValue(String className, Object cleanValue, ClassLoader classLoader) { // "null" string is converted to null cleanValue = "null".equals(cleanValue) ? null : cleanValue; if (!isPrimitive(className) && cleanValue == null) { return null; } Class<?> clazz = loadClass(className, classLoader); // if it is not a String, it has to be an instance of the desired type if (!(cleanValue instanceof String)) { if (clazz.isInstance(cleanValue)) { return cleanValue; } throw new IllegalArgumentException(new StringBuilder().append("Object ").append(cleanValue) .append(" is not a String or an instance of ").append(className).toString()); } String value = (String) cleanValue; try { if (clazz.isAssignableFrom(String.class)) { return value; } else if (clazz.isAssignableFrom(BigDecimal.class)) { return parseBigDecimal(value); } else if (clazz.isAssignableFrom(BigInteger.class)) { return parseBigInteger(value); } else if (clazz.isAssignableFrom(Boolean.class) || clazz.isAssignableFrom(boolean.class)) { return parseBoolean(value); } else if (clazz.isAssignableFrom(Byte.class) || clazz.isAssignableFrom(byte.class)) { return Byte.parseByte(value); } else if (clazz.isAssignableFrom(Character.class) || clazz.isAssignableFrom(char.class)) { return parseChar(value); } else if (clazz.isAssignableFrom(Double.class) || clazz.isAssignableFrom(double.class)) { return Double.parseDouble(cleanStringForNumberParsing(value)); } else if (clazz.isAssignableFrom(Float.class) || clazz.isAssignableFrom(float.class)) { return Float.parseFloat(cleanStringForNumberParsing(value)); } else if (clazz.isAssignableFrom(Integer.class) || clazz.isAssignableFrom(int.class)) { return Integer.parseInt(cleanStringForNumberParsing(value)); } else if (clazz.isAssignableFrom(LocalDate.class)) { return LocalDate.parse(value, DateTimeFormatter.ISO_LOCAL_DATE); } else if (clazz.isAssignableFrom(LocalDateTime.class)) { return LocalDateTime.parse(value, DateTimeFormatter.ISO_LOCAL_DATE_TIME); } else if (clazz.isAssignableFrom(LocalTime.class)) { return LocalTime.parse(value, DateTimeFormatter.ISO_LOCAL_TIME); } else if (clazz.isAssignableFrom(Long.class) || clazz.isAssignableFrom(long.class)) { return Long.parseLong(cleanStringForNumberParsing(value)); } else if (clazz.isAssignableFrom(Short.class) || clazz.isAssignableFrom(short.class)) { return Short.parseShort(cleanStringForNumberParsing(value)); } else if (Enum.class.isAssignableFrom(clazz)) { return Enum.valueOf(((Class<? extends Enum>) clazz), value); } } catch (RuntimeException e) { throw new IllegalArgumentException(new StringBuilder().append("Impossible to parse '") .append(value).append("' as ").append(className).append(" [") .append(e.getMessage()).append("]").toString()); } throw new IllegalArgumentException(new StringBuilder().append("Class ").append(className) .append(" is not natively supported. Please use an MVEL expression" + " to use it.").toString()); }
@Test public void convertValueFailNotStringOrTypeTest() { assertThatThrownBy(() -> convertValue(RuleScenarioRunnerHelperTest.class.getCanonicalName(), 1, classLoader)) .isInstanceOf(IllegalArgumentException.class) .hasMessageStartingWith("Object 1 is not a String or an instance of"); }
public Map<String, String> startSessionFromApp(@Valid AuthenticateRequest params) throws IOException, ParseException, JOSEException, InvalidSignatureException, DienstencatalogusException { var response = dcClient.retrieveMetadataFromDc(params.getClientId()); validateSignature(response, params); adClient.remoteLog("1121", Map.of("webservice_id", response.getLegacyWebserviceId())); var oidcSession = startSession(params, response.getMetadataUrl(), response.getLegacyWebserviceId(), response.getServiceName()); openIdRepository.save(oidcSession); var appSession = appClient.startAppSession( issuer + "/return?sessionId=" + oidcSession.getId(), response.getServiceName(), response.getLegacyWebserviceId(), response.getMinimumReliabilityLevel(), response.getIconUri(), oidcSession ); return Map.of("app_session_id", appSession.get("id")); }
@Test void startSessionFromAppTest() throws DienstencatalogusException, InvalidSignatureException, IOException, ParseException, JOSEException { //given AuthenticateRequest request = new AuthenticateRequest(); request.setClientId("PPP"); request.setRequest(client.generateRequest()); request.setRedirectUri("redirect_uri"); DcMetadataResponse dcMetadataResponse = new DcMetadataResponse(); dcMetadataResponse.setMetadataUrl("testUrl"); dcMetadataResponse.setLegacyWebserviceId(1L); dcMetadataResponse.setAppReturnUrl("redirect_uri"); dcMetadataResponse.setRequestStatus("STATUS_OK"); when(dcClient.retrieveMetadataFromDc(request.getClientId())).thenReturn(dcMetadataResponse); when(provider.verifySignature(dcMetadataResponse.getMetadataUrl(), signedJwt )).thenReturn(true); when(appClient.startAppSession(any(), any(), any(), any(), any(), any())).thenReturn(Map.of("id", "SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS")); //when openIdService.startSessionFromApp(request); //then }
@Override public EntityExcerpt createExcerpt(Output output) { return EntityExcerpt.builder() .id(ModelId.of(output.getId())) .type(ModelTypes.OUTPUT_V1) .title(output.getTitle()) .build(); }
@Test public void createExcerpt() { final ImmutableMap<String, Object> configuration = ImmutableMap.of(); final OutputImpl output = OutputImpl.create( "01234567890", "Output Title", "org.graylog2.output.SomeOutputClass", "admin", configuration, new Date(0L), null ); final EntityExcerpt excerpt = facade.createExcerpt(output); assertThat(excerpt.id()).isEqualTo(ModelId.of(output.getId())); assertThat(excerpt.type()).isEqualTo(ModelTypes.OUTPUT_V1); assertThat(excerpt.title()).isEqualTo(output.getTitle()); }
@Override public ChannelBuffer copy() { return copy(readerIndex, readableBytes()); }
@Test void copyBoundaryCheck1() { Assertions.assertThrows(IndexOutOfBoundsException.class, () -> buffer.copy(-1, 0)); }
public static String filter(String htmlContent) { return new HTMLFilter().filter(htmlContent); }
@Test public void filterTest() { final String html = "<alert></alert>"; final String filter = HtmlUtil.filter(html); assertEquals("", filter); }
public ParseResult parse(File file) throws IOException, SchemaParseException { return parse(file, null); }
@Test void testParseTextWithFallbackJsonParser() { Schema schema = new SchemaParser().parse(SCHEMA_JSON).mainSchema(); assertEquals(SCHEMA_REAL, schema); }
@Override public void initialize(ServiceConfiguration config) throws IOException, IllegalArgumentException { String prefix = (String) config.getProperty(CONF_TOKEN_SETTING_PREFIX); if (null == prefix) { prefix = ""; } this.confTokenSecretKeySettingName = prefix + CONF_TOKEN_SECRET_KEY; this.confTokenPublicKeySettingName = prefix + CONF_TOKEN_PUBLIC_KEY; this.confTokenAuthClaimSettingName = prefix + CONF_TOKEN_AUTH_CLAIM; this.confTokenPublicAlgSettingName = prefix + CONF_TOKEN_PUBLIC_ALG; this.confTokenAudienceClaimSettingName = prefix + CONF_TOKEN_AUDIENCE_CLAIM; this.confTokenAudienceSettingName = prefix + CONF_TOKEN_AUDIENCE; this.confTokenAllowedClockSkewSecondsSettingName = prefix + CONF_TOKEN_ALLOWED_CLOCK_SKEW_SECONDS; // we need to fetch the algorithm before we fetch the key this.publicKeyAlg = getPublicKeyAlgType(config); this.validationKey = getValidationKey(config); this.roleClaim = getTokenRoleClaim(config); this.audienceClaim = getTokenAudienceClaim(config); this.audience = getTokenAudience(config); long allowedSkew = getConfTokenAllowedClockSkewSeconds(config); this.parser = Jwts.parserBuilder() .setAllowedClockSkewSeconds(allowedSkew) .setSigningKey(this.validationKey) .build(); if (audienceClaim != null && audience == null) { throw new IllegalArgumentException("Token Audience Claim [" + audienceClaim + "] configured, but Audience stands for this broker not."); } }
@Test public void testTokenSettingPrefix() throws Exception { AuthenticationProviderToken provider = new AuthenticationProviderToken(); KeyPair keyPair = Keys.keyPairFor(SignatureAlgorithm.RS256); String publicKeyStr = AuthTokenUtils.encodeKeyBase64(keyPair.getPublic()); Properties properties = new Properties(); // Use public key for validation properties.setProperty(AuthenticationProviderToken.CONF_TOKEN_PUBLIC_KEY, publicKeyStr); ServiceConfiguration conf = new ServiceConfiguration(); conf.setProperties(properties); ServiceConfiguration mockConf = mock(ServiceConfiguration.class); String prefix = "test"; Mockito.when(mockConf.getProperty(anyString())) .thenAnswer(invocationOnMock -> conf.getProperty(((String) invocationOnMock.getArgument(0)).substring(prefix.length())) ); Mockito.when(mockConf.getProperty(AuthenticationProviderToken.CONF_TOKEN_SETTING_PREFIX)).thenReturn(prefix); provider.initialize(mockConf); // Each property is fetched only once. Prevent multiple fetches. Mockito.verify(mockConf, Mockito.times(1)).getProperty(AuthenticationProviderToken.CONF_TOKEN_SETTING_PREFIX); Mockito.verify(mockConf, Mockito.times(1)) .getProperty(prefix + AuthenticationProviderToken.CONF_TOKEN_SECRET_KEY); Mockito.verify(mockConf, Mockito.times(1)) .getProperty(prefix + AuthenticationProviderToken.CONF_TOKEN_PUBLIC_KEY); Mockito.verify(mockConf, Mockito.times(1)) .getProperty(prefix + AuthenticationProviderToken.CONF_TOKEN_AUTH_CLAIM); Mockito.verify(mockConf, Mockito.times(1)) .getProperty(prefix + AuthenticationProviderToken.CONF_TOKEN_PUBLIC_ALG); Mockito.verify(mockConf, Mockito.times(1)) .getProperty(prefix + AuthenticationProviderToken.CONF_TOKEN_AUDIENCE_CLAIM); Mockito.verify(mockConf, Mockito.times(1)) .getProperty(prefix + AuthenticationProviderToken.CONF_TOKEN_AUDIENCE); }
public static List<StreamedRow> toRows( final Buffer buff, final Function<StreamedRow, StreamedRow> addHostInfo ) { final List<StreamedRow> rows = new ArrayList<>(); int begin = 0; for (int i = 0; i <= buff.length(); i++) { if ((i == buff.length() && (i - begin > 1)) || (i < buff.length() && buff.getByte(i) == (byte) '\n')) { if (begin != i) { // Ignore random newlines - the server can send these final Buffer sliced = buff.slice(begin, i); final Buffer tidied = toJsonMsg(sliced, true); if (tidied.length() > 0) { final StreamedRow row = deserialize(tidied, StreamedRow.class); rows.add(addHostInfo.apply(row)); } } begin = i + 1; } } return rows; }
@Test public void toRowsProto() { // When: final List<StreamedRow> rows = KsqlTargetUtil.toRows(Buffer.buffer("[{\"header\":{\"queryId\":\"queryId\"," + "\"schema\":\"`A` INTEGER KEY, `B` DOUBLE, `C` ARRAY<STRING>\"," + "\"protoSchema\":" + "\"syntax = \\\"proto3\\\";\\n" + "\\n" + "message ConnectDefault1 {\\n" + " int32 A = 1;\\n" + " double B = 2;\\n" + " repeated string C = 3;\\n" + "}\\n" + "\"}},\n" + "{\"row\":{\"protobufBytes\":\"CHsRAAAAAABAbUAaBWhlbGxv\"}},\n" + "{\"row\":{\"protobufBytes\":\"CMgDEQAAAAAAqIhAGgNieWU=\"}},\n" + "{\"finalMessage\":\"limit hit!\"}]"), Functions.identity()); // Then: assertThat(rows.size(), is(4)); final StreamedRow row = rows.get(0); assertThat(row.getHeader().isPresent(), is(true)); assertThat(row.getHeader().get().getQueryId().toString(), is("queryId")); assertThat(row.getHeader().get().getSchema().toString(), is("`A` INTEGER KEY, `B` DOUBLE, `C` ARRAY<STRING>")); assertThat(row.getHeader().get().getProtoSchema().get(), is("syntax = \"proto3\";\n\nmessage ConnectDefault1 {\n int32 A = 1;\n double B = 2;\n repeated string C = 3;\n}\n")); final StreamedRow row2 = rows.get(1); assertThat(row2.getRow().isPresent(), is(true)); assertThat(row2.getRow().get().getProtobufBytes().isPresent(), is(true)); assertThat(row2.getRow().get().toString(), is("{\"protobufBytes\":\"CHsRAAAAAABAbUAaBWhlbGxv\"}")); final StreamedRow row3 = rows.get(2); assertThat(row3.getRow().isPresent(), is(true)); assertThat(row3.getRow().get().getProtobufBytes().isPresent(), is(true)); assertThat(row3.getRow().get().toString(), is("{\"protobufBytes\":\"CMgDEQAAAAAAqIhAGgNieWU=\"}")); final StreamedRow row4 = rows.get(3); assertThat(row4.getRow().isPresent(), is(false)); assertThat(row4.getFinalMessage().isPresent(), is(true)); assertThat(row4.getFinalMessage().get(), is("limit hit!")); }
static int compareAddresses(byte[] current, byte[] candidate) { if (candidate == null || candidate.length < EUI48_MAC_ADDRESS_LENGTH) { return 1; } // Must not be filled with only 0 and 1. boolean onlyZeroAndOne = true; for (byte b: candidate) { if (b != 0 && b != 1) { onlyZeroAndOne = false; break; } } if (onlyZeroAndOne) { return 1; } // Must not be a multicast address if ((candidate[0] & 1) != 0) { return 1; } // Prefer globally unique address. if ((candidate[0] & 2) == 0) { if (current.length != 0 && (current[0] & 2) == 0) { // Both current and candidate are globally unique addresses. return 0; } else { // Only candidate is globally unique. return -1; } } else { if (current.length != 0 && (current[0] & 2) == 0) { // Only current is globally unique. return 1; } else { // Both current and candidate are non-unique. return 0; } } }
@Test public void testCompareAddresses() { // should not prefer empty address when candidate is not globally unique assertEquals( 0, MacAddressUtil.compareAddresses( EMPTY_BYTES, new byte[]{(byte) 0x52, (byte) 0x54, (byte) 0x00, (byte) 0xf9, (byte) 0x32, (byte) 0xbd})); // only candidate is globally unique assertEquals( -1, MacAddressUtil.compareAddresses( EMPTY_BYTES, new byte[]{(byte) 0x50, (byte) 0x54, (byte) 0x00, (byte) 0xf9, (byte) 0x32, (byte) 0xbd})); // only candidate is globally unique assertEquals( -1, MacAddressUtil.compareAddresses( new byte[]{(byte) 0x52, (byte) 0x54, (byte) 0x00, (byte) 0xf9, (byte) 0x32, (byte) 0xbd}, new byte[]{(byte) 0x50, (byte) 0x54, (byte) 0x00, (byte) 0xf9, (byte) 0x32, (byte) 0xbd})); // only current is globally unique assertEquals( 1, MacAddressUtil.compareAddresses( new byte[]{(byte) 0x52, (byte) 0x54, (byte) 0x00, (byte) 0xf9, (byte) 0x32, (byte) 0xbd}, EMPTY_BYTES)); // only current is globally unique assertEquals( 1, MacAddressUtil.compareAddresses( new byte[]{(byte) 0x50, (byte) 0x54, (byte) 0x00, (byte) 0xf9, (byte) 0x32, (byte) 0xbd}, new byte[]{(byte) 0x52, (byte) 0x54, (byte) 0x00, (byte) 0xf9, (byte) 0x32, (byte) 0xbd})); // both are globally unique assertEquals( 0, MacAddressUtil.compareAddresses( new byte[]{(byte) 0x50, (byte) 0x54, (byte) 0x00, (byte) 0xf9, (byte) 0x32, (byte) 0xbd}, new byte[]{(byte) 0x50, (byte) 0x55, (byte) 0x01, (byte) 0xfa, (byte) 0x33, (byte) 0xbe})); }
public void createStaticTopic(final String addr, final String defaultTopic, final TopicConfig topicConfig, final TopicQueueMappingDetail topicQueueMappingDetail, boolean force, final long timeoutMillis) throws RemotingException, InterruptedException, MQBrokerException { CreateTopicRequestHeader requestHeader = new CreateTopicRequestHeader(); requestHeader.setTopic(topicConfig.getTopicName()); requestHeader.setDefaultTopic(defaultTopic); requestHeader.setReadQueueNums(topicConfig.getReadQueueNums()); requestHeader.setWriteQueueNums(topicConfig.getWriteQueueNums()); requestHeader.setPerm(topicConfig.getPerm()); requestHeader.setTopicFilterType(topicConfig.getTopicFilterType().name()); requestHeader.setTopicSysFlag(topicConfig.getTopicSysFlag()); requestHeader.setOrder(topicConfig.isOrder()); requestHeader.setForce(force); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.UPDATE_AND_CREATE_STATIC_TOPIC, requestHeader); request.setBody(topicQueueMappingDetail.encode()); RemotingCommand response = this.remotingClient.invokeSync(MixAll.brokerVIPChannel(this.clientConfig.isVipChannelEnabled(), addr), request, timeoutMillis); assert response != null; switch (response.getCode()) { case ResponseCode.SUCCESS: { return; } default: break; } throw new MQBrokerException(response.getCode(), response.getRemark()); }
@Test public void testCreateStaticTopic() throws RemotingException, InterruptedException, MQBrokerException { mockInvokeSync(); mqClientAPI.createStaticTopic(defaultBrokerAddr, defaultTopic, new TopicConfig(), new TopicQueueMappingDetail(), false, defaultTimeout); }
@Override public String toString() { return toString(false, null, BitcoinNetwork.MAINNET); }
@Test public void testNonCanonicalSigs() throws Exception { // Tests the noncanonical sigs from Bitcoin Core unit tests InputStream in = getClass().getResourceAsStream("sig_noncanonical.json"); // Poor man's JSON parser (because pulling in a lib for this is overkill) while (in.available() > 0) { while (in.available() > 0 && in.read() != '"') ; if (in.available() < 1) break; StringBuilder sig = new StringBuilder(); int c; while (in.available() > 0 && (c = in.read()) != '"') sig.append((char)c); try { final String sigStr = sig.toString(); assertFalse(TransactionSignature.isEncodingCanonical(ByteUtils.parseHex(sigStr))); } catch (IllegalArgumentException e) { // Expected for non-hex strings in the JSON that we should ignore } } in.close(); }
void tryStartApp() throws Exception { final boolean shutdown = runExecutable(preconditionChecker); if (shutdown) { return; } runExecutable(executable.get()); }
@Test public void shouldStopAppOnJoin() throws Exception { // Given: executable.shutdown(); expectLastCall(); replay(executable); // When: main.tryStartApp(); // Then: verify(executable); }
public static void notEmpty(Collection<?> collection, String message) { if (CollectionUtil.isEmpty(collection)) { throw new IllegalArgumentException(message); } }
@Test(expected = IllegalArgumentException.class) public void assertNotEmptyByMapAndMessageIsNull() { Assert.notEmpty(Collections.emptyMap()); }
@Override public UnderFileSystem create(String path, UnderFileSystemConfiguration conf) { Preconditions.checkNotNull(path, "Unable to create UnderFileSystem instance:" + " URI path should not be null"); if (checkCOSCredentials(conf)) { try { return COSUnderFileSystem.createInstance(new AlluxioURI(path), conf); } catch (Exception e) { throw Throwables.propagate(e); } } String err = "COS Credentials not available, cannot create COS Under File System."; throw Throwables.propagate(new IOException(err)); }
@Test public void createInstanceWithPath() { UnderFileSystem ufs = mFactory.create(mCosPath, mConf); Assert.assertNotNull(ufs); Assert.assertTrue(ufs instanceof COSUnderFileSystem); }
void checkPerm(PlainAccessResource needCheckedAccess, PlainAccessResource ownedAccess) { permissionChecker.check(needCheckedAccess, ownedAccess); }
@Test public void checkPerm() { PlainAccessResource plainAccessResource = new PlainAccessResource(); plainAccessResource.addResourceAndPerm("topicA", Permission.PUB); plainPermissionManager.checkPerm(plainAccessResource, pubPlainAccessResource); plainAccessResource.addResourceAndPerm("topicB", Permission.SUB); plainPermissionManager.checkPerm(plainAccessResource, anyPlainAccessResource); plainAccessResource = new PlainAccessResource(); plainAccessResource.addResourceAndPerm("topicB", Permission.SUB); plainPermissionManager.checkPerm(plainAccessResource, subPlainAccessResource); plainAccessResource.addResourceAndPerm("topicA", Permission.PUB); plainPermissionManager.checkPerm(plainAccessResource, anyPlainAccessResource); }
@PostMapping("/selector") public ShenyuAdminResult saveSelector(@RequestBody @Valid @NotNull final DataPermissionDTO dataPermissionDTO) { return ShenyuAdminResult.success(ShenyuResultMessage.SAVE_SUCCESS, dataPermissionService.createSelector(dataPermissionDTO)); }
@Test public void saveSelector() throws Exception { DataPermissionDTO dataPermissionDTO = new DataPermissionDTO(); dataPermissionDTO.setDataId("testDataId"); dataPermissionDTO.setUserId("testUserId"); given(this.dataPermissionService.createSelector(dataPermissionDTO)).willReturn(1); this.mockMvc.perform(MockMvcRequestBuilders.post("/data-permission/selector") .contentType(MediaType.APPLICATION_JSON) .content(GsonUtils.getInstance().toJson(dataPermissionDTO))) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.SAVE_SUCCESS))) .andExpect(jsonPath("$.data", is(1))) .andReturn(); }
@Override public GlobalStatusRequestProto convert2Proto(GlobalStatusRequest globalStatusRequest) { final short typeCode = globalStatusRequest.getTypeCode(); final AbstractMessageProto abstractMessage = AbstractMessageProto.newBuilder().setMessageType( MessageTypeProto.forNumber(typeCode)).build(); final AbstractTransactionRequestProto abstractTransactionRequestProto = AbstractTransactionRequestProto .newBuilder().setAbstractMessage(abstractMessage).build(); final String extraData = globalStatusRequest.getExtraData(); AbstractGlobalEndRequestProto abstractGlobalEndRequestProto = AbstractGlobalEndRequestProto.newBuilder() .setAbstractTransactionRequest(abstractTransactionRequestProto).setXid(globalStatusRequest.getXid()) .setExtraData(extraData == null ? "" : extraData).build(); GlobalStatusRequestProto result = GlobalStatusRequestProto.newBuilder().setAbstractGlobalEndRequest( abstractGlobalEndRequestProto).build(); return result; }
@Test public void convert2Proto() { GlobalStatusRequest globalStatusRequest = new GlobalStatusRequest(); globalStatusRequest.setExtraData("extraData"); globalStatusRequest.setXid("xid"); GlobalStatusRequestConvertor convertor = new GlobalStatusRequestConvertor(); GlobalStatusRequestProto proto = convertor.convert2Proto( globalStatusRequest); GlobalStatusRequest real = convertor.convert2Model(proto); assertThat((real.getTypeCode())).isEqualTo(globalStatusRequest.getTypeCode()); assertThat((real.getXid())).isEqualTo(globalStatusRequest.getXid()); assertThat((real.getExtraData())).isEqualTo(globalStatusRequest.getExtraData()); }
public static void removeColumnMetadataInfo(PropertiesConfiguration properties, String column) { properties.subset(COLUMN_PROPS_KEY_PREFIX + column).clear(); }
@Test public void testRemoveColumnMetadataInfo() throws Exception { PropertiesConfiguration configuration = CommonsConfigurationUtils.fromFile(CONFIG_FILE); configuration.setProperty(COLUMN_PROPERTY_KEY_PREFIX + "a", "foo"); configuration.setProperty(COLUMN_PROPERTY_KEY_PREFIX + "b", "bar"); configuration.setProperty(COLUMN_PROPERTY_KEY_PREFIX + "c", "foobar"); CommonsConfigurationUtils.saveToFile(configuration, CONFIG_FILE); configuration = CommonsConfigurationUtils.fromFile(CONFIG_FILE); assertTrue(configuration.containsKey(COLUMN_PROPERTY_KEY_PREFIX + "a")); assertTrue(configuration.containsKey(COLUMN_PROPERTY_KEY_PREFIX + "b")); assertTrue(configuration.containsKey(COLUMN_PROPERTY_KEY_PREFIX + "c")); SegmentColumnarIndexCreator.removeColumnMetadataInfo(configuration, COLUMN_NAME); assertFalse(configuration.containsKey(COLUMN_PROPERTY_KEY_PREFIX + "a")); assertFalse(configuration.containsKey(COLUMN_PROPERTY_KEY_PREFIX + "b")); assertFalse(configuration.containsKey(COLUMN_PROPERTY_KEY_PREFIX + "c")); CommonsConfigurationUtils.saveToFile(configuration, CONFIG_FILE); configuration = CommonsConfigurationUtils.fromFile(CONFIG_FILE); assertFalse(configuration.containsKey(COLUMN_PROPERTY_KEY_PREFIX + "a")); assertFalse(configuration.containsKey(COLUMN_PROPERTY_KEY_PREFIX + "b")); assertFalse(configuration.containsKey(COLUMN_PROPERTY_KEY_PREFIX + "c")); }
public static Range<Integer> integerRange(String range) { return ofString(range, Integer::parseInt, Integer.class); }
@Test public void emptyInfinityEquality() { assertEquals(integerRange("empty"), integerRange("empty")); assertEquals(integerRange("(infinity,infinity)"), integerRange("(infinity,infinity)")); assertEquals(integerRange("(,)"), integerRange("(infinity,infinity)")); assertEquals(integerRange("(infinity,infinity)"), integerRange("(,)")); assertNotEquals(integerRange("empty"), integerRange("(infinity,infinity)")); assertNotEquals(integerRange("empty"), integerRange("(,)")); assertNotEquals(integerRange("empty"), integerRange("(5,5)")); }
static ParseResult parse(String expression, NameValidator variableValidator) { ParseResult result = new ParseResult(); try { Parser parser = new Parser(new Scanner("ignore", new StringReader(expression))); Java.Atom atom = parser.parseConditionalExpression(); if (parser.peek().type == TokenType.END_OF_INPUT) { result.guessedVariables = new LinkedHashSet<>(); result.operators = new LinkedHashSet<>(); ValueExpressionVisitor visitor = new ValueExpressionVisitor(result, variableValidator); result.ok = atom.accept(visitor); result.invalidMessage = visitor.invalidMessage; } } catch (Exception ex) { } return result; }
@Test public void protectUsFromStuff() { NameValidator allNamesInvalid = s -> false; for (String toParse : Arrays.asList("", "new Object()", "java.lang.Object", "Test.class", "new Object(){}.toString().length", "{ 5}", "{ 5, 7 }", "Object.class", "System.out.println(\"\")", "something.newInstance()", "e.getClass ( )", "edge.getDistance()*7/*test", "edge.getDistance()//*test", "edge . getClass()", "(edge = edge) == edge", ") edge (", "in(area_blup(), edge)", "s -> truevalue")) { ParseResult res = parse(toParse, allNamesInvalid); assertFalse(res.ok, "should not be simple condition: " + toParse); assertTrue(res.guessedVariables == null || res.guessedVariables.isEmpty()); } assertFalse(parse("edge; getClass()", allNamesInvalid).ok); }
public static boolean canChangeState(Function.FunctionMetaData functionMetaData, int instanceId, Function.FunctionState newState) { if (instanceId >= functionMetaData.getFunctionDetails().getParallelism()) { return false; } if (functionMetaData.getInstanceStatesMap() == null || functionMetaData.getInstanceStatesMap().isEmpty()) { // This means that all instances of the functions are running return newState == Function.FunctionState.STOPPED; } if (instanceId >= 0) { if (functionMetaData.getInstanceStatesMap().containsKey(instanceId)) { return functionMetaData.getInstanceStatesMap().get(instanceId) != newState; } else { return false; } } else { // want to change state for all instances for (Function.FunctionState state : functionMetaData.getInstanceStatesMap().values()) { if (state != newState) { return true; } } return false; } }
@Test public void testCanChangeState() { long version = 5; Function.FunctionMetaData metaData = Function.FunctionMetaData.newBuilder().setFunctionDetails( Function.FunctionDetails.newBuilder().setName("func-1").setParallelism(2)).setVersion(version).build(); Assert.assertTrue(FunctionMetaDataUtils.canChangeState(metaData, 0, Function.FunctionState.STOPPED)); Assert.assertFalse(FunctionMetaDataUtils.canChangeState(metaData, 0, Function.FunctionState.RUNNING)); Assert.assertFalse(FunctionMetaDataUtils.canChangeState(metaData, 2, Function.FunctionState.STOPPED)); Assert.assertFalse(FunctionMetaDataUtils.canChangeState(metaData, 2, Function.FunctionState.RUNNING)); }
public static FunctionConfig validateUpdate(FunctionConfig existingConfig, FunctionConfig newConfig) { FunctionConfig mergedConfig = existingConfig.toBuilder().build(); if (!existingConfig.getTenant().equals(newConfig.getTenant())) { throw new IllegalArgumentException("Tenants differ"); } if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) { throw new IllegalArgumentException("Namespaces differ"); } if (!existingConfig.getName().equals(newConfig.getName())) { throw new IllegalArgumentException("Function Names differ"); } if (!StringUtils.isEmpty(newConfig.getClassName())) { mergedConfig.setClassName(newConfig.getClassName()); } if (!StringUtils.isEmpty(newConfig.getJar())) { mergedConfig.setJar(newConfig.getJar()); } if (newConfig.getInputSpecs() == null) { newConfig.setInputSpecs(new HashMap<>()); } if (mergedConfig.getInputSpecs() == null) { mergedConfig.setInputSpecs(new HashMap<>()); } if (newConfig.getInputs() != null) { newConfig.getInputs().forEach((topicName -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder().isRegexPattern(false).build()); })); } if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) { newConfig.getInputSpecs().put(newConfig.getTopicsPattern(), ConsumerConfig.builder() .isRegexPattern(true) .build()); } if (newConfig.getCustomSerdeInputs() != null) { newConfig.getCustomSerdeInputs().forEach((topicName, serdeClassName) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .serdeClassName(serdeClassName) .isRegexPattern(false) .build()); }); } if (newConfig.getCustomSchemaInputs() != null) { newConfig.getCustomSchemaInputs().forEach((topicName, schemaClassname) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .schemaType(schemaClassname) .isRegexPattern(false) .build()); }); } if (!newConfig.getInputSpecs().isEmpty()) { newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> { if (!existingConfig.getInputSpecs().containsKey(topicName)) { throw new IllegalArgumentException("Input Topics cannot be altered"); } if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) { throw new IllegalArgumentException( "isRegexPattern for input topic " + topicName + " cannot be altered"); } mergedConfig.getInputSpecs().put(topicName, consumerConfig); }); } if (!StringUtils.isEmpty(newConfig.getOutputSerdeClassName()) && !newConfig.getOutputSerdeClassName() .equals(existingConfig.getOutputSerdeClassName())) { throw new IllegalArgumentException("Output Serde mismatch"); } if (!StringUtils.isEmpty(newConfig.getOutputSchemaType()) && !newConfig.getOutputSchemaType() .equals(existingConfig.getOutputSchemaType())) { throw new IllegalArgumentException("Output Schema mismatch"); } if (!StringUtils.isEmpty(newConfig.getLogTopic())) { mergedConfig.setLogTopic(newConfig.getLogTopic()); } if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees() .equals(existingConfig.getProcessingGuarantees())) { throw new IllegalArgumentException("Processing Guarantees cannot be altered"); } if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering() .equals(existingConfig.getRetainOrdering())) { throw new IllegalArgumentException("Retain Ordering cannot be altered"); } if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering() .equals(existingConfig.getRetainKeyOrdering())) { throw new IllegalArgumentException("Retain Key Ordering cannot be altered"); } if (!StringUtils.isEmpty(newConfig.getOutput())) { mergedConfig.setOutput(newConfig.getOutput()); } if (newConfig.getUserConfig() != null) { mergedConfig.setUserConfig(newConfig.getUserConfig()); } if (newConfig.getSecrets() != null) { mergedConfig.setSecrets(newConfig.getSecrets()); } if (newConfig.getRuntime() != null && !newConfig.getRuntime().equals(existingConfig.getRuntime())) { throw new IllegalArgumentException("Runtime cannot be altered"); } if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) { throw new IllegalArgumentException("AutoAck cannot be altered"); } if (newConfig.getMaxMessageRetries() != null) { mergedConfig.setMaxMessageRetries(newConfig.getMaxMessageRetries()); } if (!StringUtils.isEmpty(newConfig.getDeadLetterTopic())) { mergedConfig.setDeadLetterTopic(newConfig.getDeadLetterTopic()); } if (!StringUtils.isEmpty(newConfig.getSubName()) && !newConfig.getSubName() .equals(existingConfig.getSubName())) { throw new IllegalArgumentException("Subscription Name cannot be altered"); } if (newConfig.getParallelism() != null) { mergedConfig.setParallelism(newConfig.getParallelism()); } if (newConfig.getResources() != null) { mergedConfig .setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources())); } if (newConfig.getWindowConfig() != null) { mergedConfig.setWindowConfig(newConfig.getWindowConfig()); } if (newConfig.getTimeoutMs() != null) { mergedConfig.setTimeoutMs(newConfig.getTimeoutMs()); } if (newConfig.getCleanupSubscription() != null) { mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription()); } if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) { mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags()); } if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) { mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions()); } if (newConfig.getProducerConfig() != null) { mergedConfig.setProducerConfig(newConfig.getProducerConfig()); } return mergedConfig; }
@Test public void testMergeDifferentResources() { FunctionConfig functionConfig = createFunctionConfig(); Resources resources = new Resources(); resources.setCpu(0.3); resources.setRam(1232L); resources.setDisk(123456L); FunctionConfig newFunctionConfig = createUpdatedFunctionConfig("resources", resources); FunctionConfig mergedConfig = FunctionConfigUtils.validateUpdate(functionConfig, newFunctionConfig); assertEquals( mergedConfig.getResources(), resources ); mergedConfig.setResources(functionConfig.getResources()); assertEquals( new Gson().toJson(functionConfig), new Gson().toJson(mergedConfig) ); }
public int getFailedVerifyCount() { final AtomicInteger result = new AtomicInteger(); distroRecords.forEach((s, distroRecord) -> result.addAndGet(distroRecord.getFailedVerifyCount())); return result.get(); }
@Test void testGetFailedVerifyCount() { DistroRecordsHolder.getInstance().getRecord("testGetFailedVerifyCount"); Optional<DistroRecord> actual = DistroRecordsHolder.getInstance().getRecordIfExist("testGetFailedVerifyCount"); assertTrue(actual.isPresent()); assertEquals(0, DistroRecordsHolder.getInstance().getFailedVerifyCount()); actual.get().verifyFail(); assertEquals(1, DistroRecordsHolder.getInstance().getFailedVerifyCount()); }
public static void deleteDirectory(Path path) { try { if (Files.exists(path)) { try (Stream<Path> walk = Files.walk(path)) { walk.sorted(Comparator.reverseOrder()) .map(Path::toFile) .forEach(File::delete); } } } catch (IOException e) { throw new UncheckedIOException(e); } }
@Test public void deleteDirectory() throws IOException { final Path tempDirectory = Files.createTempDirectory("temp"); final Path tempFile = Files.createTempFile(tempDirectory, "temp", "temp"); FileUtils.deleteDirectory(tempDirectory); assertThat(Files.exists(tempDirectory)).isFalse(); assertThat(Files.exists(tempFile)).isFalse(); }
public ValueAndTimestamp<V> get(final K key) { if (timestampedStore != null) { return timestampedStore.get(key); } if (versionedStore != null) { final VersionedRecord<V> versionedRecord = versionedStore.get(key); return versionedRecord == null ? null : ValueAndTimestamp.make(versionedRecord.value(), versionedRecord.timestamp()); } throw new IllegalStateException("KeyValueStoreWrapper must be initialized with either timestamped or versioned store"); }
@Test public void shouldGetFromTimestampedStore() { givenWrapperWithTimestampedStore(); when(timestampedStore.get(KEY)).thenReturn(VALUE_AND_TIMESTAMP); assertThat(wrapper.get(KEY), equalTo(VALUE_AND_TIMESTAMP)); }
@Override public Page getNextPage() { if (closed) { return null; } if (serverResponseIterator == null) { serverResponseIterator = queryPinot(split); } ByteBuffer byteBuffer = null; try { // Pinot gRPC server response iterator returns: // - n data blocks based on inbound message size; // - 1 metadata of the query results. // So we need to check ResponseType of each ServerResponse. if (serverResponseIterator.hasNext()) { long startTimeNanos = System.nanoTime(); Server.ServerResponse serverResponse = serverResponseIterator.next(); readTimeNanos += System.nanoTime() - startTimeNanos; final String responseType = serverResponse.getMetadataOrThrow("responseType"); switch (responseType) { case CommonConstants.Query.Response.ResponseType.DATA: estimatedMemoryUsageInBytes = serverResponse.getSerializedSize(); // Store each dataTable which will later be constructed into Pages. try { byteBuffer = serverResponse.getPayload().asReadOnlyByteBuffer(); DataTable dataTable = DataTableFactory.getDataTable(byteBuffer); checkExceptions(dataTable, split, PinotSessionProperties.isMarkDataFetchExceptionsAsRetriable(session)); currentDataTable = new PinotSegmentPageSource.PinotDataTableWithSize(dataTable, serverResponse.getSerializedSize()); } catch (IOException e) { throw new PinotException( PINOT_DATA_FETCH_EXCEPTION, split.getSegmentPinotQuery(), String.format("Encountered Pinot exceptions when fetching data table from Split: < %s >", split), e); } break; case CommonConstants.Query.Response.ResponseType.METADATA: // The last part of the response is Metadata currentDataTable = null; serverResponseIterator = null; close(); return null; default: throw new PinotException( PINOT_UNEXPECTED_RESPONSE, split.getSegmentPinotQuery(), String.format("Encountered Pinot exceptions, unknown response type - %s", responseType)); } } Page page = fillNextPage(); completedPositions += currentDataTable.getDataTable().getNumberOfRows(); return page; } finally { if (byteBuffer != null) { ((Buffer) byteBuffer).clear(); } } }
@Test public void testAllDataTypes() { PinotSessionProperties pinotSessionProperties = new PinotSessionProperties(pinotConfig); ConnectorSession session = new TestingConnectorSession(pinotSessionProperties.getSessionProperties()); List<DataTable> dataTables = IntStream.range(0, 3).mapToObj(i -> createDataTableWithAllTypes()).collect(toImmutableList()); List<PinotColumnHandle> pinotColumnHandles = createPinotColumnHandlesWithAllTypes(); PinotSplit mockPinotSplit = new PinotSplit(pinotConnectorId.toString(), PinotSplit.SplitType.SEGMENT, pinotColumnHandles, Optional.empty(), Optional.of("blah"), ImmutableList.of("seg"), Optional.of("host"), getGrpcPort()); PinotSegmentPageSource pinotSegmentPageSource = getPinotSegmentPageSource(session, dataTables, mockPinotSplit, pinotColumnHandles); for (int i = 0; i < dataTables.size(); ++i) { Page page = requireNonNull(pinotSegmentPageSource.getNextPage(), "Expected a valid page"); for (int j = 0; j < ALL_TYPES.size(); ++j) { Block block = page.getBlock(j); Type type = PinotColumnUtils.getPrestoTypeFromPinotType(getFieldSpec("dontcare", ALL_TYPES.get(j)), false, false); long maxHashCode = Long.MIN_VALUE; for (int k = 0; k < NUM_ROWS; ++k) { maxHashCode = Math.max(type.hash(block, k), maxHashCode); } Assert.assertTrue(maxHashCode != 0, "Not all column values can have hash code 0"); } } }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String xuguDataType = typeDefine.getDataType().toUpperCase(); switch (xuguDataType) { case XUGU_BOOLEAN: case XUGU_BOOL: builder.dataType(BasicType.BOOLEAN_TYPE); break; case XUGU_TINYINT: builder.dataType(BasicType.BYTE_TYPE); break; case XUGU_SMALLINT: builder.dataType(BasicType.SHORT_TYPE); break; case XUGU_INT: case XUGU_INTEGER: builder.dataType(BasicType.INT_TYPE); break; case XUGU_BIGINT: builder.dataType(BasicType.LONG_TYPE); break; case XUGU_FLOAT: builder.dataType(BasicType.FLOAT_TYPE); break; case XUGU_DOUBLE: builder.dataType(BasicType.DOUBLE_TYPE); break; case XUGU_NUMBER: case XUGU_DECIMAL: case XUGU_NUMERIC: DecimalType decimalType; if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) { decimalType = new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale()); } else { decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE); } builder.dataType(decimalType); builder.columnLength(Long.valueOf(decimalType.getPrecision())); builder.scale(decimalType.getScale()); break; case XUGU_CHAR: case XUGU_NCHAR: builder.dataType(BasicType.STRING_TYPE); if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(TypeDefineUtils.charTo4ByteLength(1L)); } else { builder.columnLength(typeDefine.getLength()); } break; case XUGU_VARCHAR: case XUGU_VARCHAR2: builder.dataType(BasicType.STRING_TYPE); if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) { builder.columnLength(TypeDefineUtils.charTo4ByteLength(MAX_VARCHAR_LENGTH)); } else { builder.columnLength(typeDefine.getLength()); } break; case XUGU_CLOB: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(BYTES_2GB - 1); break; case XUGU_JSON: case XUGU_GUID: builder.dataType(BasicType.STRING_TYPE); break; case XUGU_BINARY: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(MAX_BINARY_LENGTH); break; case XUGU_BLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(BYTES_2GB - 1); break; case XUGU_DATE: builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case XUGU_TIME: case XUGU_TIME_WITH_TIME_ZONE: builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); break; case XUGU_DATETIME: case XUGU_DATETIME_WITH_TIME_ZONE: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); break; case XUGU_TIMESTAMP: case XUGU_TIMESTAMP_WITH_TIME_ZONE: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); if (typeDefine.getScale() == null) { builder.scale(TIMESTAMP_DEFAULT_SCALE); } else { builder.scale(typeDefine.getScale()); } break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.XUGU, xuguDataType, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertSmallint() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder() .name("test") .columnType("smallint") .dataType("smallint") .build(); Column column = XuguTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.SHORT_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); }
public static @Nullable MetricsContainer setCurrentContainer( @Nullable MetricsContainer container) { MetricsContainerHolder holder = CONTAINER_FOR_THREAD.get(); @Nullable MetricsContainer previous = holder.container; holder.container = container; return previous; }
@Test public void testUsesAppropriateMetricsContainer() { Counter counter = Metrics.counter("ns", "name"); MetricsContainer c1 = Mockito.mock(MetricsContainer.class); MetricsContainer c2 = Mockito.mock(MetricsContainer.class); Counter counter1 = Mockito.mock(Counter.class); Counter counter2 = Mockito.mock(Counter.class); when(c1.getCounter(MetricName.named("ns", "name"))).thenReturn(counter1); when(c2.getCounter(MetricName.named("ns", "name"))).thenReturn(counter2); assertNull(MetricsEnvironment.setCurrentContainer(c1)); counter.inc(); assertEquals(c1, MetricsEnvironment.setCurrentContainer(c2)); counter.dec(); assertEquals(c2, MetricsEnvironment.setCurrentContainer(null)); verify(counter1).inc(1L); verify(counter2).inc(-1L); verifyNoMoreInteractions(counter1, counter2); }